LLM-Bridge 1.13.1__py3-none-any.whl → 1.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -65,7 +65,7 @@ async def create_openai_client(
65
65
  tools = []
66
66
  reasoning = None
67
67
 
68
- if model not in ["gpt-5-chat-latest", "gpt-5-pro"]:
68
+ if model not in ["gpt-5-pro", "gpt-5.2-pro"]:
69
69
  if code_execution:
70
70
  tools.append(
71
71
  CodeInterpreter(
@@ -73,16 +73,15 @@ async def create_openai_client(
73
73
  container=CodeInterpreterContainerCodeInterpreterToolAuto(type="auto")
74
74
  )
75
75
  )
76
- if model not in ["gpt-5-chat-latest"]:
77
- tools.append(
78
- WebSearchToolParam(
79
- type="web_search",
80
- search_context_size="high",
81
- )
76
+ tools.append(
77
+ WebSearchToolParam(
78
+ type="web_search",
79
+ search_context_size="high",
82
80
  )
83
- if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
81
+ )
82
+ if re.match(r"gpt-5.*", model):
84
83
  temperature = 1
85
- if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
84
+ if re.match(r"gpt-5.*", model):
86
85
  if thought:
87
86
  reasoning = Reasoning(
88
87
  effort="high",
@@ -71,6 +71,12 @@
71
71
  "input": 2.5,
72
72
  "output": 15
73
73
  },
74
+ {
75
+ "apiType": "OpenAI",
76
+ "model": "gpt-5.2",
77
+ "input": 1.75,
78
+ "output": 14
79
+ },
74
80
  {
75
81
  "apiType": "OpenAI",
76
82
  "model": "gpt-5.1",
@@ -89,6 +95,12 @@
89
95
  "input": 0.25,
90
96
  "output": 2
91
97
  },
98
+ {
99
+ "apiType": "OpenAI",
100
+ "model": "gpt-5.2-pro",
101
+ "input": 21,
102
+ "output": 168
103
+ },
92
104
  {
93
105
  "apiType": "OpenAI",
94
106
  "model": "gpt-5-pro",
@@ -1,32 +1,26 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.13.1
3
+ Version: 1.14.0
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
7
- Keywords: llm,ai
7
+ License-File: LICENSE
8
+ Keywords: ai,llm
8
9
  Classifier: Framework :: FastAPI
9
10
  Classifier: Programming Language :: Python :: 3
10
11
  Requires-Python: >=3.12
11
- Description-Content-Type: text/markdown
12
- License-File: LICENSE
12
+ Requires-Dist: anthropic==0.75.0
13
+ Requires-Dist: docxlatex>=1.1.1
13
14
  Requires-Dist: fastapi
15
+ Requires-Dist: google-genai==1.46.0
14
16
  Requires-Dist: httpx
15
- Requires-Dist: tenacity
16
17
  Requires-Dist: openai==2.9.0
17
- Requires-Dist: tiktoken==0.11.0
18
- Requires-Dist: google-genai==1.46.0
19
- Requires-Dist: anthropic==0.75.0
20
- Requires-Dist: PyMuPDF
21
- Requires-Dist: docxlatex>=1.1.1
22
18
  Requires-Dist: openpyxl
19
+ Requires-Dist: pymupdf
23
20
  Requires-Dist: python-pptx
24
- Provides-Extra: test
25
- Requires-Dist: pytest; extra == "test"
26
- Requires-Dist: pytest-asyncio; extra == "test"
27
- Requires-Dist: python-dotenv; extra == "test"
28
- Requires-Dist: protobuf; extra == "test"
29
- Dynamic: license-file
21
+ Requires-Dist: tenacity
22
+ Requires-Dist: tiktoken==0.11.0
23
+ Description-Content-Type: text/markdown
30
24
 
31
25
  # LLM Bridge
32
26
 
@@ -71,18 +65,36 @@ The features listed represent the maximum capabilities of each API type supporte
71
65
  pip install --upgrade llm_bridge
72
66
  ```
73
67
 
74
- ## Test
68
+ ## Development
69
+
70
+ ### Python uv
71
+
72
+ 1. Install uv: `powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"`
73
+ 2. Install Python in uv: `uv python install 3.12`; upgrade Python in uv: `uv python install 3.12`
74
+ 3. Configure requirements:
75
+ ```bash
76
+ uv sync
77
+ ```
78
+
79
+ ### Pycharm Professional
80
+
81
+ 1. Add New Interpreter >> Add Local Interpreter
82
+ - Environment: Select existing
83
+ - Type: uv
84
+ 2. Add New Configuration >> uv run >> script: `./usage/main.py`
85
+
86
+ ### Usage
87
+
88
+ Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
89
+
90
+ ### Test
75
91
 
76
92
  ```bash
77
- pytest
93
+ uv run pytest
78
94
  ```
79
95
 
80
- ## Quick Start
96
+ ### Build
81
97
 
82
- ### Setup
83
-
84
- 1. Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
85
- 2. Install requirements: `pip install -r requirements.txt`
86
- 3. In PyCharm, add a new Python configuration:
87
- - script: `./usage/main.py`
88
- - Paths to ".env" files: `./usage/.env`
98
+ ```bash
99
+ uv build
100
+ ```
@@ -33,7 +33,7 @@ llm_bridge/logic/chat_generate/media_processor.py,sha256=ZR8G24EHwZZr2T9iFDRmScD
33
33
  llm_bridge/logic/chat_generate/model_client_factory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=EMXEpNTmhOv_D8pQIPboW3taRX5DnLa1QIGJn0VjO4M,3597
35
35
  llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=6CuacST1bLUGhY-rGH0bm5tu3r9iQQhIweN32TgqLCc,3692
36
- llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=xRnqYal5lBS76tCJKJHVvI0UsLCM49hTlqN6xYmzPco,5028
36
+ llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=zQyDC3beuUeC8czU22U_Xg8VGxvuQxxuaxWgghCknWg,4889
37
37
  llm_bridge/logic/chat_generate/model_client_factory/schema_converter.py,sha256=kQ3RGyg_9vbe7oYzbl11Dzu-tHPY1z2-SBBSgHHwPfM,143
38
38
  llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
39
  llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py,sha256=SfDhQXR7L5nCPHS4MIjwgzK_wER7qOUCc8gh-K77kKY,2441
@@ -46,7 +46,7 @@ llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cR
46
46
  llm_bridge/logic/message_preprocess/file_type_checker.py,sha256=nkrVki1a2udCeVqUnfIVi7Wxx8OMKbBuHw3FOlm17uo,1603
47
47
  llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=VR4__ip4ytAo62DHn9HeeYdbcx5lWItBnKsm9l3gmY4,1924
48
48
  llm_bridge/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
- llm_bridge/resources/model_prices.json,sha256=Sf-knxXYkM2taRLAStzJbw-Ps5zxG9lMSAzwSB78MVc,2588
49
+ llm_bridge/resources/model_prices.json,sha256=efolDkn3hqR5yT5dyAPqLLF-ozmUiXKSFTxoPSaqFUM,2779
50
50
  llm_bridge/type/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
51
  llm_bridge/type/chat_response.py,sha256=zEw-my_I0-7msmlTySdBGE2vWUIPILex0UrUPqTJiYY,754
52
52
  llm_bridge/type/message.py,sha256=NyWmSSrciFfvF81aBwAH8qFpo5IpRhh8QXMselbYen8,370
@@ -56,8 +56,7 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
56
56
  llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
57
57
  llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
58
58
  llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
59
- llm_bridge-1.13.1.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
60
- llm_bridge-1.13.1.dist-info/METADATA,sha256=-UnhTogwrioLevGqr6kyMCyXy4fq1LpkPstEQzUjM4k,3500
61
- llm_bridge-1.13.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
62
- llm_bridge-1.13.1.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
63
- llm_bridge-1.13.1.dist-info/RECORD,,
59
+ llm_bridge-1.14.0.dist-info/METADATA,sha256=xRtP-R5h4d52zvOINH7Tq_rKCQubigcq77Z4sLkcp7c,3594
60
+ llm_bridge-1.14.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
61
+ llm_bridge-1.14.0.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
62
+ llm_bridge-1.14.0.dist-info/RECORD,,
@@ -1,5 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
-
@@ -1 +0,0 @@
1
- llm_bridge