LLM-Bridge 1.14.0__py3-none-any.whl → 1.14.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -65,7 +65,7 @@ async def create_openai_client(
65
65
  tools = []
66
66
  reasoning = None
67
67
 
68
- if model not in ["gpt-5-pro", "gpt-5.2-pro"]:
68
+ if model not in ["gpt-5-chat-latest", "gpt-5-pro"]:
69
69
  if code_execution:
70
70
  tools.append(
71
71
  CodeInterpreter(
@@ -73,15 +73,16 @@ async def create_openai_client(
73
73
  container=CodeInterpreterContainerCodeInterpreterToolAuto(type="auto")
74
74
  )
75
75
  )
76
- tools.append(
77
- WebSearchToolParam(
78
- type="web_search",
79
- search_context_size="high",
76
+ if model not in ["gpt-5-chat-latest"]:
77
+ tools.append(
78
+ WebSearchToolParam(
79
+ type="web_search",
80
+ search_context_size="high",
81
+ )
80
82
  )
81
- )
82
- if re.match(r"gpt-5.*", model):
83
+ if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
83
84
  temperature = 1
84
- if re.match(r"gpt-5.*", model):
85
+ if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
85
86
  if thought:
86
87
  reasoning = Reasoning(
87
88
  effort="high",
@@ -71,12 +71,6 @@
71
71
  "input": 2.5,
72
72
  "output": 15
73
73
  },
74
- {
75
- "apiType": "OpenAI",
76
- "model": "gpt-5.2",
77
- "input": 1.75,
78
- "output": 14
79
- },
80
74
  {
81
75
  "apiType": "OpenAI",
82
76
  "model": "gpt-5.1",
@@ -95,12 +89,6 @@
95
89
  "input": 0.25,
96
90
  "output": 2
97
91
  },
98
- {
99
- "apiType": "OpenAI",
100
- "model": "gpt-5.2-pro",
101
- "input": 21,
102
- "output": 168
103
- },
104
92
  {
105
93
  "apiType": "OpenAI",
106
94
  "model": "gpt-5-pro",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.14.0
3
+ Version: 1.14.0a1
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -76,12 +76,13 @@ pip install --upgrade llm_bridge
76
76
  uv sync
77
77
  ```
78
78
 
79
- ### Pycharm Professional
79
+ ### Pycharm
80
80
 
81
- 1. Add New Interpreter >> Add Local Interpreter
82
- - Environment: Select existing
83
- - Type: uv
84
- 2. Add New Configuration >> uv run >> script: `./usage/main.py`
81
+ Add New Configuration >> uv run
82
+ - script: `./usage/main.py`
83
+ - Paths to ".env" files: `./usage/.env`
84
+
85
+ If uv interpreter is not found, create a new project with uv.
85
86
 
86
87
  ### Usage
87
88
 
@@ -33,7 +33,7 @@ llm_bridge/logic/chat_generate/media_processor.py,sha256=ZR8G24EHwZZr2T9iFDRmScD
33
33
  llm_bridge/logic/chat_generate/model_client_factory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=EMXEpNTmhOv_D8pQIPboW3taRX5DnLa1QIGJn0VjO4M,3597
35
35
  llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=6CuacST1bLUGhY-rGH0bm5tu3r9iQQhIweN32TgqLCc,3692
36
- llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=zQyDC3beuUeC8czU22U_Xg8VGxvuQxxuaxWgghCknWg,4889
36
+ llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=xRnqYal5lBS76tCJKJHVvI0UsLCM49hTlqN6xYmzPco,5028
37
37
  llm_bridge/logic/chat_generate/model_client_factory/schema_converter.py,sha256=kQ3RGyg_9vbe7oYzbl11Dzu-tHPY1z2-SBBSgHHwPfM,143
38
38
  llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
39
  llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py,sha256=SfDhQXR7L5nCPHS4MIjwgzK_wER7qOUCc8gh-K77kKY,2441
@@ -46,7 +46,7 @@ llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cR
46
46
  llm_bridge/logic/message_preprocess/file_type_checker.py,sha256=nkrVki1a2udCeVqUnfIVi7Wxx8OMKbBuHw3FOlm17uo,1603
47
47
  llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=VR4__ip4ytAo62DHn9HeeYdbcx5lWItBnKsm9l3gmY4,1924
48
48
  llm_bridge/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
- llm_bridge/resources/model_prices.json,sha256=efolDkn3hqR5yT5dyAPqLLF-ozmUiXKSFTxoPSaqFUM,2779
49
+ llm_bridge/resources/model_prices.json,sha256=Sf-knxXYkM2taRLAStzJbw-Ps5zxG9lMSAzwSB78MVc,2588
50
50
  llm_bridge/type/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
51
  llm_bridge/type/chat_response.py,sha256=zEw-my_I0-7msmlTySdBGE2vWUIPILex0UrUPqTJiYY,754
52
52
  llm_bridge/type/message.py,sha256=NyWmSSrciFfvF81aBwAH8qFpo5IpRhh8QXMselbYen8,370
@@ -56,7 +56,7 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
56
56
  llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
57
57
  llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
58
58
  llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
59
- llm_bridge-1.14.0.dist-info/METADATA,sha256=xRtP-R5h4d52zvOINH7Tq_rKCQubigcq77Z4sLkcp7c,3594
60
- llm_bridge-1.14.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
61
- llm_bridge-1.14.0.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
62
- llm_bridge-1.14.0.dist-info/RECORD,,
59
+ llm_bridge-1.14.0a1.dist-info/METADATA,sha256=tGAXNEfbVunKIrQ5Trp5pIM3VLLMZ0b4dWCDT-BUDHU,3594
60
+ llm_bridge-1.14.0a1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
61
+ llm_bridge-1.14.0a1.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
62
+ llm_bridge-1.14.0a1.dist-info/RECORD,,