LLM-Bridge 1.12.0a0__py3-none-any.whl → 1.12.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,9 +36,13 @@ async def create_claude_client(
36
36
  messages=claude_messages,
37
37
  )
38
38
 
39
+ context_window = 200_000
40
+ if model in ["claude-sonnet-4-5"]:
41
+ context_window = 1_000_000
42
+ max_output = 64_000
39
43
  max_tokens = min(
40
- 32_000, # Max output: Claude 4.5 64K; Claude 4.1 32K
41
- 200_000 - input_tokens, # Context window: Claude Sonnet 4.5 beta: 1M; otherwise 200K
44
+ max_output,
45
+ context_window - input_tokens,
42
46
  )
43
47
  thinking = None
44
48
  if thought:
@@ -52,12 +56,13 @@ async def create_claude_client(
52
56
  "output-128k-2025-02-19",
53
57
  "code-execution-2025-08-25",
54
58
  ]
55
- tools: list[BetaToolUnionParam] = [
59
+ tools: list[BetaToolUnionParam] = []
60
+ tools.append(
56
61
  BetaWebSearchTool20250305Param(
57
62
  type="web_search_20250305",
58
63
  name="web_search",
59
- ),
60
- ]
64
+ )
65
+ )
61
66
  if code_execution:
62
67
  tools.append(
63
68
  BetaCodeExecutionTool20250825Param(
@@ -21,7 +21,7 @@ async def extract_text_files_to_message(message: Message, api_type: str) -> None
21
21
  if file_type != "text" and file_type != "application":
22
22
  continue
23
23
 
24
- if sub_type == "pdf" and api_type in ("OpenAI", "OpenAI-Azure", "Gemini-Free", "Gemini-Paid", "Claude"):
24
+ if sub_type == "pdf" and api_type in ("OpenAI", "OpenAI-Azure", "Gemini-Vertex", "Gemini-Free", "Gemini-Paid", "Claude"):
25
25
  continue
26
26
 
27
27
  filename = get_file_name(file_url)
@@ -125,6 +125,12 @@
125
125
  "input": 0,
126
126
  "output": 0
127
127
  },
128
+ {
129
+ "apiType": "Claude",
130
+ "model": "claude-opus-4-5",
131
+ "input": 5,
132
+ "output": 25
133
+ },
128
134
  {
129
135
  "apiType": "Claude",
130
136
  "model": "claude-sonnet-4-5",
@@ -137,12 +143,6 @@
137
143
  "input": 1,
138
144
  "output": 5
139
145
  },
140
- {
141
- "apiType": "Claude",
142
- "model": "claude-opus-4-1",
143
- "input": 15,
144
- "output": 75
145
- },
146
146
  {
147
147
  "apiType": "Grok",
148
148
  "model": "grok-4-latest",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.12.0a0
3
+ Version: 1.12.2
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -30,7 +30,7 @@ Dynamic: license-file
30
30
 
31
31
  # LLM Bridge
32
32
 
33
- LLM Bridge is a unified Python interface for interacting with LLMs, including OpenAI, OpenAI-Azure, OpenAI-GitHub, Gemini, Claude, and Grok.
33
+ LLM Bridge is a unified Python interface for interacting with LLMs, including OpenAI (Native / Azure / GitHub), Gemini (AI Studio / Vertex), Claude, and Grok.
34
34
 
35
35
  GitHub: [https://github.com/windsnow1025/LLM-Bridge](https://github.com/windsnow1025/LLM-Bridge)
36
36
 
@@ -39,30 +39,31 @@ PyPI: [https://pypi.org/project/LLM-Bridge/](https://pypi.org/project/LLM-Bridge
39
39
  ## Workflow and Features
40
40
 
41
41
  1. **Message Preprocessor**: extracts text content from documents (Word, Excel, PPT, Code files, PDFs) which are not natively supported by the target model.
42
- 2. **Chat Client Factory**: create a client for the specific LLM API with model parameters
43
- 1. **Model Message Converter**: convert general messages to model messages
44
- 1. **Media Processor**: converts media (Image, Audio, Video, PDF) which are natively supported by the target model into compatible formats.
42
+ 2. **Chat Client Factory**: creates a client for the specific LLM API with model parameters
43
+ 1. **Model Message Converter**: converts general messages to model messages
44
+ 1. **Media Processor**: converts general media (Image, Audio, Video, PDF) to model compatible formats.
45
45
  3. **Chat Client**: generate stream or non-stream responses
46
- 1. **Model Thoughts**: captures and formats the model's thinking process
47
- 2. **Search Citations**: extracts and formats citations from search results
48
- 3. **Token Counter**: tracks and reports input and output token usage
46
+ - **Model Thoughts**: captures and formats the model's thinking process
47
+ - **Code Execution**: auto generate and execute Python code
48
+ - **Web Search + Citations**: extracts and formats citations from search results
49
+ - **Token Counter**: tracks and reports input and output token usage
49
50
 
50
- ### Model Features
51
+ ### Supported Features for API Types
51
52
 
52
53
  The features listed represent the maximum capabilities of each API type supported by LLM Bridge.
53
54
 
54
55
  | API Type | Input Format | Capabilities | Output Format |
55
56
  |----------|--------------------------------|--------------------------------------------------|-------------------|
56
- | OpenAI | Text, Image | Thinking, Web Search, Code Execution | Text |
57
+ | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
57
58
  | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution | Text, Image, File |
58
59
  | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
59
60
  | Grok | Text, Image | | Text |
60
61
 
61
62
  #### Planned Features
62
63
 
63
- - OpenAI: Web Search: Citations, Image Output
64
- - Gemini: Code Execution: Code, Code Output
65
- - Claude: Code Execution, File Output
64
+ - Structured Output
65
+ - More features for API Types
66
+ - Native support for Grok
66
67
 
67
68
  ## Installation
68
69
 
@@ -31,7 +31,7 @@ llm_bridge/logic/chat_generate/chat_client_factory.py,sha256=i2Lefytc2Sc6Ownjq7o
31
31
  llm_bridge/logic/chat_generate/chat_message_converter.py,sha256=40VTBOPXg_ocrEZMdt1ObYlm-mhRL35zWzzxv8m2xRc,1538
32
32
  llm_bridge/logic/chat_generate/media_processor.py,sha256=ZR8G24EHwZZr2T9iFDRmScDGyJ_kvThApABzSzK0CL0,702
33
33
  llm_bridge/logic/chat_generate/model_client_factory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=8qhKE8H6bEUiCDc8ZYaKuzeAe2ne_6oNGR0Y-FQHJTk,3011
34
+ llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=bdO-4LBSwe1x8_5kamVg6dpRkxGB8_FXgRaaNH53qUs,3059
35
35
  llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=j4V3l8I3G4qmJzK1ZVpCKnUxsSEkPA5nL72MCywxHIg,3394
36
36
  llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=EoU5kyccnwOKjGdFi5yTozNVPrq402jRtWPjSmBJs7M,4517
37
37
  llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -43,9 +43,9 @@ llm_bridge/logic/message_preprocess/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQ
43
43
  llm_bridge/logic/message_preprocess/code_file_extensions.py,sha256=5bsnSKC9PGbl6ZMy80sXfagAbz77pGjt6Z2-qwzUw48,9306
44
44
  llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cRsDAfmCynJMdlvBqiCKIT9kbx96kg,2861
45
45
  llm_bridge/logic/message_preprocess/file_type_checker.py,sha256=nkrVki1a2udCeVqUnfIVi7Wxx8OMKbBuHw3FOlm17uo,1603
46
- llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=ERws57Dsu-f5LpWKqJ_SEP7omNWXeGoJaocX91P6QDQ,1907
46
+ llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=VR4__ip4ytAo62DHn9HeeYdbcx5lWItBnKsm9l3gmY4,1924
47
47
  llm_bridge/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
48
- llm_bridge/resources/model_prices.json,sha256=JUvODxZICUqeD2vDmZCpvATE6nlCZgpU8VmQj9M9LaE,3086
48
+ llm_bridge/resources/model_prices.json,sha256=wCZY1PuI5l8UxeT1Wh7uQAcJ0KijRE4q-McDqtDItwg,3085
49
49
  llm_bridge/type/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
50
  llm_bridge/type/chat_response.py,sha256=zEw-my_I0-7msmlTySdBGE2vWUIPILex0UrUPqTJiYY,754
51
51
  llm_bridge/type/message.py,sha256=NyWmSSrciFfvF81aBwAH8qFpo5IpRhh8QXMselbYen8,370
@@ -55,8 +55,8 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
55
55
  llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
56
56
  llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
57
57
  llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
58
- llm_bridge-1.12.0a0.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
59
- llm_bridge-1.12.0a0.dist-info/METADATA,sha256=-OGfwe8cLZCU2LcEhR4X6cE4Ks9-wuzltmcp80y4F3k,3374
60
- llm_bridge-1.12.0a0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
- llm_bridge-1.12.0a0.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
62
- llm_bridge-1.12.0a0.dist-info/RECORD,,
58
+ llm_bridge-1.12.2.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
59
+ llm_bridge-1.12.2.dist-info/METADATA,sha256=j3fjvIbmo_zsT_-UF-IhqZ1OvwLNRiYAruM5jc9Ncq0,3388
60
+ llm_bridge-1.12.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
61
+ llm_bridge-1.12.2.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
62
+ llm_bridge-1.12.2.dist-info/RECORD,,