LLM-Bridge 1.12.1__tar.gz → 1.12.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llm_bridge-1.12.1 → llm_bridge-1.12.2/LLM_Bridge.egg-info}/PKG-INFO +1 -1
- {llm_bridge-1.12.1/LLM_Bridge.egg-info → llm_bridge-1.12.2}/PKG-INFO +1 -1
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +10 -5
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/resources/model_prices.json +6 -6
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/pyproject.toml +1 -1
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/LICENSE +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/LLM_Bridge.egg-info/SOURCES.txt +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/LLM_Bridge.egg-info/dependency_links.txt +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/LLM_Bridge.egg-info/requires.txt +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/LLM_Bridge.egg-info/top_level.txt +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/MANIFEST.in +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/README.md +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/chat_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/claude/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/openai/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/printing_status.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/model_client/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/model_client/claude_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/model_client/gemini_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/model_client/openai_client.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/file_fetch.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/model_prices.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/resources/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/chat_response.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/message.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/model_message/__init__.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/model_message/claude_message.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/model_message/gemini_message.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/model_message/openai_message.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/serializer.py +0 -0
- {llm_bridge-1.12.1 → llm_bridge-1.12.2}/setup.cfg +0 -0
|
@@ -36,9 +36,13 @@ async def create_claude_client(
|
|
|
36
36
|
messages=claude_messages,
|
|
37
37
|
)
|
|
38
38
|
|
|
39
|
+
context_window = 200_000
|
|
40
|
+
if model in ["claude-sonnet-4-5"]:
|
|
41
|
+
context_window = 1_000_000
|
|
42
|
+
max_output = 64_000
|
|
39
43
|
max_tokens = min(
|
|
40
|
-
|
|
41
|
-
|
|
44
|
+
max_output,
|
|
45
|
+
context_window - input_tokens,
|
|
42
46
|
)
|
|
43
47
|
thinking = None
|
|
44
48
|
if thought:
|
|
@@ -52,12 +56,13 @@ async def create_claude_client(
|
|
|
52
56
|
"output-128k-2025-02-19",
|
|
53
57
|
"code-execution-2025-08-25",
|
|
54
58
|
]
|
|
55
|
-
tools: list[BetaToolUnionParam] = [
|
|
59
|
+
tools: list[BetaToolUnionParam] = []
|
|
60
|
+
tools.append(
|
|
56
61
|
BetaWebSearchTool20250305Param(
|
|
57
62
|
type="web_search_20250305",
|
|
58
63
|
name="web_search",
|
|
59
|
-
)
|
|
60
|
-
|
|
64
|
+
)
|
|
65
|
+
)
|
|
61
66
|
if code_execution:
|
|
62
67
|
tools.append(
|
|
63
68
|
BetaCodeExecutionTool20250825Param(
|
|
@@ -125,6 +125,12 @@
|
|
|
125
125
|
"input": 0,
|
|
126
126
|
"output": 0
|
|
127
127
|
},
|
|
128
|
+
{
|
|
129
|
+
"apiType": "Claude",
|
|
130
|
+
"model": "claude-opus-4-5",
|
|
131
|
+
"input": 5,
|
|
132
|
+
"output": 25
|
|
133
|
+
},
|
|
128
134
|
{
|
|
129
135
|
"apiType": "Claude",
|
|
130
136
|
"model": "claude-sonnet-4-5",
|
|
@@ -137,12 +143,6 @@
|
|
|
137
143
|
"input": 1,
|
|
138
144
|
"output": 5
|
|
139
145
|
},
|
|
140
|
-
{
|
|
141
|
-
"apiType": "Claude",
|
|
142
|
-
"model": "claude-opus-4-1",
|
|
143
|
-
"input": 15,
|
|
144
|
-
"output": 75
|
|
145
|
-
},
|
|
146
146
|
{
|
|
147
147
|
"apiType": "Grok",
|
|
148
148
|
"model": "grok-4-latest",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/claude/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/gemini/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/openai/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/client/implementations/printing_status.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/chat_client_factory.py
RENAMED
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/chat_generate/chat_message_converter.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/code_file_extensions.py
RENAMED
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/document_processor.py
RENAMED
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/file_type_checker.py
RENAMED
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/logic/message_preprocess/message_preprocessor.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.12.1 → llm_bridge-1.12.2}/llm_bridge/type/model_message/openai_responses_message.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|