LLM-Bridge 1.11.7__py3-none-any.whl → 1.11.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +2 -2
- llm_bridge/resources/model_prices.json +12 -0
- {llm_bridge-1.11.7.dist-info → llm_bridge-1.11.9.dist-info}/METADATA +1 -1
- {llm_bridge-1.11.7.dist-info → llm_bridge-1.11.9.dist-info}/RECORD +7 -7
- {llm_bridge-1.11.7.dist-info → llm_bridge-1.11.9.dist-info}/WHEEL +0 -0
- {llm_bridge-1.11.7.dist-info → llm_bridge-1.11.9.dist-info}/licenses/LICENSE +0 -0
- {llm_bridge-1.11.7.dist-info → llm_bridge-1.11.9.dist-info}/top_level.txt +0 -0
|
@@ -36,11 +36,11 @@ async def create_claude_client(
|
|
|
36
36
|
|
|
37
37
|
max_tokens = min(
|
|
38
38
|
32_000, # Max output: Claude 4.5 64K; Claude 4.1 32K
|
|
39
|
-
200_000 - input_tokens # Context window: Claude Sonnet 4.5 beta: 1M; otherwise 200K
|
|
39
|
+
200_000 - input_tokens, # Context window: Claude Sonnet 4.5 beta: 1M; otherwise 200K
|
|
40
40
|
)
|
|
41
41
|
thinking = ThinkingConfigEnabledParam(
|
|
42
42
|
type="enabled",
|
|
43
|
-
budget_tokens=
|
|
43
|
+
budget_tokens=max(1024, max_tokens // 2), # Minimum budget tokens: 1024
|
|
44
44
|
)
|
|
45
45
|
temperature = 1
|
|
46
46
|
betas: list[AnthropicBetaParam] = [
|
|
@@ -1,4 +1,10 @@
|
|
|
1
1
|
[
|
|
2
|
+
{
|
|
3
|
+
"apiType": "Gemini-Vertex",
|
|
4
|
+
"model": "gemini-3-pro-preview",
|
|
5
|
+
"input": 4,
|
|
6
|
+
"output": 18
|
|
7
|
+
},
|
|
2
8
|
{
|
|
3
9
|
"apiType": "Gemini-Vertex",
|
|
4
10
|
"model": "gemini-2.5-flash",
|
|
@@ -41,6 +47,12 @@
|
|
|
41
47
|
"input": 0,
|
|
42
48
|
"output": 0
|
|
43
49
|
},
|
|
50
|
+
{
|
|
51
|
+
"apiType": "Gemini-Paid",
|
|
52
|
+
"model": "gemini-3-pro-preview",
|
|
53
|
+
"input": 4,
|
|
54
|
+
"output": 18
|
|
55
|
+
},
|
|
44
56
|
{
|
|
45
57
|
"apiType": "Gemini-Paid",
|
|
46
58
|
"model": "gemini-flash-latest",
|
|
@@ -31,7 +31,7 @@ llm_bridge/logic/chat_generate/chat_client_factory.py,sha256=H0rcRHytSfYKz_mwRfJ
|
|
|
31
31
|
llm_bridge/logic/chat_generate/chat_message_converter.py,sha256=40VTBOPXg_ocrEZMdt1ObYlm-mhRL35zWzzxv8m2xRc,1538
|
|
32
32
|
llm_bridge/logic/chat_generate/media_processor.py,sha256=ZR8G24EHwZZr2T9iFDRmScDGyJ_kvThApABzSzK0CL0,702
|
|
33
33
|
llm_bridge/logic/chat_generate/model_client_factory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
|
-
llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=
|
|
34
|
+
llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=AxXY-Lr1MAZpV1wCtKXfcnq9B72dnhCAEb9K4_zSOCk,2830
|
|
35
35
|
llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=ms0v1TnVA_JJFKhOkbF_qHeRJEAZ3SH2QOYUi2w_FBI,3614
|
|
36
36
|
llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=Tr4gma6efcX1hs1EzvRPV175ZsbMZEIWnhxWM1CY3K0,4384
|
|
37
37
|
llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -45,7 +45,7 @@ llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cR
|
|
|
45
45
|
llm_bridge/logic/message_preprocess/file_type_checker.py,sha256=nkrVki1a2udCeVqUnfIVi7Wxx8OMKbBuHw3FOlm17uo,1603
|
|
46
46
|
llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=ERws57Dsu-f5LpWKqJ_SEP7omNWXeGoJaocX91P6QDQ,1907
|
|
47
47
|
llm_bridge/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
-
llm_bridge/resources/model_prices.json,sha256=
|
|
48
|
+
llm_bridge/resources/model_prices.json,sha256=JUvODxZICUqeD2vDmZCpvATE6nlCZgpU8VmQj9M9LaE,3086
|
|
49
49
|
llm_bridge/type/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
50
50
|
llm_bridge/type/chat_response.py,sha256=zEw-my_I0-7msmlTySdBGE2vWUIPILex0UrUPqTJiYY,754
|
|
51
51
|
llm_bridge/type/message.py,sha256=NyWmSSrciFfvF81aBwAH8qFpo5IpRhh8QXMselbYen8,370
|
|
@@ -55,8 +55,8 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
|
|
|
55
55
|
llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
|
|
56
56
|
llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
|
|
57
57
|
llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
|
|
58
|
-
llm_bridge-1.11.
|
|
59
|
-
llm_bridge-1.11.
|
|
60
|
-
llm_bridge-1.11.
|
|
61
|
-
llm_bridge-1.11.
|
|
62
|
-
llm_bridge-1.11.
|
|
58
|
+
llm_bridge-1.11.9.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
|
|
59
|
+
llm_bridge-1.11.9.dist-info/METADATA,sha256=Ig75w004J0JK0I9STn7BbP9WEbb4M_w5xE5xDfdtW-0,7849
|
|
60
|
+
llm_bridge-1.11.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
61
|
+
llm_bridge-1.11.9.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
|
|
62
|
+
llm_bridge-1.11.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|