LLM-Bridge 1.11.6__py3-none-any.whl → 1.11.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llm_bridge/client/implementations/openai/openai_token_couter.py +1 -1
- llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +1 -1
- llm_bridge/resources/model_prices.json +12 -0
- {llm_bridge-1.11.6.dist-info → llm_bridge-1.11.8.dist-info}/METADATA +1 -1
- {llm_bridge-1.11.6.dist-info → llm_bridge-1.11.8.dist-info}/RECORD +8 -8
- {llm_bridge-1.11.6.dist-info → llm_bridge-1.11.8.dist-info}/WHEEL +0 -0
- {llm_bridge-1.11.6.dist-info → llm_bridge-1.11.8.dist-info}/licenses/LICENSE +0 -0
- {llm_bridge-1.11.6.dist-info → llm_bridge-1.11.8.dist-info}/top_level.txt +0 -0
|
@@ -35,7 +35,7 @@ def count_openai_responses_input_tokens(messages: list[OpenAIResponsesMessage])
|
|
|
35
35
|
|
|
36
36
|
def count_openai_output_tokens(chat_response: ChatResponse) -> int:
|
|
37
37
|
text = chat_response.text
|
|
38
|
-
file_count = len(chat_response.files)
|
|
38
|
+
file_count = len(chat_response.files) if chat_response.files else 0
|
|
39
39
|
|
|
40
40
|
return num_tokens_from_text(text) + file_count * 1000
|
|
41
41
|
|
|
@@ -74,7 +74,7 @@ async def create_openai_client(
|
|
|
74
74
|
search_context_size="high",
|
|
75
75
|
)
|
|
76
76
|
)
|
|
77
|
-
if re.match(r"
|
|
77
|
+
if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
|
|
78
78
|
temperature = 1
|
|
79
79
|
if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
|
|
80
80
|
reasoning = Reasoning(
|
|
@@ -1,4 +1,10 @@
|
|
|
1
1
|
[
|
|
2
|
+
{
|
|
3
|
+
"apiType": "Gemini-Vertex",
|
|
4
|
+
"model": "gemini-3-pro-preview",
|
|
5
|
+
"input": 4,
|
|
6
|
+
"output": 18
|
|
7
|
+
},
|
|
2
8
|
{
|
|
3
9
|
"apiType": "Gemini-Vertex",
|
|
4
10
|
"model": "gemini-2.5-flash",
|
|
@@ -41,6 +47,12 @@
|
|
|
41
47
|
"input": 0,
|
|
42
48
|
"output": 0
|
|
43
49
|
},
|
|
50
|
+
{
|
|
51
|
+
"apiType": "Gemini-Paid",
|
|
52
|
+
"model": "gemini-3-pro-preview",
|
|
53
|
+
"input": 4,
|
|
54
|
+
"output": 18
|
|
55
|
+
},
|
|
44
56
|
{
|
|
45
57
|
"apiType": "Gemini-Paid",
|
|
46
58
|
"model": "gemini-flash-latest",
|
|
@@ -16,7 +16,7 @@ llm_bridge/client/implementations/gemini/stream_gemini_client.py,sha256=vqPhQdr-
|
|
|
16
16
|
llm_bridge/client/implementations/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
17
|
llm_bridge/client/implementations/openai/non_stream_openai_client.py,sha256=aceJm6FF6VdzVRECzJyTY8-aQjCekhhbrMPEcUN24fo,2171
|
|
18
18
|
llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py,sha256=E8bBefhgtGM0qF3WH3VtXWbgls60kWVDWu2UG2SfsXM,4216
|
|
19
|
-
llm_bridge/client/implementations/openai/openai_token_couter.py,sha256=
|
|
19
|
+
llm_bridge/client/implementations/openai/openai_token_couter.py,sha256=ESl3L049NSE6Y1wfrH195ftQIFdr6XjJcmw5gJBeGaA,1472
|
|
20
20
|
llm_bridge/client/implementations/openai/steam_openai_responses_client.py,sha256=HdaIYeJg9o5TjyqMlGUjfsPF2MDoxWF8tOqsqIbNTw8,4100
|
|
21
21
|
llm_bridge/client/implementations/openai/stream_openai_client.py,sha256=Izq4xH9EuLjUCBJsuSr6U4Kj6FN5c7w_oHf9wmQatXE,2988
|
|
22
22
|
llm_bridge/client/model_client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -33,7 +33,7 @@ llm_bridge/logic/chat_generate/media_processor.py,sha256=ZR8G24EHwZZr2T9iFDRmScD
|
|
|
33
33
|
llm_bridge/logic/chat_generate/model_client_factory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
34
|
llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py,sha256=unSrPGhQ4wO4xeMnXOGlCfd6BZE7NNYs6mYVcchXOvc,2800
|
|
35
35
|
llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py,sha256=ms0v1TnVA_JJFKhOkbF_qHeRJEAZ3SH2QOYUi2w_FBI,3614
|
|
36
|
-
llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=
|
|
36
|
+
llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py,sha256=Tr4gma6efcX1hs1EzvRPV175ZsbMZEIWnhxWM1CY3K0,4384
|
|
37
37
|
llm_bridge/logic/chat_generate/model_message_converter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
38
|
llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py,sha256=SfDhQXR7L5nCPHS4MIjwgzK_wER7qOUCc8gh-K77kKY,2441
|
|
39
39
|
llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py,sha256=UjhzRX7sBa3-Zv1flMJd8bc8uRWMMav4UdJFhE6nVq4,1527
|
|
@@ -45,7 +45,7 @@ llm_bridge/logic/message_preprocess/document_processor.py,sha256=IsVqoFgWNa9i8cR
|
|
|
45
45
|
llm_bridge/logic/message_preprocess/file_type_checker.py,sha256=nkrVki1a2udCeVqUnfIVi7Wxx8OMKbBuHw3FOlm17uo,1603
|
|
46
46
|
llm_bridge/logic/message_preprocess/message_preprocessor.py,sha256=ERws57Dsu-f5LpWKqJ_SEP7omNWXeGoJaocX91P6QDQ,1907
|
|
47
47
|
llm_bridge/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
-
llm_bridge/resources/model_prices.json,sha256=
|
|
48
|
+
llm_bridge/resources/model_prices.json,sha256=JUvODxZICUqeD2vDmZCpvATE6nlCZgpU8VmQj9M9LaE,3086
|
|
49
49
|
llm_bridge/type/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
50
50
|
llm_bridge/type/chat_response.py,sha256=zEw-my_I0-7msmlTySdBGE2vWUIPILex0UrUPqTJiYY,754
|
|
51
51
|
llm_bridge/type/message.py,sha256=NyWmSSrciFfvF81aBwAH8qFpo5IpRhh8QXMselbYen8,370
|
|
@@ -55,8 +55,8 @@ llm_bridge/type/model_message/claude_message.py,sha256=gYJUTbLUeifQMva3Axarc-VFe
|
|
|
55
55
|
llm_bridge/type/model_message/gemini_message.py,sha256=mh8pf929g7_NkBzSOwnLXyrwSzTT4yt2FmyX7NZn0sM,4302
|
|
56
56
|
llm_bridge/type/model_message/openai_message.py,sha256=xFaLY-cZoSwNd7E9BSWQjBNcRfCVH11X9s2yxXlctR0,453
|
|
57
57
|
llm_bridge/type/model_message/openai_responses_message.py,sha256=be1q2euA0ybjj4NO6NxOGIRB9eJuXSb4ssUm_bM4Ocs,1529
|
|
58
|
-
llm_bridge-1.11.
|
|
59
|
-
llm_bridge-1.11.
|
|
60
|
-
llm_bridge-1.11.
|
|
61
|
-
llm_bridge-1.11.
|
|
62
|
-
llm_bridge-1.11.
|
|
58
|
+
llm_bridge-1.11.8.dist-info/licenses/LICENSE,sha256=m6uon-6P_CaiqcBfApMfjG9YRtDxcr40Z52JcqUCEAE,1069
|
|
59
|
+
llm_bridge-1.11.8.dist-info/METADATA,sha256=HLkz73IR-dsQLJLtujoajKwaTixNP1XyJRTtNd8BBaE,7849
|
|
60
|
+
llm_bridge-1.11.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
61
|
+
llm_bridge-1.11.8.dist-info/top_level.txt,sha256=PtxyrgNX1lSa1Ab_qswg0sekSXejG5zrS6b_v3Po05g,11
|
|
62
|
+
llm_bridge-1.11.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|