LLM-Bridge 1.11.6__tar.gz → 1.11.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {llm_bridge-1.11.6 → llm_bridge-1.11.8/LLM_Bridge.egg-info}/PKG-INFO +1 -1
  2. {llm_bridge-1.11.6/LLM_Bridge.egg-info → llm_bridge-1.11.8}/PKG-INFO +1 -1
  3. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/openai/openai_token_couter.py +1 -1
  4. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +1 -1
  5. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/resources/model_prices.json +12 -0
  6. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/pyproject.toml +1 -1
  7. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/LICENSE +0 -0
  8. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/LLM_Bridge.egg-info/SOURCES.txt +0 -0
  9. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/LLM_Bridge.egg-info/dependency_links.txt +0 -0
  10. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/LLM_Bridge.egg-info/requires.txt +0 -0
  11. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/LLM_Bridge.egg-info/top_level.txt +0 -0
  12. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/MANIFEST.in +0 -0
  13. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/README.md +0 -0
  14. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/__init__.py +0 -0
  15. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/__init__.py +0 -0
  16. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/chat_client.py +0 -0
  17. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/__init__.py +0 -0
  18. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/claude/__init__.py +0 -0
  19. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
  20. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
  21. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
  22. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
  23. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
  24. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
  25. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
  26. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
  27. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
  28. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/openai/__init__.py +0 -0
  29. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
  30. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
  31. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
  32. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
  33. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/implementations/printing_status.py +0 -0
  34. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/model_client/__init__.py +0 -0
  35. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/model_client/claude_client.py +0 -0
  36. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/model_client/gemini_client.py +0 -0
  37. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/client/model_client/openai_client.py +0 -0
  38. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/__init__.py +0 -0
  39. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/__init__.py +0 -0
  40. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
  41. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
  42. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
  43. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
  44. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
  45. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
  46. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
  47. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
  48. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
  49. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
  50. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
  51. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/file_fetch.py +0 -0
  52. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
  53. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
  54. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
  55. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
  56. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
  57. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/logic/model_prices.py +0 -0
  58. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/resources/__init__.py +0 -0
  59. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/__init__.py +0 -0
  60. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/chat_response.py +0 -0
  61. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/message.py +0 -0
  62. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/model_message/__init__.py +0 -0
  63. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/model_message/claude_message.py +0 -0
  64. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/model_message/gemini_message.py +0 -0
  65. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/model_message/openai_message.py +0 -0
  66. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
  67. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/llm_bridge/type/serializer.py +0 -0
  68. {llm_bridge-1.11.6 → llm_bridge-1.11.8}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.11.6
3
+ Version: 1.11.8
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.11.6
3
+ Version: 1.11.8
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -35,7 +35,7 @@ def count_openai_responses_input_tokens(messages: list[OpenAIResponsesMessage])
35
35
 
36
36
  def count_openai_output_tokens(chat_response: ChatResponse) -> int:
37
37
  text = chat_response.text
38
- file_count = len(chat_response.files)
38
+ file_count = len(chat_response.files) if chat_response.files else 0
39
39
 
40
40
  return num_tokens_from_text(text) + file_count * 1000
41
41
 
@@ -74,7 +74,7 @@ async def create_openai_client(
74
74
  search_context_size="high",
75
75
  )
76
76
  )
77
- if re.match(r"^o\d", model) or (re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest"):
77
+ if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
78
78
  temperature = 1
79
79
  if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
80
80
  reasoning = Reasoning(
@@ -1,4 +1,10 @@
1
1
  [
2
+ {
3
+ "apiType": "Gemini-Vertex",
4
+ "model": "gemini-3-pro-preview",
5
+ "input": 4,
6
+ "output": 18
7
+ },
2
8
  {
3
9
  "apiType": "Gemini-Vertex",
4
10
  "model": "gemini-2.5-flash",
@@ -41,6 +47,12 @@
41
47
  "input": 0,
42
48
  "output": 0
43
49
  },
50
+ {
51
+ "apiType": "Gemini-Paid",
52
+ "model": "gemini-3-pro-preview",
53
+ "input": 4,
54
+ "output": 18
55
+ },
44
56
  {
45
57
  "apiType": "Gemini-Paid",
46
58
  "model": "gemini-flash-latest",
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "LLM-Bridge"
7
- version = "1.11.6"
7
+ version = "1.11.8"
8
8
  authors = [
9
9
  {name = "windsnow1025", email = "windsnow1025@gmail.com"}
10
10
  ]
File without changes
File without changes
File without changes
File without changes