LLM-Bridge 1.11.8__tar.gz → 1.11.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {llm_bridge-1.11.8 → llm_bridge-1.11.10/LLM_Bridge.egg-info}/PKG-INFO +1 -1
  2. {llm_bridge-1.11.8/LLM_Bridge.egg-info → llm_bridge-1.11.10}/PKG-INFO +1 -1
  3. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +2 -2
  4. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +1 -1
  5. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/pyproject.toml +1 -1
  6. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/LICENSE +0 -0
  7. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/LLM_Bridge.egg-info/SOURCES.txt +0 -0
  8. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/LLM_Bridge.egg-info/dependency_links.txt +0 -0
  9. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/LLM_Bridge.egg-info/requires.txt +0 -0
  10. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/LLM_Bridge.egg-info/top_level.txt +0 -0
  11. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/MANIFEST.in +0 -0
  12. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/README.md +0 -0
  13. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/__init__.py +0 -0
  14. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/__init__.py +0 -0
  15. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/chat_client.py +0 -0
  16. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/__init__.py +0 -0
  17. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/claude/__init__.py +0 -0
  18. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
  19. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
  20. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
  21. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
  22. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
  23. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
  24. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
  25. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
  26. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
  27. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/openai/__init__.py +0 -0
  28. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
  29. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
  30. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
  31. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
  32. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
  33. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/implementations/printing_status.py +0 -0
  34. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/model_client/__init__.py +0 -0
  35. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/model_client/claude_client.py +0 -0
  36. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/model_client/gemini_client.py +0 -0
  37. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/client/model_client/openai_client.py +0 -0
  38. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/__init__.py +0 -0
  39. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/__init__.py +0 -0
  40. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
  41. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
  42. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
  43. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
  44. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
  45. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
  46. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
  47. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
  48. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
  49. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
  50. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/file_fetch.py +0 -0
  51. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
  52. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
  53. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
  54. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
  55. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
  56. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/logic/model_prices.py +0 -0
  57. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/resources/__init__.py +0 -0
  58. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/resources/model_prices.json +0 -0
  59. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/__init__.py +0 -0
  60. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/chat_response.py +0 -0
  61. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/message.py +0 -0
  62. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/model_message/__init__.py +0 -0
  63. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/model_message/claude_message.py +0 -0
  64. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/model_message/gemini_message.py +0 -0
  65. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/model_message/openai_message.py +0 -0
  66. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
  67. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/llm_bridge/type/serializer.py +0 -0
  68. {llm_bridge-1.11.8 → llm_bridge-1.11.10}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.11.8
3
+ Version: 1.11.10
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.11.8
3
+ Version: 1.11.10
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -36,11 +36,11 @@ async def create_claude_client(
36
36
 
37
37
  max_tokens = min(
38
38
  32_000, # Max output: Claude 4.5 64K; Claude 4.1 32K
39
- 200_000 - input_tokens # Context window: Claude Sonnet 4.5 beta: 1M; otherwise 200K
39
+ 200_000 - input_tokens, # Context window: Claude Sonnet 4.5 beta: 1M; otherwise 200K
40
40
  )
41
41
  thinking = ThinkingConfigEnabledParam(
42
42
  type="enabled",
43
- budget_tokens=min(32_000, max_tokens) // 2
43
+ budget_tokens=max(1024, max_tokens // 2), # Minimum budget tokens: 1024
44
44
  )
45
45
  temperature = 1
46
46
  betas: list[AnthropicBetaParam] = [
@@ -47,7 +47,7 @@ async def create_openai_client(
47
47
  else:
48
48
  raise HTTPException(status_code=500, detail="API Type not matched")
49
49
 
50
- if api_type in ("OpenAI", "OpenAI-Azure", "Grok"):
50
+ if api_type in ("OpenAI", "OpenAI-Azure"):
51
51
  use_responses_api = True
52
52
  else:
53
53
  use_responses_api = False
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "LLM-Bridge"
7
- version = "1.11.8"
7
+ version = "1.11.10"
8
8
  authors = [
9
9
  {name = "windsnow1025", email = "windsnow1025@gmail.com"}
10
10
  ]
File without changes
File without changes
File without changes
File without changes