LLM-Bridge 1.12.5__tar.gz → 1.12.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {llm_bridge-1.12.5 → llm_bridge-1.12.6/LLM_Bridge.egg-info}/PKG-INFO +7 -7
  2. {llm_bridge-1.12.5/LLM_Bridge.egg-info → llm_bridge-1.12.6}/PKG-INFO +7 -7
  3. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/README.md +6 -6
  4. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/chat_client_factory.py +4 -0
  5. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +5 -0
  6. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/pyproject.toml +1 -1
  7. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/LICENSE +0 -0
  8. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/LLM_Bridge.egg-info/SOURCES.txt +0 -0
  9. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/LLM_Bridge.egg-info/dependency_links.txt +0 -0
  10. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/LLM_Bridge.egg-info/requires.txt +0 -0
  11. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/LLM_Bridge.egg-info/top_level.txt +0 -0
  12. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/MANIFEST.in +0 -0
  13. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/__init__.py +0 -0
  14. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/__init__.py +0 -0
  15. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/chat_client.py +0 -0
  16. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/__init__.py +0 -0
  17. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/claude/__init__.py +0 -0
  18. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
  19. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
  20. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
  21. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
  22. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
  23. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
  24. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
  25. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
  26. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
  27. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/openai/__init__.py +0 -0
  28. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
  29. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
  30. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
  31. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
  32. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
  33. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/implementations/printing_status.py +0 -0
  34. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/model_client/__init__.py +0 -0
  35. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/model_client/claude_client.py +0 -0
  36. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/model_client/gemini_client.py +0 -0
  37. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/client/model_client/openai_client.py +0 -0
  38. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/__init__.py +0 -0
  39. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/__init__.py +0 -0
  40. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
  41. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
  42. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
  43. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
  44. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +0 -0
  45. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
  46. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
  47. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
  48. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
  49. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
  50. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/file_fetch.py +0 -0
  51. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
  52. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
  53. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
  54. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
  55. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
  56. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/logic/model_prices.py +0 -0
  57. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/resources/__init__.py +0 -0
  58. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/resources/model_prices.json +0 -0
  59. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/__init__.py +0 -0
  60. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/chat_response.py +0 -0
  61. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/message.py +0 -0
  62. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/model_message/__init__.py +0 -0
  63. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/model_message/claude_message.py +0 -0
  64. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/model_message/gemini_message.py +0 -0
  65. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/model_message/openai_message.py +0 -0
  66. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
  67. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/llm_bridge/type/serializer.py +0 -0
  68. {llm_bridge-1.12.5 → llm_bridge-1.12.6}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.12.5
3
+ Version: 1.12.6
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -52,12 +52,12 @@ PyPI: [https://pypi.org/project/LLM-Bridge/](https://pypi.org/project/LLM-Bridge
52
52
 
53
53
  The features listed represent the maximum capabilities of each API type supported by LLM Bridge.
54
54
 
55
- | API Type | Input Format | Capabilities | Output Format |
56
- |----------|--------------------------------|--------------------------------------------------|-------------------|
57
- | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
58
- | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution | Text, Image, File |
59
- | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
60
- | Grok | Text, Image | | Text |
55
+ | API Type | Input Format | Capabilities | Output Format |
56
+ |----------|--------------------------------|---------------------------------------------------------------------|-------------------|
57
+ | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
58
+ | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution, Structured Output | Text, Image, File |
59
+ | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
60
+ | Grok | Text, Image | | Text |
61
61
 
62
62
  #### Planned Features
63
63
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.12.5
3
+ Version: 1.12.6
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -52,12 +52,12 @@ PyPI: [https://pypi.org/project/LLM-Bridge/](https://pypi.org/project/LLM-Bridge
52
52
 
53
53
  The features listed represent the maximum capabilities of each API type supported by LLM Bridge.
54
54
 
55
- | API Type | Input Format | Capabilities | Output Format |
56
- |----------|--------------------------------|--------------------------------------------------|-------------------|
57
- | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
58
- | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution | Text, Image, File |
59
- | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
60
- | Grok | Text, Image | | Text |
55
+ | API Type | Input Format | Capabilities | Output Format |
56
+ |----------|--------------------------------|---------------------------------------------------------------------|-------------------|
57
+ | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
58
+ | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution, Structured Output | Text, Image, File |
59
+ | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
60
+ | Grok | Text, Image | | Text |
61
61
 
62
62
  #### Planned Features
63
63
 
@@ -22,12 +22,12 @@ PyPI: [https://pypi.org/project/LLM-Bridge/](https://pypi.org/project/LLM-Bridge
22
22
 
23
23
  The features listed represent the maximum capabilities of each API type supported by LLM Bridge.
24
24
 
25
- | API Type | Input Format | Capabilities | Output Format |
26
- |----------|--------------------------------|--------------------------------------------------|-------------------|
27
- | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
28
- | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution | Text, Image, File |
29
- | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
30
- | Grok | Text, Image | | Text |
25
+ | API Type | Input Format | Capabilities | Output Format |
26
+ |----------|--------------------------------|---------------------------------------------------------------------|-------------------|
27
+ | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
28
+ | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution, Structured Output | Text, Image, File |
29
+ | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
30
+ | Grok | Text, Image | | Text |
31
31
 
32
32
  #### Planned Features
33
33
 
@@ -16,6 +16,7 @@ async def create_chat_client(
16
16
  stream: bool,
17
17
  thought: bool,
18
18
  code_execution: bool,
19
+ structured_output_schema: dict | None = None,
19
20
  ) -> ChatClient:
20
21
  if api_type == 'OpenAI':
21
22
  return await create_openai_client(
@@ -74,6 +75,7 @@ async def create_chat_client(
74
75
  stream=stream,
75
76
  thought=thought,
76
77
  code_execution=code_execution,
78
+ structured_output_schema=structured_output_schema,
77
79
  )
78
80
  elif api_type == 'Gemini-Paid':
79
81
  return await create_gemini_client(
@@ -85,6 +87,7 @@ async def create_chat_client(
85
87
  stream=stream,
86
88
  thought=thought,
87
89
  code_execution=code_execution,
90
+ structured_output_schema=structured_output_schema,
88
91
  )
89
92
  elif api_type == 'Gemini-Vertex':
90
93
  return await create_gemini_client(
@@ -96,6 +99,7 @@ async def create_chat_client(
96
99
  thought=thought,
97
100
  code_execution=code_execution,
98
101
  vertexai=True,
102
+ structured_output_schema=structured_output_schema,
99
103
  )
100
104
  elif api_type == 'Claude':
101
105
  return await create_claude_client(
@@ -18,6 +18,7 @@ async def create_gemini_client(
18
18
  stream: bool,
19
19
  thought: bool,
20
20
  code_execution: bool,
21
+ structured_output_schema: dict | None = None,
21
22
  ):
22
23
  client = genai.Client(
23
24
  vertexai=vertexai,
@@ -86,6 +87,10 @@ async def create_gemini_client(
86
87
  response_modalities=response_modalities,
87
88
  )
88
89
 
90
+ if structured_output_schema is not None:
91
+ config.response_mime_type = "application/json"
92
+ config.response_json_schema = structured_output_schema
93
+
89
94
  gemini_messages = await convert_messages_to_gemini(messages)
90
95
 
91
96
  if stream:
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "LLM-Bridge"
7
- version = "1.12.5"
7
+ version = "1.12.6"
8
8
  authors = [
9
9
  {name = "windsnow1025", email = "windsnow1025@gmail.com"}
10
10
  ]
File without changes
File without changes
File without changes