LLM-Bridge 1.12.0__tar.gz → 1.12.0a0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0/LLM_Bridge.egg-info}/PKG-INFO +13 -14
  2. {llm_bridge-1.12.0/LLM_Bridge.egg-info → llm_bridge-1.12.0a0}/PKG-INFO +13 -14
  3. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/README.md +12 -13
  4. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/pyproject.toml +1 -1
  5. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/LICENSE +0 -0
  6. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/LLM_Bridge.egg-info/SOURCES.txt +0 -0
  7. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/LLM_Bridge.egg-info/dependency_links.txt +0 -0
  8. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/LLM_Bridge.egg-info/requires.txt +0 -0
  9. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/LLM_Bridge.egg-info/top_level.txt +0 -0
  10. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/MANIFEST.in +0 -0
  11. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/__init__.py +0 -0
  12. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/__init__.py +0 -0
  13. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/chat_client.py +0 -0
  14. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/__init__.py +0 -0
  15. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/claude/__init__.py +0 -0
  16. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
  17. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
  18. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
  19. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
  20. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
  21. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
  22. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
  23. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
  24. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
  25. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/openai/__init__.py +0 -0
  26. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
  27. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
  28. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
  29. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
  30. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
  31. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/implementations/printing_status.py +0 -0
  32. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/model_client/__init__.py +0 -0
  33. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/model_client/claude_client.py +0 -0
  34. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/model_client/gemini_client.py +0 -0
  35. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/client/model_client/openai_client.py +0 -0
  36. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/__init__.py +0 -0
  37. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/__init__.py +0 -0
  38. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
  39. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
  40. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
  41. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
  42. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
  43. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
  44. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +0 -0
  45. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
  46. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
  47. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
  48. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
  49. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
  50. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/file_fetch.py +0 -0
  51. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
  52. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
  53. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
  54. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
  55. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
  56. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/logic/model_prices.py +0 -0
  57. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/resources/__init__.py +0 -0
  58. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/resources/model_prices.json +0 -0
  59. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/__init__.py +0 -0
  60. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/chat_response.py +0 -0
  61. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/message.py +0 -0
  62. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/model_message/__init__.py +0 -0
  63. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/model_message/claude_message.py +0 -0
  64. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/model_message/gemini_message.py +0 -0
  65. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/model_message/openai_message.py +0 -0
  66. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
  67. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/llm_bridge/type/serializer.py +0 -0
  68. {llm_bridge-1.12.0 → llm_bridge-1.12.0a0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.12.0
3
+ Version: 1.12.0a0
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -30,7 +30,7 @@ Dynamic: license-file
30
30
 
31
31
  # LLM Bridge
32
32
 
33
- LLM Bridge is a unified Python interface for interacting with LLMs, including OpenAI (Native / Azure / GitHub), Gemini (AI Studio / Vertex), Claude, and Grok.
33
+ LLM Bridge is a unified Python interface for interacting with LLMs, including OpenAI, OpenAI-Azure, OpenAI-GitHub, Gemini, Claude, and Grok.
34
34
 
35
35
  GitHub: [https://github.com/windsnow1025/LLM-Bridge](https://github.com/windsnow1025/LLM-Bridge)
36
36
 
@@ -39,31 +39,30 @@ PyPI: [https://pypi.org/project/LLM-Bridge/](https://pypi.org/project/LLM-Bridge
39
39
  ## Workflow and Features
40
40
 
41
41
  1. **Message Preprocessor**: extracts text content from documents (Word, Excel, PPT, Code files, PDFs) which are not natively supported by the target model.
42
- 2. **Chat Client Factory**: creates a client for the specific LLM API with model parameters
43
- 1. **Model Message Converter**: converts general messages to model messages
44
- 1. **Media Processor**: converts general media (Image, Audio, Video, PDF) to model compatible formats.
42
+ 2. **Chat Client Factory**: create a client for the specific LLM API with model parameters
43
+ 1. **Model Message Converter**: convert general messages to model messages
44
+ 1. **Media Processor**: converts media (Image, Audio, Video, PDF) which are natively supported by the target model into compatible formats.
45
45
  3. **Chat Client**: generate stream or non-stream responses
46
- - **Model Thoughts**: captures and formats the model's thinking process
47
- - **Code Execution**: auto generate and execute Python code
48
- - **Web Search + Citations**: extracts and formats citations from search results
49
- - **Token Counter**: tracks and reports input and output token usage
46
+ 1. **Model Thoughts**: captures and formats the model's thinking process
47
+ 2. **Search Citations**: extracts and formats citations from search results
48
+ 3. **Token Counter**: tracks and reports input and output token usage
50
49
 
51
- ### Supported Features for API Types
50
+ ### Model Features
52
51
 
53
52
  The features listed represent the maximum capabilities of each API type supported by LLM Bridge.
54
53
 
55
54
  | API Type | Input Format | Capabilities | Output Format |
56
55
  |----------|--------------------------------|--------------------------------------------------|-------------------|
57
- | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
56
+ | OpenAI | Text, Image | Thinking, Web Search, Code Execution | Text |
58
57
  | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution | Text, Image, File |
59
58
  | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
60
59
  | Grok | Text, Image | | Text |
61
60
 
62
61
  #### Planned Features
63
62
 
64
- - Structured Output
65
- - More features for API Types
66
- - Native support for Grok
63
+ - OpenAI: Web Search: Citations, Image Output
64
+ - Gemini: Code Execution: Code, Code Output
65
+ - Claude: Code Execution, File Output
67
66
 
68
67
  ## Installation
69
68
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.12.0
3
+ Version: 1.12.0a0
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
@@ -30,7 +30,7 @@ Dynamic: license-file
30
30
 
31
31
  # LLM Bridge
32
32
 
33
- LLM Bridge is a unified Python interface for interacting with LLMs, including OpenAI (Native / Azure / GitHub), Gemini (AI Studio / Vertex), Claude, and Grok.
33
+ LLM Bridge is a unified Python interface for interacting with LLMs, including OpenAI, OpenAI-Azure, OpenAI-GitHub, Gemini, Claude, and Grok.
34
34
 
35
35
  GitHub: [https://github.com/windsnow1025/LLM-Bridge](https://github.com/windsnow1025/LLM-Bridge)
36
36
 
@@ -39,31 +39,30 @@ PyPI: [https://pypi.org/project/LLM-Bridge/](https://pypi.org/project/LLM-Bridge
39
39
  ## Workflow and Features
40
40
 
41
41
  1. **Message Preprocessor**: extracts text content from documents (Word, Excel, PPT, Code files, PDFs) which are not natively supported by the target model.
42
- 2. **Chat Client Factory**: creates a client for the specific LLM API with model parameters
43
- 1. **Model Message Converter**: converts general messages to model messages
44
- 1. **Media Processor**: converts general media (Image, Audio, Video, PDF) to model compatible formats.
42
+ 2. **Chat Client Factory**: create a client for the specific LLM API with model parameters
43
+ 1. **Model Message Converter**: convert general messages to model messages
44
+ 1. **Media Processor**: converts media (Image, Audio, Video, PDF) which are natively supported by the target model into compatible formats.
45
45
  3. **Chat Client**: generate stream or non-stream responses
46
- - **Model Thoughts**: captures and formats the model's thinking process
47
- - **Code Execution**: auto generate and execute Python code
48
- - **Web Search + Citations**: extracts and formats citations from search results
49
- - **Token Counter**: tracks and reports input and output token usage
46
+ 1. **Model Thoughts**: captures and formats the model's thinking process
47
+ 2. **Search Citations**: extracts and formats citations from search results
48
+ 3. **Token Counter**: tracks and reports input and output token usage
50
49
 
51
- ### Supported Features for API Types
50
+ ### Model Features
52
51
 
53
52
  The features listed represent the maximum capabilities of each API type supported by LLM Bridge.
54
53
 
55
54
  | API Type | Input Format | Capabilities | Output Format |
56
55
  |----------|--------------------------------|--------------------------------------------------|-------------------|
57
- | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
56
+ | OpenAI | Text, Image | Thinking, Web Search, Code Execution | Text |
58
57
  | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution | Text, Image, File |
59
58
  | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
60
59
  | Grok | Text, Image | | Text |
61
60
 
62
61
  #### Planned Features
63
62
 
64
- - Structured Output
65
- - More features for API Types
66
- - Native support for Grok
63
+ - OpenAI: Web Search: Citations, Image Output
64
+ - Gemini: Code Execution: Code, Code Output
65
+ - Claude: Code Execution, File Output
67
66
 
68
67
  ## Installation
69
68
 
@@ -1,6 +1,6 @@
1
1
  # LLM Bridge
2
2
 
3
- LLM Bridge is a unified Python interface for interacting with LLMs, including OpenAI (Native / Azure / GitHub), Gemini (AI Studio / Vertex), Claude, and Grok.
3
+ LLM Bridge is a unified Python interface for interacting with LLMs, including OpenAI, OpenAI-Azure, OpenAI-GitHub, Gemini, Claude, and Grok.
4
4
 
5
5
  GitHub: [https://github.com/windsnow1025/LLM-Bridge](https://github.com/windsnow1025/LLM-Bridge)
6
6
 
@@ -9,31 +9,30 @@ PyPI: [https://pypi.org/project/LLM-Bridge/](https://pypi.org/project/LLM-Bridge
9
9
  ## Workflow and Features
10
10
 
11
11
  1. **Message Preprocessor**: extracts text content from documents (Word, Excel, PPT, Code files, PDFs) which are not natively supported by the target model.
12
- 2. **Chat Client Factory**: creates a client for the specific LLM API with model parameters
13
- 1. **Model Message Converter**: converts general messages to model messages
14
- 1. **Media Processor**: converts general media (Image, Audio, Video, PDF) to model compatible formats.
12
+ 2. **Chat Client Factory**: create a client for the specific LLM API with model parameters
13
+ 1. **Model Message Converter**: convert general messages to model messages
14
+ 1. **Media Processor**: converts media (Image, Audio, Video, PDF) which are natively supported by the target model into compatible formats.
15
15
  3. **Chat Client**: generate stream or non-stream responses
16
- - **Model Thoughts**: captures and formats the model's thinking process
17
- - **Code Execution**: auto generate and execute Python code
18
- - **Web Search + Citations**: extracts and formats citations from search results
19
- - **Token Counter**: tracks and reports input and output token usage
16
+ 1. **Model Thoughts**: captures and formats the model's thinking process
17
+ 2. **Search Citations**: extracts and formats citations from search results
18
+ 3. **Token Counter**: tracks and reports input and output token usage
20
19
 
21
- ### Supported Features for API Types
20
+ ### Model Features
22
21
 
23
22
  The features listed represent the maximum capabilities of each API type supported by LLM Bridge.
24
23
 
25
24
  | API Type | Input Format | Capabilities | Output Format |
26
25
  |----------|--------------------------------|--------------------------------------------------|-------------------|
27
- | OpenAI | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
26
+ | OpenAI | Text, Image | Thinking, Web Search, Code Execution | Text |
28
27
  | Gemini | Text, Image, Video, Audio, PDF | Thinking, Web Search + Citations, Code Execution | Text, Image, File |
29
28
  | Claude | Text, Image, PDF | Thinking, Web Search, Code Execution | Text |
30
29
  | Grok | Text, Image | | Text |
31
30
 
32
31
  #### Planned Features
33
32
 
34
- - Structured Output
35
- - More features for API Types
36
- - Native support for Grok
33
+ - OpenAI: Web Search: Citations, Image Output
34
+ - Gemini: Code Execution: Code, Code Output
35
+ - Claude: Code Execution, File Output
37
36
 
38
37
  ## Installation
39
38
 
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "LLM-Bridge"
7
- version = "1.12.0"
7
+ version = "1.12.0-alpha.0"
8
8
  authors = [
9
9
  {name = "windsnow1025", email = "windsnow1025@gmail.com"}
10
10
  ]
File without changes
File without changes
File without changes