LLM-Bridge 1.13.1__tar.gz → 1.14.0a0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0/LLM_Bridge.egg-info}/PKG-INFO +29 -15
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/LLM_Bridge.egg-info/requires.txt +0 -6
- {llm_bridge-1.13.1/LLM_Bridge.egg-info → llm_bridge-1.14.0a0}/PKG-INFO +29 -15
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/README.md +28 -9
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/pyproject.toml +7 -8
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/LICENSE +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/LLM_Bridge.egg-info/SOURCES.txt +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/LLM_Bridge.egg-info/dependency_links.txt +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/LLM_Bridge.egg-info/top_level.txt +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/MANIFEST.in +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/chat_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/claude/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/openai/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/printing_status.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/model_client/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/model_client/claude_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/model_client/gemini_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/model_client/openai_client.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_client_factory/schema_converter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/file_fetch.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/model_prices.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/resources/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/resources/model_prices.json +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/chat_response.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/message.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/model_message/__init__.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/model_message/claude_message.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/model_message/gemini_message.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/model_message/openai_message.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/serializer.py +0 -0
- {llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: LLM-Bridge
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.14.0a0
|
|
4
4
|
Summary: A Bridge for LLMs
|
|
5
5
|
Author-email: windsnow1025 <windsnow1025@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -21,11 +21,6 @@ Requires-Dist: PyMuPDF
|
|
|
21
21
|
Requires-Dist: docxlatex>=1.1.1
|
|
22
22
|
Requires-Dist: openpyxl
|
|
23
23
|
Requires-Dist: python-pptx
|
|
24
|
-
Provides-Extra: test
|
|
25
|
-
Requires-Dist: pytest; extra == "test"
|
|
26
|
-
Requires-Dist: pytest-asyncio; extra == "test"
|
|
27
|
-
Requires-Dist: python-dotenv; extra == "test"
|
|
28
|
-
Requires-Dist: protobuf; extra == "test"
|
|
29
24
|
Dynamic: license-file
|
|
30
25
|
|
|
31
26
|
# LLM Bridge
|
|
@@ -71,18 +66,37 @@ The features listed represent the maximum capabilities of each API type supporte
|
|
|
71
66
|
pip install --upgrade llm_bridge
|
|
72
67
|
```
|
|
73
68
|
|
|
74
|
-
##
|
|
69
|
+
## Development
|
|
75
70
|
|
|
76
|
-
|
|
77
|
-
pytest
|
|
78
|
-
```
|
|
71
|
+
### Python uv
|
|
79
72
|
|
|
80
|
-
|
|
73
|
+
1. Install uv: `powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"`
|
|
74
|
+
2. Install Python in uv: `uv python install 3.12`; upgrade Python in uv: `uv python install 3.12`
|
|
75
|
+
3. Configure requirements:
|
|
76
|
+
```bash
|
|
77
|
+
uv sync
|
|
78
|
+
```
|
|
81
79
|
|
|
82
|
-
###
|
|
80
|
+
### Pycharm
|
|
83
81
|
|
|
84
|
-
|
|
85
|
-
2. Install requirements: `pip install -r requirements.txt`
|
|
86
|
-
3. In PyCharm, add a new Python configuration:
|
|
82
|
+
Add New Configuration >> uv run
|
|
87
83
|
- script: `./usage/main.py`
|
|
88
84
|
- Paths to ".env" files: `./usage/.env`
|
|
85
|
+
|
|
86
|
+
If uv interpreter is not found, create a new project with uv.
|
|
87
|
+
|
|
88
|
+
### Usage
|
|
89
|
+
|
|
90
|
+
Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
|
|
91
|
+
|
|
92
|
+
### Test
|
|
93
|
+
|
|
94
|
+
```bash
|
|
95
|
+
uv run pytest
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
### Build
|
|
99
|
+
|
|
100
|
+
```bash
|
|
101
|
+
uv build
|
|
102
|
+
```
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: LLM-Bridge
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.14.0a0
|
|
4
4
|
Summary: A Bridge for LLMs
|
|
5
5
|
Author-email: windsnow1025 <windsnow1025@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -21,11 +21,6 @@ Requires-Dist: PyMuPDF
|
|
|
21
21
|
Requires-Dist: docxlatex>=1.1.1
|
|
22
22
|
Requires-Dist: openpyxl
|
|
23
23
|
Requires-Dist: python-pptx
|
|
24
|
-
Provides-Extra: test
|
|
25
|
-
Requires-Dist: pytest; extra == "test"
|
|
26
|
-
Requires-Dist: pytest-asyncio; extra == "test"
|
|
27
|
-
Requires-Dist: python-dotenv; extra == "test"
|
|
28
|
-
Requires-Dist: protobuf; extra == "test"
|
|
29
24
|
Dynamic: license-file
|
|
30
25
|
|
|
31
26
|
# LLM Bridge
|
|
@@ -71,18 +66,37 @@ The features listed represent the maximum capabilities of each API type supporte
|
|
|
71
66
|
pip install --upgrade llm_bridge
|
|
72
67
|
```
|
|
73
68
|
|
|
74
|
-
##
|
|
69
|
+
## Development
|
|
75
70
|
|
|
76
|
-
|
|
77
|
-
pytest
|
|
78
|
-
```
|
|
71
|
+
### Python uv
|
|
79
72
|
|
|
80
|
-
|
|
73
|
+
1. Install uv: `powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"`
|
|
74
|
+
2. Install Python in uv: `uv python install 3.12`; upgrade Python in uv: `uv python install 3.12`
|
|
75
|
+
3. Configure requirements:
|
|
76
|
+
```bash
|
|
77
|
+
uv sync
|
|
78
|
+
```
|
|
81
79
|
|
|
82
|
-
###
|
|
80
|
+
### Pycharm
|
|
83
81
|
|
|
84
|
-
|
|
85
|
-
2. Install requirements: `pip install -r requirements.txt`
|
|
86
|
-
3. In PyCharm, add a new Python configuration:
|
|
82
|
+
Add New Configuration >> uv run
|
|
87
83
|
- script: `./usage/main.py`
|
|
88
84
|
- Paths to ".env" files: `./usage/.env`
|
|
85
|
+
|
|
86
|
+
If uv interpreter is not found, create a new project with uv.
|
|
87
|
+
|
|
88
|
+
### Usage
|
|
89
|
+
|
|
90
|
+
Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
|
|
91
|
+
|
|
92
|
+
### Test
|
|
93
|
+
|
|
94
|
+
```bash
|
|
95
|
+
uv run pytest
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
### Build
|
|
99
|
+
|
|
100
|
+
```bash
|
|
101
|
+
uv build
|
|
102
|
+
```
|
|
@@ -41,18 +41,37 @@ The features listed represent the maximum capabilities of each API type supporte
|
|
|
41
41
|
pip install --upgrade llm_bridge
|
|
42
42
|
```
|
|
43
43
|
|
|
44
|
-
##
|
|
44
|
+
## Development
|
|
45
45
|
|
|
46
|
-
|
|
47
|
-
pytest
|
|
48
|
-
```
|
|
46
|
+
### Python uv
|
|
49
47
|
|
|
50
|
-
|
|
48
|
+
1. Install uv: `powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"`
|
|
49
|
+
2. Install Python in uv: `uv python install 3.12`; upgrade Python in uv: `uv python install 3.12`
|
|
50
|
+
3. Configure requirements:
|
|
51
|
+
```bash
|
|
52
|
+
uv sync
|
|
53
|
+
```
|
|
51
54
|
|
|
52
|
-
###
|
|
55
|
+
### Pycharm
|
|
53
56
|
|
|
54
|
-
|
|
55
|
-
2. Install requirements: `pip install -r requirements.txt`
|
|
56
|
-
3. In PyCharm, add a new Python configuration:
|
|
57
|
+
Add New Configuration >> uv run
|
|
57
58
|
- script: `./usage/main.py`
|
|
58
59
|
- Paths to ".env" files: `./usage/.env`
|
|
60
|
+
|
|
61
|
+
If uv interpreter is not found, create a new project with uv.
|
|
62
|
+
|
|
63
|
+
### Usage
|
|
64
|
+
|
|
65
|
+
Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
|
|
66
|
+
|
|
67
|
+
### Test
|
|
68
|
+
|
|
69
|
+
```bash
|
|
70
|
+
uv run pytest
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
### Build
|
|
74
|
+
|
|
75
|
+
```bash
|
|
76
|
+
uv build
|
|
77
|
+
```
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "LLM-Bridge"
|
|
7
|
-
version = "1.
|
|
7
|
+
version = "1.14.0-alpha.0"
|
|
8
8
|
authors = [
|
|
9
9
|
{name = "windsnow1025", email = "windsnow1025@gmail.com"}
|
|
10
10
|
]
|
|
@@ -23,20 +23,19 @@ dependencies = [
|
|
|
23
23
|
"tenacity",
|
|
24
24
|
"openai==2.9.0",
|
|
25
25
|
"tiktoken==0.11.0",
|
|
26
|
-
"google-genai==1.46.0",
|
|
26
|
+
"google-genai==1.46.0", # google.genai
|
|
27
27
|
"anthropic==0.75.0",
|
|
28
|
-
"PyMuPDF",
|
|
28
|
+
"PyMuPDF", # fitz
|
|
29
29
|
"docxlatex>=1.1.1",
|
|
30
30
|
"openpyxl",
|
|
31
|
-
"python-pptx",
|
|
31
|
+
"python-pptx", # pptx
|
|
32
32
|
]
|
|
33
33
|
|
|
34
|
-
[
|
|
35
|
-
|
|
34
|
+
[dependency-groups]
|
|
35
|
+
dev = [
|
|
36
36
|
"pytest",
|
|
37
37
|
"pytest-asyncio",
|
|
38
|
-
"python-dotenv",
|
|
39
|
-
"protobuf"
|
|
38
|
+
"python-dotenv", #dotenv
|
|
40
39
|
]
|
|
41
40
|
|
|
42
41
|
[tool.setuptools.packages.find]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/claude/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/gemini/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/openai/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/client/implementations/printing_status.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/chat_client_factory.py
RENAMED
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/chat_generate/chat_message_converter.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/message_preprocess/document_processor.py
RENAMED
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/logic/message_preprocess/file_type_checker.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.13.1 → llm_bridge-1.14.0a0}/llm_bridge/type/model_message/openai_responses_message.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|