LLM-Bridge 1.15.3__tar.gz → 1.15.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/.gitignore +71 -15
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/PKG-INFO +1 -1
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/pyproject.toml +1 -1
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/uv.lock +1 -1
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/.gitattributes +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/.github/workflows/python-publish.yml +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/LICENSE +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/MANIFEST.in +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/README.md +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/chat_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/claude/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/openai/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/openai/openai_responses_response_handler.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/printing_status.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/model_client/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/model_client/claude_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/model_client/gemini_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/model_client/openai_client.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_client_factory/schema_converter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/file_fetch.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/model_prices.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/resources/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/resources/model_prices.json +11 -11
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/chat_response.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/message.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/model_message/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/model_message/claude_message.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/model_message/gemini_message.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/model_message/openai_message.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/serializer.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/tests/__init__.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/tests/chat_client_factory_test.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/tests/message_preprocessor_test.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/usage/.env.example +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/usage/main.py +0 -0
- {llm_bridge-1.15.3 → llm_bridge-1.15.4}/usage/workflow.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Byte-compiled / optimized / DLL files
|
|
2
2
|
__pycache__/
|
|
3
|
-
*.py[
|
|
3
|
+
*.py[codz]
|
|
4
4
|
*$py.class
|
|
5
5
|
|
|
6
6
|
# C extensions
|
|
@@ -27,8 +27,8 @@ share/python-wheels/
|
|
|
27
27
|
MANIFEST
|
|
28
28
|
|
|
29
29
|
# PyInstaller
|
|
30
|
-
#
|
|
31
|
-
#
|
|
30
|
+
# Usually these files are written by a python script from a template
|
|
31
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
32
32
|
*.manifest
|
|
33
33
|
*.spec
|
|
34
34
|
|
|
@@ -46,7 +46,7 @@ htmlcov/
|
|
|
46
46
|
nosetests.xml
|
|
47
47
|
coverage.xml
|
|
48
48
|
*.cover
|
|
49
|
-
*.py
|
|
49
|
+
*.py.cover
|
|
50
50
|
.hypothesis/
|
|
51
51
|
.pytest_cache/
|
|
52
52
|
cover/
|
|
@@ -92,22 +92,37 @@ ipython_config.py
|
|
|
92
92
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
93
93
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
94
94
|
# install all needed dependencies.
|
|
95
|
-
#Pipfile.lock
|
|
95
|
+
# Pipfile.lock
|
|
96
|
+
|
|
97
|
+
# UV
|
|
98
|
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
|
99
|
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
100
|
+
# commonly ignored for libraries.
|
|
101
|
+
# uv.lock
|
|
96
102
|
|
|
97
103
|
# poetry
|
|
98
104
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
99
105
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
100
106
|
# commonly ignored for libraries.
|
|
101
107
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
102
|
-
#poetry.lock
|
|
108
|
+
# poetry.lock
|
|
109
|
+
# poetry.toml
|
|
103
110
|
|
|
104
111
|
# pdm
|
|
105
112
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
106
|
-
#pdm.
|
|
107
|
-
# pdm
|
|
108
|
-
#
|
|
109
|
-
#
|
|
110
|
-
.pdm
|
|
113
|
+
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
|
114
|
+
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
|
115
|
+
# pdm.lock
|
|
116
|
+
# pdm.toml
|
|
117
|
+
.pdm-python
|
|
118
|
+
.pdm-build/
|
|
119
|
+
|
|
120
|
+
# pixi
|
|
121
|
+
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
|
122
|
+
# pixi.lock
|
|
123
|
+
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
|
124
|
+
# in the .venv directory. It is recommended not to include this directory in version control.
|
|
125
|
+
.pixi
|
|
111
126
|
|
|
112
127
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
113
128
|
__pypackages__/
|
|
@@ -116,11 +131,25 @@ __pypackages__/
|
|
|
116
131
|
celerybeat-schedule
|
|
117
132
|
celerybeat.pid
|
|
118
133
|
|
|
134
|
+
# Redis
|
|
135
|
+
*.rdb
|
|
136
|
+
*.aof
|
|
137
|
+
*.pid
|
|
138
|
+
|
|
139
|
+
# RabbitMQ
|
|
140
|
+
mnesia/
|
|
141
|
+
rabbitmq/
|
|
142
|
+
rabbitmq-data/
|
|
143
|
+
|
|
144
|
+
# ActiveMQ
|
|
145
|
+
activemq-data/
|
|
146
|
+
|
|
119
147
|
# SageMath parsed files
|
|
120
148
|
*.sage.py
|
|
121
149
|
|
|
122
150
|
# Environments
|
|
123
151
|
.env
|
|
152
|
+
.envrc
|
|
124
153
|
.venv
|
|
125
154
|
env/
|
|
126
155
|
venv/
|
|
@@ -153,8 +182,35 @@ dmypy.json
|
|
|
153
182
|
cython_debug/
|
|
154
183
|
|
|
155
184
|
# PyCharm
|
|
156
|
-
#
|
|
157
|
-
#
|
|
158
|
-
#
|
|
159
|
-
#
|
|
185
|
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
186
|
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
187
|
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
188
|
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
160
189
|
.idea/
|
|
190
|
+
|
|
191
|
+
# Abstra
|
|
192
|
+
# Abstra is an AI-powered process automation framework.
|
|
193
|
+
# Ignore directories containing user credentials, local state, and settings.
|
|
194
|
+
# Learn more at https://abstra.io/docs
|
|
195
|
+
.abstra/
|
|
196
|
+
|
|
197
|
+
# Visual Studio Code
|
|
198
|
+
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
|
199
|
+
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
|
200
|
+
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
|
201
|
+
# you could uncomment the following to ignore the entire vscode folder
|
|
202
|
+
# .vscode/
|
|
203
|
+
|
|
204
|
+
# Ruff stuff:
|
|
205
|
+
.ruff_cache/
|
|
206
|
+
|
|
207
|
+
# PyPI configuration file
|
|
208
|
+
.pypirc
|
|
209
|
+
|
|
210
|
+
# Marimo
|
|
211
|
+
marimo/_static/
|
|
212
|
+
marimo/_lsp/
|
|
213
|
+
__marimo__/
|
|
214
|
+
|
|
215
|
+
# Streamlit
|
|
216
|
+
.streamlit/secrets.toml
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/claude/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/gemini/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/openai/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/client/implementations/printing_status.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/chat_client_factory.py
RENAMED
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/chat_generate/chat_message_converter.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/code_file_extensions.py
RENAMED
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/document_processor.py
RENAMED
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/file_type_checker.py
RENAMED
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/logic/message_preprocess/message_preprocessor.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
@@ -1,46 +1,46 @@
|
|
|
1
1
|
[
|
|
2
2
|
{
|
|
3
|
-
"apiType": "Gemini-
|
|
3
|
+
"apiType": "Gemini-Vertex",
|
|
4
4
|
"model": "gemini-3-pro-preview",
|
|
5
5
|
"input": 4,
|
|
6
6
|
"output": 18
|
|
7
7
|
},
|
|
8
8
|
{
|
|
9
|
-
"apiType": "Gemini-
|
|
9
|
+
"apiType": "Gemini-Vertex",
|
|
10
10
|
"model": "gemini-3-flash-preview",
|
|
11
11
|
"input": 1,
|
|
12
12
|
"output": 3
|
|
13
13
|
},
|
|
14
14
|
{
|
|
15
|
-
"apiType": "Gemini-
|
|
15
|
+
"apiType": "Gemini-Vertex",
|
|
16
16
|
"model": "gemini-3-pro-image-preview",
|
|
17
17
|
"input": 2,
|
|
18
18
|
"output": 120
|
|
19
19
|
},
|
|
20
20
|
{
|
|
21
21
|
"apiType": "Gemini-Paid",
|
|
22
|
-
"model": "gemini-flash-latest",
|
|
23
|
-
"input": 1,
|
|
24
|
-
"output": 2.5
|
|
25
|
-
},
|
|
26
|
-
{
|
|
27
|
-
"apiType": "Gemini-Vertex",
|
|
28
22
|
"model": "gemini-3-pro-preview",
|
|
29
23
|
"input": 4,
|
|
30
24
|
"output": 18
|
|
31
25
|
},
|
|
32
26
|
{
|
|
33
|
-
"apiType": "Gemini-
|
|
27
|
+
"apiType": "Gemini-Paid",
|
|
34
28
|
"model": "gemini-3-flash-preview",
|
|
35
29
|
"input": 1,
|
|
36
30
|
"output": 3
|
|
37
31
|
},
|
|
38
32
|
{
|
|
39
|
-
"apiType": "Gemini-
|
|
33
|
+
"apiType": "Gemini-Paid",
|
|
40
34
|
"model": "gemini-3-pro-image-preview",
|
|
41
35
|
"input": 2,
|
|
42
36
|
"output": 120
|
|
43
37
|
},
|
|
38
|
+
{
|
|
39
|
+
"apiType": "Gemini-Paid",
|
|
40
|
+
"model": "gemini-flash-latest",
|
|
41
|
+
"input": 1,
|
|
42
|
+
"output": 2.5
|
|
43
|
+
},
|
|
44
44
|
{
|
|
45
45
|
"apiType": "Gemini-Free",
|
|
46
46
|
"model": "gemini-3-flash-preview",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llm_bridge-1.15.3 → llm_bridge-1.15.4}/llm_bridge/type/model_message/openai_responses_message.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|