LLM-Bridge 1.13.1__tar.gz → 1.14.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. llm_bridge-1.14.0/.gitattributes +2 -0
  2. llm_bridge-1.14.0/.github/workflows/python-publish.yml +32 -0
  3. llm_bridge-1.14.0/.gitignore +160 -0
  4. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/PKG-INFO +38 -26
  5. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/README.md +28 -10
  6. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_client_factory/openai_client_factory.py +8 -9
  7. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/resources/model_prices.json +12 -0
  8. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/pyproject.toml +22 -27
  9. llm_bridge-1.14.0/tests/__init__.py +0 -0
  10. llm_bridge-1.14.0/tests/chat_client_factory_test.py +20 -0
  11. llm_bridge-1.14.0/tests/message_preprocessor_test.py +26 -0
  12. llm_bridge-1.14.0/usage/.env.example +9 -0
  13. llm_bridge-1.14.0/usage/main.py +226 -0
  14. llm_bridge-1.14.0/usage/workflow.py +34 -0
  15. llm_bridge-1.14.0/uv.lock +1025 -0
  16. llm_bridge-1.13.1/LLM_Bridge.egg-info/PKG-INFO +0 -88
  17. llm_bridge-1.13.1/LLM_Bridge.egg-info/SOURCES.txt +0 -67
  18. llm_bridge-1.13.1/LLM_Bridge.egg-info/dependency_links.txt +0 -1
  19. llm_bridge-1.13.1/LLM_Bridge.egg-info/requires.txt +0 -17
  20. llm_bridge-1.13.1/LLM_Bridge.egg-info/top_level.txt +0 -1
  21. llm_bridge-1.13.1/setup.cfg +0 -4
  22. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/LICENSE +0 -0
  23. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/MANIFEST.in +0 -0
  24. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/__init__.py +0 -0
  25. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/__init__.py +0 -0
  26. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/chat_client.py +0 -0
  27. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/__init__.py +0 -0
  28. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/claude/__init__.py +0 -0
  29. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/claude/claude_response_handler.py +0 -0
  30. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/claude/claude_token_counter.py +0 -0
  31. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/claude/non_stream_claude_client.py +0 -0
  32. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/claude/stream_claude_client.py +0 -0
  33. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/gemini/__init__.py +0 -0
  34. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/gemini/gemini_response_handler.py +0 -0
  35. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/gemini/gemini_token_counter.py +0 -0
  36. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/gemini/non_stream_gemini_client.py +0 -0
  37. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/gemini/stream_gemini_client.py +0 -0
  38. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/openai/__init__.py +0 -0
  39. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/openai/non_stream_openai_client.py +0 -0
  40. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/openai/non_stream_openai_responses_client.py +0 -0
  41. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/openai/openai_token_couter.py +0 -0
  42. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/openai/steam_openai_responses_client.py +0 -0
  43. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/openai/stream_openai_client.py +0 -0
  44. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/implementations/printing_status.py +0 -0
  45. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/model_client/__init__.py +0 -0
  46. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/model_client/claude_client.py +0 -0
  47. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/model_client/gemini_client.py +0 -0
  48. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/client/model_client/openai_client.py +0 -0
  49. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/__init__.py +0 -0
  50. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/__init__.py +0 -0
  51. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/chat_client_factory.py +0 -0
  52. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/chat_message_converter.py +0 -0
  53. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/media_processor.py +0 -0
  54. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_client_factory/__init__.py +0 -0
  55. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_client_factory/claude_client_factory.py +0 -0
  56. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_client_factory/gemini_client_factory.py +0 -0
  57. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_client_factory/schema_converter.py +0 -0
  58. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_message_converter/__init__.py +0 -0
  59. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_message_converter/claude_message_converter.py +0 -0
  60. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_message_converter/gemini_message_converter.py +0 -0
  61. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_message_converter/openai_message_converter.py +0 -0
  62. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/chat_generate/model_message_converter/openai_responses_message_converter.py +0 -0
  63. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/file_fetch.py +0 -0
  64. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/message_preprocess/__init__.py +0 -0
  65. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/message_preprocess/code_file_extensions.py +0 -0
  66. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/message_preprocess/document_processor.py +0 -0
  67. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/message_preprocess/file_type_checker.py +0 -0
  68. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/message_preprocess/message_preprocessor.py +0 -0
  69. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/logic/model_prices.py +0 -0
  70. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/resources/__init__.py +0 -0
  71. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/__init__.py +0 -0
  72. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/chat_response.py +0 -0
  73. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/message.py +0 -0
  74. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/model_message/__init__.py +0 -0
  75. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/model_message/claude_message.py +0 -0
  76. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/model_message/gemini_message.py +0 -0
  77. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/model_message/openai_message.py +0 -0
  78. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/model_message/openai_responses_message.py +0 -0
  79. {llm_bridge-1.13.1 → llm_bridge-1.14.0}/llm_bridge/type/serializer.py +0 -0
@@ -0,0 +1,2 @@
1
+ # Auto detect text files and perform LF normalization
2
+ * text=auto
@@ -0,0 +1,32 @@
1
+ name: "Publish"
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ # Publish on any tag starting with a `v`, e.g., v0.1.0
7
+ - v*
8
+
9
+ jobs:
10
+ run:
11
+ runs-on: ubuntu-latest
12
+ environment:
13
+ name: pypi
14
+ permissions:
15
+ id-token: write
16
+ contents: read
17
+ steps:
18
+ - name: Checkout
19
+ uses: actions/checkout@v5
20
+ - name: Install uv
21
+ uses: astral-sh/setup-uv@v7
22
+ - name: Install Python 3.12
23
+ run: uv python install 3.12
24
+ - name: Build
25
+ run: uv build
26
+ # Check that basic features work and we didn't miss to include crucial files
27
+ # - name: Smoke test (wheel)
28
+ # run: uv run --isolated --no-project --with dist/*.whl tests/smoke_test.py
29
+ # - name: Smoke test (source distribution)
30
+ # run: uv run --isolated --no-project --with dist/*.tar.gz tests/smoke_test.py
31
+ - name: Publish
32
+ run: uv publish
@@ -0,0 +1,160 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # pdm
105
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106
+ #pdm.lock
107
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108
+ # in version control.
109
+ # https://pdm.fming.dev/#use-with-ide
110
+ .pdm.toml
111
+
112
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113
+ __pypackages__/
114
+
115
+ # Celery stuff
116
+ celerybeat-schedule
117
+ celerybeat.pid
118
+
119
+ # SageMath parsed files
120
+ *.sage.py
121
+
122
+ # Environments
123
+ .env
124
+ .venv
125
+ env/
126
+ venv/
127
+ ENV/
128
+ env.bak/
129
+ venv.bak/
130
+
131
+ # Spyder project settings
132
+ .spyderproject
133
+ .spyproject
134
+
135
+ # Rope project settings
136
+ .ropeproject
137
+
138
+ # mkdocs documentation
139
+ /site
140
+
141
+ # mypy
142
+ .mypy_cache/
143
+ .dmypy.json
144
+ dmypy.json
145
+
146
+ # Pyre type checker
147
+ .pyre/
148
+
149
+ # pytype static type analyzer
150
+ .pytype/
151
+
152
+ # Cython debug symbols
153
+ cython_debug/
154
+
155
+ # PyCharm
156
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
159
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160
+ .idea/
@@ -1,32 +1,26 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLM-Bridge
3
- Version: 1.13.1
3
+ Version: 1.14.0
4
4
  Summary: A Bridge for LLMs
5
5
  Author-email: windsnow1025 <windsnow1025@gmail.com>
6
6
  License-Expression: MIT
7
- Keywords: llm,ai
7
+ License-File: LICENSE
8
+ Keywords: ai,llm
8
9
  Classifier: Framework :: FastAPI
9
10
  Classifier: Programming Language :: Python :: 3
10
11
  Requires-Python: >=3.12
11
- Description-Content-Type: text/markdown
12
- License-File: LICENSE
12
+ Requires-Dist: anthropic==0.75.0
13
+ Requires-Dist: docxlatex>=1.1.1
13
14
  Requires-Dist: fastapi
15
+ Requires-Dist: google-genai==1.46.0
14
16
  Requires-Dist: httpx
15
- Requires-Dist: tenacity
16
17
  Requires-Dist: openai==2.9.0
17
- Requires-Dist: tiktoken==0.11.0
18
- Requires-Dist: google-genai==1.46.0
19
- Requires-Dist: anthropic==0.75.0
20
- Requires-Dist: PyMuPDF
21
- Requires-Dist: docxlatex>=1.1.1
22
18
  Requires-Dist: openpyxl
19
+ Requires-Dist: pymupdf
23
20
  Requires-Dist: python-pptx
24
- Provides-Extra: test
25
- Requires-Dist: pytest; extra == "test"
26
- Requires-Dist: pytest-asyncio; extra == "test"
27
- Requires-Dist: python-dotenv; extra == "test"
28
- Requires-Dist: protobuf; extra == "test"
29
- Dynamic: license-file
21
+ Requires-Dist: tenacity
22
+ Requires-Dist: tiktoken==0.11.0
23
+ Description-Content-Type: text/markdown
30
24
 
31
25
  # LLM Bridge
32
26
 
@@ -71,18 +65,36 @@ The features listed represent the maximum capabilities of each API type supporte
71
65
  pip install --upgrade llm_bridge
72
66
  ```
73
67
 
74
- ## Test
68
+ ## Development
69
+
70
+ ### Python uv
71
+
72
+ 1. Install uv: `powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"`
73
+ 2. Install Python in uv: `uv python install 3.12`; upgrade Python in uv: `uv python install 3.12`
74
+ 3. Configure requirements:
75
+ ```bash
76
+ uv sync
77
+ ```
78
+
79
+ ### Pycharm Professional
80
+
81
+ 1. Add New Interpreter >> Add Local Interpreter
82
+ - Environment: Select existing
83
+ - Type: uv
84
+ 2. Add New Configuration >> uv run >> script: `./usage/main.py`
85
+
86
+ ### Usage
87
+
88
+ Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
89
+
90
+ ### Test
75
91
 
76
92
  ```bash
77
- pytest
93
+ uv run pytest
78
94
  ```
79
95
 
80
- ## Quick Start
96
+ ### Build
81
97
 
82
- ### Setup
83
-
84
- 1. Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
85
- 2. Install requirements: `pip install -r requirements.txt`
86
- 3. In PyCharm, add a new Python configuration:
87
- - script: `./usage/main.py`
88
- - Paths to ".env" files: `./usage/.env`
98
+ ```bash
99
+ uv build
100
+ ```
@@ -41,18 +41,36 @@ The features listed represent the maximum capabilities of each API type supporte
41
41
  pip install --upgrade llm_bridge
42
42
  ```
43
43
 
44
- ## Test
44
+ ## Development
45
+
46
+ ### Python uv
47
+
48
+ 1. Install uv: `powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"`
49
+ 2. Install Python in uv: `uv python install 3.12`; upgrade Python in uv: `uv python install 3.12`
50
+ 3. Configure requirements:
51
+ ```bash
52
+ uv sync
53
+ ```
54
+
55
+ ### Pycharm Professional
56
+
57
+ 1. Add New Interpreter >> Add Local Interpreter
58
+ - Environment: Select existing
59
+ - Type: uv
60
+ 2. Add New Configuration >> uv run >> script: `./usage/main.py`
61
+
62
+ ### Usage
63
+
64
+ Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
65
+
66
+ ### Test
45
67
 
46
68
  ```bash
47
- pytest
69
+ uv run pytest
48
70
  ```
49
71
 
50
- ## Quick Start
51
-
52
- ### Setup
72
+ ### Build
53
73
 
54
- 1. Copy `./usage/.env.example` and rename it to `./usage/.env`, then fill in the environment variables.
55
- 2. Install requirements: `pip install -r requirements.txt`
56
- 3. In PyCharm, add a new Python configuration:
57
- - script: `./usage/main.py`
58
- - Paths to ".env" files: `./usage/.env`
74
+ ```bash
75
+ uv build
76
+ ```
@@ -65,7 +65,7 @@ async def create_openai_client(
65
65
  tools = []
66
66
  reasoning = None
67
67
 
68
- if model not in ["gpt-5-chat-latest", "gpt-5-pro"]:
68
+ if model not in ["gpt-5-pro", "gpt-5.2-pro"]:
69
69
  if code_execution:
70
70
  tools.append(
71
71
  CodeInterpreter(
@@ -73,16 +73,15 @@ async def create_openai_client(
73
73
  container=CodeInterpreterContainerCodeInterpreterToolAuto(type="auto")
74
74
  )
75
75
  )
76
- if model not in ["gpt-5-chat-latest"]:
77
- tools.append(
78
- WebSearchToolParam(
79
- type="web_search",
80
- search_context_size="high",
81
- )
76
+ tools.append(
77
+ WebSearchToolParam(
78
+ type="web_search",
79
+ search_context_size="high",
82
80
  )
83
- if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
81
+ )
82
+ if re.match(r"gpt-5.*", model):
84
83
  temperature = 1
85
- if re.match(r"gpt-5.*", model) and model != "gpt-5-chat-latest":
84
+ if re.match(r"gpt-5.*", model):
86
85
  if thought:
87
86
  reasoning = Reasoning(
88
87
  effort="high",
@@ -71,6 +71,12 @@
71
71
  "input": 2.5,
72
72
  "output": 15
73
73
  },
74
+ {
75
+ "apiType": "OpenAI",
76
+ "model": "gpt-5.2",
77
+ "input": 1.75,
78
+ "output": 14
79
+ },
74
80
  {
75
81
  "apiType": "OpenAI",
76
82
  "model": "gpt-5.1",
@@ -89,6 +95,12 @@
89
95
  "input": 0.25,
90
96
  "output": 2
91
97
  },
98
+ {
99
+ "apiType": "OpenAI",
100
+ "model": "gpt-5.2-pro",
101
+ "input": 21,
102
+ "output": 168
103
+ },
92
104
  {
93
105
  "apiType": "OpenAI",
94
106
  "model": "gpt-5-pro",
@@ -1,47 +1,42 @@
1
1
  [build-system]
2
- requires = ["setuptools"]
3
- build-backend = "setuptools.build_meta"
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "LLM-Bridge"
7
- version = "1.13.1"
8
- authors = [
9
- {name = "windsnow1025", email = "windsnow1025@gmail.com"}
10
- ]
11
- description = "A Bridge for LLMs"
12
- readme = "README.md"
13
- requires-python = ">=3.12"
14
- keywords = ["llm", "ai"]
15
- license = "MIT"
16
- classifiers = [
17
- "Framework :: FastAPI",
18
- "Programming Language :: Python :: 3",
19
- ]
7
+ version = "1.14.0"
20
8
  dependencies = [
21
9
  "fastapi",
22
10
  "httpx",
23
11
  "tenacity",
24
12
  "openai==2.9.0",
25
13
  "tiktoken==0.11.0",
26
- "google-genai==1.46.0",
14
+ "google-genai==1.46.0", # google.genai
27
15
  "anthropic==0.75.0",
28
- "PyMuPDF",
16
+ "PyMuPDF", # fitz
29
17
  "docxlatex>=1.1.1",
30
18
  "openpyxl",
31
- "python-pptx",
19
+ "python-pptx", # pptx
20
+ ]
21
+ requires-python = ">=3.12"
22
+ authors = [
23
+ {name = "windsnow1025", email = "windsnow1025@gmail.com"}
24
+ ]
25
+ description = "A Bridge for LLMs"
26
+ readme = "README.md"
27
+ license = "MIT"
28
+ keywords = ["llm", "ai"]
29
+ classifiers = [
30
+ "Framework :: FastAPI",
31
+ "Programming Language :: Python :: 3",
32
32
  ]
33
33
 
34
- [project.optional-dependencies]
35
- test = [
34
+ [dependency-groups]
35
+ dev = [
36
36
  "pytest",
37
37
  "pytest-asyncio",
38
- "python-dotenv",
39
- "protobuf"
38
+ "python-dotenv", #dotenv
40
39
  ]
41
40
 
42
- [tool.setuptools.packages.find]
43
- where = ["."]
44
- include = ["llm_bridge*"]
45
-
46
41
  [tool.pytest.ini_options]
47
- asyncio_default_fixture_loop_scope = "function"
42
+ asyncio_default_fixture_loop_scope = "function"
File without changes
@@ -0,0 +1,20 @@
1
+ import pytest
2
+
3
+ from llm_bridge.type.message import Message, Role, Content, ContentType
4
+
5
+
6
+ @pytest.fixture
7
+ def sample_messages():
8
+ return [
9
+ Message(role=Role.System, contents=[
10
+ Content(type=ContentType.Text, data="You are a helpful assistant.")
11
+ ]),
12
+ Message(role=Role.User, contents=[
13
+ Content(type=ContentType.Text, data="Hello")
14
+ ])
15
+ ]
16
+
17
+
18
+ @pytest.mark.asyncio
19
+ async def test_placeholder():
20
+ assert True
@@ -0,0 +1,26 @@
1
+ import pytest
2
+
3
+ from llm_bridge.logic.message_preprocess.message_preprocessor import extract_system_messages
4
+ from llm_bridge.type.message import Message, Role, Content, ContentType
5
+
6
+
7
+ @pytest.fixture
8
+ def sample_messages():
9
+ return [
10
+ Message(role=Role.System, contents=[
11
+ Content(type=ContentType.Text, data="You are a helpful assistant.")
12
+ ]),
13
+ Message(role=Role.User, contents=[
14
+ Content(type=ContentType.Text, data="Hello")
15
+ ])
16
+ ]
17
+
18
+ def test_extract_system_messages(sample_messages):
19
+ extracted_text = extract_system_messages(sample_messages)
20
+
21
+ assert extracted_text == "You are a helpful assistant.\n"
22
+
23
+ assert len(sample_messages) == 1
24
+ assert sample_messages[0].role == Role.User
25
+ assert sample_messages[0].contents[0].type == ContentType.Text
26
+ assert sample_messages[0].contents[0].data == "Hello"
@@ -0,0 +1,9 @@
1
+ OPENAI_API_KEY=
2
+ GEMINI_FREE_API_KEY=
3
+ GEMINI_PAID_API_KEY=
4
+ GEMINI_VERTEX_API_KEY=
5
+ ANTHROPIC_API_KEY=
6
+ AZURE_API_KEY=
7
+ AZURE_API_BASE=
8
+ GITHUB_API_KEY=
9
+ XAI_API_KEY=