iflow-mcp-sjquant-llm-bridge-mcp 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,8 @@
1
+ .vscode
2
+ .venv
3
+ .env
4
+ .cursorignore
5
+ .gitignore
6
+ .git
7
+ .github
8
+ uv.lock
@@ -0,0 +1,16 @@
1
+ .git
2
+ Dockerfile
3
+ .DS_Store
4
+ .gitignore
5
+ .dockerignore
6
+ .python-version
7
+ **/.vscode
8
+ **/__pycache__
9
+ **/.env
10
+ **/*.env.*
11
+ **/*.pyc
12
+ .venv/
13
+ .pytest_cache/
14
+ .ruff_cache/
15
+ .mypy_cache/
16
+ .cache/
@@ -0,0 +1,72 @@
1
+ name: Automated Release Process
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+
8
+ jobs:
9
+ publish:
10
+ runs-on: ubuntu-latest
11
+ permissions:
12
+ contents: write
13
+ id-token: write
14
+ steps:
15
+ - name: Checkout repository
16
+ uses: actions/checkout@v4
17
+
18
+ - name: Install uv
19
+ uses: astral-sh/setup-uv@v5
20
+ with:
21
+ enable-cache: true
22
+ python-version: "3.11"
23
+
24
+ - name: Determine Version Change
25
+ id: version_check
26
+ run: |
27
+ VERSION="v$(uvx --from=toml-cli toml get --toml-path=pyproject.toml project.version)"
28
+ echo "Current version: $VERSION"
29
+
30
+ LATEST_RELEASE=$(curl -s -H "Authorization: token ${{ github.token }}" \
31
+ https://api.github.com/repos/${{ github.repository }}/releases/latest | jq -r '.tag_name')
32
+ echo "Latest release version: $LATEST_RELEASE"
33
+
34
+ if [ "$VERSION" != "$LATEST_RELEASE" ]; then
35
+ echo "Version has changed."
36
+ echo "version_changed=true" >> $GITHUB_OUTPUT
37
+ echo "new_version=$VERSION" >> $GITHUB_OUTPUT
38
+ else
39
+ echo "No version change detected."
40
+ echo "version_changed=false" >> $GITHUB_OUTPUT
41
+ fi
42
+
43
+ - name: Create Release
44
+ if: steps.version_check.outputs.version_changed == 'true'
45
+ uses: softprops/action-gh-release@v2
46
+ with:
47
+ tag_name: ${{ steps.version_check.outputs.new_version }}
48
+ generate_release_notes: True
49
+
50
+ - name: mint API token
51
+ id: mint-token
52
+ run: |
53
+ # retrieve the ambient OIDC token
54
+ resp=$(curl -H "Authorization: bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" \
55
+ "$ACTIONS_ID_TOKEN_REQUEST_URL&audience=pypi")
56
+ oidc_token=$(jq -r '.value' <<< "${resp}")
57
+
58
+ # exchange the OIDC token for an API token
59
+ resp=$(curl -X POST https://pypi.org/_/oidc/mint-token -d "{\"token\": \"${oidc_token}\"}")
60
+ api_token=$(jq -r '.token' <<< "${resp}")
61
+
62
+ # mask the newly minted API token, so that we don't accidentally leak it
63
+ echo "::add-mask::${api_token}"
64
+
65
+ # see the next step in the workflow for an example of using this step output
66
+ echo "api-token=${api_token}" >> "${GITHUB_OUTPUT}"
67
+
68
+ - name: Build and publish to PyPI
69
+ if: steps.version_check.outputs.version_changed == 'true'
70
+ run: |
71
+ uv build
72
+ uv publish --token ${{ steps.mint-token.outputs.api-token }}
@@ -0,0 +1,174 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # UV
98
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ #uv.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ #poetry.lock
109
+
110
+ # pdm
111
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112
+ #pdm.lock
113
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114
+ # in version control.
115
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
116
+ .pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
121
+ __pypackages__/
122
+
123
+ # Celery stuff
124
+ celerybeat-schedule
125
+ celerybeat.pid
126
+
127
+ # SageMath parsed files
128
+ *.sage.py
129
+
130
+ # Environments
131
+ .env
132
+ .venv
133
+ env/
134
+ venv/
135
+ ENV/
136
+ env.bak/
137
+ venv.bak/
138
+
139
+ # Spyder project settings
140
+ .spyderproject
141
+ .spyproject
142
+
143
+ # Rope project settings
144
+ .ropeproject
145
+
146
+ # mkdocs documentation
147
+ /site
148
+
149
+ # mypy
150
+ .mypy_cache/
151
+ .dmypy.json
152
+ dmypy.json
153
+
154
+ # Pyre type checker
155
+ .pyre/
156
+
157
+ # pytype static type analyzer
158
+ .pytype/
159
+
160
+ # Cython debug symbols
161
+ cython_debug/
162
+
163
+ # PyCharm
164
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
165
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
166
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
167
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
168
+ #.idea/
169
+
170
+ # Ruff stuff:
171
+ .ruff_cache/
172
+
173
+ # PyPI configuration file
174
+ .pypirc
@@ -0,0 +1,6 @@
1
+ {
2
+ "editor.formatOnSave": true,
3
+ "python.languageServer": "Pylance",
4
+ "python.analysis.typeCheckingMode": "strict",
5
+ "ruff.importStrategy": "fromEnvironment"
6
+ }
@@ -0,0 +1,26 @@
1
+ # Use UV's official Python 3.11 Alpine image which comes with UV pre-installed
2
+ FROM ghcr.io/astral-sh/uv:python3.11-alpine
3
+
4
+ # Install build dependencies
5
+ RUN apk add --no-cache gcc musl-dev linux-headers
6
+
7
+ # Set the working directory
8
+ WORKDIR /app
9
+
10
+ # Use system python without virtual environment
11
+ ENV UV_PROJECT_ENVIRONMENT=/usr/local
12
+
13
+ # Install dependencies first (without the project)
14
+ RUN --mount=type=cache,target=/root/.cache/uv \
15
+ --mount=type=bind,source=uv.lock,target=uv.lock \
16
+ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
17
+ uv sync --frozen --no-install-project
18
+
19
+ # Copy the project into the image
20
+ COPY . .
21
+
22
+ # Sync the project
23
+ RUN --mount=type=cache,target=/root/.cache/uv \
24
+ uv sync --frozen
25
+
26
+ CMD ["llm-bridge-mcp"]
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Seonu Jang
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,158 @@
1
+ Metadata-Version: 2.4
2
+ Name: iflow-mcp_sjquant-llm-bridge-mcp
3
+ Version: 0.1.2
4
+ Summary: A simple MCP server that provides a unified interface to various LLM providers using Pydantic AI
5
+ License-File: LICENSE
6
+ Requires-Python: >=3.11
7
+ Requires-Dist: mcp[cli]>=0.1.0
8
+ Requires-Dist: pydantic-ai>=0.0.39
9
+ Requires-Dist: pydantic>=2.10.6
10
+ Requires-Dist: python-dotenv>=1.0.0
11
+ Description-Content-Type: text/markdown
12
+
13
+ # LLM Bridge MCP
14
+ [![smithery badge](https://smithery.ai/badge/@sjquant/llm-bridge-mcp)](https://smithery.ai/server/@sjquant/llm-bridge-mcp)
15
+
16
+ LLM Bridge MCP allows AI agents to interact with multiple large language models through a standardized interface. It leverages the Message Control Protocol (MCP) to provide seamless access to different LLM providers, making it easy to switch between models or use multiple models in the same application.
17
+
18
+ <a href="https://glama.ai/mcp/servers/@sjquant/llm-bridge-mcp">
19
+ <img width="380" height="200" src="https://glama.ai/mcp/servers/@sjquant/llm-bridge-mcp/badge" alt="LLM Bridge MCP server" />
20
+ </a>
21
+
22
+ ## Features
23
+
24
+ - Unified interface to multiple LLM providers:
25
+ - OpenAI (GPT models)
26
+ - Anthropic (Claude models)
27
+ - Google (Gemini models)
28
+ - DeepSeek
29
+ - ...
30
+ - Built with Pydantic AI for type safety and validation
31
+ - Supports customizable parameters like temperature and max tokens
32
+ - Provides usage tracking and metrics
33
+
34
+ ## Tools
35
+
36
+ The server implements the following tool:
37
+
38
+ ```
39
+ run_llm(
40
+ prompt: str,
41
+ model_name: KnownModelName = "openai:gpt-4o-mini",
42
+ temperature: float = 0.7,
43
+ max_tokens: int = 8192,
44
+ system_prompt: str = "",
45
+ ) -> LLMResponse
46
+ ```
47
+
48
+ - `prompt`: The text prompt to send to the LLM
49
+ - `model_name`: Specific model to use (default: "openai:gpt-4o-mini")
50
+ - `temperature`: Controls randomness (0.0 to 1.0)
51
+ - `max_tokens`: Maximum number of tokens to generate
52
+ - `system_prompt`: Optional system prompt to guide the model's behavior
53
+
54
+ ## Installation
55
+
56
+ ### Installing via Smithery
57
+
58
+ To install llm-bridge-mcp for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@sjquant/llm-bridge-mcp):
59
+
60
+ ```bash
61
+ npx -y @smithery/cli install @sjquant/llm-bridge-mcp --client claude
62
+ ```
63
+
64
+ ### Manual Installation
65
+
66
+ 1. Clone the repository:
67
+
68
+ ```bash
69
+ git clone https://github.com/yourusername/llm-bridge-mcp.git
70
+ cd llm-bridge-mcp
71
+ ```
72
+
73
+ 2. Install [uv](https://github.com/astral-sh/uv) (if not already installed):
74
+
75
+ ```bash
76
+ # On macOS
77
+ brew install uv
78
+
79
+ # On Linux
80
+ curl -LsSf https://astral.sh/uv/install.sh | sh
81
+
82
+ # On Windows
83
+ powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"
84
+ ```
85
+
86
+ ## Configuration
87
+
88
+ Create a `.env` file in the root directory with your API keys:
89
+
90
+ ```
91
+ OPENAI_API_KEY=your_openai_api_key
92
+ ANTHROPIC_API_KEY=your_anthropic_api_key
93
+ GOOGLE_API_KEY=your_google_api_key
94
+ DEEPSEEK_API_KEY=your_deepseek_api_key
95
+ ```
96
+
97
+ ## Usage
98
+
99
+ ### Using with Claude Desktop or Cursor
100
+
101
+ Add a server entry to your Claude Desktop configuration file or `.cursor/mcp.json`:
102
+
103
+ ```json
104
+ "mcpServers": {
105
+ "llm-bridge": {
106
+ "command": "uvx",
107
+ "args": [
108
+ "llm-bridge-mcp"
109
+ ],
110
+ "env": {
111
+ "OPENAI_API_KEY": "your_openai_api_key",
112
+ "ANTHROPIC_API_KEY": "your_anthropic_api_key",
113
+ "GOOGLE_API_KEY": "your_google_api_key",
114
+ "DEEPSEEK_API_KEY": "your_deepseek_api_key"
115
+ }
116
+ }
117
+ }
118
+ ```
119
+
120
+ ### Troubleshooting
121
+
122
+ #### Common Issues
123
+
124
+ ##### 1. "spawn uvx ENOENT" Error
125
+
126
+ This error occurs when the system cannot find the `uvx` executable in your PATH. To resolve this:
127
+
128
+ **Solution: Use the full path to uvx**
129
+
130
+ Find the full path to your uvx executable:
131
+
132
+ ```bash
133
+ # On macOS/Linux
134
+ which uvx
135
+
136
+ # On Windows
137
+ where.exe uvx
138
+ ```
139
+
140
+ Then update your MCP server configuration to use the full path:
141
+
142
+ ```json
143
+ "mcpServers": {
144
+ "llm-bridge": {
145
+ "command": "/full/path/to/uvx", // Replace with your actual path
146
+ "args": [
147
+ "llm-bridge-mcp"
148
+ ],
149
+ "env": {
150
+ // ... your environment variables
151
+ }
152
+ }
153
+ }
154
+ ```
155
+
156
+ ## License
157
+
158
+ This project is licensed under the MIT License - see the LICENSE file for details.
@@ -0,0 +1,146 @@
1
+ # LLM Bridge MCP
2
+ [![smithery badge](https://smithery.ai/badge/@sjquant/llm-bridge-mcp)](https://smithery.ai/server/@sjquant/llm-bridge-mcp)
3
+
4
+ LLM Bridge MCP allows AI agents to interact with multiple large language models through a standardized interface. It leverages the Message Control Protocol (MCP) to provide seamless access to different LLM providers, making it easy to switch between models or use multiple models in the same application.
5
+
6
+ <a href="https://glama.ai/mcp/servers/@sjquant/llm-bridge-mcp">
7
+ <img width="380" height="200" src="https://glama.ai/mcp/servers/@sjquant/llm-bridge-mcp/badge" alt="LLM Bridge MCP server" />
8
+ </a>
9
+
10
+ ## Features
11
+
12
+ - Unified interface to multiple LLM providers:
13
+ - OpenAI (GPT models)
14
+ - Anthropic (Claude models)
15
+ - Google (Gemini models)
16
+ - DeepSeek
17
+ - ...
18
+ - Built with Pydantic AI for type safety and validation
19
+ - Supports customizable parameters like temperature and max tokens
20
+ - Provides usage tracking and metrics
21
+
22
+ ## Tools
23
+
24
+ The server implements the following tool:
25
+
26
+ ```
27
+ run_llm(
28
+ prompt: str,
29
+ model_name: KnownModelName = "openai:gpt-4o-mini",
30
+ temperature: float = 0.7,
31
+ max_tokens: int = 8192,
32
+ system_prompt: str = "",
33
+ ) -> LLMResponse
34
+ ```
35
+
36
+ - `prompt`: The text prompt to send to the LLM
37
+ - `model_name`: Specific model to use (default: "openai:gpt-4o-mini")
38
+ - `temperature`: Controls randomness (0.0 to 1.0)
39
+ - `max_tokens`: Maximum number of tokens to generate
40
+ - `system_prompt`: Optional system prompt to guide the model's behavior
41
+
42
+ ## Installation
43
+
44
+ ### Installing via Smithery
45
+
46
+ To install llm-bridge-mcp for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@sjquant/llm-bridge-mcp):
47
+
48
+ ```bash
49
+ npx -y @smithery/cli install @sjquant/llm-bridge-mcp --client claude
50
+ ```
51
+
52
+ ### Manual Installation
53
+
54
+ 1. Clone the repository:
55
+
56
+ ```bash
57
+ git clone https://github.com/yourusername/llm-bridge-mcp.git
58
+ cd llm-bridge-mcp
59
+ ```
60
+
61
+ 2. Install [uv](https://github.com/astral-sh/uv) (if not already installed):
62
+
63
+ ```bash
64
+ # On macOS
65
+ brew install uv
66
+
67
+ # On Linux
68
+ curl -LsSf https://astral.sh/uv/install.sh | sh
69
+
70
+ # On Windows
71
+ powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"
72
+ ```
73
+
74
+ ## Configuration
75
+
76
+ Create a `.env` file in the root directory with your API keys:
77
+
78
+ ```
79
+ OPENAI_API_KEY=your_openai_api_key
80
+ ANTHROPIC_API_KEY=your_anthropic_api_key
81
+ GOOGLE_API_KEY=your_google_api_key
82
+ DEEPSEEK_API_KEY=your_deepseek_api_key
83
+ ```
84
+
85
+ ## Usage
86
+
87
+ ### Using with Claude Desktop or Cursor
88
+
89
+ Add a server entry to your Claude Desktop configuration file or `.cursor/mcp.json`:
90
+
91
+ ```json
92
+ "mcpServers": {
93
+ "llm-bridge": {
94
+ "command": "uvx",
95
+ "args": [
96
+ "llm-bridge-mcp"
97
+ ],
98
+ "env": {
99
+ "OPENAI_API_KEY": "your_openai_api_key",
100
+ "ANTHROPIC_API_KEY": "your_anthropic_api_key",
101
+ "GOOGLE_API_KEY": "your_google_api_key",
102
+ "DEEPSEEK_API_KEY": "your_deepseek_api_key"
103
+ }
104
+ }
105
+ }
106
+ ```
107
+
108
+ ### Troubleshooting
109
+
110
+ #### Common Issues
111
+
112
+ ##### 1. "spawn uvx ENOENT" Error
113
+
114
+ This error occurs when the system cannot find the `uvx` executable in your PATH. To resolve this:
115
+
116
+ **Solution: Use the full path to uvx**
117
+
118
+ Find the full path to your uvx executable:
119
+
120
+ ```bash
121
+ # On macOS/Linux
122
+ which uvx
123
+
124
+ # On Windows
125
+ where.exe uvx
126
+ ```
127
+
128
+ Then update your MCP server configuration to use the full path:
129
+
130
+ ```json
131
+ "mcpServers": {
132
+ "llm-bridge": {
133
+ "command": "/full/path/to/uvx", // Replace with your actual path
134
+ "args": [
135
+ "llm-bridge-mcp"
136
+ ],
137
+ "env": {
138
+ // ... your environment variables
139
+ }
140
+ }
141
+ }
142
+ ```
143
+
144
+ ## License
145
+
146
+ This project is licensed under the MIT License - see the LICENSE file for details.
@@ -0,0 +1 @@
1
+ python
@@ -0,0 +1,3 @@
1
+ from .main import main
2
+
3
+ __all__ = ["main"]