mhub_proxy 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mhub_proxy-1.0.0/PKG-INFO +107 -0
- mhub_proxy-1.0.0/README.md +97 -0
- mhub_proxy-1.0.0/mhub_proxy/__init__.py +3 -0
- mhub_proxy-1.0.0/mhub_proxy/app.py +20 -0
- mhub_proxy-1.0.0/mhub_proxy/auth.py +41 -0
- mhub_proxy-1.0.0/mhub_proxy/cli.py +67 -0
- mhub_proxy-1.0.0/mhub_proxy/config.py +35 -0
- mhub_proxy-1.0.0/mhub_proxy/proxy.py +85 -0
- mhub_proxy-1.0.0/mhub_proxy.egg-info/PKG-INFO +107 -0
- mhub_proxy-1.0.0/mhub_proxy.egg-info/SOURCES.txt +14 -0
- mhub_proxy-1.0.0/mhub_proxy.egg-info/dependency_links.txt +1 -0
- mhub_proxy-1.0.0/mhub_proxy.egg-info/entry_points.txt +2 -0
- mhub_proxy-1.0.0/mhub_proxy.egg-info/requires.txt +3 -0
- mhub_proxy-1.0.0/mhub_proxy.egg-info/top_level.txt +1 -0
- mhub_proxy-1.0.0/pyproject.toml +27 -0
- mhub_proxy-1.0.0/setup.cfg +4 -0
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mhub_proxy
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: OpenAI-compatible local proxy for enterprise LLM endpoints
|
|
5
|
+
Requires-Python: >=3.10
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: fastapi>=0.133.1
|
|
8
|
+
Requires-Dist: httpx>=0.28.1
|
|
9
|
+
Requires-Dist: uvicorn>=0.41.0
|
|
10
|
+
|
|
11
|
+
# mhub_proxy
|
|
12
|
+
|
|
13
|
+
`mhub_proxy` is a local OpenAI-compatible proxy that lets CLI tools (for example, Opencode) send OpenAI-style requests to SecureGPT.
|
|
14
|
+
|
|
15
|
+
## Requirements:
|
|
16
|
+
|
|
17
|
+
`uv` is required to install and run the proxy.
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
Or, using `pip`:
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
pip install uv
|
|
27
|
+
```
|
|
28
|
+
## Install `mhub_proxy`
|
|
29
|
+
|
|
30
|
+
Install as a `uv` tool:
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
uv tool install mhub_proxy
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Required environment variables
|
|
37
|
+
|
|
38
|
+
Before starting the proxy, define the variables used by `mhub_proxy/config.py`:
|
|
39
|
+
|
|
40
|
+
- `MHUB_API_URL` : URL of the Model Hub API
|
|
41
|
+
- `MHUB_CLIENT_ID` : Client ID for the Model Hub API
|
|
42
|
+
- `MHUB_CLIENT_SECRET` : Client Secret for the Model Hub API
|
|
43
|
+
- `ONEACCOUNT_LOGIN_URL` : URL of the OneAccount login API
|
|
44
|
+
|
|
45
|
+
Optional:
|
|
46
|
+
|
|
47
|
+
- `MHUB_PROXY_HOST` (default: `127.0.0.1`)
|
|
48
|
+
- `MHUB_PROXY_PORT` (default: `8123`)
|
|
49
|
+
- `MHUB_PROXY_LOG_LEVEL` (default: `info`)
|
|
50
|
+
|
|
51
|
+
## Run the proxy
|
|
52
|
+
|
|
53
|
+
Start the proxy with:
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
uvx mhub_proxy
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
By default, the proxy runs on `http://127.0.0.1:8123`.
|
|
60
|
+
|
|
61
|
+
## Use with Opencode
|
|
62
|
+
|
|
63
|
+
1. Add an `opencode.json` file to the root of your project.
|
|
64
|
+
2. Use the configuration in this repository as your example (`/opencode.json`).
|
|
65
|
+
|
|
66
|
+
**IMPORTANT: Notice how the API version is included in the model ID, e.g. `gpt-5.1-2025-11-13@2025-01-01-preview`.**
|
|
67
|
+
This is required for the proxy to correctly route requests to the Model Hub API.
|
|
68
|
+
|
|
69
|
+
3. Optionally: Install the OpenCode plugin (see below) in your project (`uvx mhub_proxy --install-plugin`).
|
|
70
|
+
4. Start Opencode from a terminal **inside your project directory**.
|
|
71
|
+
|
|
72
|
+
The included `opencode.json` points Opencode to the local proxy URL:
|
|
73
|
+
|
|
74
|
+
```json
|
|
75
|
+
"baseURL": "http://127.0.0.1:8123/openai"
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
When Opencode asks for an API key, you can enter any value.
|
|
79
|
+
|
|
80
|
+
Reference: [OpenCode plugin docs](https://opencode.ai/docs/plugins/#create-a-plugin)
|
|
81
|
+
|
|
82
|
+
## Install the OpenCode plugin
|
|
83
|
+
|
|
84
|
+
A plugin is included to automatically start the proxy when Opencode starts (if not started already).
|
|
85
|
+
Optionally, you may install the bundled plugin into your project:
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
uvx mhub_proxy --install-plugin
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
This copies plugin files into:
|
|
92
|
+
|
|
93
|
+
```text
|
|
94
|
+
<project_root>/.opencode/plugins
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
You can also install into a specific project root:
|
|
98
|
+
|
|
99
|
+
```bash
|
|
100
|
+
uvx mhub_proxy --install-plugin --project-root /path/to/project
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## Notes
|
|
104
|
+
|
|
105
|
+
- The proxy expects OpenAI chat-completions style traffic from your CLI tool.
|
|
106
|
+
- If startup fails, first verify that all required environment variables are set.
|
|
107
|
+
- The installed OpenCode plugin checks proxy health at session start and will launch `uvx mhub_proxy` in the background when needed.
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
# mhub_proxy
|
|
2
|
+
|
|
3
|
+
`mhub_proxy` is a local OpenAI-compatible proxy that lets CLI tools (for example, Opencode) send OpenAI-style requests to SecureGPT.
|
|
4
|
+
|
|
5
|
+
## Requirements:
|
|
6
|
+
|
|
7
|
+
`uv` is required to install and run the proxy.
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
Or, using `pip`:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
pip install uv
|
|
17
|
+
```
|
|
18
|
+
## Install `mhub_proxy`
|
|
19
|
+
|
|
20
|
+
Install as a `uv` tool:
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
uv tool install mhub_proxy
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Required environment variables
|
|
27
|
+
|
|
28
|
+
Before starting the proxy, define the variables used by `mhub_proxy/config.py`:
|
|
29
|
+
|
|
30
|
+
- `MHUB_API_URL` : URL of the Model Hub API
|
|
31
|
+
- `MHUB_CLIENT_ID` : Client ID for the Model Hub API
|
|
32
|
+
- `MHUB_CLIENT_SECRET` : Client Secret for the Model Hub API
|
|
33
|
+
- `ONEACCOUNT_LOGIN_URL` : URL of the OneAccount login API
|
|
34
|
+
|
|
35
|
+
Optional:
|
|
36
|
+
|
|
37
|
+
- `MHUB_PROXY_HOST` (default: `127.0.0.1`)
|
|
38
|
+
- `MHUB_PROXY_PORT` (default: `8123`)
|
|
39
|
+
- `MHUB_PROXY_LOG_LEVEL` (default: `info`)
|
|
40
|
+
|
|
41
|
+
## Run the proxy
|
|
42
|
+
|
|
43
|
+
Start the proxy with:
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
uvx mhub_proxy
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
By default, the proxy runs on `http://127.0.0.1:8123`.
|
|
50
|
+
|
|
51
|
+
## Use with Opencode
|
|
52
|
+
|
|
53
|
+
1. Add an `opencode.json` file to the root of your project.
|
|
54
|
+
2. Use the configuration in this repository as your example (`/opencode.json`).
|
|
55
|
+
|
|
56
|
+
**IMPORTANT: Notice how the API version is included in the model ID, e.g. `gpt-5.1-2025-11-13@2025-01-01-preview`.**
|
|
57
|
+
This is required for the proxy to correctly route requests to the Model Hub API.
|
|
58
|
+
|
|
59
|
+
3. Optionally: Install the OpenCode plugin (see below) in your project (`uvx mhub_proxy --install-plugin`).
|
|
60
|
+
4. Start Opencode from a terminal **inside your project directory**.
|
|
61
|
+
|
|
62
|
+
The included `opencode.json` points Opencode to the local proxy URL:
|
|
63
|
+
|
|
64
|
+
```json
|
|
65
|
+
"baseURL": "http://127.0.0.1:8123/openai"
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
When Opencode asks for an API key, you can enter any value.
|
|
69
|
+
|
|
70
|
+
Reference: [OpenCode plugin docs](https://opencode.ai/docs/plugins/#create-a-plugin)
|
|
71
|
+
|
|
72
|
+
## Install the OpenCode plugin
|
|
73
|
+
|
|
74
|
+
A plugin is included to automatically start the proxy when Opencode starts (if not started already).
|
|
75
|
+
Optionally, you may install the bundled plugin into your project:
|
|
76
|
+
|
|
77
|
+
```bash
|
|
78
|
+
uvx mhub_proxy --install-plugin
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
This copies plugin files into:
|
|
82
|
+
|
|
83
|
+
```text
|
|
84
|
+
<project_root>/.opencode/plugins
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
You can also install into a specific project root:
|
|
88
|
+
|
|
89
|
+
```bash
|
|
90
|
+
uvx mhub_proxy --install-plugin --project-root /path/to/project
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
## Notes
|
|
94
|
+
|
|
95
|
+
- The proxy expects OpenAI chat-completions style traffic from your CLI tool.
|
|
96
|
+
- If startup fails, first verify that all required environment variables are set.
|
|
97
|
+
- The installed OpenCode plugin checks proxy health at session start and will launch `uvx mhub_proxy` in the background when needed.
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from contextlib import asynccontextmanager
|
|
3
|
+
|
|
4
|
+
from fastapi import FastAPI
|
|
5
|
+
|
|
6
|
+
from .proxy import router as proxy_router
|
|
7
|
+
|
|
8
|
+
@asynccontextmanager
|
|
9
|
+
async def lifespan(app_instance: FastAPI):
|
|
10
|
+
|
|
11
|
+
app_instance.state.token_lock = asyncio.Lock()
|
|
12
|
+
app_instance.state.cached_auth_header = ""
|
|
13
|
+
app_instance.state.cached_auth_expires_at = 0.0
|
|
14
|
+
try:
|
|
15
|
+
yield
|
|
16
|
+
finally:
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
app = FastAPI(title="SecureGPT MHUB Proxy", version="0.1.0", lifespan=lifespan)
|
|
20
|
+
app.include_router(proxy_router)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import time
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
from fastapi import Request
|
|
5
|
+
|
|
6
|
+
from .config import oneaccount_login_url, mhub_client_id, mhub_client_secret
|
|
7
|
+
|
|
8
|
+
async def _authenticate(request: Request) -> None:
|
|
9
|
+
"""
|
|
10
|
+
Authenticate with OneAccount and return the access token.
|
|
11
|
+
"""
|
|
12
|
+
access_token = request.app.state.cached_auth_header
|
|
13
|
+
token_expiry = request.app.state.cached_auth_expires_at
|
|
14
|
+
if access_token and token_expiry > time.time():
|
|
15
|
+
return access_token
|
|
16
|
+
|
|
17
|
+
headers = {
|
|
18
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
19
|
+
}
|
|
20
|
+
payload = {
|
|
21
|
+
"grant_type": "client_credentials",
|
|
22
|
+
"client_id": mhub_client_id(),
|
|
23
|
+
"client_secret": mhub_client_secret(),
|
|
24
|
+
"scope": "urn:grp:chatgpt",
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
28
|
+
response = await client.post(
|
|
29
|
+
f"{oneaccount_login_url()}/as/token.oauth2",
|
|
30
|
+
headers=headers,
|
|
31
|
+
data=payload,
|
|
32
|
+
timeout=15,
|
|
33
|
+
)
|
|
34
|
+
response.raise_for_status()
|
|
35
|
+
response_json = response.json()
|
|
36
|
+
access_token = response_json.get("access_token")
|
|
37
|
+
request.app.state.cached_auth_header = access_token
|
|
38
|
+
request.app.state.cached_auth_expires_at = time.time() + response_json.get(
|
|
39
|
+
"expires_in"
|
|
40
|
+
)
|
|
41
|
+
return access_token
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
from importlib import resources
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import uvicorn
|
|
5
|
+
|
|
6
|
+
from .config import verify_config, log_level, host, port
|
|
7
|
+
|
|
8
|
+
# def setup_logging() -> None:
|
|
9
|
+
# file_handler = logging.FileHandler("mhub_proxy.log")
|
|
10
|
+
# file_handler.setFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s"))
|
|
11
|
+
# root_logger = logging.getLogger("mhub_proxy.requests")
|
|
12
|
+
# root_logger.addHandler(file_handler)
|
|
13
|
+
# root_logger.setLevel(log_level().upper())
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _copy_plugin_dir(project_root: Path) -> list[Path]:
|
|
17
|
+
source_plugins = resources.files("mhub_proxy").joinpath(".opencode").joinpath("plugins")
|
|
18
|
+
destination_plugins = project_root / ".opencode" / "plugins"
|
|
19
|
+
destination_plugins.mkdir(parents=True, exist_ok=True)
|
|
20
|
+
|
|
21
|
+
copied_files: list[Path] = []
|
|
22
|
+
for entry in source_plugins.iterdir():
|
|
23
|
+
if not entry.is_file():
|
|
24
|
+
continue
|
|
25
|
+
|
|
26
|
+
destination = destination_plugins / entry.name
|
|
27
|
+
destination.write_bytes(entry.read_bytes())
|
|
28
|
+
copied_files.append(destination)
|
|
29
|
+
|
|
30
|
+
return copied_files
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def main() -> None:
|
|
34
|
+
parser = argparse.ArgumentParser(prog="mhub_proxy")
|
|
35
|
+
parser.add_argument(
|
|
36
|
+
"--install-plugin",
|
|
37
|
+
action="store_true",
|
|
38
|
+
help="Install bundled OpenCode plugin(s) into <project_root>/.opencode/plugins.",
|
|
39
|
+
)
|
|
40
|
+
parser.add_argument(
|
|
41
|
+
"--project-root",
|
|
42
|
+
default=".",
|
|
43
|
+
help="Project root used by --install-plugin (default: current directory).",
|
|
44
|
+
)
|
|
45
|
+
args = parser.parse_args()
|
|
46
|
+
|
|
47
|
+
if args.install_plugin:
|
|
48
|
+
project_root = Path(args.project_root).resolve()
|
|
49
|
+
copied_files = _copy_plugin_dir(project_root=project_root)
|
|
50
|
+
if not copied_files:
|
|
51
|
+
print("No plugin files were found to install.")
|
|
52
|
+
return
|
|
53
|
+
|
|
54
|
+
print(f"Installed {len(copied_files)} plugin file(s) to {project_root / '.opencode' / 'plugins'}:")
|
|
55
|
+
for file_path in copied_files:
|
|
56
|
+
print(f"- {file_path}")
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
# setup_logging()
|
|
60
|
+
verify_config()
|
|
61
|
+
uvicorn.run(
|
|
62
|
+
"mhub_proxy.app:app",
|
|
63
|
+
host=host(),
|
|
64
|
+
port=port(),
|
|
65
|
+
log_level=log_level(),
|
|
66
|
+
access_log=True,
|
|
67
|
+
)
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
def required_env(name: str) -> str:
|
|
4
|
+
value = os.getenv(name, "").strip()
|
|
5
|
+
if not value:
|
|
6
|
+
raise ValueError(f"{name} is not set")
|
|
7
|
+
return value
|
|
8
|
+
|
|
9
|
+
def mhub_base_url() -> str:
|
|
10
|
+
return required_env("MHUB_API_URL").rstrip("/")
|
|
11
|
+
|
|
12
|
+
def mhub_client_id() -> str:
|
|
13
|
+
return required_env("MHUB_CLIENT_ID")
|
|
14
|
+
|
|
15
|
+
def mhub_client_secret() -> str:
|
|
16
|
+
return required_env("MHUB_CLIENT_SECRET")
|
|
17
|
+
|
|
18
|
+
def oneaccount_login_url() -> str:
|
|
19
|
+
return required_env("ONEACCOUNT_LOGIN_URL").rstrip("/")
|
|
20
|
+
|
|
21
|
+
def log_level() -> str:
|
|
22
|
+
return os.getenv("MHUB_PROXY_LOG_LEVEL") or os.getenv("LLM_PROXY_LOG_LEVEL", "info")
|
|
23
|
+
|
|
24
|
+
def host() -> str:
|
|
25
|
+
return os.getenv("MHUB_PROXY_HOST") or os.getenv("LLM_PROXY_HOST", "127.0.0.1")
|
|
26
|
+
|
|
27
|
+
def port() -> int:
|
|
28
|
+
return int(os.getenv("MHUB_PROXY_PORT") or os.getenv("LLM_PROXY_PORT", "8123"))
|
|
29
|
+
|
|
30
|
+
def verify_config() -> None:
|
|
31
|
+
mhub_base_url()
|
|
32
|
+
mhub_client_id()
|
|
33
|
+
mhub_client_secret()
|
|
34
|
+
oneaccount_login_url()
|
|
35
|
+
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import uuid
|
|
3
|
+
from typing import AsyncIterator
|
|
4
|
+
import json
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
from fastapi import APIRouter, Request
|
|
8
|
+
from fastapi.responses import StreamingResponse
|
|
9
|
+
|
|
10
|
+
from .config import (
|
|
11
|
+
mhub_base_url,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from .auth import _authenticate
|
|
15
|
+
|
|
16
|
+
router = APIRouter()
|
|
17
|
+
request_logger = logging.getLogger("mhub_proxy.requests")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def openai_upstream_url(deployment_id: str) -> str:
|
|
21
|
+
upstream_base = mhub_base_url()
|
|
22
|
+
base = upstream_base.rstrip("/")
|
|
23
|
+
model_id = deployment_id.split('@')[0]
|
|
24
|
+
api_version = deployment_id.split('@')[1]
|
|
25
|
+
return f"{base}/providers/openai/deployments/{model_id}/chat/completions?api-version={api_version}"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@router.get("/health")
|
|
29
|
+
async def health() -> dict[str, str]:
|
|
30
|
+
return {"status": "ok"}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@router.post("/openai/chat/completions")
|
|
34
|
+
async def chat_completions(request: Request) -> StreamingResponse:
|
|
35
|
+
request_id = uuid.uuid4().hex
|
|
36
|
+
body = await request.body()
|
|
37
|
+
body = json.loads(body)
|
|
38
|
+
|
|
39
|
+
upstream_url = openai_upstream_url(
|
|
40
|
+
deployment_id=body.get("model")
|
|
41
|
+
)
|
|
42
|
+
request_logger.info(
|
|
43
|
+
"body=%s", json.dumps(body),
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
request_logger.info(
|
|
47
|
+
"request_id=%s method=%s path=%s upstream_url=%s query=%s",
|
|
48
|
+
request_id,
|
|
49
|
+
request.method,
|
|
50
|
+
request.url.path,
|
|
51
|
+
upstream_url,
|
|
52
|
+
str(request.query_params),
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
auth_token = await _authenticate(request)
|
|
56
|
+
headers = {
|
|
57
|
+
"Content-Type": "application/json",
|
|
58
|
+
"Authorization": f"Bearer {auth_token}",
|
|
59
|
+
}
|
|
60
|
+
payload = {
|
|
61
|
+
"messages": body.get("messages"),
|
|
62
|
+
"tools": body.get("tools"),
|
|
63
|
+
"reasoning_effort": body.get("reasoning_effort", "medium"),
|
|
64
|
+
"tool_choice": "auto",
|
|
65
|
+
"stream_options": {"include_usage": True},
|
|
66
|
+
"stream": True,
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async def stream_response() -> AsyncIterator[bytes]:
|
|
70
|
+
async with httpx.AsyncClient(verify=False, timeout=300) as client:
|
|
71
|
+
async with client.stream(
|
|
72
|
+
"POST",
|
|
73
|
+
upstream_url,
|
|
74
|
+
headers=headers,
|
|
75
|
+
json=payload,
|
|
76
|
+
) as response:
|
|
77
|
+
response.raise_for_status()
|
|
78
|
+
async for chunk in response.aiter_bytes():
|
|
79
|
+
yield chunk
|
|
80
|
+
|
|
81
|
+
return StreamingResponse(
|
|
82
|
+
content=stream_response(),
|
|
83
|
+
status_code=200,
|
|
84
|
+
headers={"Content-Type": "application/json"},
|
|
85
|
+
)
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: mhub_proxy
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: OpenAI-compatible local proxy for enterprise LLM endpoints
|
|
5
|
+
Requires-Python: >=3.10
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: fastapi>=0.133.1
|
|
8
|
+
Requires-Dist: httpx>=0.28.1
|
|
9
|
+
Requires-Dist: uvicorn>=0.41.0
|
|
10
|
+
|
|
11
|
+
# mhub_proxy
|
|
12
|
+
|
|
13
|
+
`mhub_proxy` is a local OpenAI-compatible proxy that lets CLI tools (for example, Opencode) send OpenAI-style requests to SecureGPT.
|
|
14
|
+
|
|
15
|
+
## Requirements:
|
|
16
|
+
|
|
17
|
+
`uv` is required to install and run the proxy.
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
Or, using `pip`:
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
pip install uv
|
|
27
|
+
```
|
|
28
|
+
## Install `mhub_proxy`
|
|
29
|
+
|
|
30
|
+
Install as a `uv` tool:
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
uv tool install mhub_proxy
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Required environment variables
|
|
37
|
+
|
|
38
|
+
Before starting the proxy, define the variables used by `mhub_proxy/config.py`:
|
|
39
|
+
|
|
40
|
+
- `MHUB_API_URL` : URL of the Model Hub API
|
|
41
|
+
- `MHUB_CLIENT_ID` : Client ID for the Model Hub API
|
|
42
|
+
- `MHUB_CLIENT_SECRET` : Client Secret for the Model Hub API
|
|
43
|
+
- `ONEACCOUNT_LOGIN_URL` : URL of the OneAccount login API
|
|
44
|
+
|
|
45
|
+
Optional:
|
|
46
|
+
|
|
47
|
+
- `MHUB_PROXY_HOST` (default: `127.0.0.1`)
|
|
48
|
+
- `MHUB_PROXY_PORT` (default: `8123`)
|
|
49
|
+
- `MHUB_PROXY_LOG_LEVEL` (default: `info`)
|
|
50
|
+
|
|
51
|
+
## Run the proxy
|
|
52
|
+
|
|
53
|
+
Start the proxy with:
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
uvx mhub_proxy
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
By default, the proxy runs on `http://127.0.0.1:8123`.
|
|
60
|
+
|
|
61
|
+
## Use with Opencode
|
|
62
|
+
|
|
63
|
+
1. Add an `opencode.json` file to the root of your project.
|
|
64
|
+
2. Use the configuration in this repository as your example (`/opencode.json`).
|
|
65
|
+
|
|
66
|
+
**IMPORTANT: Notice how the API version is included in the model ID, e.g. `gpt-5.1-2025-11-13@2025-01-01-preview`.**
|
|
67
|
+
This is required for the proxy to correctly route requests to the Model Hub API.
|
|
68
|
+
|
|
69
|
+
3. Optionally: Install the OpenCode plugin (see below) in your project (`uvx mhub_proxy --install-plugin`).
|
|
70
|
+
4. Start Opencode from a terminal **inside your project directory**.
|
|
71
|
+
|
|
72
|
+
The included `opencode.json` points Opencode to the local proxy URL:
|
|
73
|
+
|
|
74
|
+
```json
|
|
75
|
+
"baseURL": "http://127.0.0.1:8123/openai"
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
When Opencode asks for an API key, you can enter any value.
|
|
79
|
+
|
|
80
|
+
Reference: [OpenCode plugin docs](https://opencode.ai/docs/plugins/#create-a-plugin)
|
|
81
|
+
|
|
82
|
+
## Install the OpenCode plugin
|
|
83
|
+
|
|
84
|
+
A plugin is included to automatically start the proxy when Opencode starts (if not started already).
|
|
85
|
+
Optionally, you may install the bundled plugin into your project:
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
uvx mhub_proxy --install-plugin
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
This copies plugin files into:
|
|
92
|
+
|
|
93
|
+
```text
|
|
94
|
+
<project_root>/.opencode/plugins
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
You can also install into a specific project root:
|
|
98
|
+
|
|
99
|
+
```bash
|
|
100
|
+
uvx mhub_proxy --install-plugin --project-root /path/to/project
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## Notes
|
|
104
|
+
|
|
105
|
+
- The proxy expects OpenAI chat-completions style traffic from your CLI tool.
|
|
106
|
+
- If startup fails, first verify that all required environment variables are set.
|
|
107
|
+
- The installed OpenCode plugin checks proxy health at session start and will launch `uvx mhub_proxy` in the background when needed.
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
mhub_proxy/__init__.py
|
|
4
|
+
mhub_proxy/app.py
|
|
5
|
+
mhub_proxy/auth.py
|
|
6
|
+
mhub_proxy/cli.py
|
|
7
|
+
mhub_proxy/config.py
|
|
8
|
+
mhub_proxy/proxy.py
|
|
9
|
+
mhub_proxy.egg-info/PKG-INFO
|
|
10
|
+
mhub_proxy.egg-info/SOURCES.txt
|
|
11
|
+
mhub_proxy.egg-info/dependency_links.txt
|
|
12
|
+
mhub_proxy.egg-info/entry_points.txt
|
|
13
|
+
mhub_proxy.egg-info/requires.txt
|
|
14
|
+
mhub_proxy.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
mhub_proxy
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "mhub_proxy"
|
|
3
|
+
version = "1.0.0"
|
|
4
|
+
description = "OpenAI-compatible local proxy for enterprise LLM endpoints"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
requires-python = ">=3.10"
|
|
7
|
+
dependencies = [
|
|
8
|
+
"fastapi>=0.133.1",
|
|
9
|
+
"httpx>=0.28.1",
|
|
10
|
+
"uvicorn>=0.41.0",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
[build-system]
|
|
14
|
+
requires = ["setuptools>=68", "wheel"]
|
|
15
|
+
build-backend = "setuptools.build_meta"
|
|
16
|
+
|
|
17
|
+
[tool.uv]
|
|
18
|
+
package = true
|
|
19
|
+
|
|
20
|
+
[[tool.uv.index]]
|
|
21
|
+
url = "https://pypi.org/simple/"
|
|
22
|
+
|
|
23
|
+
[tool.setuptools.packages.find]
|
|
24
|
+
include = ["mhub_proxy*"]
|
|
25
|
+
|
|
26
|
+
[project.scripts]
|
|
27
|
+
mhub_proxy = "mhub_proxy.cli:main"
|