anthropic-bridge 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. anthropic_bridge-0.1.3/.github/workflows/cd.yml +27 -0
  2. anthropic_bridge-0.1.3/.github/workflows/ci.yml +34 -0
  3. anthropic_bridge-0.1.3/.gitignore +8 -0
  4. anthropic_bridge-0.1.3/CLAUDE.md +43 -0
  5. anthropic_bridge-0.1.3/LICENSE +21 -0
  6. anthropic_bridge-0.1.3/PKG-INFO +16 -0
  7. anthropic_bridge-0.1.3/README.md +147 -0
  8. anthropic_bridge-0.1.3/anthropic_bridge/__init__.py +3 -0
  9. anthropic_bridge-0.1.3/anthropic_bridge/__main__.py +30 -0
  10. anthropic_bridge-0.1.3/anthropic_bridge/cache.py +88 -0
  11. anthropic_bridge-0.1.3/anthropic_bridge/client.py +433 -0
  12. anthropic_bridge-0.1.3/anthropic_bridge/models.py +69 -0
  13. anthropic_bridge-0.1.3/anthropic_bridge/providers/__init__.py +12 -0
  14. anthropic_bridge-0.1.3/anthropic_bridge/providers/base.py +57 -0
  15. anthropic_bridge-0.1.3/anthropic_bridge/providers/codex.py +317 -0
  16. anthropic_bridge-0.1.3/anthropic_bridge/providers/deepseek.py +23 -0
  17. anthropic_bridge-0.1.3/anthropic_bridge/providers/gemini.py +30 -0
  18. anthropic_bridge-0.1.3/anthropic_bridge/providers/grok.py +90 -0
  19. anthropic_bridge-0.1.3/anthropic_bridge/providers/minimax.py +24 -0
  20. anthropic_bridge-0.1.3/anthropic_bridge/providers/openai.py +35 -0
  21. anthropic_bridge-0.1.3/anthropic_bridge/providers/qwen.py +26 -0
  22. anthropic_bridge-0.1.3/anthropic_bridge/providers/registry.py +27 -0
  23. anthropic_bridge-0.1.3/anthropic_bridge/server.py +96 -0
  24. anthropic_bridge-0.1.3/anthropic_bridge/transform.py +239 -0
  25. anthropic_bridge-0.1.3/pyproject.toml +91 -0
  26. anthropic_bridge-0.1.3/tests/__init__.py +0 -0
  27. anthropic_bridge-0.1.3/tests/test_integration.py +490 -0
  28. anthropic_bridge-0.1.3/uv.lock +541 -0
@@ -0,0 +1,27 @@
1
+ name: CD
2
+
3
+ on:
4
+ release:
5
+ types: [created]
6
+ workflow_dispatch:
7
+
8
+ jobs:
9
+ deploy:
10
+ runs-on: ubuntu-latest
11
+ steps:
12
+ - uses: actions/checkout@v4
13
+ - name: Set up Python
14
+ uses: actions/setup-python@v4
15
+ with:
16
+ python-version: "3.x"
17
+ - name: Install dependencies
18
+ run: |
19
+ python -m pip install --upgrade pip
20
+ pip install setuptools build twine
21
+ - name: Build and publish
22
+ env:
23
+ TWINE_USERNAME: __token__
24
+ TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
25
+ run: |
26
+ python -m build
27
+ twine upload dist/*
@@ -0,0 +1,34 @@
1
+ name: CI
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ pull_request:
7
+ branches: [main]
8
+
9
+ jobs:
10
+ test:
11
+ runs-on: ubuntu-latest
12
+ steps:
13
+ - uses: actions/checkout@v4
14
+
15
+ - name: Set up Python
16
+ uses: actions/setup-python@v5
17
+ with:
18
+ python-version: "3.13"
19
+
20
+ - name: Install dependencies
21
+ run: |
22
+ python -m pip install --upgrade pip
23
+ pip install -e ".[test,dev]"
24
+
25
+ - name: Run ruff
26
+ run: ruff check anthropic_bridge/ tests/
27
+
28
+ - name: Run mypy
29
+ run: mypy anthropic_bridge/
30
+
31
+ - name: Run tests
32
+ env:
33
+ OPENROUTER_API_KEY: ${{ secrets.OPENROUTER_API_KEY }}
34
+ run: pytest tests/ -v
@@ -0,0 +1,8 @@
1
+ __pycache__/
2
+ *.pyc
3
+ *.pyo
4
+ *.egg-info/
5
+ dist/
6
+ build/
7
+ .pytest_cache/
8
+ .env
@@ -0,0 +1,43 @@
1
+ # CLAUDE.md
2
+
3
+ This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4
+
5
+ ## Project Overview
6
+
7
+ anthropic-bridge is a proxy server that translates Anthropic Messages API requests into OpenRouter API format, enabling use of various LLM providers (Gemini, OpenAI, Grok, DeepSeek, Qwen, MiniMax) through an Anthropic-compatible interface.
8
+
9
+ ## Commands
10
+
11
+ ```bash
12
+ # Install dependencies
13
+ pip install -e ".[test,dev]"
14
+
15
+ # Run server (requires OPENROUTER_API_KEY env var)
16
+ OPENROUTER_API_KEY=your_key anthropic-bridge --port 8080 --host 127.0.0.1
17
+
18
+ # Lint
19
+ ruff check anthropic_bridge/ tests/
20
+
21
+ # Type check
22
+ mypy anthropic_bridge/
23
+
24
+ # Run tests (requires OPENROUTER_API_KEY env var)
25
+ OPENROUTER_API_KEY=your_key pytest tests/ -v
26
+ ```
27
+
28
+ ## Architecture
29
+
30
+ **Request Flow**: Anthropic API request → `server.py` → `client.py` → OpenRouter API → SSE stream converted back to Anthropic format
31
+
32
+ **Core Components**:
33
+ - `server.py` - FastAPI app exposing `/v1/messages` endpoint that accepts Anthropic API format
34
+ - `client.py` - `OpenRouterClient` handles request transformation and streams OpenRouter responses back as Anthropic SSE events
35
+ - `transform.py` - Converts Anthropic messages/tools/tool_choice to OpenAI format for OpenRouter
36
+
37
+ **Provider System** (`providers/`):
38
+ - `BaseProvider` - Abstract base defining `process_text_content()`, `should_handle()`, and `prepare_request()` hooks
39
+ - `ProviderRegistry` - Selects appropriate provider based on model ID
40
+ - Provider implementations (Grok, Gemini, OpenAI, etc.) handle model-specific quirks like XML tool call parsing (Grok) or reasoning detail injection (Gemini)
41
+
42
+ **Caching** (`cache.py`):
43
+ - `ReasoningCache` persists Gemini reasoning details between tool call rounds to `~/.anthropic_bridge/cache/`
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Michael Gendy
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,16 @@
1
+ Metadata-Version: 2.4
2
+ Name: anthropic-bridge
3
+ Version: 0.1.3
4
+ Summary: Bridge Anthropic API to OpenRouter models
5
+ License-Expression: MIT
6
+ License-File: LICENSE
7
+ Requires-Python: >=3.11
8
+ Requires-Dist: fastapi>=0.115.0
9
+ Requires-Dist: httpx>=0.28.0
10
+ Requires-Dist: uvicorn>=0.32.0
11
+ Provides-Extra: dev
12
+ Requires-Dist: mypy>=1.13.0; extra == 'dev'
13
+ Requires-Dist: ruff>=0.8.0; extra == 'dev'
14
+ Provides-Extra: test
15
+ Requires-Dist: pytest-asyncio>=0.24.0; extra == 'test'
16
+ Requires-Dist: pytest>=8.0.0; extra == 'test'
@@ -0,0 +1,147 @@
1
+ # anthropic-bridge
2
+
3
+ A proxy server that exposes an Anthropic Messages API-compatible endpoint while routing requests to various LLM providers through OpenRouter and Codex CLI.
4
+
5
+ ## Features
6
+
7
+ - Anthropic Messages API compatible (`/v1/messages`)
8
+ - Streaming SSE responses
9
+ - Tool/function calling support
10
+ - Multi-round conversations
11
+ - Support for multiple providers: Gemini, OpenAI, Grok, DeepSeek, Qwen, MiniMax
12
+ - Extended thinking/reasoning support for compatible models
13
+ - Reasoning cache for Gemini models across tool call rounds
14
+ - **Codex CLI integration** - Use OpenAI's Codex models with your ChatGPT subscription
15
+
16
+ ## Installation
17
+
18
+ ```bash
19
+ pip install anthropic-bridge
20
+ ```
21
+
22
+ For development:
23
+
24
+ ```bash
25
+ git clone https://github.com/michaelgendy/anthropic-bridge.git
26
+ cd anthropic-bridge
27
+ pip install -e ".[test,dev]"
28
+ ```
29
+
30
+ ## Usage
31
+
32
+ ### With OpenRouter
33
+
34
+ Set your OpenRouter API key and start the server:
35
+
36
+ ```bash
37
+ export OPENROUTER_API_KEY=your_key
38
+ anthropic-bridge --port 8080 --host 127.0.0.1
39
+ ```
40
+
41
+ Then point your Anthropic SDK client to `http://localhost:8080`:
42
+
43
+ ```python
44
+ from anthropic import Anthropic
45
+
46
+ client = Anthropic(
47
+ api_key="not-used",
48
+ base_url="http://localhost:8080"
49
+ )
50
+
51
+ response = client.messages.create(
52
+ model="google/gemini-2.5-pro-preview", # Any OpenRouter model
53
+ max_tokens=1024,
54
+ messages=[{"role": "user", "content": "Hello!"}]
55
+ )
56
+ ```
57
+
58
+ ### With Codex CLI
59
+
60
+ First, authenticate with Codex CLI using your ChatGPT subscription:
61
+
62
+ ```bash
63
+ codex login
64
+ ```
65
+
66
+ Then start the bridge (no API key needed for Codex models):
67
+
68
+ ```bash
69
+ anthropic-bridge --port 8080
70
+ ```
71
+
72
+ Use `codex/` prefixed models:
73
+
74
+ ```python
75
+ from anthropic import Anthropic
76
+
77
+ client = Anthropic(
78
+ api_key="not-used",
79
+ base_url="http://localhost:8080"
80
+ )
81
+
82
+ response = client.messages.create(
83
+ model="codex/gpt-5.2-codex", # Codex model
84
+ max_tokens=1024,
85
+ messages=[{"role": "user", "content": "Hello!"}]
86
+ )
87
+ ```
88
+
89
+ #### Codex Models with Reasoning Levels
90
+
91
+ Append reasoning level suffix to control reasoning effort:
92
+
93
+ | Model | Description |
94
+ |-------|-------------|
95
+ | `codex/gpt-5.2-codex` | Default reasoning |
96
+ | `codex/gpt-5.2-codex:low` | Low reasoning effort |
97
+ | `codex/gpt-5.2-codex:medium` | Medium reasoning effort |
98
+ | `codex/gpt-5.2-codex:high` | High reasoning effort |
99
+ | `codex/gpt-5.2-codex:xhigh` | Extra high reasoning effort |
100
+ | `codex/gpt-5.2` | GPT-5.2 base model |
101
+ | `codex/o3` | O3 model |
102
+
103
+ ## API Endpoints
104
+
105
+ | Endpoint | Method | Description |
106
+ |----------|--------|-------------|
107
+ | `/` | GET | Health check |
108
+ | `/health` | GET | Health check |
109
+ | `/v1/messages` | POST | Anthropic Messages API |
110
+ | `/v1/messages/count_tokens` | POST | Token counting (approximate) |
111
+
112
+ ## Configuration
113
+
114
+ | Environment Variable | Required | Description |
115
+ |---------------------|----------|-------------|
116
+ | `OPENROUTER_API_KEY` | No* | Your OpenRouter API key (*required for non-Codex models) |
117
+
118
+ | CLI Flag | Default | Description |
119
+ |----------|---------|-------------|
120
+ | `--port` | 8080 | Port to run on |
121
+ | `--host` | 127.0.0.1 | Host to bind to |
122
+
123
+ ### Model Routing
124
+
125
+ - Models prefixed with `codex/` are routed to Codex CLI
126
+ - All other models are routed to OpenRouter (requires `OPENROUTER_API_KEY`)
127
+
128
+ ## Supported Models
129
+
130
+ ### Codex CLI (via ChatGPT subscription)
131
+
132
+ - **Codex** (`codex/*`) - GPT-5.2, GPT-5.2-Codex, O3 with reasoning levels
133
+
134
+ ### OpenRouter
135
+
136
+ Any model available on OpenRouter can be used. Provider-specific optimizations exist for:
137
+
138
+ - **Google Gemini** (`google/*`) - Reasoning detail caching
139
+ - **OpenAI** (`openai/*`) - Extended thinking support
140
+ - **xAI Grok** (`x-ai/*`) - XML tool call parsing
141
+ - **DeepSeek** (`deepseek/*`)
142
+ - **Qwen** (`qwen/*`)
143
+ - **MiniMax** (`minimax/*`)
144
+
145
+ ## License
146
+
147
+ MIT
@@ -0,0 +1,3 @@
1
+ from .server import create_app
2
+
3
+ __all__ = ["create_app"]
@@ -0,0 +1,30 @@
1
+ import argparse
2
+ import os
3
+
4
+ import uvicorn
5
+
6
+ from .server import create_app
7
+
8
+
9
+ def main() -> None:
10
+ parser = argparse.ArgumentParser(description="Anthropic Bridge Server")
11
+ parser.add_argument("--port", type=int, default=8080, help="Port to run on")
12
+ parser.add_argument("--host", default="127.0.0.1", help="Host to bind to")
13
+
14
+ args = parser.parse_args()
15
+
16
+ api_key = os.environ.get("OPENROUTER_API_KEY", "")
17
+
18
+ app = create_app(openrouter_api_key=api_key or None)
19
+
20
+ print(f"Starting Anthropic Bridge on {args.host}:{args.port}")
21
+ print(" Codex CLI: codex/* models")
22
+ if api_key:
23
+ print(" OpenRouter: all other models")
24
+ else:
25
+ print(" OpenRouter: disabled (set OPENROUTER_API_KEY to enable)")
26
+ uvicorn.run(app, host=args.host, port=args.port, log_level="info")
27
+
28
+
29
+ if __name__ == "__main__":
30
+ main()
@@ -0,0 +1,88 @@
1
+ import json
2
+ import time
3
+ from pathlib import Path
4
+ from threading import Lock
5
+ from typing import Any
6
+
7
+ DEFAULT_CACHE_DIR = Path.home() / ".anthropic_bridge" / "cache"
8
+ DEFAULT_TTL_DAYS = 30
9
+
10
+
11
+ class ReasoningCache:
12
+ def __init__(self, cache_dir: Path | None = None, ttl_days: int = DEFAULT_TTL_DAYS):
13
+ self._cache_dir = cache_dir or DEFAULT_CACHE_DIR
14
+ self._cache_file = self._cache_dir / "reasoning_details.json"
15
+ self._ttl_seconds = ttl_days * 24 * 60 * 60
16
+ self._lock = Lock()
17
+ self._memory_cache: dict[str, dict[str, Any]] = {}
18
+ self._loaded = False
19
+
20
+ def _ensure_loaded(self) -> None:
21
+ if self._loaded:
22
+ return
23
+ with self._lock:
24
+ if self._loaded: # Double-checked locking for thread safety
25
+ return # type: ignore[unreachable]
26
+ self._cache_dir.mkdir(parents=True, exist_ok=True)
27
+ if self._cache_file.exists():
28
+ try:
29
+ data = json.loads(self._cache_file.read_text())
30
+ self._memory_cache = data if isinstance(data, dict) else {}
31
+ except (json.JSONDecodeError, OSError):
32
+ self._memory_cache = {}
33
+ self._loaded = True
34
+
35
+ def _save(self) -> None:
36
+ try:
37
+ self._cache_file.write_text(json.dumps(self._memory_cache, indent=2))
38
+ except OSError:
39
+ pass
40
+
41
+ def _cleanup_expired(self) -> None:
42
+ now = time.time()
43
+ expired = [
44
+ k
45
+ for k, v in self._memory_cache.items()
46
+ if now - v.get("timestamp", 0) > self._ttl_seconds
47
+ ]
48
+ for k in expired:
49
+ del self._memory_cache[k]
50
+
51
+ def get(self, tool_call_id: str) -> list[dict[str, Any]] | None:
52
+ self._ensure_loaded()
53
+ entry = self._memory_cache.get(tool_call_id)
54
+ if not entry:
55
+ return None
56
+ if time.time() - entry.get("timestamp", 0) > self._ttl_seconds:
57
+ with self._lock:
58
+ self._memory_cache.pop(tool_call_id, None)
59
+ self._save()
60
+ return None
61
+ return entry.get("data")
62
+
63
+ def set(self, tool_call_id: str, reasoning_details: list[dict[str, Any]]) -> None:
64
+ self._ensure_loaded()
65
+ with self._lock:
66
+ self._memory_cache[tool_call_id] = {
67
+ "timestamp": time.time(),
68
+ "data": reasoning_details,
69
+ }
70
+ self._cleanup_expired()
71
+ self._save()
72
+
73
+ def clear(self) -> None:
74
+ with self._lock:
75
+ self._memory_cache = {}
76
+ if self._cache_file.exists():
77
+ self._cache_file.unlink()
78
+
79
+
80
+ # global instance
81
+ _cache: ReasoningCache | None = None
82
+
83
+
84
+ def get_reasoning_cache() -> ReasoningCache:
85
+ global _cache
86
+ if _cache is None:
87
+ _cache = ReasoningCache()
88
+ return _cache