local-openai2anthropic 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,34 +11,35 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
11
11
 
12
12
  class Settings(BaseSettings):
13
13
  """Application settings loaded from environment variables."""
14
-
14
+
15
15
  model_config = SettingsConfigDict(
16
16
  env_prefix="OA2A_", # OpenAI-to-Anthropic prefix
17
17
  env_file=".env",
18
18
  env_file_encoding="utf-8",
19
19
  case_sensitive=False,
20
+ extra="ignore",
20
21
  )
21
-
22
+
22
23
  # OpenAI API Configuration
23
24
  openai_api_key: Optional[str] = None
24
25
  openai_base_url: str = "https://api.openai.com/v1"
25
26
  openai_org_id: Optional[str] = None
26
27
  openai_project_id: Optional[str] = None
27
-
28
+
28
29
  # Server Configuration
29
30
  host: str = "0.0.0.0"
30
31
  port: int = 8080
31
32
  request_timeout: float = 300.0 # 5 minutes
32
-
33
+
33
34
  # API Key for authenticating requests to this server (optional)
34
35
  api_key: Optional[str] = None
35
-
36
+
36
37
  # CORS settings
37
38
  cors_origins: list[str] = ["*"]
38
39
  cors_credentials: bool = True
39
40
  cors_methods: list[str] = ["*"]
40
41
  cors_headers: list[str] = ["*"]
41
-
42
+
42
43
  # Logging
43
44
  log_level: str = "DEBUG"
44
45
 
@@ -32,7 +32,7 @@ def create_app(settings: Settings | None = None) -> FastAPI:
32
32
  app = FastAPI(
33
33
  title="local-openai2anthropic",
34
34
  description="A proxy server that converts Anthropic Messages API to OpenAI API",
35
- version="0.2.0",
35
+ version="0.2.5",
36
36
  docs_url="/docs",
37
37
  redoc_url="/redoc",
38
38
  )
@@ -182,7 +182,7 @@ Examples:
182
182
  parser.add_argument(
183
183
  "--version",
184
184
  action="version",
185
- version="%(prog)s 0.2.0",
185
+ version="%(prog)s 0.2.5",
186
186
  )
187
187
 
188
188
  # Create subparsers for commands
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: local-openai2anthropic
3
- Version: 0.2.5
3
+ Version: 0.2.7
4
4
  Summary: A lightweight proxy server that converts Anthropic Messages API to OpenAI API
5
5
  Project-URL: Homepage, https://github.com/dongfangzan/local-openai2anthropic
6
6
  Project-URL: Repository, https://github.com/dongfangzan/local-openai2anthropic
@@ -80,7 +80,11 @@ Other OpenAI-compatible backends may work but are not fully tested.
80
80
  pip install local-openai2anthropic
81
81
  ```
82
82
 
83
- ### 2. Start Your Local LLM Server
83
+ ### 2. Configure Your LLM Backend (Optional)
84
+
85
+ **Option A: Start a local LLM server**
86
+
87
+ If you don't have an LLM server running, you can start one locally:
84
88
 
85
89
  Example with vLLM:
86
90
  ```bash
@@ -94,6 +98,16 @@ sglang launch --model-path meta-llama/Llama-2-7b-chat-hf --port 8000
94
98
  # SGLang starts at http://localhost:8000/v1
95
99
  ```
96
100
 
101
+ **Option B: Use an existing OpenAI-compatible API**
102
+
103
+ If you already have a deployed OpenAI-compatible API (local or remote), you can use it directly. Just note the base URL for the next step.
104
+
105
+ Examples:
106
+ - Local vLLM/SGLang: `http://localhost:8000/v1`
107
+ - Remote API: `https://api.example.com/v1`
108
+
109
+ > **Note:** If you're using [Ollama](https://ollama.com), it natively supports the Anthropic API format, so you don't need this proxy. Just point your Claude SDK directly to `http://localhost:11434/v1`.
110
+
97
111
  ### 3. Start the Proxy
98
112
 
99
113
  **Option A: Run in background (recommended)**
@@ -156,22 +170,31 @@ You can configure [Claude Code](https://github.com/anthropics/claude-code) to us
156
170
 
157
171
  ### Configuration Steps
158
172
 
159
- 1. **Create or edit Claude Code config file** at `~/.claude/CLAUDE.md`:
160
-
161
- ```markdown
162
- # Claude Code Configuration
163
-
164
- ## API Settings
165
-
166
- - Claude API Base URL: http://localhost:8080
167
- - Claude API Key: dummy-key
168
-
169
- ## Model Settings
170
-
171
- Use model: meta-llama/Llama-2-7b-chat-hf # Your local model name
173
+ 1. **Edit Claude Code config file** at `~/.claude/settings.json`:
174
+
175
+ ```json
176
+ {
177
+ "env": {
178
+ "ANTHROPIC_BASE_URL": "http://localhost:8080",
179
+ "ANTHROPIC_API_KEY": "dummy-key",
180
+ "ANTHROPIC_MODEL": "meta-llama/Llama-2-7b-chat-hf",
181
+ "ANTHROPIC_DEFAULT_SONNET_MODEL": "meta-llama/Llama-2-7b-chat-hf",
182
+ "ANTHROPIC_DEFAULT_OPUS_MODEL": "meta-llama/Llama-2-7b-chat-hf",
183
+ "ANTHROPIC_DEFAULT_HAIKU_MODEL": "meta-llama/Llama-2-7b-chat-hf",
184
+ "ANTHROPIC_REASONING_MODEL": "meta-llama/Llama-2-7b-chat-hf"
185
+ }
186
+ }
172
187
  ```
173
188
 
174
- 2. **Alternatively, set environment variables** before running Claude Code:
189
+ | Variable | Description |
190
+ |----------|-------------|
191
+ | `ANTHROPIC_MODEL` | General model setting |
192
+ | `ANTHROPIC_DEFAULT_SONNET_MODEL` | Default model for Sonnet mode (Claude Code default) |
193
+ | `ANTHROPIC_DEFAULT_OPUS_MODEL` | Default model for Opus mode |
194
+ | `ANTHROPIC_DEFAULT_HAIKU_MODEL` | Default model for Haiku mode |
195
+ | `ANTHROPIC_REASONING_MODEL` | Default model for reasoning tasks |
196
+
197
+ 2. **Or set environment variables** before running Claude Code:
175
198
 
176
199
  ```bash
177
200
  export ANTHROPIC_BASE_URL=http://localhost:8080
@@ -180,38 +203,36 @@ export ANTHROPIC_API_KEY=dummy-key
180
203
  claude
181
204
  ```
182
205
 
183
- 3. **Or use the `--api-key` and `--base-url` flags**:
184
-
185
- ```bash
186
- claude --api-key dummy-key --base-url http://localhost:8080
187
- ```
188
-
189
206
  ### Complete Workflow Example
190
207
 
208
+ Make sure `~/.claude/settings.json` is configured as described above.
209
+
191
210
  Terminal 1 - Start your local LLM:
192
211
  ```bash
193
212
  vllm serve meta-llama/Llama-2-7b-chat-hf
194
213
  ```
195
214
 
196
- Terminal 2 - Start the proxy:
215
+ Terminal 2 - Start the proxy (background mode):
197
216
  ```bash
198
217
  export OA2A_OPENAI_BASE_URL=http://localhost:8000/v1
199
218
  export OA2A_OPENAI_API_KEY=dummy
200
219
  export OA2A_TAVILY_API_KEY="tvly-your-tavily-api-key" # Optional: enable web search
201
220
 
202
- oa2a
221
+ oa2a start
203
222
  ```
204
223
 
205
- Terminal 3 - Launch Claude Code with local LLM:
224
+ Terminal 3 - Launch Claude Code:
206
225
  ```bash
207
- export ANTHROPIC_BASE_URL=http://localhost:8080
208
- export ANTHROPIC_API_KEY=dummy-key
209
-
210
226
  claude
211
227
  ```
212
228
 
213
229
  Now Claude Code will use your local LLM instead of the cloud API.
214
230
 
231
+ To stop the proxy:
232
+ ```bash
233
+ oa2a stop
234
+ ```
235
+
215
236
  ---
216
237
 
217
238
  ## Features
@@ -1,10 +1,10 @@
1
1
  local_openai2anthropic/__init__.py,sha256=IEn8YcQGsaEaCr04s3hS2AcgsIt5NU5Qa2C8Uwz7RdY,1059
2
2
  local_openai2anthropic/__main__.py,sha256=K21u5u7FN8-DbO67TT_XDF0neGqJeFrVNkteRauCRQk,179
3
- local_openai2anthropic/config.py,sha256=bnM7p5htd6rHgLn7Z0Ukmm2jVImLuVjIB5Cnfpf2ClY,1918
3
+ local_openai2anthropic/config.py,sha256=3M5ZAz3uYNMGxaottEBseEOZF-GnVaGuioH9Hpmgnd8,1918
4
4
  local_openai2anthropic/converter.py,sha256=d-qYwtv6FIbpKSRsZN4jhnKM4D4k52la-_bpEYPTAS0,15790
5
5
  local_openai2anthropic/daemon.py,sha256=pZnRojGFcuIpR8yLDNjV-b0LJRBVhgRAa-dKeRRse44,10017
6
6
  local_openai2anthropic/daemon_runner.py,sha256=rguOH0PgpbjqNsKYei0uCQX8JQOQ1wmtQH1CtW95Dbw,3274
7
- local_openai2anthropic/main.py,sha256=5tdgPel8RSCn1iK0d7hYAmcTM9vYHlepgQujaEXA2ic,9866
7
+ local_openai2anthropic/main.py,sha256=FK5JBBpzB_T44y3N16lPl1hK4ht4LEQqRKzVmkIjIoo,9866
8
8
  local_openai2anthropic/openai_types.py,sha256=jFdCvLwtXYoo5gGRqOhbHQcVaxcsxNnCP_yFPIv7rG4,3823
9
9
  local_openai2anthropic/protocol.py,sha256=vUEgxtRPFll6jEtLc4DyxTLCBjrWIEScZXhEqe4uibk,5185
10
10
  local_openai2anthropic/router.py,sha256=imzvgduneiniwHroTgeT9d8q4iF5GAuptaVP38sakUg,40226
@@ -12,8 +12,8 @@ local_openai2anthropic/tavily_client.py,sha256=QsBhnyF8BFWPAxB4XtWCCpHCquNL5SW93
12
12
  local_openai2anthropic/server_tools/__init__.py,sha256=QlJfjEta-HOCtLe7NaY_fpbEKv-ZpInjAnfmSqE9tbk,615
13
13
  local_openai2anthropic/server_tools/base.py,sha256=pNFsv-jSgxVrkY004AHAcYMNZgVSO8ZOeCzQBUtQ3vU,5633
14
14
  local_openai2anthropic/server_tools/web_search.py,sha256=1C7lX_cm-tMaN3MsCjinEZYPJc_Hj4yAxYay9h8Zbvs,6543
15
- local_openai2anthropic-0.2.5.dist-info/METADATA,sha256=saRDX2uZwiYyNQMXq75WnYbQ8oG2KBkk9gh8yuyqvDg,10108
16
- local_openai2anthropic-0.2.5.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
17
- local_openai2anthropic-0.2.5.dist-info/entry_points.txt,sha256=hdc9tSJUNxyNLXcTYye5SuD2K0bEQhxBhGnWTFup6ZM,116
18
- local_openai2anthropic-0.2.5.dist-info/licenses/LICENSE,sha256=X3_kZy3lJvd_xp8IeyUcIAO2Y367MXZc6aaRx8BYR_s,11369
19
- local_openai2anthropic-0.2.5.dist-info/RECORD,,
15
+ local_openai2anthropic-0.2.7.dist-info/METADATA,sha256=eA34CtgLACHsE4gf4Scuj7yU5IBg_Ys26x8nMnCd_eM,11240
16
+ local_openai2anthropic-0.2.7.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
17
+ local_openai2anthropic-0.2.7.dist-info/entry_points.txt,sha256=hdc9tSJUNxyNLXcTYye5SuD2K0bEQhxBhGnWTFup6ZM,116
18
+ local_openai2anthropic-0.2.7.dist-info/licenses/LICENSE,sha256=X3_kZy3lJvd_xp8IeyUcIAO2Y367MXZc6aaRx8BYR_s,11369
19
+ local_openai2anthropic-0.2.7.dist-info/RECORD,,