local-openai2anthropic 0.3.5__tar.gz → 0.3.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- local_openai2anthropic-0.3.7/.claude/CLAUDE.md +163 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/PKG-INFO +2 -1
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/pyproject.toml +2 -1
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/__init__.py +1 -1
- local_openai2anthropic-0.3.7/src/local_openai2anthropic/config.py +328 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/main.py +9 -7
- local_openai2anthropic-0.3.7/tests/test_config.py +649 -0
- local_openai2anthropic-0.3.7/tests/test_e2e_websearch.py +397 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_logging.py +78 -74
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_main.py +1 -1
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_server_tools.py +11 -4
- local_openai2anthropic-0.3.5/src/local_openai2anthropic/config.py +0 -69
- local_openai2anthropic-0.3.5/tests/test_config.py +0 -177
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/.github/workflows/publish.yml +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/.gitignore +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/.reports/dead-code-analysis.md +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/LICENSE +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/README.md +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/README_zh.md +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/examples/basic_chat.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/examples/streaming.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/examples/thinking_mode.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/examples/tool_calling.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/examples/vision.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/examples/web_search.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/__main__.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/converter.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/daemon.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/daemon_runner.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/openai_types.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/protocol.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/router.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/server_tools/__init__.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/server_tools/base.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/server_tools/web_search.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/streaming/__init__.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/streaming/handler.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/tavily_client.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/tools/__init__.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/tools/handler.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/utils/__init__.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/utils/tokens.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/__init__.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/coverage/coverage.json +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/coverage/coverage_detailed.json +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/coverage/coverage_report.json +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/coverage/coverage_report_new.json +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/coverage/coverage_summary.json +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_converter.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_converter_edge_cases.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_daemon.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_daemon_advanced.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_daemon_runner.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_e2e_multimodel.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_integration.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_openai_types.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_protocol.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_router.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_router_comprehensive.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_router_edge_cases.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_router_streaming.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_tavily_client.py +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/tests/test_upstream.sh +0 -0
- {local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/uv.lock +0 -0
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
# Claude 开发指南
|
|
2
|
+
|
|
3
|
+
## Configuration File
|
|
4
|
+
|
|
5
|
+
### Config File Location
|
|
6
|
+
|
|
7
|
+
- **Linux/macOS**: `~/.oa2a/config.toml`
|
|
8
|
+
- **Windows**: `%USERPROFILE%\.oa2a\config.toml`
|
|
9
|
+
|
|
10
|
+
### First Run
|
|
11
|
+
|
|
12
|
+
A default config file will be created automatically on first startup. Edit this file to add your API keys.
|
|
13
|
+
|
|
14
|
+
### Configuration Example
|
|
15
|
+
|
|
16
|
+
```toml
|
|
17
|
+
# OA2A Configuration File
|
|
18
|
+
# Place this file at ~/.oa2a/config.toml
|
|
19
|
+
|
|
20
|
+
# OpenAI API Configuration
|
|
21
|
+
openai_api_key = "your-openai-api-key"
|
|
22
|
+
openai_base_url = "https://api.openai.com/v1"
|
|
23
|
+
openai_org_id = ""
|
|
24
|
+
openai_project_id = ""
|
|
25
|
+
|
|
26
|
+
# Server Configuration
|
|
27
|
+
host = "0.0.0.0"
|
|
28
|
+
port = 8080
|
|
29
|
+
request_timeout = 300.0
|
|
30
|
+
|
|
31
|
+
# API Key for authenticating requests to this server (optional)
|
|
32
|
+
api_key = ""
|
|
33
|
+
|
|
34
|
+
# CORS settings
|
|
35
|
+
cors_origins = ["*"]
|
|
36
|
+
cors_credentials = true
|
|
37
|
+
cors_methods = ["*"]
|
|
38
|
+
cors_headers = ["*"]
|
|
39
|
+
|
|
40
|
+
# Logging
|
|
41
|
+
log_level = "DEBUG"
|
|
42
|
+
log_dir = "" # Empty uses platform-specific default
|
|
43
|
+
|
|
44
|
+
# Tavily Web Search Configuration
|
|
45
|
+
tavily_api_key = ""
|
|
46
|
+
tavily_timeout = 30.0
|
|
47
|
+
tavily_max_results = 5
|
|
48
|
+
websearch_max_uses = 5
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## 版本号更新清单
|
|
52
|
+
|
|
53
|
+
当修改项目版本号时,需要同步更新以下所有位置:
|
|
54
|
+
|
|
55
|
+
### 1. pyproject.toml
|
|
56
|
+
- **位置**: 第3行
|
|
57
|
+
- **格式**: `version = "x.y.z"`
|
|
58
|
+
- **示例**: `version = "0.3.5"`
|
|
59
|
+
|
|
60
|
+
### 2. src/local_openai2anthropic/__init__.py
|
|
61
|
+
- **位置**: 第6行
|
|
62
|
+
- **格式**: `__version__ = "x.y.z"`
|
|
63
|
+
- **示例**: `__version__ = "0.3.5"`
|
|
64
|
+
|
|
65
|
+
### 3. src/local_openai2anthropic/main.py (FastAPI 应用版本)
|
|
66
|
+
- **位置**: 第104行
|
|
67
|
+
- **格式**: `version="x.y.z"`
|
|
68
|
+
- **示例**: `version="0.3.5"`
|
|
69
|
+
|
|
70
|
+
### 4. src/local_openai2anthropic/main.py (命令行 --version)
|
|
71
|
+
- **位置**: 第254行
|
|
72
|
+
- **格式**: `version="%(prog)s x.y.z"`
|
|
73
|
+
- **示例**: `version="%(prog)s 0.3.5"`
|
|
74
|
+
|
|
75
|
+
### 5. tests/test_main.py (测试断言)
|
|
76
|
+
- **位置**: 第26行
|
|
77
|
+
- **格式**: `assert app.version == "x.y.z"`
|
|
78
|
+
- **示例**: `assert app.version == "0.3.5"`
|
|
79
|
+
|
|
80
|
+
### 6. Git Tag
|
|
81
|
+
- **格式**: `vx.y.z`
|
|
82
|
+
- **示例**: `v0.3.5`
|
|
83
|
+
- **命令**:
|
|
84
|
+
```bash
|
|
85
|
+
git tag v0.3.5
|
|
86
|
+
git push origin v0.3.5
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
## 版本号格式
|
|
90
|
+
|
|
91
|
+
使用语义化版本控制 (Semantic Versioning):
|
|
92
|
+
- **MAJOR**: 不兼容的 API 修改
|
|
93
|
+
- **MINOR**: 向下兼容的功能新增
|
|
94
|
+
- **PATCH**: 向下兼容的问题修复
|
|
95
|
+
|
|
96
|
+
## 发布流程
|
|
97
|
+
|
|
98
|
+
1. 更新上述所有文件中的版本号
|
|
99
|
+
2. 运行测试确保通过: `pytest`
|
|
100
|
+
3. 提交更改: `git commit -m "chore(release): bump version to x.y.z"`
|
|
101
|
+
4. 创建标签: `git tag vx.y.z`
|
|
102
|
+
5. 推送代码和标签: `git push && git push origin vx.y.z`
|
|
103
|
+
6. GitHub Actions 将自动发布到 PyPI
|
|
104
|
+
|
|
105
|
+
## 代码提交规范
|
|
106
|
+
|
|
107
|
+
### 测试覆盖率要求
|
|
108
|
+
|
|
109
|
+
在提交任何新代码之前,必须满足以下测试要求:
|
|
110
|
+
|
|
111
|
+
#### 1. 新代码单元测试
|
|
112
|
+
- **必须**为所有未提交的新代码编写单元测试
|
|
113
|
+
- 新代码的测试覆盖率**必须 > 90%**
|
|
114
|
+
- 测试文件命名规范: `test_<module_name>.py`
|
|
115
|
+
- 测试函数命名规范: `test_<function_name>_<scenario>`
|
|
116
|
+
|
|
117
|
+
#### 2. 总体覆盖率检查
|
|
118
|
+
- 运行 `/everything-claude-code:test-coverage` 检查总体覆盖率
|
|
119
|
+
- **总体覆盖率必须 ≥ 80%**
|
|
120
|
+
- 如果总体覆盖率低于 80%,需要补充测试或优化现有代码
|
|
121
|
+
|
|
122
|
+
#### 3. 代码审查
|
|
123
|
+
|
|
124
|
+
在提交代码前,必须进行代码审查:
|
|
125
|
+
|
|
126
|
+
```bash
|
|
127
|
+
# 运行代码审查(检查安全漏洞、代码质量、最佳实践)
|
|
128
|
+
/code-review:code-review
|
|
129
|
+
|
|
130
|
+
# 根据审查结果修复问题
|
|
131
|
+
# - CRITICAL/HIGH 级别问题必须修复
|
|
132
|
+
# - MEDIUM/LOW 级别问题根据情况处理
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
代码审查将检查:
|
|
136
|
+
- **安全问题**: 硬编码凭证、SQL注入、XSS、路径遍历等
|
|
137
|
+
- **代码质量**: 函数长度、嵌套深度、错误处理、console.log等
|
|
138
|
+
- **最佳实践**: 不可变模式、emoji使用、测试覆盖、可访问性等
|
|
139
|
+
|
|
140
|
+
#### 4. 提交流程
|
|
141
|
+
```bash
|
|
142
|
+
# 1. 编写新代码
|
|
143
|
+
# 2. 编写对应的单元测试
|
|
144
|
+
# 3. 运行测试并检查覆盖率
|
|
145
|
+
pytest --cov=src/local_openai2anthropic --cov-report=term-missing
|
|
146
|
+
|
|
147
|
+
# 4. 运行 Claude Code 覆盖率检查
|
|
148
|
+
/everything-claude-code:test-coverage
|
|
149
|
+
|
|
150
|
+
# 5. 运行代码审查
|
|
151
|
+
/code-review:code-review
|
|
152
|
+
|
|
153
|
+
# 6. 确认新代码覆盖率 > 90% 且总体覆盖率 ≥ 80%,且无 CRITICAL/HIGH 问题
|
|
154
|
+
# 7. 提交代码
|
|
155
|
+
git add .
|
|
156
|
+
git commit -m "feat: your commit message"
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
#### 4. 测试质量标准
|
|
160
|
+
- 测试用例应覆盖正常路径、边界条件和异常情况
|
|
161
|
+
- 使用 `pytest` 作为测试框架
|
|
162
|
+
- 异步代码使用 `pytest-asyncio`
|
|
163
|
+
- 适当的测试夹具 (fixtures) 和参数化测试
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: local-openai2anthropic
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.7
|
|
4
4
|
Summary: A lightweight proxy server that converts Anthropic Messages API to OpenAI API
|
|
5
5
|
Project-URL: Homepage, https://github.com/dongfangzan/local-openai2anthropic
|
|
6
6
|
Project-URL: Repository, https://github.com/dongfangzan/local-openai2anthropic
|
|
@@ -24,6 +24,7 @@ Requires-Dist: httpx>=0.25.0
|
|
|
24
24
|
Requires-Dist: openai>=1.30.0
|
|
25
25
|
Requires-Dist: pydantic-settings>=2.0.0
|
|
26
26
|
Requires-Dist: pydantic>=2.0.0
|
|
27
|
+
Requires-Dist: tomli-w>=1.0.0
|
|
27
28
|
Requires-Dist: uvicorn[standard]>=0.23.0
|
|
28
29
|
Provides-Extra: dev
|
|
29
30
|
Requires-Dist: black>=23.0.0; extra == 'dev'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "local-openai2anthropic"
|
|
3
|
-
version = "0.3.
|
|
3
|
+
version = "0.3.7"
|
|
4
4
|
description = "A lightweight proxy server that converts Anthropic Messages API to OpenAI API"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = { text = "Apache-2.0" }
|
|
@@ -29,6 +29,7 @@ dependencies = [
|
|
|
29
29
|
"pydantic-settings>=2.0.0",
|
|
30
30
|
"anthropic>=0.30.0",
|
|
31
31
|
"openai>=1.30.0",
|
|
32
|
+
"tomli-w>=1.0.0",
|
|
32
33
|
]
|
|
33
34
|
|
|
34
35
|
[project.optional-dependencies]
|
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
2
|
+
"""
|
|
3
|
+
Configuration settings for the proxy server.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import sys
|
|
7
|
+
from functools import lru_cache
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Optional
|
|
10
|
+
|
|
11
|
+
from pydantic import BaseModel, ConfigDict
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def get_config_dir() -> Path:
|
|
15
|
+
"""Get platform-specific config directory.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
Path to the config directory (~/.oa2a)
|
|
19
|
+
"""
|
|
20
|
+
return Path.home() / ".oa2a"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_config_file() -> Path:
|
|
24
|
+
"""Get config file path.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
Path to the config file (~/.oa2a/config.toml)
|
|
28
|
+
"""
|
|
29
|
+
return get_config_dir() / "config.toml"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def create_default_config() -> bool:
|
|
33
|
+
"""Create default config file if not exists.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
True if a new config file was created, False if it already exists
|
|
37
|
+
"""
|
|
38
|
+
config_file = get_config_file()
|
|
39
|
+
if config_file.exists():
|
|
40
|
+
return False
|
|
41
|
+
|
|
42
|
+
config_dir = get_config_dir()
|
|
43
|
+
config_dir.mkdir(parents=True, exist_ok=True)
|
|
44
|
+
|
|
45
|
+
# Set restrictive permissions (0o600) for the config directory on Unix-like systems
|
|
46
|
+
if sys.platform != "win32":
|
|
47
|
+
config_dir.chmod(0o700)
|
|
48
|
+
|
|
49
|
+
default_config = """# OA2A Configuration File
|
|
50
|
+
# Place this file at ~/.oa2a/config.toml
|
|
51
|
+
|
|
52
|
+
# OpenAI API Configuration
|
|
53
|
+
openai_api_key = ""
|
|
54
|
+
openai_base_url = "https://api.openai.com/v1"
|
|
55
|
+
openai_org_id = ""
|
|
56
|
+
openai_project_id = ""
|
|
57
|
+
|
|
58
|
+
# Server Configuration
|
|
59
|
+
host = "0.0.0.0"
|
|
60
|
+
port = 8080
|
|
61
|
+
request_timeout = 300.0
|
|
62
|
+
|
|
63
|
+
# API Key for authenticating requests to this server (optional)
|
|
64
|
+
api_key = ""
|
|
65
|
+
|
|
66
|
+
# CORS settings
|
|
67
|
+
cors_origins = ["*"]
|
|
68
|
+
cors_credentials = true
|
|
69
|
+
cors_methods = ["*"]
|
|
70
|
+
cors_headers = ["*"]
|
|
71
|
+
|
|
72
|
+
# Logging
|
|
73
|
+
log_level = "INFO"
|
|
74
|
+
log_dir = "" # Empty uses platform-specific default
|
|
75
|
+
|
|
76
|
+
# Tavily Web Search Configuration
|
|
77
|
+
tavily_api_key = ""
|
|
78
|
+
tavily_timeout = 30.0
|
|
79
|
+
tavily_max_results = 5
|
|
80
|
+
websearch_max_uses = 5
|
|
81
|
+
"""
|
|
82
|
+
config_file.write_text(default_config, encoding="utf-8")
|
|
83
|
+
|
|
84
|
+
# Set restrictive permissions (0o600) for the config file on Unix-like systems
|
|
85
|
+
if sys.platform != "win32":
|
|
86
|
+
config_file.chmod(0o600)
|
|
87
|
+
|
|
88
|
+
return True
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def interactive_setup() -> dict:
|
|
92
|
+
"""Interactive configuration setup wizard.
|
|
93
|
+
|
|
94
|
+
Guides user through setting up essential configuration values.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Dictionary containing user-provided configuration
|
|
98
|
+
"""
|
|
99
|
+
print("=" * 60)
|
|
100
|
+
print(" Welcome to local-openai2anthropic Setup Wizard")
|
|
101
|
+
print("=" * 60)
|
|
102
|
+
print()
|
|
103
|
+
print("This wizard will help you create the initial configuration.")
|
|
104
|
+
print(f"Config file will be saved to: {get_config_file()}")
|
|
105
|
+
print()
|
|
106
|
+
|
|
107
|
+
config = {}
|
|
108
|
+
|
|
109
|
+
# OpenAI API Key (required)
|
|
110
|
+
print("[1/3] OpenAI API Configuration")
|
|
111
|
+
print("-" * 40)
|
|
112
|
+
while True:
|
|
113
|
+
api_key = input("Enter your OpenAI API Key (required): ").strip()
|
|
114
|
+
if api_key:
|
|
115
|
+
config["openai_api_key"] = api_key
|
|
116
|
+
break
|
|
117
|
+
print("API Key is required. Please enter a valid key.")
|
|
118
|
+
|
|
119
|
+
# Base URL (optional, with default)
|
|
120
|
+
default_url = "https://api.openai.com/v1"
|
|
121
|
+
base_url = input(f"Enter OpenAI Base URL [{default_url}]: ").strip()
|
|
122
|
+
config["openai_base_url"] = base_url if base_url else default_url
|
|
123
|
+
|
|
124
|
+
print()
|
|
125
|
+
print("[2/3] Server Configuration")
|
|
126
|
+
print("-" * 40)
|
|
127
|
+
|
|
128
|
+
# Host (with default)
|
|
129
|
+
default_host = "0.0.0.0"
|
|
130
|
+
host = input(f"Enter server host [{default_host}]: ").strip()
|
|
131
|
+
config["host"] = host if host else default_host
|
|
132
|
+
|
|
133
|
+
# Port (with default)
|
|
134
|
+
default_port = "8080"
|
|
135
|
+
port_input = input(f"Enter server port [{default_port}]: ").strip()
|
|
136
|
+
try:
|
|
137
|
+
config["port"] = int(port_input) if port_input else int(default_port)
|
|
138
|
+
except ValueError:
|
|
139
|
+
print(f"Invalid port number, using default: {default_port}")
|
|
140
|
+
config["port"] = int(default_port)
|
|
141
|
+
|
|
142
|
+
# API Key for server authentication (optional)
|
|
143
|
+
print()
|
|
144
|
+
print("[3/3] Server API Authentication (Optional)")
|
|
145
|
+
print("-" * 40)
|
|
146
|
+
print("Set an API key to authenticate requests to this server.")
|
|
147
|
+
print(
|
|
148
|
+
"Leave empty to allow unauthenticated access (not recommended for production)."
|
|
149
|
+
)
|
|
150
|
+
server_api_key = input("Enter server API key (optional): ").strip()
|
|
151
|
+
if server_api_key:
|
|
152
|
+
config["api_key"] = server_api_key
|
|
153
|
+
|
|
154
|
+
print()
|
|
155
|
+
print("=" * 60)
|
|
156
|
+
print(" Configuration Summary")
|
|
157
|
+
print("=" * 60)
|
|
158
|
+
print(f"OpenAI Base URL: {config.get('openai_base_url', default_url)}")
|
|
159
|
+
print(
|
|
160
|
+
f"Server: {config.get('host', default_host)}:{config.get('port', default_port)}"
|
|
161
|
+
)
|
|
162
|
+
print(f"OpenAI API Key: {config.get('openai_api_key', '')[:8]}... (configured)")
|
|
163
|
+
if config.get("api_key"):
|
|
164
|
+
print(f"Server Auth: {config['api_key'][:8]}... (configured)")
|
|
165
|
+
print()
|
|
166
|
+
|
|
167
|
+
return config
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def create_config_from_dict(config: dict) -> None:
|
|
171
|
+
"""Create config file from dictionary.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
config: Dictionary containing configuration values
|
|
175
|
+
"""
|
|
176
|
+
import tomli_w
|
|
177
|
+
|
|
178
|
+
config_file = get_config_file()
|
|
179
|
+
config_dir = get_config_dir()
|
|
180
|
+
config_dir.mkdir(parents=True, exist_ok=True)
|
|
181
|
+
|
|
182
|
+
# Set restrictive permissions for the config directory on Unix-like systems
|
|
183
|
+
if sys.platform != "win32":
|
|
184
|
+
config_dir.chmod(0o700)
|
|
185
|
+
|
|
186
|
+
# Build config dict with proper structure
|
|
187
|
+
toml_config: dict = {
|
|
188
|
+
"openai_api_key": config.get("openai_api_key", ""),
|
|
189
|
+
"openai_base_url": config.get("openai_base_url", "https://api.openai.com/v1"),
|
|
190
|
+
"host": config.get("host", "0.0.0.0"),
|
|
191
|
+
"port": config.get("port", 8080),
|
|
192
|
+
"request_timeout": config.get("request_timeout", 300.0),
|
|
193
|
+
"cors_origins": ["*"],
|
|
194
|
+
"cors_credentials": True,
|
|
195
|
+
"cors_methods": ["*"],
|
|
196
|
+
"cors_headers": ["*"],
|
|
197
|
+
"log_level": "INFO",
|
|
198
|
+
"log_dir": "",
|
|
199
|
+
"tavily_timeout": 30.0,
|
|
200
|
+
"tavily_max_results": 5,
|
|
201
|
+
"websearch_max_uses": 5,
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
# Add optional values only if present
|
|
205
|
+
if config.get("api_key"):
|
|
206
|
+
toml_config["api_key"] = config["api_key"]
|
|
207
|
+
|
|
208
|
+
if config.get("tavily_api_key"):
|
|
209
|
+
toml_config["tavily_api_key"] = config["tavily_api_key"]
|
|
210
|
+
|
|
211
|
+
# Write using proper TOML serialization (prevents injection attacks)
|
|
212
|
+
with open(config_file, "wb") as f:
|
|
213
|
+
tomli_w.dump(toml_config, f)
|
|
214
|
+
|
|
215
|
+
# Set restrictive permissions for the config file on Unix-like systems
|
|
216
|
+
if sys.platform != "win32":
|
|
217
|
+
config_file.chmod(0o600)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def load_config_from_file() -> dict:
|
|
221
|
+
"""Load configuration from TOML file.
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
Dictionary containing configuration values, empty dict if file doesn't exist
|
|
225
|
+
"""
|
|
226
|
+
if sys.version_info >= (3, 11):
|
|
227
|
+
import tomllib
|
|
228
|
+
else:
|
|
229
|
+
import tomli as tomllib
|
|
230
|
+
|
|
231
|
+
config_file = get_config_file()
|
|
232
|
+
if not config_file.exists():
|
|
233
|
+
return {}
|
|
234
|
+
with open(config_file, "rb") as f:
|
|
235
|
+
return tomllib.load(f)
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
class Settings(BaseModel):
|
|
239
|
+
"""Application settings loaded from config file."""
|
|
240
|
+
|
|
241
|
+
model_config = ConfigDict(extra="ignore")
|
|
242
|
+
|
|
243
|
+
# OpenAI API Configuration
|
|
244
|
+
openai_api_key: Optional[str] = None
|
|
245
|
+
openai_base_url: str = "https://api.openai.com/v1"
|
|
246
|
+
openai_org_id: Optional[str] = None
|
|
247
|
+
openai_project_id: Optional[str] = None
|
|
248
|
+
|
|
249
|
+
# Server Configuration
|
|
250
|
+
host: str = "0.0.0.0"
|
|
251
|
+
port: int = 8080
|
|
252
|
+
request_timeout: float = 300.0 # 5 minutes
|
|
253
|
+
|
|
254
|
+
# API Key for authenticating requests to this server (optional)
|
|
255
|
+
api_key: Optional[str] = None
|
|
256
|
+
|
|
257
|
+
# CORS settings
|
|
258
|
+
cors_origins: list[str] = ["*"]
|
|
259
|
+
cors_credentials: bool = True
|
|
260
|
+
cors_methods: list[str] = ["*"]
|
|
261
|
+
cors_headers: list[str] = ["*"]
|
|
262
|
+
|
|
263
|
+
# Logging
|
|
264
|
+
log_level: str = "INFO"
|
|
265
|
+
log_dir: str = "" # Empty means use platform-specific default
|
|
266
|
+
|
|
267
|
+
# Tavily Web Search Configuration
|
|
268
|
+
tavily_api_key: Optional[str] = None
|
|
269
|
+
tavily_timeout: float = 30.0
|
|
270
|
+
tavily_max_results: int = 5
|
|
271
|
+
websearch_max_uses: int = 5 # Default max_uses per request
|
|
272
|
+
|
|
273
|
+
@property
|
|
274
|
+
def openai_auth_headers(self) -> dict[str, str]:
|
|
275
|
+
"""Get OpenAI authentication headers."""
|
|
276
|
+
headers = {
|
|
277
|
+
"Authorization": f"Bearer {self.openai_api_key}",
|
|
278
|
+
}
|
|
279
|
+
if self.openai_org_id:
|
|
280
|
+
headers["OpenAI-Organization"] = self.openai_org_id
|
|
281
|
+
if self.openai_project_id:
|
|
282
|
+
headers["OpenAI-Project"] = self.openai_project_id
|
|
283
|
+
return headers
|
|
284
|
+
|
|
285
|
+
@classmethod
|
|
286
|
+
def from_toml(cls) -> "Settings":
|
|
287
|
+
"""Load settings from TOML config file.
|
|
288
|
+
|
|
289
|
+
Returns:
|
|
290
|
+
Settings instance populated from config file
|
|
291
|
+
"""
|
|
292
|
+
config_data = load_config_from_file()
|
|
293
|
+
return cls(**config_data)
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def is_interactive() -> bool:
|
|
297
|
+
"""Check if running in an interactive terminal.
|
|
298
|
+
|
|
299
|
+
Returns:
|
|
300
|
+
True if stdin is a TTY (interactive), False otherwise
|
|
301
|
+
"""
|
|
302
|
+
return sys.stdin.isatty()
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
@lru_cache
|
|
306
|
+
def get_settings() -> Settings:
|
|
307
|
+
"""Get cached settings instance.
|
|
308
|
+
|
|
309
|
+
Creates config file interactively if it doesn't exist and running in a TTY.
|
|
310
|
+
Falls back to creating a default config file in non-interactive environments.
|
|
311
|
+
|
|
312
|
+
Returns:
|
|
313
|
+
Settings instance loaded from config file
|
|
314
|
+
"""
|
|
315
|
+
config_file = get_config_file()
|
|
316
|
+
if not config_file.exists():
|
|
317
|
+
if is_interactive():
|
|
318
|
+
# Interactive setup wizard
|
|
319
|
+
config = interactive_setup()
|
|
320
|
+
create_config_from_dict(config)
|
|
321
|
+
print(f"\nConfiguration saved to: {config_file}")
|
|
322
|
+
print("You can edit this file later to change settings.\n")
|
|
323
|
+
else:
|
|
324
|
+
# Non-interactive environment: create default config
|
|
325
|
+
create_default_config()
|
|
326
|
+
print(f"Created default config file: {config_file}")
|
|
327
|
+
print("Please edit it to add your API keys and settings.")
|
|
328
|
+
return Settings.from_toml()
|
{local_openai2anthropic-0.3.5 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/main.py
RENAMED
|
@@ -15,7 +15,7 @@ from fastapi import FastAPI, HTTPException, Request
|
|
|
15
15
|
from fastapi.middleware.cors import CORSMiddleware
|
|
16
16
|
from fastapi.responses import JSONResponse
|
|
17
17
|
|
|
18
|
-
from local_openai2anthropic.config import Settings, get_settings
|
|
18
|
+
from local_openai2anthropic.config import Settings, get_config_file, get_settings
|
|
19
19
|
from local_openai2anthropic.protocol import AnthropicError, AnthropicErrorResponse
|
|
20
20
|
from local_openai2anthropic.router import router
|
|
21
21
|
|
|
@@ -101,7 +101,7 @@ def create_app(settings: Settings | None = None) -> FastAPI:
|
|
|
101
101
|
app = FastAPI(
|
|
102
102
|
title="local-openai2anthropic",
|
|
103
103
|
description="A proxy server that converts Anthropic Messages API to OpenAI API",
|
|
104
|
-
version="0.3.
|
|
104
|
+
version="0.3.7",
|
|
105
105
|
docs_url="/docs",
|
|
106
106
|
redoc_url="/redoc",
|
|
107
107
|
)
|
|
@@ -203,11 +203,13 @@ def run_foreground(settings: Settings) -> None:
|
|
|
203
203
|
"""Run server in foreground mode (blocking)."""
|
|
204
204
|
# Validate required settings
|
|
205
205
|
if not settings.openai_api_key:
|
|
206
|
+
config_file = get_config_file()
|
|
206
207
|
print(
|
|
207
|
-
"Error:
|
|
208
|
-
"
|
|
209
|
-
"
|
|
210
|
-
"
|
|
208
|
+
f"Error: openai_api_key is required.\n"
|
|
209
|
+
f"Please edit the configuration file:\n"
|
|
210
|
+
f" {config_file}\n"
|
|
211
|
+
f"\nSet your OpenAI API key:\n"
|
|
212
|
+
f' openai_api_key = "your-api-key"',
|
|
211
213
|
file=sys.stderr,
|
|
212
214
|
)
|
|
213
215
|
sys.exit(1)
|
|
@@ -251,7 +253,7 @@ Examples:
|
|
|
251
253
|
parser.add_argument(
|
|
252
254
|
"--version",
|
|
253
255
|
action="version",
|
|
254
|
-
version="%(prog)s 0.
|
|
256
|
+
version="%(prog)s 0.3.7",
|
|
255
257
|
)
|
|
256
258
|
|
|
257
259
|
# Create subparsers for commands
|