local-openai2anthropic 0.3.6__tar.gz → 0.3.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/PKG-INFO +2 -2
  2. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/pyproject.toml +2 -2
  3. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/__init__.py +1 -1
  4. local_openai2anthropic-0.3.7/src/local_openai2anthropic/config.py +328 -0
  5. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/main.py +2 -2
  6. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_config.py +285 -0
  7. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_logging.py +78 -74
  8. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_main.py +1 -1
  9. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_server_tools.py +11 -4
  10. local_openai2anthropic-0.3.6/src/local_openai2anthropic/config.py +0 -181
  11. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/.claude/CLAUDE.md +0 -0
  12. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/.github/workflows/publish.yml +0 -0
  13. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/.gitignore +0 -0
  14. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/.reports/dead-code-analysis.md +0 -0
  15. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/LICENSE +0 -0
  16. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/README.md +0 -0
  17. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/README_zh.md +0 -0
  18. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/examples/basic_chat.py +0 -0
  19. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/examples/streaming.py +0 -0
  20. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/examples/thinking_mode.py +0 -0
  21. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/examples/tool_calling.py +0 -0
  22. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/examples/vision.py +0 -0
  23. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/examples/web_search.py +0 -0
  24. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/__main__.py +0 -0
  25. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/converter.py +0 -0
  26. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/daemon.py +0 -0
  27. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/daemon_runner.py +0 -0
  28. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/openai_types.py +0 -0
  29. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/protocol.py +0 -0
  30. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/router.py +0 -0
  31. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/server_tools/__init__.py +0 -0
  32. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/server_tools/base.py +0 -0
  33. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/server_tools/web_search.py +0 -0
  34. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/streaming/__init__.py +0 -0
  35. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/streaming/handler.py +0 -0
  36. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/tavily_client.py +0 -0
  37. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/tools/__init__.py +0 -0
  38. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/tools/handler.py +0 -0
  39. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/utils/__init__.py +0 -0
  40. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/src/local_openai2anthropic/utils/tokens.py +0 -0
  41. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/__init__.py +0 -0
  42. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/coverage/coverage.json +0 -0
  43. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/coverage/coverage_detailed.json +0 -0
  44. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/coverage/coverage_report.json +0 -0
  45. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/coverage/coverage_report_new.json +0 -0
  46. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/coverage/coverage_summary.json +0 -0
  47. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_converter.py +0 -0
  48. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_converter_edge_cases.py +0 -0
  49. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_daemon.py +0 -0
  50. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_daemon_advanced.py +0 -0
  51. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_daemon_runner.py +0 -0
  52. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_e2e_multimodel.py +0 -0
  53. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_e2e_websearch.py +0 -0
  54. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_integration.py +0 -0
  55. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_openai_types.py +0 -0
  56. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_protocol.py +0 -0
  57. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_router.py +0 -0
  58. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_router_comprehensive.py +0 -0
  59. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_router_edge_cases.py +0 -0
  60. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_router_streaming.py +0 -0
  61. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_tavily_client.py +0 -0
  62. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/tests/test_upstream.sh +0 -0
  63. {local_openai2anthropic-0.3.6 → local_openai2anthropic-0.3.7}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: local-openai2anthropic
3
- Version: 0.3.6
3
+ Version: 0.3.7
4
4
  Summary: A lightweight proxy server that converts Anthropic Messages API to OpenAI API
5
5
  Project-URL: Homepage, https://github.com/dongfangzan/local-openai2anthropic
6
6
  Project-URL: Repository, https://github.com/dongfangzan/local-openai2anthropic
@@ -24,7 +24,7 @@ Requires-Dist: httpx>=0.25.0
24
24
  Requires-Dist: openai>=1.30.0
25
25
  Requires-Dist: pydantic-settings>=2.0.0
26
26
  Requires-Dist: pydantic>=2.0.0
27
- Requires-Dist: tomli>=2.0.0; python_version < '3.11'
27
+ Requires-Dist: tomli-w>=1.0.0
28
28
  Requires-Dist: uvicorn[standard]>=0.23.0
29
29
  Provides-Extra: dev
30
30
  Requires-Dist: black>=23.0.0; extra == 'dev'
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "local-openai2anthropic"
3
- version = "0.3.6"
3
+ version = "0.3.7"
4
4
  description = "A lightweight proxy server that converts Anthropic Messages API to OpenAI API"
5
5
  readme = "README.md"
6
6
  license = { text = "Apache-2.0" }
@@ -29,7 +29,7 @@ dependencies = [
29
29
  "pydantic-settings>=2.0.0",
30
30
  "anthropic>=0.30.0",
31
31
  "openai>=1.30.0",
32
- "tomli>=2.0.0; python_version<'3.11'",
32
+ "tomli-w>=1.0.0",
33
33
  ]
34
34
 
35
35
  [project.optional-dependencies]
@@ -3,7 +3,7 @@
3
3
  local-openai2anthropic: A proxy server that converts Anthropic Messages API to OpenAI API.
4
4
  """
5
5
 
6
- __version__ = "0.3.6"
6
+ __version__ = "0.3.7"
7
7
 
8
8
  from local_openai2anthropic.protocol import (
9
9
  AnthropicError,
@@ -0,0 +1,328 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ """
3
+ Configuration settings for the proxy server.
4
+ """
5
+
6
+ import sys
7
+ from functools import lru_cache
8
+ from pathlib import Path
9
+ from typing import Optional
10
+
11
+ from pydantic import BaseModel, ConfigDict
12
+
13
+
14
+ def get_config_dir() -> Path:
15
+ """Get platform-specific config directory.
16
+
17
+ Returns:
18
+ Path to the config directory (~/.oa2a)
19
+ """
20
+ return Path.home() / ".oa2a"
21
+
22
+
23
+ def get_config_file() -> Path:
24
+ """Get config file path.
25
+
26
+ Returns:
27
+ Path to the config file (~/.oa2a/config.toml)
28
+ """
29
+ return get_config_dir() / "config.toml"
30
+
31
+
32
+ def create_default_config() -> bool:
33
+ """Create default config file if not exists.
34
+
35
+ Returns:
36
+ True if a new config file was created, False if it already exists
37
+ """
38
+ config_file = get_config_file()
39
+ if config_file.exists():
40
+ return False
41
+
42
+ config_dir = get_config_dir()
43
+ config_dir.mkdir(parents=True, exist_ok=True)
44
+
45
+ # Set restrictive permissions (0o600) for the config directory on Unix-like systems
46
+ if sys.platform != "win32":
47
+ config_dir.chmod(0o700)
48
+
49
+ default_config = """# OA2A Configuration File
50
+ # Place this file at ~/.oa2a/config.toml
51
+
52
+ # OpenAI API Configuration
53
+ openai_api_key = ""
54
+ openai_base_url = "https://api.openai.com/v1"
55
+ openai_org_id = ""
56
+ openai_project_id = ""
57
+
58
+ # Server Configuration
59
+ host = "0.0.0.0"
60
+ port = 8080
61
+ request_timeout = 300.0
62
+
63
+ # API Key for authenticating requests to this server (optional)
64
+ api_key = ""
65
+
66
+ # CORS settings
67
+ cors_origins = ["*"]
68
+ cors_credentials = true
69
+ cors_methods = ["*"]
70
+ cors_headers = ["*"]
71
+
72
+ # Logging
73
+ log_level = "INFO"
74
+ log_dir = "" # Empty uses platform-specific default
75
+
76
+ # Tavily Web Search Configuration
77
+ tavily_api_key = ""
78
+ tavily_timeout = 30.0
79
+ tavily_max_results = 5
80
+ websearch_max_uses = 5
81
+ """
82
+ config_file.write_text(default_config, encoding="utf-8")
83
+
84
+ # Set restrictive permissions (0o600) for the config file on Unix-like systems
85
+ if sys.platform != "win32":
86
+ config_file.chmod(0o600)
87
+
88
+ return True
89
+
90
+
91
+ def interactive_setup() -> dict:
92
+ """Interactive configuration setup wizard.
93
+
94
+ Guides user through setting up essential configuration values.
95
+
96
+ Returns:
97
+ Dictionary containing user-provided configuration
98
+ """
99
+ print("=" * 60)
100
+ print(" Welcome to local-openai2anthropic Setup Wizard")
101
+ print("=" * 60)
102
+ print()
103
+ print("This wizard will help you create the initial configuration.")
104
+ print(f"Config file will be saved to: {get_config_file()}")
105
+ print()
106
+
107
+ config = {}
108
+
109
+ # OpenAI API Key (required)
110
+ print("[1/3] OpenAI API Configuration")
111
+ print("-" * 40)
112
+ while True:
113
+ api_key = input("Enter your OpenAI API Key (required): ").strip()
114
+ if api_key:
115
+ config["openai_api_key"] = api_key
116
+ break
117
+ print("API Key is required. Please enter a valid key.")
118
+
119
+ # Base URL (optional, with default)
120
+ default_url = "https://api.openai.com/v1"
121
+ base_url = input(f"Enter OpenAI Base URL [{default_url}]: ").strip()
122
+ config["openai_base_url"] = base_url if base_url else default_url
123
+
124
+ print()
125
+ print("[2/3] Server Configuration")
126
+ print("-" * 40)
127
+
128
+ # Host (with default)
129
+ default_host = "0.0.0.0"
130
+ host = input(f"Enter server host [{default_host}]: ").strip()
131
+ config["host"] = host if host else default_host
132
+
133
+ # Port (with default)
134
+ default_port = "8080"
135
+ port_input = input(f"Enter server port [{default_port}]: ").strip()
136
+ try:
137
+ config["port"] = int(port_input) if port_input else int(default_port)
138
+ except ValueError:
139
+ print(f"Invalid port number, using default: {default_port}")
140
+ config["port"] = int(default_port)
141
+
142
+ # API Key for server authentication (optional)
143
+ print()
144
+ print("[3/3] Server API Authentication (Optional)")
145
+ print("-" * 40)
146
+ print("Set an API key to authenticate requests to this server.")
147
+ print(
148
+ "Leave empty to allow unauthenticated access (not recommended for production)."
149
+ )
150
+ server_api_key = input("Enter server API key (optional): ").strip()
151
+ if server_api_key:
152
+ config["api_key"] = server_api_key
153
+
154
+ print()
155
+ print("=" * 60)
156
+ print(" Configuration Summary")
157
+ print("=" * 60)
158
+ print(f"OpenAI Base URL: {config.get('openai_base_url', default_url)}")
159
+ print(
160
+ f"Server: {config.get('host', default_host)}:{config.get('port', default_port)}"
161
+ )
162
+ print(f"OpenAI API Key: {config.get('openai_api_key', '')[:8]}... (configured)")
163
+ if config.get("api_key"):
164
+ print(f"Server Auth: {config['api_key'][:8]}... (configured)")
165
+ print()
166
+
167
+ return config
168
+
169
+
170
+ def create_config_from_dict(config: dict) -> None:
171
+ """Create config file from dictionary.
172
+
173
+ Args:
174
+ config: Dictionary containing configuration values
175
+ """
176
+ import tomli_w
177
+
178
+ config_file = get_config_file()
179
+ config_dir = get_config_dir()
180
+ config_dir.mkdir(parents=True, exist_ok=True)
181
+
182
+ # Set restrictive permissions for the config directory on Unix-like systems
183
+ if sys.platform != "win32":
184
+ config_dir.chmod(0o700)
185
+
186
+ # Build config dict with proper structure
187
+ toml_config: dict = {
188
+ "openai_api_key": config.get("openai_api_key", ""),
189
+ "openai_base_url": config.get("openai_base_url", "https://api.openai.com/v1"),
190
+ "host": config.get("host", "0.0.0.0"),
191
+ "port": config.get("port", 8080),
192
+ "request_timeout": config.get("request_timeout", 300.0),
193
+ "cors_origins": ["*"],
194
+ "cors_credentials": True,
195
+ "cors_methods": ["*"],
196
+ "cors_headers": ["*"],
197
+ "log_level": "INFO",
198
+ "log_dir": "",
199
+ "tavily_timeout": 30.0,
200
+ "tavily_max_results": 5,
201
+ "websearch_max_uses": 5,
202
+ }
203
+
204
+ # Add optional values only if present
205
+ if config.get("api_key"):
206
+ toml_config["api_key"] = config["api_key"]
207
+
208
+ if config.get("tavily_api_key"):
209
+ toml_config["tavily_api_key"] = config["tavily_api_key"]
210
+
211
+ # Write using proper TOML serialization (prevents injection attacks)
212
+ with open(config_file, "wb") as f:
213
+ tomli_w.dump(toml_config, f)
214
+
215
+ # Set restrictive permissions for the config file on Unix-like systems
216
+ if sys.platform != "win32":
217
+ config_file.chmod(0o600)
218
+
219
+
220
+ def load_config_from_file() -> dict:
221
+ """Load configuration from TOML file.
222
+
223
+ Returns:
224
+ Dictionary containing configuration values, empty dict if file doesn't exist
225
+ """
226
+ if sys.version_info >= (3, 11):
227
+ import tomllib
228
+ else:
229
+ import tomli as tomllib
230
+
231
+ config_file = get_config_file()
232
+ if not config_file.exists():
233
+ return {}
234
+ with open(config_file, "rb") as f:
235
+ return tomllib.load(f)
236
+
237
+
238
+ class Settings(BaseModel):
239
+ """Application settings loaded from config file."""
240
+
241
+ model_config = ConfigDict(extra="ignore")
242
+
243
+ # OpenAI API Configuration
244
+ openai_api_key: Optional[str] = None
245
+ openai_base_url: str = "https://api.openai.com/v1"
246
+ openai_org_id: Optional[str] = None
247
+ openai_project_id: Optional[str] = None
248
+
249
+ # Server Configuration
250
+ host: str = "0.0.0.0"
251
+ port: int = 8080
252
+ request_timeout: float = 300.0 # 5 minutes
253
+
254
+ # API Key for authenticating requests to this server (optional)
255
+ api_key: Optional[str] = None
256
+
257
+ # CORS settings
258
+ cors_origins: list[str] = ["*"]
259
+ cors_credentials: bool = True
260
+ cors_methods: list[str] = ["*"]
261
+ cors_headers: list[str] = ["*"]
262
+
263
+ # Logging
264
+ log_level: str = "INFO"
265
+ log_dir: str = "" # Empty means use platform-specific default
266
+
267
+ # Tavily Web Search Configuration
268
+ tavily_api_key: Optional[str] = None
269
+ tavily_timeout: float = 30.0
270
+ tavily_max_results: int = 5
271
+ websearch_max_uses: int = 5 # Default max_uses per request
272
+
273
+ @property
274
+ def openai_auth_headers(self) -> dict[str, str]:
275
+ """Get OpenAI authentication headers."""
276
+ headers = {
277
+ "Authorization": f"Bearer {self.openai_api_key}",
278
+ }
279
+ if self.openai_org_id:
280
+ headers["OpenAI-Organization"] = self.openai_org_id
281
+ if self.openai_project_id:
282
+ headers["OpenAI-Project"] = self.openai_project_id
283
+ return headers
284
+
285
+ @classmethod
286
+ def from_toml(cls) -> "Settings":
287
+ """Load settings from TOML config file.
288
+
289
+ Returns:
290
+ Settings instance populated from config file
291
+ """
292
+ config_data = load_config_from_file()
293
+ return cls(**config_data)
294
+
295
+
296
+ def is_interactive() -> bool:
297
+ """Check if running in an interactive terminal.
298
+
299
+ Returns:
300
+ True if stdin is a TTY (interactive), False otherwise
301
+ """
302
+ return sys.stdin.isatty()
303
+
304
+
305
+ @lru_cache
306
+ def get_settings() -> Settings:
307
+ """Get cached settings instance.
308
+
309
+ Creates config file interactively if it doesn't exist and running in a TTY.
310
+ Falls back to creating a default config file in non-interactive environments.
311
+
312
+ Returns:
313
+ Settings instance loaded from config file
314
+ """
315
+ config_file = get_config_file()
316
+ if not config_file.exists():
317
+ if is_interactive():
318
+ # Interactive setup wizard
319
+ config = interactive_setup()
320
+ create_config_from_dict(config)
321
+ print(f"\nConfiguration saved to: {config_file}")
322
+ print("You can edit this file later to change settings.\n")
323
+ else:
324
+ # Non-interactive environment: create default config
325
+ create_default_config()
326
+ print(f"Created default config file: {config_file}")
327
+ print("Please edit it to add your API keys and settings.")
328
+ return Settings.from_toml()
@@ -101,7 +101,7 @@ def create_app(settings: Settings | None = None) -> FastAPI:
101
101
  app = FastAPI(
102
102
  title="local-openai2anthropic",
103
103
  description="A proxy server that converts Anthropic Messages API to OpenAI API",
104
- version="0.3.6",
104
+ version="0.3.7",
105
105
  docs_url="/docs",
106
106
  redoc_url="/redoc",
107
107
  )
@@ -253,7 +253,7 @@ Examples:
253
253
  parser.add_argument(
254
254
  "--version",
255
255
  action="version",
256
- version="%(prog)s 0.3.6",
256
+ version="%(prog)s 0.3.7",
257
257
  )
258
258
 
259
259
  # Create subparsers for commands
@@ -8,10 +8,13 @@ import pytest
8
8
 
9
9
  from local_openai2anthropic.config import (
10
10
  Settings,
11
+ create_config_from_dict,
11
12
  create_default_config,
12
13
  get_config_dir,
13
14
  get_config_file,
14
15
  get_settings,
16
+ interactive_setup,
17
+ is_interactive,
15
18
  load_config_from_file,
16
19
  )
17
20
 
@@ -360,5 +363,287 @@ class TestGetSettings:
360
363
  assert "Please edit it to add your API keys and settings" in captured.out
361
364
 
362
365
 
366
+ class TestIsInteractive:
367
+ """Tests for is_interactive function."""
368
+
369
+ def test_is_interactive_returns_bool(self):
370
+ """Test that is_interactive returns a boolean."""
371
+ result = is_interactive()
372
+ assert isinstance(result, bool)
373
+
374
+
375
+ class TestCreateConfigFromDict:
376
+ """Tests for create_config_from_dict function."""
377
+
378
+ def test_create_config_from_dict_basic(self, tmp_path, monkeypatch):
379
+ """Test creating config file from dictionary with basic values."""
380
+ monkeypatch.setattr(
381
+ "local_openai2anthropic.config.get_config_dir", lambda: tmp_path / ".oa2a"
382
+ )
383
+
384
+ config = {
385
+ "openai_api_key": "test-api-key",
386
+ "openai_base_url": "https://api.openai.com/v1",
387
+ "host": "0.0.0.0",
388
+ "port": 8080,
389
+ }
390
+
391
+ create_config_from_dict(config)
392
+
393
+ config_file = get_config_file()
394
+ assert config_file.exists()
395
+
396
+ content = config_file.read_text()
397
+ assert 'openai_api_key = "test-api-key"' in content
398
+ assert 'openai_base_url = "https://api.openai.com/v1"' in content
399
+ assert 'host = "0.0.0.0"' in content
400
+ assert "port = 8080" in content
401
+
402
+ def test_create_config_from_dict_with_optional_values(self, tmp_path, monkeypatch):
403
+ """Test creating config with optional values."""
404
+ monkeypatch.setattr(
405
+ "local_openai2anthropic.config.get_config_dir", lambda: tmp_path / ".oa2a"
406
+ )
407
+
408
+ config = {
409
+ "openai_api_key": "test-api-key",
410
+ "api_key": "server-api-key",
411
+ }
412
+
413
+ create_config_from_dict(config)
414
+
415
+ config_file = get_config_file()
416
+ content = config_file.read_text()
417
+
418
+ assert 'api_key = "server-api-key"' in content
419
+
420
+ def test_create_config_from_dict_without_optional_values(
421
+ self, tmp_path, monkeypatch
422
+ ):
423
+ """Test creating config without optional values omits them."""
424
+ monkeypatch.setattr(
425
+ "local_openai2anthropic.config.get_config_dir", lambda: tmp_path / ".oa2a"
426
+ )
427
+
428
+ config = {
429
+ "openai_api_key": "test-api-key",
430
+ }
431
+
432
+ create_config_from_dict(config)
433
+
434
+ config_file = get_config_file()
435
+ content = config_file.read_text()
436
+
437
+ # Parse TOML to verify structure
438
+ import tomllib
439
+
440
+ with open(config_file, "rb") as f:
441
+ parsed = tomllib.load(f)
442
+
443
+ # Optional api_key should not be present when not provided
444
+ assert "api_key" not in parsed
445
+ # But openai_api_key should be present
446
+ assert parsed["openai_api_key"] == "test-api-key"
447
+
448
+ def test_create_config_from_dict_custom_host_port(self, tmp_path, monkeypatch):
449
+ """Test creating config with custom host and port."""
450
+ monkeypatch.setattr(
451
+ "local_openai2anthropic.config.get_config_dir", lambda: tmp_path / ".oa2a"
452
+ )
453
+
454
+ config = {
455
+ "openai_api_key": "test-key",
456
+ "host": "127.0.0.1",
457
+ "port": 9000,
458
+ }
459
+
460
+ create_config_from_dict(config)
461
+
462
+ config_file = get_config_file()
463
+ content = config_file.read_text()
464
+
465
+ assert 'host = "127.0.0.1"' in content
466
+ assert "port = 9000" in content
467
+
468
+ def test_create_config_from_dict_creates_directory(self, tmp_path, monkeypatch):
469
+ """Test that create_config_from_dict creates the config directory."""
470
+ monkeypatch.setattr(
471
+ "local_openai2anthropic.config.get_config_dir", lambda: tmp_path / ".oa2a"
472
+ )
473
+
474
+ config_dir = tmp_path / ".oa2a"
475
+ assert not config_dir.exists()
476
+
477
+ create_config_from_dict({"openai_api_key": "test"})
478
+
479
+ assert config_dir.exists()
480
+ assert config_dir.is_dir()
481
+
482
+ def test_create_config_file_permissions(self, tmp_path, monkeypatch):
483
+ """Test that created config file has correct permissions."""
484
+ import sys
485
+
486
+ monkeypatch.setattr(
487
+ "local_openai2anthropic.config.get_config_dir", lambda: tmp_path / ".oa2a"
488
+ )
489
+
490
+ create_config_from_dict({"openai_api_key": "test"})
491
+
492
+ if sys.platform != "win32":
493
+ import stat
494
+
495
+ config_file = get_config_file()
496
+ config_dir = get_config_dir()
497
+
498
+ file_perms = stat.S_IMODE(config_file.stat().st_mode)
499
+ dir_perms = stat.S_IMODE(config_dir.stat().st_mode)
500
+
501
+ assert file_perms == 0o600
502
+ assert dir_perms == 0o700
503
+
504
+
505
+ class TestInteractiveSetup:
506
+ """Tests for interactive_setup function."""
507
+
508
+ def test_interactive_setup_all_values(self, monkeypatch):
509
+ """Test interactive setup with all values provided."""
510
+ # Mock get_config_file to return a fixed path
511
+ monkeypatch.setattr(
512
+ "local_openai2anthropic.config.get_config_file",
513
+ lambda: Path("/tmp/.oa2a/config.toml"),
514
+ )
515
+
516
+ # Mock input for all inputs
517
+ inputs = iter(
518
+ [
519
+ "openai-api-key", # api_key (required)
520
+ "https://custom.api.com", # base_url
521
+ "127.0.0.1", # host
522
+ "9000", # port
523
+ "server-api-key", # server api_key
524
+ ]
525
+ )
526
+ monkeypatch.setattr("builtins.input", lambda prompt: next(inputs))
527
+
528
+ config = interactive_setup()
529
+
530
+ assert config["openai_api_key"] == "openai-api-key"
531
+ assert config["openai_base_url"] == "https://custom.api.com"
532
+ assert config["host"] == "127.0.0.1"
533
+ assert config["port"] == 9000
534
+ assert config["api_key"] == "server-api-key"
535
+
536
+ def test_interactive_setup_defaults(self, monkeypatch):
537
+ """Test interactive setup with default values."""
538
+ monkeypatch.setattr(
539
+ "local_openai2anthropic.config.get_config_file",
540
+ lambda: Path("/tmp/.oa2a/config.toml"),
541
+ )
542
+
543
+ # Mock input - api key + all empty to accept defaults
544
+ inputs = iter(["openai-api-key", "", "", "", ""])
545
+ monkeypatch.setattr("builtins.input", lambda prompt: next(inputs))
546
+
547
+ config = interactive_setup()
548
+
549
+ assert config["openai_api_key"] == "openai-api-key"
550
+ assert config["openai_base_url"] == "https://api.openai.com/v1"
551
+ assert config["host"] == "0.0.0.0"
552
+ assert config["port"] == 8080
553
+ assert "api_key" not in config
554
+
555
+ def test_interactive_setup_invalid_port(self, monkeypatch):
556
+ """Test interactive setup with invalid port falls back to default."""
557
+ monkeypatch.setattr(
558
+ "local_openai2anthropic.config.get_config_file",
559
+ lambda: Path("/tmp/.oa2a/config.toml"),
560
+ )
561
+
562
+ # Mock input with invalid port
563
+ inputs = iter(["openai-api-key", "", "", "invalid", ""])
564
+ monkeypatch.setattr("builtins.input", lambda prompt: next(inputs))
565
+
566
+ config = interactive_setup()
567
+
568
+ assert config["port"] == 8080 # Should fall back to default
569
+
570
+ def test_interactive_setup_requires_api_key(self, monkeypatch):
571
+ """Test that interactive setup requires API key."""
572
+ monkeypatch.setattr(
573
+ "local_openai2anthropic.config.get_config_file",
574
+ lambda: Path("/tmp/.oa2a/config.toml"),
575
+ )
576
+
577
+ # Mock input - first empty (rejected), then valid
578
+ inputs = iter(["", "openai-api-key", "", "", "", ""])
579
+ monkeypatch.setattr("builtins.input", lambda prompt: next(inputs))
580
+
581
+ config = interactive_setup()
582
+
583
+ assert config["openai_api_key"] == "openai-api-key"
584
+
585
+
586
+ class TestGetSettingsInteractive:
587
+ """Tests for get_settings with interactive mode."""
588
+
589
+ def test_get_settings_non_interactive_creates_default(
590
+ self, tmp_path, monkeypatch, capsys
591
+ ):
592
+ """Test get_settings in non-interactive mode creates default config."""
593
+ # Clear cache
594
+ get_settings.cache_clear()
595
+
596
+ monkeypatch.setattr(
597
+ "local_openai2anthropic.config.get_config_dir", lambda: tmp_path / ".oa2a"
598
+ )
599
+ monkeypatch.setattr(
600
+ "local_openai2anthropic.config.is_interactive", lambda: False
601
+ )
602
+
603
+ settings = get_settings()
604
+
605
+ # Should create default config
606
+ assert get_config_file().exists()
607
+ assert isinstance(settings, Settings)
608
+
609
+ # Check notification
610
+ captured = capsys.readouterr()
611
+ assert "Created default config file" in captured.out
612
+
613
+ def test_get_settings_interactive_mode(self, tmp_path, monkeypatch, capsys):
614
+ """Test get_settings in interactive mode runs setup wizard."""
615
+ # Clear cache
616
+ get_settings.cache_clear()
617
+
618
+ monkeypatch.setattr(
619
+ "local_openai2anthropic.config.get_config_dir", lambda: tmp_path / ".oa2a"
620
+ )
621
+ monkeypatch.setattr(
622
+ "local_openai2anthropic.config.is_interactive", lambda: True
623
+ )
624
+
625
+ # Mock interactive_setup
626
+ mock_config = {
627
+ "openai_api_key": "test-key",
628
+ "host": "127.0.0.1",
629
+ "port": 9000,
630
+ }
631
+ monkeypatch.setattr(
632
+ "local_openai2anthropic.config.interactive_setup", lambda: mock_config
633
+ )
634
+
635
+ settings = get_settings()
636
+
637
+ # Should create config with interactive values
638
+ assert get_config_file().exists()
639
+ assert settings.openai_api_key == "test-key"
640
+ assert settings.host == "127.0.0.1"
641
+ assert settings.port == 9000
642
+
643
+ # Check notification
644
+ captured = capsys.readouterr()
645
+ assert "Configuration saved to" in captured.out
646
+
647
+
363
648
  if __name__ == "__main__":
364
649
  pytest.main([__file__, "-v"])