ccproxy-api 0.1.5__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. ccproxy/_version.py +2 -2
  2. ccproxy/adapters/codex/__init__.py +11 -0
  3. ccproxy/adapters/openai/models.py +1 -1
  4. ccproxy/adapters/openai/response_adapter.py +355 -0
  5. ccproxy/adapters/openai/response_models.py +178 -0
  6. ccproxy/api/app.py +16 -0
  7. ccproxy/api/routes/codex.py +1231 -0
  8. ccproxy/api/routes/health.py +228 -3
  9. ccproxy/auth/openai/__init__.py +13 -0
  10. ccproxy/auth/openai/credentials.py +166 -0
  11. ccproxy/auth/openai/oauth_client.py +334 -0
  12. ccproxy/auth/openai/storage.py +184 -0
  13. ccproxy/claude_sdk/options.py +1 -1
  14. ccproxy/cli/commands/auth.py +398 -1
  15. ccproxy/cli/commands/serve.py +3 -1
  16. ccproxy/config/claude.py +1 -1
  17. ccproxy/config/codex.py +100 -0
  18. ccproxy/config/scheduler.py +4 -4
  19. ccproxy/config/settings.py +19 -0
  20. ccproxy/core/codex_transformers.py +389 -0
  21. ccproxy/core/http_transformers.py +153 -2
  22. ccproxy/models/detection.py +82 -0
  23. ccproxy/models/requests.py +22 -0
  24. ccproxy/models/responses.py +16 -0
  25. ccproxy/services/codex_detection_service.py +263 -0
  26. ccproxy/services/proxy_service.py +530 -0
  27. ccproxy/utils/model_mapping.py +7 -5
  28. ccproxy/utils/startup_helpers.py +62 -0
  29. ccproxy_api-0.1.6.dist-info/METADATA +615 -0
  30. {ccproxy_api-0.1.5.dist-info → ccproxy_api-0.1.6.dist-info}/RECORD +33 -22
  31. ccproxy_api-0.1.5.dist-info/METADATA +0 -396
  32. {ccproxy_api-0.1.5.dist-info → ccproxy_api-0.1.6.dist-info}/WHEEL +0 -0
  33. {ccproxy_api-0.1.5.dist-info → ccproxy_api-0.1.6.dist-info}/entry_points.txt +0 -0
  34. {ccproxy_api-0.1.5.dist-info → ccproxy_api-0.1.6.dist-info}/licenses/LICENSE +0 -0
@@ -252,3 +252,19 @@ class InternalServerError(APIError):
252
252
  type: Annotated[
253
253
  Literal["internal_server_error"], Field(description="Error type")
254
254
  ] = "internal_server_error"
255
+
256
+
257
+ class CodexResponse(BaseModel):
258
+ """OpenAI Codex completion response model."""
259
+
260
+ id: Annotated[str, Field(description="Response ID")]
261
+ model: Annotated[str, Field(description="Model used for completion")]
262
+ content: Annotated[str, Field(description="Generated content")]
263
+ finish_reason: Annotated[
264
+ str | None, Field(description="Reason the response finished")
265
+ ] = None
266
+ usage: Annotated[Usage | None, Field(description="Token usage information")] = None
267
+
268
+ model_config = ConfigDict(
269
+ extra="allow"
270
+ ) # Allow additional fields for compatibility
@@ -0,0 +1,263 @@
1
+ """Service for automatically detecting Codex CLI headers at startup."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ import json
7
+ import os
8
+ import socket
9
+ import subprocess
10
+ from typing import Any
11
+
12
+ import structlog
13
+ from fastapi import FastAPI, Request, Response
14
+
15
+ from ccproxy.config.discovery import get_ccproxy_cache_dir
16
+ from ccproxy.config.settings import Settings
17
+ from ccproxy.models.detection import (
18
+ CodexCacheData,
19
+ CodexHeaders,
20
+ CodexInstructionsData,
21
+ )
22
+
23
+
24
+ logger = structlog.get_logger(__name__)
25
+
26
+
27
+ class CodexDetectionService:
28
+ """Service for automatically detecting Codex CLI headers at startup."""
29
+
30
+ def __init__(self, settings: Settings) -> None:
31
+ """Initialize Codex detection service."""
32
+ self.settings = settings
33
+ self.cache_dir = get_ccproxy_cache_dir()
34
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
35
+ self._cached_data: CodexCacheData | None = None
36
+
37
+ async def initialize_detection(self) -> CodexCacheData:
38
+ """Initialize Codex detection at startup."""
39
+ try:
40
+ # Get current Codex version
41
+ current_version = await self._get_codex_version()
42
+
43
+ # Try to load from cache first
44
+ detected_data = self._load_from_cache(current_version)
45
+ cached = detected_data is not None
46
+ if cached:
47
+ logger.debug("detection_codex_headers_debug", version=current_version)
48
+ else:
49
+ # No cache or version changed - detect fresh
50
+ detected_data = await self._detect_codex_headers(current_version)
51
+ # Cache the results
52
+ self._save_to_cache(detected_data)
53
+
54
+ self._cached_data = detected_data
55
+
56
+ logger.info(
57
+ "detection_codex_headers_completed",
58
+ version=current_version,
59
+ cached=cached,
60
+ )
61
+
62
+ # TODO: add proper testing without codex cli installed
63
+ if detected_data is None:
64
+ raise ValueError("Codex detection failed")
65
+ return detected_data
66
+
67
+ except Exception as e:
68
+ logger.warning("detection_codex_headers_failed", fallback=True, error=e)
69
+ # Return fallback data
70
+ fallback_data = self._get_fallback_data()
71
+ self._cached_data = fallback_data
72
+ return fallback_data
73
+
74
+ def get_cached_data(self) -> CodexCacheData | None:
75
+ """Get currently cached detection data."""
76
+ return self._cached_data
77
+
78
+ async def _get_codex_version(self) -> str:
79
+ """Get Codex CLI version."""
80
+ try:
81
+ result = subprocess.run(
82
+ ["codex", "--version"],
83
+ capture_output=True,
84
+ text=True,
85
+ timeout=10,
86
+ )
87
+ if result.returncode == 0:
88
+ # Extract version from output like "codex 0.21.0"
89
+ version_line = result.stdout.strip()
90
+ if " " in version_line:
91
+ # Handle "codex 0.21.0" format - extract just the version number
92
+ return version_line.split()[-1]
93
+ return version_line
94
+ else:
95
+ raise RuntimeError(f"Codex version command failed: {result.stderr}")
96
+
97
+ except (subprocess.TimeoutExpired, FileNotFoundError, RuntimeError) as e:
98
+ logger.warning("codex_version_detection_failed", error=str(e))
99
+ return "unknown"
100
+
101
+ async def _detect_codex_headers(self, version: str) -> CodexCacheData:
102
+ """Execute Codex CLI with proxy to capture headers and instructions."""
103
+ # Data captured from the request
104
+ captured_data: dict[str, Any] = {}
105
+
106
+ async def capture_handler(request: Request) -> Response:
107
+ """Capture the Codex CLI request."""
108
+ captured_data["headers"] = dict(request.headers)
109
+ captured_data["body"] = await request.body()
110
+ # Return a mock response to satisfy Codex CLI
111
+ return Response(
112
+ content='{"choices": [{"message": {"content": "Test response"}}]}',
113
+ media_type="application/json",
114
+ status_code=200,
115
+ )
116
+
117
+ # Create temporary FastAPI app
118
+ temp_app = FastAPI()
119
+ temp_app.post("/backend-api/codex/responses")(capture_handler)
120
+
121
+ # Find available port
122
+ sock = socket.socket()
123
+ sock.bind(("", 0))
124
+ port = sock.getsockname()[1]
125
+ sock.close()
126
+
127
+ # Start server in background
128
+ from uvicorn import Config, Server
129
+
130
+ config = Config(temp_app, host="127.0.0.1", port=port, log_level="error")
131
+ server = Server(config)
132
+
133
+ logger.debug("start")
134
+ server_task = asyncio.create_task(server.serve())
135
+
136
+ try:
137
+ # Wait for server to start
138
+ await asyncio.sleep(0.5)
139
+
140
+ # Execute Codex CLI with proxy
141
+ env = {
142
+ **dict(os.environ),
143
+ "OPENAI_BASE_URL": f"http://127.0.0.1:{port}/backend-api/codex",
144
+ }
145
+
146
+ process = await asyncio.create_subprocess_exec(
147
+ "codex",
148
+ "exec",
149
+ "test",
150
+ env=env,
151
+ stdout=asyncio.subprocess.PIPE,
152
+ stderr=asyncio.subprocess.PIPE,
153
+ )
154
+ # stderr = ""
155
+ # if process.stderr:
156
+ # stderr = await process.stderr.read(128)
157
+ # stdout = ""
158
+ # if process.stdout:
159
+ # stdout = await process.stdout.read(128)
160
+ # logger.warning("rcecdy", stderr=stderr, stdout=stdout)
161
+
162
+ # Wait for process with timeout
163
+ try:
164
+ await asyncio.wait_for(process.wait(), timeout=300)
165
+ except TimeoutError:
166
+ process.kill()
167
+ await process.wait()
168
+
169
+ # Stop server
170
+ server.should_exit = True
171
+ await server_task
172
+
173
+ if not captured_data:
174
+ raise RuntimeError("Failed to capture Codex CLI request")
175
+
176
+ # Extract headers and instructions
177
+ headers = self._extract_headers(captured_data["headers"])
178
+ instructions = self._extract_instructions(captured_data["body"])
179
+
180
+ return CodexCacheData(
181
+ codex_version=version, headers=headers, instructions=instructions
182
+ )
183
+
184
+ except Exception as e:
185
+ # Ensure server is stopped
186
+ server.should_exit = True
187
+ if not server_task.done():
188
+ await server_task
189
+ raise
190
+
191
+ def _load_from_cache(self, version: str) -> CodexCacheData | None:
192
+ """Load cached data for specific Codex version."""
193
+ cache_file = self.cache_dir / f"codex_headers_{version}.json"
194
+
195
+ if not cache_file.exists():
196
+ return None
197
+
198
+ try:
199
+ with cache_file.open("r") as f:
200
+ data = json.load(f)
201
+ return CodexCacheData.model_validate(data)
202
+ except Exception:
203
+ return None
204
+
205
+ def _save_to_cache(self, data: CodexCacheData) -> None:
206
+ """Save detection data to cache."""
207
+ cache_file = self.cache_dir / f"codex_headers_{data.codex_version}.json"
208
+
209
+ try:
210
+ with cache_file.open("w") as f:
211
+ json.dump(data.model_dump(), f, indent=2, default=str)
212
+ logger.debug(
213
+ "cache_saved", file=str(cache_file), version=data.codex_version
214
+ )
215
+ except Exception as e:
216
+ logger.warning("cache_save_failed", file=str(cache_file), error=str(e))
217
+
218
+ def _extract_headers(self, headers: dict[str, str]) -> CodexHeaders:
219
+ """Extract Codex CLI headers from captured request."""
220
+ try:
221
+ return CodexHeaders.model_validate(headers)
222
+ except Exception as e:
223
+ logger.error("header_extraction_failed", error=str(e))
224
+ raise ValueError(f"Failed to extract required headers: {e}") from e
225
+
226
+ def _extract_instructions(self, body: bytes) -> CodexInstructionsData:
227
+ """Extract instructions from captured request body."""
228
+ try:
229
+ data = json.loads(body.decode("utf-8"))
230
+ instructions_content = data.get("instructions")
231
+
232
+ if instructions_content is None:
233
+ raise ValueError("No instructions field found in request body")
234
+
235
+ return CodexInstructionsData(instructions_field=instructions_content)
236
+
237
+ except Exception as e:
238
+ logger.error("instructions_extraction_failed", error=str(e))
239
+ raise ValueError(f"Failed to extract instructions: {e}") from e
240
+
241
+ def _get_fallback_data(self) -> CodexCacheData:
242
+ """Get fallback data when detection fails."""
243
+ logger.warning("using_fallback_codex_data")
244
+
245
+ # Use hardcoded values as fallback from req.json
246
+ fallback_headers = CodexHeaders(
247
+ session_id="", # Will be generated per request
248
+ originator="codex_cli_rs",
249
+ **{"openai-beta": "responses=experimental"},
250
+ version="0.21.0",
251
+ **{"chatgpt-account-id": ""}, # Will be set from auth
252
+ )
253
+
254
+ # Use exact instructions from req.json
255
+ fallback_instructions = CodexInstructionsData(
256
+ instructions_field='You are a coding agent running in the Codex CLI, a terminal-based coding assistant. Codex CLI is an open source project led by OpenAI. You are expected to be precise, safe, and helpful.\n\nYour capabilities:\n- Receive user prompts and other context provided by the harness, such as files in the workspace.\n- Communicate with the user by streaming thinking & responses, and by making & updating plans.\n- Emit function calls to run terminal commands and apply patches. Depending on how this specific run is configured, you can request that these function calls be escalated to the user for approval before running. More on this in the "Sandbox and approvals" section.\n\nWithin this context, Codex refers to the open-source agentic coding interface (not the old Codex language model built by OpenAI).'
257
+ )
258
+
259
+ return CodexCacheData(
260
+ codex_version="fallback",
261
+ headers=fallback_headers,
262
+ instructions=fallback_instructions,
263
+ )