ccproxy-api 0.1.5__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. ccproxy/_version.py +2 -2
  2. ccproxy/adapters/codex/__init__.py +11 -0
  3. ccproxy/adapters/openai/models.py +1 -1
  4. ccproxy/adapters/openai/response_adapter.py +355 -0
  5. ccproxy/adapters/openai/response_models.py +178 -0
  6. ccproxy/api/app.py +16 -0
  7. ccproxy/api/routes/codex.py +1231 -0
  8. ccproxy/api/routes/health.py +228 -3
  9. ccproxy/auth/openai/__init__.py +13 -0
  10. ccproxy/auth/openai/credentials.py +166 -0
  11. ccproxy/auth/openai/oauth_client.py +334 -0
  12. ccproxy/auth/openai/storage.py +184 -0
  13. ccproxy/claude_sdk/options.py +1 -1
  14. ccproxy/cli/commands/auth.py +398 -1
  15. ccproxy/cli/commands/serve.py +3 -1
  16. ccproxy/config/claude.py +1 -1
  17. ccproxy/config/codex.py +100 -0
  18. ccproxy/config/scheduler.py +4 -4
  19. ccproxy/config/settings.py +19 -0
  20. ccproxy/core/codex_transformers.py +389 -0
  21. ccproxy/core/http_transformers.py +153 -2
  22. ccproxy/models/detection.py +82 -0
  23. ccproxy/models/requests.py +22 -0
  24. ccproxy/models/responses.py +16 -0
  25. ccproxy/services/codex_detection_service.py +263 -0
  26. ccproxy/services/proxy_service.py +530 -0
  27. ccproxy/utils/model_mapping.py +7 -5
  28. ccproxy/utils/startup_helpers.py +62 -0
  29. ccproxy_api-0.1.6.dist-info/METADATA +615 -0
  30. {ccproxy_api-0.1.5.dist-info → ccproxy_api-0.1.6.dist-info}/RECORD +33 -22
  31. ccproxy_api-0.1.5.dist-info/METADATA +0 -396
  32. {ccproxy_api-0.1.5.dist-info → ccproxy_api-0.1.6.dist-info}/WHEEL +0 -0
  33. {ccproxy_api-0.1.5.dist-info → ccproxy_api-0.1.6.dist-info}/entry_points.txt +0 -0
  34. {ccproxy_api-0.1.5.dist-info → ccproxy_api-0.1.6.dist-info}/licenses/LICENSE +0 -0
@@ -13,9 +13,11 @@ import httpx
13
13
  import structlog
14
14
  from fastapi import HTTPException, Request
15
15
  from fastapi.responses import StreamingResponse
16
+ from starlette.responses import Response
16
17
  from typing_extensions import TypedDict
17
18
 
18
19
  from ccproxy.config.settings import Settings
20
+ from ccproxy.core.codex_transformers import CodexRequestTransformer
19
21
  from ccproxy.core.http import BaseProxyClient
20
22
  from ccproxy.core.http_transformers import (
21
23
  HTTPRequestTransformer,
@@ -107,6 +109,7 @@ class ProxyService:
107
109
  # Create concrete transformers
108
110
  self.request_transformer = HTTPRequestTransformer()
109
111
  self.response_transformer = HTTPResponseTransformer()
112
+ self.codex_transformer = CodexRequestTransformer()
110
113
 
111
114
  # Create OpenAI adapter for stream transformation
112
115
  from ccproxy.adapters.openai.adapter import OpenAIAdapter
@@ -414,6 +417,384 @@ class ProxyService:
414
417
  ctx.add_metadata(error=e)
415
418
  raise
416
419
 
420
+ async def handle_codex_request(
421
+ self,
422
+ method: str,
423
+ path: str,
424
+ session_id: str,
425
+ access_token: str,
426
+ request: Request,
427
+ settings: Settings,
428
+ ) -> StreamingResponse | Response:
429
+ """Handle OpenAI Codex proxy request with request/response capture.
430
+
431
+ Args:
432
+ method: HTTP method
433
+ path: Request path (e.g., "/responses" or "/{session_id}/responses")
434
+ session_id: Resolved session ID
435
+ access_token: OpenAI access token
436
+ request: FastAPI request object
437
+ settings: Application settings
438
+
439
+ Returns:
440
+ StreamingResponse or regular Response
441
+ """
442
+ try:
443
+ # Read request body - check if already stored by middleware
444
+ if hasattr(request.state, "body"):
445
+ body = request.state.body
446
+ else:
447
+ body = await request.body()
448
+
449
+ # Parse request data to capture the instructions field and other metadata
450
+ request_data = None
451
+ try:
452
+ request_data = json.loads(body.decode("utf-8")) if body else {}
453
+ except (json.JSONDecodeError, UnicodeDecodeError) as e:
454
+ request_data = {}
455
+ logger.warning(
456
+ "codex_json_decode_failed",
457
+ error=str(e),
458
+ body_preview=body[:100].decode("utf-8", errors="replace")
459
+ if body
460
+ else None,
461
+ body_length=len(body) if body else 0,
462
+ )
463
+
464
+ # Parse request to extract account_id from token if available
465
+ import jwt
466
+
467
+ account_id = "unknown"
468
+ try:
469
+ decoded = jwt.decode(access_token, options={"verify_signature": False})
470
+ account_id = decoded.get(
471
+ "org_id", decoded.get("sub", decoded.get("account_id", "unknown"))
472
+ )
473
+ except Exception:
474
+ pass
475
+
476
+ # Get Codex detection data from app state
477
+ codex_detection_data = None
478
+ if self.app_state and hasattr(self.app_state, "codex_detection_data"):
479
+ codex_detection_data = self.app_state.codex_detection_data
480
+
481
+ # Use CodexRequestTransformer to build request
482
+ original_headers = dict(request.headers)
483
+ transformed_request = await self.codex_transformer.transform_codex_request(
484
+ method=method,
485
+ path=path,
486
+ headers=original_headers,
487
+ body=body,
488
+ access_token=access_token,
489
+ session_id=session_id,
490
+ account_id=account_id,
491
+ codex_detection_data=codex_detection_data,
492
+ target_base_url=settings.codex.base_url,
493
+ )
494
+
495
+ target_url = transformed_request["url"]
496
+ headers = transformed_request["headers"]
497
+ transformed_body = transformed_request["body"] or body
498
+
499
+ # Parse transformed body for logging
500
+ transformed_request_data = request_data
501
+ if transformed_body and transformed_body != body:
502
+ try:
503
+ transformed_request_data = json.loads(
504
+ transformed_body.decode("utf-8")
505
+ )
506
+ except (json.JSONDecodeError, UnicodeDecodeError):
507
+ transformed_request_data = request_data
508
+
509
+ # Generate request ID for logging
510
+ from uuid import uuid4
511
+
512
+ request_id = f"codex_{uuid4().hex[:8]}"
513
+
514
+ # Log Codex request (including instructions field and headers)
515
+ await self._log_codex_request(
516
+ request_id=request_id,
517
+ method=method,
518
+ url=target_url,
519
+ headers=headers,
520
+ body_data=transformed_request_data,
521
+ session_id=session_id,
522
+ )
523
+
524
+ # Check if user explicitly requested streaming (from original request)
525
+ user_requested_streaming = self.codex_transformer._is_streaming_request(
526
+ body
527
+ )
528
+
529
+ # Forward request to ChatGPT backend
530
+ if user_requested_streaming:
531
+ # Handle streaming request with proper context management
532
+ # First, collect the response to check for errors
533
+ collected_chunks = []
534
+ chunk_count = 0
535
+ total_bytes = 0
536
+ response_status_code = 200
537
+ response_headers = {}
538
+
539
+ async def stream_codex_response() -> AsyncGenerator[bytes, None]:
540
+ nonlocal \
541
+ collected_chunks, \
542
+ chunk_count, \
543
+ total_bytes, \
544
+ response_status_code, \
545
+ response_headers
546
+
547
+ logger.debug(
548
+ "proxy_service_streaming_started",
549
+ request_id=request_id,
550
+ session_id=session_id,
551
+ )
552
+
553
+ async with (
554
+ httpx.AsyncClient(timeout=240.0) as client,
555
+ client.stream(
556
+ method=method,
557
+ url=target_url,
558
+ headers=headers,
559
+ content=transformed_body,
560
+ ) as response,
561
+ ):
562
+ # Capture response info for error checking
563
+ response_status_code = response.status_code
564
+ response_headers = dict(response.headers)
565
+
566
+ # Log response headers for streaming
567
+ await self._log_codex_response_headers(
568
+ request_id=request_id,
569
+ status_code=response.status_code,
570
+ headers=dict(response.headers),
571
+ stream_type="codex_sse",
572
+ )
573
+
574
+ # Check if upstream actually returned streaming
575
+ content_type = response.headers.get("content-type", "")
576
+ is_streaming = "text/event-stream" in content_type
577
+
578
+ if not is_streaming:
579
+ logger.warning(
580
+ "codex_expected_streaming_but_got_regular",
581
+ content_type=content_type,
582
+ status_code=response.status_code,
583
+ )
584
+
585
+ async for chunk in response.aiter_bytes():
586
+ chunk_count += 1
587
+ chunk_size = len(chunk)
588
+ total_bytes += chunk_size
589
+ collected_chunks.append(chunk)
590
+
591
+ logger.debug(
592
+ "proxy_service_streaming_chunk",
593
+ request_id=request_id,
594
+ chunk_number=chunk_count,
595
+ chunk_size=chunk_size,
596
+ total_bytes=total_bytes,
597
+ )
598
+
599
+ yield chunk
600
+
601
+ logger.debug(
602
+ "proxy_service_streaming_complete",
603
+ request_id=request_id,
604
+ total_chunks=chunk_count,
605
+ total_bytes=total_bytes,
606
+ )
607
+
608
+ # Log the complete stream data after streaming finishes
609
+ await self._log_codex_streaming_complete(
610
+ request_id=request_id,
611
+ chunks=collected_chunks,
612
+ )
613
+
614
+ # Execute the stream generator to collect the response
615
+ generator_chunks = []
616
+ async for chunk in stream_codex_response():
617
+ generator_chunks.append(chunk)
618
+
619
+ # Now check if this should be an error response
620
+ content_type = response_headers.get("content-type", "")
621
+ if (
622
+ response_status_code >= 400
623
+ and "text/event-stream" not in content_type
624
+ ):
625
+ # Return error as regular Response with proper status code
626
+ error_content = b"".join(collected_chunks)
627
+ logger.warning(
628
+ "codex_returning_error_as_regular_response",
629
+ status_code=response_status_code,
630
+ content_type=content_type,
631
+ content_preview=error_content[:200].decode(
632
+ "utf-8", errors="replace"
633
+ ),
634
+ )
635
+ return Response(
636
+ content=error_content,
637
+ status_code=response_status_code,
638
+ headers=response_headers,
639
+ )
640
+
641
+ # Return normal streaming response
642
+ async def replay_stream() -> AsyncGenerator[bytes, None]:
643
+ for chunk in generator_chunks:
644
+ yield chunk
645
+
646
+ # Forward upstream headers but filter out incompatible ones for streaming
647
+ streaming_headers = dict(response_headers)
648
+ # Remove headers that conflict with streaming responses
649
+ streaming_headers.pop("content-length", None)
650
+ streaming_headers.pop("content-encoding", None)
651
+ streaming_headers.pop("date", None)
652
+ # Set streaming-specific headers
653
+ streaming_headers.update(
654
+ {
655
+ "content-type": "text/event-stream",
656
+ "cache-control": "no-cache",
657
+ "connection": "keep-alive",
658
+ }
659
+ )
660
+
661
+ return StreamingResponse(
662
+ replay_stream(),
663
+ media_type="text/event-stream",
664
+ headers=streaming_headers,
665
+ )
666
+ else:
667
+ # Handle non-streaming request
668
+ async with httpx.AsyncClient(timeout=240.0) as client:
669
+ response = await client.request(
670
+ method=method,
671
+ url=target_url,
672
+ headers=headers,
673
+ content=transformed_body,
674
+ )
675
+
676
+ # Check if upstream response is streaming (shouldn't happen)
677
+ content_type = response.headers.get("content-type", "")
678
+ transfer_encoding = response.headers.get("transfer-encoding", "")
679
+ upstream_is_streaming = "text/event-stream" in content_type or (
680
+ transfer_encoding == "chunked" and content_type == ""
681
+ )
682
+
683
+ logger.debug(
684
+ "codex_response_non_streaming",
685
+ content_type=content_type,
686
+ user_requested_streaming=user_requested_streaming,
687
+ upstream_is_streaming=upstream_is_streaming,
688
+ transfer_encoding=transfer_encoding,
689
+ )
690
+
691
+ if upstream_is_streaming:
692
+ # Upstream is streaming but user didn't request streaming
693
+ # Collect all streaming data and return as JSON
694
+ logger.debug(
695
+ "converting_upstream_stream_to_json", request_id=request_id
696
+ )
697
+
698
+ collected_chunks = []
699
+ async for chunk in response.aiter_bytes():
700
+ collected_chunks.append(chunk)
701
+
702
+ # Combine all chunks
703
+ full_content = b"".join(collected_chunks)
704
+
705
+ # Try to parse the streaming data and extract the final response
706
+ try:
707
+ # Parse SSE data to extract JSON response
708
+ content_str = full_content.decode("utf-8")
709
+ lines = content_str.strip().split("\n")
710
+
711
+ # Look for the last data line with JSON content
712
+ final_json = None
713
+ for line in reversed(lines):
714
+ if line.startswith("data: ") and not line.endswith(
715
+ "[DONE]"
716
+ ):
717
+ try:
718
+ json_str = line[6:] # Remove "data: " prefix
719
+ final_json = json.loads(json_str)
720
+ break
721
+ except json.JSONDecodeError:
722
+ continue
723
+
724
+ if final_json:
725
+ response_content = json.dumps(final_json).encode(
726
+ "utf-8"
727
+ )
728
+ else:
729
+ # Fallback: return the raw content
730
+ response_content = full_content
731
+
732
+ except (UnicodeDecodeError, json.JSONDecodeError):
733
+ # Fallback: return raw content
734
+ response_content = full_content
735
+
736
+ # Log the complete response
737
+ try:
738
+ response_data = json.loads(response_content.decode("utf-8"))
739
+ except (json.JSONDecodeError, UnicodeDecodeError):
740
+ response_data = {
741
+ "raw_content": response_content.decode(
742
+ "utf-8", errors="replace"
743
+ )
744
+ }
745
+
746
+ await self._log_codex_response(
747
+ request_id=request_id,
748
+ status_code=response.status_code,
749
+ headers=dict(response.headers),
750
+ body_data=response_data,
751
+ )
752
+
753
+ # Return as JSON response
754
+ return Response(
755
+ content=response_content,
756
+ status_code=response.status_code,
757
+ headers={
758
+ "content-type": "application/json",
759
+ "content-length": str(len(response_content)),
760
+ },
761
+ media_type="application/json",
762
+ )
763
+ else:
764
+ # For regular non-streaming responses
765
+ response_data = None
766
+ try:
767
+ response_data = (
768
+ json.loads(response.content.decode("utf-8"))
769
+ if response.content
770
+ else {}
771
+ )
772
+ except (json.JSONDecodeError, UnicodeDecodeError):
773
+ response_data = {
774
+ "raw_content": response.content.decode(
775
+ "utf-8", errors="replace"
776
+ )
777
+ }
778
+
779
+ await self._log_codex_response(
780
+ request_id=request_id,
781
+ status_code=response.status_code,
782
+ headers=dict(response.headers),
783
+ body_data=response_data,
784
+ )
785
+
786
+ # Return regular response
787
+ return Response(
788
+ content=response.content,
789
+ status_code=response.status_code,
790
+ headers=dict(response.headers),
791
+ media_type=response.headers.get("content-type"),
792
+ )
793
+
794
+ except Exception as e:
795
+ logger.error("Codex request failed", error=str(e), session_id=session_id)
796
+ raise
797
+
417
798
  async def _get_access_token(self) -> str:
418
799
  """Get access token for upstream authentication.
419
800
 
@@ -584,6 +965,155 @@ class ProxyService:
584
965
  timestamp=timestamp,
585
966
  )
586
967
 
968
+ async def _log_codex_request(
969
+ self,
970
+ request_id: str,
971
+ method: str,
972
+ url: str,
973
+ headers: dict[str, str],
974
+ body_data: dict[str, Any] | None,
975
+ session_id: str,
976
+ ) -> None:
977
+ """Log outgoing Codex request preserving instructions field exactly."""
978
+ if not self._verbose_api:
979
+ return
980
+
981
+ # Log to console with redacted headers
982
+ logger.info(
983
+ "verbose_codex_request",
984
+ request_id=request_id,
985
+ method=method,
986
+ url=url,
987
+ headers=self._redact_headers(headers),
988
+ session_id=session_id,
989
+ instructions_preview=(
990
+ body_data.get("instructions", "")[:100] + "..."
991
+ if body_data and body_data.get("instructions")
992
+ else None
993
+ ),
994
+ )
995
+
996
+ # Save complete request to file (without redaction)
997
+ timestamp = time.strftime("%Y%m%d_%H%M%S")
998
+ await write_request_log(
999
+ request_id=request_id,
1000
+ log_type="codex_request",
1001
+ data={
1002
+ "method": method,
1003
+ "url": url,
1004
+ "headers": dict(headers),
1005
+ "body": body_data,
1006
+ "session_id": session_id,
1007
+ },
1008
+ timestamp=timestamp,
1009
+ )
1010
+
1011
+ async def _log_codex_response(
1012
+ self,
1013
+ request_id: str,
1014
+ status_code: int,
1015
+ headers: dict[str, str],
1016
+ body_data: dict[str, Any] | None,
1017
+ ) -> None:
1018
+ """Log complete non-streaming Codex response."""
1019
+ if not self._verbose_api:
1020
+ return
1021
+
1022
+ # Log to console with redacted headers
1023
+ logger.info(
1024
+ "verbose_codex_response",
1025
+ request_id=request_id,
1026
+ status_code=status_code,
1027
+ headers=self._redact_headers(headers),
1028
+ response_type="non_streaming",
1029
+ )
1030
+
1031
+ # Save complete response to file
1032
+ timestamp = time.strftime("%Y%m%d_%H%M%S")
1033
+ await write_request_log(
1034
+ request_id=request_id,
1035
+ log_type="codex_response",
1036
+ data={
1037
+ "status_code": status_code,
1038
+ "headers": dict(headers),
1039
+ "body": body_data,
1040
+ },
1041
+ timestamp=timestamp,
1042
+ )
1043
+
1044
+ async def _log_codex_response_headers(
1045
+ self,
1046
+ request_id: str,
1047
+ status_code: int,
1048
+ headers: dict[str, str],
1049
+ stream_type: str,
1050
+ ) -> None:
1051
+ """Log streaming Codex response headers."""
1052
+ if not self._verbose_api:
1053
+ return
1054
+
1055
+ # Log to console with redacted headers
1056
+ logger.info(
1057
+ "verbose_codex_response_headers",
1058
+ request_id=request_id,
1059
+ status_code=status_code,
1060
+ headers=self._redact_headers(headers),
1061
+ stream_type=stream_type,
1062
+ )
1063
+
1064
+ # Save response headers to file
1065
+ timestamp = time.strftime("%Y%m%d_%H%M%S")
1066
+ await write_request_log(
1067
+ request_id=request_id,
1068
+ log_type="codex_response_headers",
1069
+ data={
1070
+ "status_code": status_code,
1071
+ "headers": dict(headers),
1072
+ "stream_type": stream_type,
1073
+ },
1074
+ timestamp=timestamp,
1075
+ )
1076
+
1077
+ async def _log_codex_streaming_complete(
1078
+ self,
1079
+ request_id: str,
1080
+ chunks: list[bytes],
1081
+ ) -> None:
1082
+ """Log complete streaming data after stream finishes."""
1083
+ if not self._verbose_api:
1084
+ return
1085
+
1086
+ # Combine chunks and decode for analysis
1087
+ complete_data = b"".join(chunks)
1088
+ try:
1089
+ decoded_data = complete_data.decode("utf-8", errors="replace")
1090
+ except Exception:
1091
+ decoded_data = f"<binary data of length {len(complete_data)}>"
1092
+
1093
+ # Log to console with preview
1094
+ logger.info(
1095
+ "verbose_codex_streaming_complete",
1096
+ request_id=request_id,
1097
+ total_bytes=len(complete_data),
1098
+ chunk_count=len(chunks),
1099
+ data_preview=decoded_data[:200] + "..."
1100
+ if len(decoded_data) > 200
1101
+ else decoded_data,
1102
+ )
1103
+
1104
+ # Save complete streaming data to file
1105
+ timestamp = time.strftime("%Y%m%d_%H%M%S")
1106
+ await write_request_log(
1107
+ request_id=request_id,
1108
+ log_type="codex_streaming_complete",
1109
+ data={
1110
+ "total_bytes": len(complete_data),
1111
+ "chunk_count": len(chunks),
1112
+ "complete_data": decoded_data,
1113
+ },
1114
+ timestamp=timestamp,
1115
+ )
1116
+
587
1117
  def _should_stream_response(self, headers: dict[str, str]) -> bool:
588
1118
  """Check if response should be streamed based on request headers.
589
1119
 
@@ -9,6 +9,7 @@ from __future__ import annotations
9
9
 
10
10
  # Combined mapping: OpenAI models → Claude models AND Claude aliases → canonical Claude models
11
11
  MODEL_MAPPING: dict[str, str] = {
12
+ "gpt-5": "claude-sonnet-4-20250514",
12
13
  # OpenAI GPT-4 models → Claude 3.5 Sonnet (most comparable)
13
14
  "gpt-4": "claude-3-5-sonnet-20241022",
14
15
  "gpt-4-turbo": "claude-3-5-sonnet-20241022",
@@ -80,11 +81,12 @@ def map_model_to_claude(model_name: str) -> str:
80
81
  return "claude-3-7-sonnet-20250219"
81
82
  elif model_name.startswith("gpt-3.5"):
82
83
  return "claude-3-5-haiku-latest"
83
- elif model_name.startswith("o1"):
84
- return "claude-sonnet-4-20250514"
85
- elif model_name.startswith("o3"):
86
- return "claude-opus-4-20250514"
87
- elif model_name.startswith("gpt"):
84
+ elif (
85
+ model_name.startswith("o1")
86
+ or model_name.startswith("gpt-5")
87
+ or model_name.startswith("o3")
88
+ or model_name.startswith("gpt")
89
+ ):
88
90
  return "claude-sonnet-4-20250514"
89
91
 
90
92
  # If it's already a Claude model, pass through unchanged
@@ -23,6 +23,7 @@ from ccproxy.scheduler.errors import SchedulerError
23
23
  from ccproxy.scheduler.manager import start_scheduler, stop_scheduler
24
24
  from ccproxy.services.claude_detection_service import ClaudeDetectionService
25
25
  from ccproxy.services.claude_sdk_service import ClaudeSDKService
26
+ from ccproxy.services.codex_detection_service import CodexDetectionService
26
27
  from ccproxy.services.credentials.manager import CredentialsManager
27
28
 
28
29
 
@@ -126,6 +127,41 @@ async def check_claude_cli_startup(app: FastAPI, settings: Settings) -> None:
126
127
  )
127
128
 
128
129
 
130
+ async def check_codex_cli_startup(app: FastAPI, settings: Settings) -> None:
131
+ """Check Codex CLI availability at startup.
132
+
133
+ Args:
134
+ app: FastAPI application instance
135
+ settings: Application settings
136
+ """
137
+ try:
138
+ from ccproxy.api.routes.health import get_codex_cli_info
139
+
140
+ codex_info = await get_codex_cli_info()
141
+
142
+ if codex_info.status == "available":
143
+ logger.info(
144
+ "codex_cli_available",
145
+ status=codex_info.status,
146
+ version=codex_info.version,
147
+ binary_path=codex_info.binary_path,
148
+ )
149
+ else:
150
+ logger.warning(
151
+ "codex_cli_unavailable",
152
+ status=codex_info.status,
153
+ error=codex_info.error,
154
+ binary_path=codex_info.binary_path,
155
+ message=f"Codex CLI status: {codex_info.status}",
156
+ )
157
+ except Exception as e:
158
+ logger.error(
159
+ "codex_cli_check_failed",
160
+ error=str(e),
161
+ message="Failed to check Codex CLI status during startup",
162
+ )
163
+
164
+
129
165
  async def initialize_log_storage_startup(app: FastAPI, settings: Settings) -> None:
130
166
  """Initialize log storage if needed and backend is DuckDB.
131
167
 
@@ -261,6 +297,32 @@ async def initialize_claude_detection_startup(app: FastAPI, settings: Settings)
261
297
  app.state.claude_detection_service = detection_service
262
298
 
263
299
 
300
+ async def initialize_codex_detection_startup(app: FastAPI, settings: Settings) -> None:
301
+ """Initialize Codex detection service.
302
+
303
+ Args:
304
+ app: FastAPI application instance
305
+ settings: Application settings
306
+ """
307
+ try:
308
+ logger.debug("initializing_codex_detection")
309
+ detection_service = CodexDetectionService(settings)
310
+ codex_data = await detection_service.initialize_detection()
311
+ app.state.codex_detection_data = codex_data
312
+ app.state.codex_detection_service = detection_service
313
+ logger.debug(
314
+ "codex_detection_completed",
315
+ version=codex_data.codex_version,
316
+ cached_at=codex_data.cached_at.isoformat(),
317
+ )
318
+ except Exception as e:
319
+ logger.error("codex_detection_startup_failed", error=str(e))
320
+ # Continue startup with fallback - detection service will provide fallback data
321
+ detection_service = CodexDetectionService(settings)
322
+ app.state.codex_detection_data = detection_service._get_fallback_data()
323
+ app.state.codex_detection_service = detection_service
324
+
325
+
264
326
  async def initialize_claude_sdk_startup(app: FastAPI, settings: Settings) -> None:
265
327
  """Initialize ClaudeSDKService and store in app state.
266
328