code-puppy 0.0.302__py3-none-any.whl → 0.0.335__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. code_puppy/agents/base_agent.py +343 -35
  2. code_puppy/chatgpt_codex_client.py +283 -0
  3. code_puppy/cli_runner.py +898 -0
  4. code_puppy/command_line/add_model_menu.py +23 -1
  5. code_puppy/command_line/autosave_menu.py +271 -35
  6. code_puppy/command_line/colors_menu.py +520 -0
  7. code_puppy/command_line/command_handler.py +8 -2
  8. code_puppy/command_line/config_commands.py +82 -10
  9. code_puppy/command_line/core_commands.py +70 -7
  10. code_puppy/command_line/diff_menu.py +5 -0
  11. code_puppy/command_line/mcp/custom_server_form.py +4 -0
  12. code_puppy/command_line/mcp/edit_command.py +3 -1
  13. code_puppy/command_line/mcp/handler.py +7 -2
  14. code_puppy/command_line/mcp/install_command.py +8 -3
  15. code_puppy/command_line/mcp/install_menu.py +5 -1
  16. code_puppy/command_line/mcp/logs_command.py +173 -64
  17. code_puppy/command_line/mcp/restart_command.py +7 -2
  18. code_puppy/command_line/mcp/search_command.py +10 -4
  19. code_puppy/command_line/mcp/start_all_command.py +16 -6
  20. code_puppy/command_line/mcp/start_command.py +3 -1
  21. code_puppy/command_line/mcp/status_command.py +2 -1
  22. code_puppy/command_line/mcp/stop_all_command.py +5 -1
  23. code_puppy/command_line/mcp/stop_command.py +3 -1
  24. code_puppy/command_line/mcp/wizard_utils.py +10 -4
  25. code_puppy/command_line/model_settings_menu.py +58 -7
  26. code_puppy/command_line/motd.py +13 -7
  27. code_puppy/command_line/onboarding_slides.py +180 -0
  28. code_puppy/command_line/onboarding_wizard.py +340 -0
  29. code_puppy/command_line/prompt_toolkit_completion.py +16 -2
  30. code_puppy/command_line/session_commands.py +11 -4
  31. code_puppy/config.py +106 -17
  32. code_puppy/http_utils.py +155 -196
  33. code_puppy/keymap.py +8 -0
  34. code_puppy/main.py +5 -828
  35. code_puppy/mcp_/__init__.py +17 -0
  36. code_puppy/mcp_/blocking_startup.py +61 -32
  37. code_puppy/mcp_/config_wizard.py +5 -1
  38. code_puppy/mcp_/managed_server.py +23 -3
  39. code_puppy/mcp_/manager.py +65 -0
  40. code_puppy/mcp_/mcp_logs.py +224 -0
  41. code_puppy/messaging/__init__.py +20 -4
  42. code_puppy/messaging/bus.py +64 -0
  43. code_puppy/messaging/markdown_patches.py +57 -0
  44. code_puppy/messaging/messages.py +16 -0
  45. code_puppy/messaging/renderers.py +21 -9
  46. code_puppy/messaging/rich_renderer.py +113 -67
  47. code_puppy/messaging/spinner/console_spinner.py +34 -0
  48. code_puppy/model_factory.py +271 -45
  49. code_puppy/model_utils.py +57 -48
  50. code_puppy/models.json +21 -7
  51. code_puppy/plugins/__init__.py +12 -0
  52. code_puppy/plugins/antigravity_oauth/__init__.py +10 -0
  53. code_puppy/plugins/antigravity_oauth/accounts.py +406 -0
  54. code_puppy/plugins/antigravity_oauth/antigravity_model.py +612 -0
  55. code_puppy/plugins/antigravity_oauth/config.py +42 -0
  56. code_puppy/plugins/antigravity_oauth/constants.py +136 -0
  57. code_puppy/plugins/antigravity_oauth/oauth.py +478 -0
  58. code_puppy/plugins/antigravity_oauth/register_callbacks.py +406 -0
  59. code_puppy/plugins/antigravity_oauth/storage.py +271 -0
  60. code_puppy/plugins/antigravity_oauth/test_plugin.py +319 -0
  61. code_puppy/plugins/antigravity_oauth/token.py +167 -0
  62. code_puppy/plugins/antigravity_oauth/transport.py +595 -0
  63. code_puppy/plugins/antigravity_oauth/utils.py +169 -0
  64. code_puppy/plugins/chatgpt_oauth/config.py +5 -1
  65. code_puppy/plugins/chatgpt_oauth/oauth_flow.py +5 -6
  66. code_puppy/plugins/chatgpt_oauth/register_callbacks.py +5 -3
  67. code_puppy/plugins/chatgpt_oauth/test_plugin.py +26 -11
  68. code_puppy/plugins/chatgpt_oauth/utils.py +180 -65
  69. code_puppy/plugins/claude_code_oauth/register_callbacks.py +30 -0
  70. code_puppy/plugins/claude_code_oauth/utils.py +1 -0
  71. code_puppy/plugins/shell_safety/agent_shell_safety.py +1 -118
  72. code_puppy/plugins/shell_safety/register_callbacks.py +44 -3
  73. code_puppy/prompts/codex_system_prompt.md +310 -0
  74. code_puppy/pydantic_patches.py +131 -0
  75. code_puppy/reopenable_async_client.py +8 -8
  76. code_puppy/terminal_utils.py +291 -0
  77. code_puppy/tools/agent_tools.py +34 -9
  78. code_puppy/tools/command_runner.py +344 -27
  79. code_puppy/tools/file_operations.py +33 -45
  80. code_puppy/uvx_detection.py +242 -0
  81. {code_puppy-0.0.302.data → code_puppy-0.0.335.data}/data/code_puppy/models.json +21 -7
  82. {code_puppy-0.0.302.dist-info → code_puppy-0.0.335.dist-info}/METADATA +30 -1
  83. {code_puppy-0.0.302.dist-info → code_puppy-0.0.335.dist-info}/RECORD +87 -64
  84. {code_puppy-0.0.302.data → code_puppy-0.0.335.data}/data/code_puppy/models_dev_api.json +0 -0
  85. {code_puppy-0.0.302.dist-info → code_puppy-0.0.335.dist-info}/WHEEL +0 -0
  86. {code_puppy-0.0.302.dist-info → code_puppy-0.0.335.dist-info}/entry_points.txt +0 -0
  87. {code_puppy-0.0.302.dist-info → code_puppy-0.0.335.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,169 @@
1
+ """Utility helpers for the Antigravity OAuth plugin."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import logging
7
+ from typing import Any, Dict, List, Optional
8
+
9
+ from .config import (
10
+ ANTIGRAVITY_OAUTH_CONFIG,
11
+ get_antigravity_models_path,
12
+ get_token_storage_path,
13
+ )
14
+ from .constants import ANTIGRAVITY_ENDPOINT, ANTIGRAVITY_HEADERS, ANTIGRAVITY_MODELS
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def load_stored_tokens() -> Optional[Dict[str, Any]]:
20
+ """Load stored OAuth tokens from disk."""
21
+ try:
22
+ token_path = get_token_storage_path()
23
+ if token_path.exists():
24
+ with open(token_path, "r", encoding="utf-8") as f:
25
+ return json.load(f)
26
+ except Exception as e:
27
+ logger.error("Failed to load tokens: %s", e)
28
+ return None
29
+
30
+
31
+ def save_tokens(tokens: Dict[str, Any]) -> bool:
32
+ """Save OAuth tokens to disk."""
33
+ try:
34
+ token_path = get_token_storage_path()
35
+ with open(token_path, "w", encoding="utf-8") as f:
36
+ json.dump(tokens, f, indent=2)
37
+ token_path.chmod(0o600)
38
+ return True
39
+ except Exception as e:
40
+ logger.error("Failed to save tokens: %s", e)
41
+ return False
42
+
43
+
44
+ def load_antigravity_models() -> Dict[str, Any]:
45
+ """Load configured Antigravity models from disk."""
46
+ try:
47
+ models_path = get_antigravity_models_path()
48
+ if models_path.exists():
49
+ with open(models_path, "r", encoding="utf-8") as f:
50
+ return json.load(f)
51
+ except Exception as e:
52
+ logger.error("Failed to load Antigravity models: %s", e)
53
+ return {}
54
+
55
+
56
+ def save_antigravity_models(models: Dict[str, Any]) -> bool:
57
+ """Save Antigravity models configuration to disk."""
58
+ try:
59
+ models_path = get_antigravity_models_path()
60
+ with open(models_path, "w", encoding="utf-8") as f:
61
+ json.dump(models, f, indent=2)
62
+ return True
63
+ except Exception as e:
64
+ logger.error("Failed to save Antigravity models: %s", e)
65
+ return False
66
+
67
+
68
+ def add_models_to_config(access_token: str, project_id: str = "") -> bool:
69
+ """Add all available Antigravity models to the configuration."""
70
+ try:
71
+ models_config: Dict[str, Any] = {}
72
+ prefix = ANTIGRAVITY_OAUTH_CONFIG["prefix"]
73
+
74
+ for model_id, model_info in ANTIGRAVITY_MODELS.items():
75
+ prefixed_name = f"{prefix}{model_id}"
76
+
77
+ # Build custom headers
78
+ headers = dict(ANTIGRAVITY_HEADERS)
79
+
80
+ # Use custom_gemini type with Antigravity transport
81
+ models_config[prefixed_name] = {
82
+ "type": "custom_gemini",
83
+ "name": model_id,
84
+ "custom_endpoint": {
85
+ "url": ANTIGRAVITY_ENDPOINT,
86
+ "api_key": access_token,
87
+ "headers": headers,
88
+ },
89
+ "project_id": project_id,
90
+ "context_length": model_info.get("context_length", 200000),
91
+ "family": model_info.get("family", "other"),
92
+ "oauth_source": "antigravity-plugin",
93
+ "antigravity": True, # Flag to use Antigravity transport
94
+ }
95
+
96
+ # Add thinking budget if present
97
+ if model_info.get("thinking_budget"):
98
+ models_config[prefixed_name]["thinking_budget"] = model_info[
99
+ "thinking_budget"
100
+ ]
101
+
102
+ if save_antigravity_models(models_config):
103
+ logger.info("Added %d Antigravity models", len(models_config))
104
+ return True
105
+
106
+ except Exception as e:
107
+ logger.error("Error adding models to config: %s", e)
108
+ return False
109
+
110
+
111
+ def remove_antigravity_models() -> int:
112
+ """Remove all Antigravity models from configuration."""
113
+ try:
114
+ models = load_antigravity_models()
115
+ to_remove = [
116
+ name
117
+ for name, config in models.items()
118
+ if config.get("oauth_source") == "antigravity-plugin"
119
+ ]
120
+
121
+ if not to_remove:
122
+ return 0
123
+
124
+ for model_name in to_remove:
125
+ models.pop(model_name, None)
126
+
127
+ if save_antigravity_models(models):
128
+ return len(to_remove)
129
+ except Exception as e:
130
+ logger.error("Error removing Antigravity models: %s", e)
131
+ return 0
132
+
133
+
134
+ def get_model_families_summary() -> Dict[str, List[str]]:
135
+ """Get a summary of available models by family."""
136
+ families: Dict[str, List[str]] = {
137
+ "gemini": [],
138
+ "claude": [],
139
+ "other": [],
140
+ }
141
+
142
+ for model_id, info in ANTIGRAVITY_MODELS.items():
143
+ family = info.get("family", "other")
144
+ if family in families:
145
+ families[family].append(model_id)
146
+
147
+ return families
148
+
149
+
150
+ def reload_current_agent() -> None:
151
+ """Reload the current agent so new auth tokens are picked up immediately."""
152
+ try:
153
+ from code_puppy.agents import get_current_agent
154
+
155
+ current_agent = get_current_agent()
156
+ if current_agent is None:
157
+ logger.debug("No current agent to reload")
158
+ return
159
+
160
+ if hasattr(current_agent, "refresh_config"):
161
+ try:
162
+ current_agent.refresh_config()
163
+ except Exception:
164
+ pass
165
+
166
+ current_agent.reload_code_generation_agent()
167
+ logger.info("Active agent reloaded with new authentication")
168
+ except Exception as e:
169
+ logger.warning("Agent reload failed: %s", e)
@@ -9,7 +9,8 @@ CHATGPT_OAUTH_CONFIG: Dict[str, Any] = {
9
9
  "issuer": "https://auth.openai.com",
10
10
  "auth_url": "https://auth.openai.com/oauth/authorize",
11
11
  "token_url": "https://auth.openai.com/oauth/token",
12
- "api_base_url": "https://api.openai.com",
12
+ # API endpoints - Codex uses chatgpt.com backend, not api.openai.com
13
+ "api_base_url": "https://chatgpt.com/backend-api/codex",
13
14
  # OAuth client configuration for Code Puppy
14
15
  "client_id": "app_EMoamEEZ73f0CkXaXp7hrann",
15
16
  "scope": "openid profile email offline_access",
@@ -24,6 +25,9 @@ CHATGPT_OAUTH_CONFIG: Dict[str, Any] = {
24
25
  "prefix": "chatgpt-",
25
26
  "default_context_length": 272000,
26
27
  "api_key_env_var": "CHATGPT_OAUTH_API_KEY",
28
+ # Codex CLI version info (for User-Agent header)
29
+ "client_version": "0.72.0",
30
+ "originator": "codex_cli_rs",
27
31
  }
28
32
 
29
33
 
@@ -19,7 +19,6 @@ from .config import CHATGPT_OAUTH_CONFIG
19
19
  from .utils import (
20
20
  add_models_to_extra_config,
21
21
  assign_redirect_uri,
22
- fetch_chatgpt_models,
23
22
  load_stored_tokens,
24
23
  parse_jwt_claims,
25
24
  prepare_oauth_context,
@@ -318,12 +317,12 @@ def run_oauth_flow() -> None:
318
317
  )
319
318
 
320
319
  if api_key:
321
- emit_info("Fetching available ChatGPT models…")
322
- models = fetch_chatgpt_models(api_key)
320
+ emit_info("Registering ChatGPT Codex models…")
321
+ from .utils import DEFAULT_CODEX_MODELS
322
+
323
+ models = DEFAULT_CODEX_MODELS
323
324
  if models:
324
- if add_models_to_extra_config(models, api_key):
325
+ if add_models_to_extra_config(models):
325
326
  emit_success(
326
327
  "ChatGPT models registered. Use the `chatgpt-` prefix in /model."
327
328
  )
328
- else:
329
- emit_warning("API key obtained, but model list could not be fetched.")
@@ -5,6 +5,8 @@ from __future__ import annotations
5
5
  import os
6
6
  from typing import List, Optional, Tuple
7
7
 
8
+ from code_puppy.callbacks import register_callback
9
+ from code_puppy.config import set_model_name
8
10
  from code_puppy.messaging import emit_info, emit_success, emit_warning
9
11
 
10
12
  from .config import CHATGPT_OAUTH_CONFIG, get_token_storage_path
@@ -74,6 +76,7 @@ def _handle_custom_command(command: str, name: str) -> Optional[bool]:
74
76
 
75
77
  if name == "chatgpt-auth":
76
78
  run_oauth_flow()
79
+ set_model_name("chatgpt-gpt-5.2-codex")
77
80
  return True
78
81
 
79
82
  if name == "chatgpt-status":
@@ -87,6 +90,5 @@ def _handle_custom_command(command: str, name: str) -> Optional[bool]:
87
90
  return None
88
91
 
89
92
 
90
- # Temporarily disabled - chatgpt-oauth plugin not working yet
91
- # register_callback("custom_command_help", _custom_help)
92
- # register_callback("custom_command", _handle_custom_command)
93
+ register_callback("custom_command_help", _custom_help)
94
+ register_callback("custom_command", _handle_custom_command)
@@ -235,25 +235,41 @@ def test_exchange_code_for_tokens(mock_post):
235
235
 
236
236
  @patch("code_puppy.plugins.chatgpt_oauth.utils.requests.get")
237
237
  def test_fetch_chatgpt_models(mock_get):
238
- """Test fetching models from OpenAI API."""
238
+ """Test fetching models from ChatGPT Codex API."""
239
239
  mock_response = MagicMock()
240
240
  mock_response.status_code = 200
241
+ # New response format uses "models" key with "slug" field
241
242
  mock_response.json.return_value = {
242
- "data": [
243
- {"id": "gpt-4o"},
244
- {"id": "gpt-3.5-turbo"},
245
- {"id": "whisper-1"}, # Should be filtered out
246
- {"id": "o1-preview"},
243
+ "models": [
244
+ {"slug": "gpt-4o"},
245
+ {"slug": "gpt-3.5-turbo"},
246
+ {"slug": "o1-preview"},
247
+ {"slug": "codex-mini"},
247
248
  ]
248
249
  }
249
250
  mock_get.return_value = mock_response
250
251
 
251
- models = utils.fetch_chatgpt_models("test_api_key")
252
+ models = utils.fetch_chatgpt_models("test_access_token", "test_account_id")
252
253
  assert models is not None
253
254
  assert "gpt-4o" in models
254
255
  assert "gpt-3.5-turbo" in models
255
256
  assert "o1-preview" in models
256
- assert "whisper-1" not in models # Should be filtered
257
+ assert "codex-mini" in models
258
+
259
+
260
+ @patch("code_puppy.plugins.chatgpt_oauth.utils.requests.get")
261
+ def test_fetch_chatgpt_models_fallback(mock_get):
262
+ """Test that fetch_chatgpt_models returns default list on API failure."""
263
+ mock_response = MagicMock()
264
+ mock_response.status_code = 404
265
+ mock_response.text = '{"detail":"Not Found"}'
266
+ mock_get.return_value = mock_response
267
+
268
+ models = utils.fetch_chatgpt_models("test_access_token", "test_account_id")
269
+ assert models is not None
270
+ # Should return default models
271
+ assert "gpt-5.2" in models
272
+ assert "gpt-4o" in models
257
273
 
258
274
 
259
275
  def test_add_models_to_chatgpt_config(tmp_path):
@@ -262,14 +278,13 @@ def test_add_models_to_chatgpt_config(tmp_path):
262
278
  config, "get_chatgpt_models_path", return_value=tmp_path / "chatgpt_models.json"
263
279
  ):
264
280
  models = ["gpt-4o", "gpt-3.5-turbo"]
265
- api_key = "sk-test"
266
281
 
267
- assert utils.add_models_to_extra_config(models, api_key)
282
+ assert utils.add_models_to_extra_config(models)
268
283
 
269
284
  loaded = utils.load_chatgpt_models()
270
285
  assert "chatgpt-gpt-4o" in loaded
271
286
  assert "chatgpt-gpt-3.5-turbo" in loaded
272
- assert loaded["chatgpt-gpt-4o"]["type"] == "openai"
287
+ assert loaded["chatgpt-gpt-4o"]["type"] == "chatgpt_oauth"
273
288
  assert loaded["chatgpt-gpt-4o"]["name"] == "gpt-4o"
274
289
  assert loaded["chatgpt-gpt-4o"]["oauth_source"] == "chatgpt-oauth-plugin"
275
290
 
@@ -149,6 +149,98 @@ def load_stored_tokens() -> Optional[Dict[str, Any]]:
149
149
  return None
150
150
 
151
151
 
152
+ def get_valid_access_token() -> Optional[str]:
153
+ """Get a valid access token, refreshing if expired.
154
+
155
+ Returns:
156
+ Valid access token string, or None if not authenticated or refresh failed.
157
+ """
158
+ tokens = load_stored_tokens()
159
+ if not tokens:
160
+ logger.debug("No stored ChatGPT OAuth tokens found")
161
+ return None
162
+
163
+ access_token = tokens.get("access_token")
164
+ if not access_token:
165
+ logger.debug("No access_token in stored tokens")
166
+ return None
167
+
168
+ # Check if token is expired by parsing JWT claims
169
+ claims = parse_jwt_claims(access_token)
170
+ if claims:
171
+ exp = claims.get("exp")
172
+ if exp and isinstance(exp, (int, float)):
173
+ # Add 30 second buffer before expiry
174
+ if time.time() > exp - 30:
175
+ logger.info("ChatGPT OAuth token expired, attempting refresh")
176
+ refreshed = refresh_access_token()
177
+ if refreshed:
178
+ return refreshed
179
+ logger.warning("Token refresh failed")
180
+ return None
181
+
182
+ return access_token
183
+
184
+
185
+ def refresh_access_token() -> Optional[str]:
186
+ """Refresh the access token using the refresh token.
187
+
188
+ Returns:
189
+ New access token if refresh succeeded, None otherwise.
190
+ """
191
+ tokens = load_stored_tokens()
192
+ if not tokens:
193
+ return None
194
+
195
+ refresh_token = tokens.get("refresh_token")
196
+ if not refresh_token:
197
+ logger.debug("No refresh_token available")
198
+ return None
199
+
200
+ payload = {
201
+ "grant_type": "refresh_token",
202
+ "refresh_token": refresh_token,
203
+ "client_id": CHATGPT_OAUTH_CONFIG["client_id"],
204
+ }
205
+
206
+ headers = {
207
+ "Content-Type": "application/x-www-form-urlencoded",
208
+ }
209
+
210
+ try:
211
+ response = requests.post(
212
+ CHATGPT_OAUTH_CONFIG["token_url"],
213
+ data=payload,
214
+ headers=headers,
215
+ timeout=30,
216
+ )
217
+
218
+ if response.status_code == 200:
219
+ new_tokens = response.json()
220
+ # Merge with existing tokens (preserve account_id, etc.)
221
+ tokens.update(
222
+ {
223
+ "access_token": new_tokens.get("access_token"),
224
+ "refresh_token": new_tokens.get("refresh_token", refresh_token),
225
+ "id_token": new_tokens.get("id_token", tokens.get("id_token")),
226
+ "last_refresh": datetime.datetime.now(datetime.timezone.utc)
227
+ .isoformat()
228
+ .replace("+00:00", "Z"),
229
+ }
230
+ )
231
+ if save_tokens(tokens):
232
+ logger.info("Successfully refreshed ChatGPT OAuth token")
233
+ return tokens["access_token"]
234
+ else:
235
+ logger.error(
236
+ "Token refresh failed: %s - %s", response.status_code, response.text
237
+ )
238
+ except Exception as exc:
239
+ logger.error("Token refresh error: %s", exc)
240
+
241
+ return None
242
+
243
+
152
244
  def save_tokens(tokens: Dict[str, Any]) -> bool:
153
245
  if tokens is None:
154
246
  raise TypeError("tokens cannot be None")
@@ -248,103 +340,126 @@ def exchange_code_for_tokens(
248
340
  return None
249
341
 
250
342
 
251
- def fetch_chatgpt_models(api_key: str) -> Optional[List[str]]:
252
- """Fetch available models from OpenAI API.
343
+ # Default models available via ChatGPT Codex API
344
+ # These are the known models that work with ChatGPT OAuth tokens
345
+ # Based on codex-rs CLI and shell-scripts/codex-call.sh
346
+ DEFAULT_CODEX_MODELS = [
347
+ "gpt-5.2",
348
+ "gpt-5.2-codex",
349
+ ]
350
+
253
351
 
254
- Makes a real HTTP GET request to OpenAI's models endpoint and filters
255
- the results to include only GPT series models while preserving server order.
352
+ def fetch_chatgpt_models(access_token: str, account_id: str) -> Optional[List[str]]:
353
+ """Fetch available models from ChatGPT Codex API.
354
+
355
+ Attempts to fetch models from the API, but falls back to a default list
356
+ of known Codex-compatible models if the API is unavailable.
256
357
 
257
358
  Args:
258
- api_key: OpenAI API key for authentication
359
+ access_token: OAuth access token for authentication
360
+ account_id: ChatGPT account ID (required for the API)
259
361
 
260
362
  Returns:
261
- List of filtered model IDs preserving server order, or None if request fails
363
+ List of model IDs, or default list if API fails
262
364
  """
263
- # Build the models URL, ensuring it ends with /v1/models
264
- base_url = CHATGPT_OAUTH_CONFIG["api_base_url"].rstrip("/")
265
- models_url = f"{base_url}/v1/models"
365
+ import platform
266
366
 
267
- # Blocklist of model IDs to exclude
268
- blocklist = {"whisper-1"}
367
+ # Build the models URL with client version
368
+ client_version = CHATGPT_OAUTH_CONFIG.get("client_version", "0.72.0")
369
+ base_url = CHATGPT_OAUTH_CONFIG["api_base_url"].rstrip("/")
370
+ models_url = f"{base_url}/models"
371
+
372
+ # Build User-Agent to match codex-rs CLI format
373
+ originator = CHATGPT_OAUTH_CONFIG.get("originator", "codex_cli_rs")
374
+ os_name = platform.system()
375
+ if os_name == "Darwin":
376
+ os_name = "Mac OS"
377
+ os_version = platform.release()
378
+ arch = platform.machine()
379
+ user_agent = (
380
+ f"{originator}/{client_version} ({os_name} {os_version}; {arch}) "
381
+ "Terminal_Codex_CLI"
382
+ )
269
383
 
270
384
  headers = {
271
- "Authorization": f"Bearer {api_key}",
385
+ "Authorization": f"Bearer {access_token}",
386
+ "ChatGPT-Account-Id": account_id,
387
+ "User-Agent": user_agent,
388
+ "originator": originator,
389
+ "Accept": "application/json",
272
390
  }
273
391
 
274
- try:
275
- response = requests.get(models_url, headers=headers, timeout=30)
276
-
277
- if response.status_code != 200:
278
- logger.error(
279
- "Failed to fetch models: HTTP %d - %s",
280
- response.status_code,
281
- response.text,
282
- )
283
- return None
284
-
285
- # Parse JSON response
286
- try:
287
- data = response.json()
288
- if "data" not in data or not isinstance(data["data"], list):
289
- logger.error("Invalid response format: missing 'data' list")
290
- return None
291
- except (json.JSONDecodeError, ValueError) as exc:
292
- logger.error("Failed to parse JSON response: %s", exc)
293
- return None
294
-
295
- # Filter models: start with "gpt-" or "o1-" and not in blocklist
296
- filtered_models = []
297
- seen_models = set() # For deduplication while preserving order
298
-
299
- for model in data["data"]:
300
- # Skip None entries
301
- if model is None:
302
- continue
392
+ # Query params
393
+ params = {"client_version": client_version}
303
394
 
304
- model_id = model.get("id")
305
- if not model_id:
306
- continue
307
-
308
- # Skip if already seen (deduplication)
309
- if model_id in seen_models:
310
- continue
311
-
312
- # Check if model starts with allowed prefixes and not in blocklist
313
- if (
314
- model_id.startswith("gpt-") or model_id.startswith("o1-")
315
- ) and model_id not in blocklist:
316
- filtered_models.append(model_id)
317
- seen_models.add(model_id)
395
+ try:
396
+ response = requests.get(models_url, headers=headers, params=params, timeout=30)
318
397
 
319
- return filtered_models
398
+ if response.status_code == 200:
399
+ # Parse JSON response
400
+ try:
401
+ data = response.json()
402
+ # The response has a "models" key with list of model objects
403
+ if "models" in data and isinstance(data["models"], list):
404
+ models = []
405
+ for model in data["models"]:
406
+ if model is None:
407
+ continue
408
+ model_id = (
409
+ model.get("slug") or model.get("id") or model.get("name")
410
+ )
411
+ if model_id:
412
+ models.append(model_id)
413
+ if models:
414
+ return models
415
+ except (json.JSONDecodeError, ValueError) as exc:
416
+ logger.warning("Failed to parse models response: %s", exc)
417
+
418
+ # API didn't return valid models, use default list
419
+ logger.info(
420
+ "Models endpoint returned %d, using default model list",
421
+ response.status_code,
422
+ )
320
423
 
321
424
  except requests.exceptions.Timeout:
322
- logger.error("Timeout while fetching models after 30 seconds")
323
- return None
425
+ logger.warning("Timeout fetching models, using default list")
324
426
  except requests.exceptions.RequestException as exc:
325
- logger.error("Network error while fetching models: %s", exc)
326
- return None
427
+ logger.warning("Network error fetching models: %s, using default list", exc)
327
428
  except Exception as exc:
328
- logger.error("Unexpected error while fetching models: %s", exc)
329
- return None
429
+ logger.warning("Error fetching models: %s, using default list", exc)
330
430
 
431
+ # Return default models when API fails
432
+ logger.info("Using default Codex models: %s", DEFAULT_CODEX_MODELS)
433
+ return DEFAULT_CODEX_MODELS
331
434
 
332
- def add_models_to_extra_config(models: List[str], api_key: str) -> bool:
435
+
436
+ def add_models_to_extra_config(models: List[str]) -> bool:
333
437
  """Add ChatGPT models to chatgpt_models.json configuration."""
334
438
  try:
335
439
  chatgpt_models = load_chatgpt_models()
336
440
  added = 0
337
441
  for model_name in models:
338
442
  prefixed = f"{CHATGPT_OAUTH_CONFIG['prefix']}{model_name}"
443
+
444
+ # Determine supported settings based on model type
445
+ # All GPT-5.x models support reasoning_effort and verbosity
446
+ supported_settings = ["reasoning_effort", "verbosity"]
447
+
448
+ # Only codex models support xhigh reasoning effort
449
+ # Regular gpt-5.2 is capped at "high"
450
+ is_codex = "codex" in model_name.lower()
451
+
339
452
  chatgpt_models[prefixed] = {
340
- "type": "openai",
453
+ "type": "chatgpt_oauth",
341
454
  "name": model_name,
342
455
  "custom_endpoint": {
456
+ # Codex API uses chatgpt.com/backend-api/codex, not api.openai.com
343
457
  "url": CHATGPT_OAUTH_CONFIG["api_base_url"],
344
- "api_key": "${" + CHATGPT_OAUTH_CONFIG["api_key_env_var"] + "}",
345
458
  },
346
459
  "context_length": CHATGPT_OAUTH_CONFIG["default_context_length"],
347
460
  "oauth_source": "chatgpt-oauth-plugin",
461
+ "supported_settings": supported_settings,
462
+ "supports_xhigh_reasoning": is_codex,
348
463
  }
349
464
  added += 1
350
465
  if save_chatgpt_models(chatgpt_models):
@@ -12,6 +12,7 @@ from typing import Any, Dict, List, Optional, Tuple
12
12
  from urllib.parse import parse_qs, urlparse
13
13
 
14
14
  from code_puppy.callbacks import register_callback
15
+ from code_puppy.config import set_model_name
15
16
  from code_puppy.messaging import emit_error, emit_info, emit_success, emit_warning
16
17
 
17
18
  from ..oauth_puppy_html import oauth_failure_html, oauth_success_html
@@ -180,6 +181,31 @@ def _custom_help() -> List[Tuple[str, str]]:
180
181
  ]
181
182
 
182
183
 
184
+ def _reload_current_agent() -> None:
185
+ """Reload the current agent so new auth tokens are picked up immediately."""
186
+ try:
187
+ from code_puppy.agents import get_current_agent
188
+
189
+ current_agent = get_current_agent()
190
+ if current_agent is None:
191
+ logger.debug("No current agent to reload")
192
+ return
193
+
194
+ # JSON agents may need to refresh their config before reload
195
+ if hasattr(current_agent, "refresh_config"):
196
+ try:
197
+ current_agent.refresh_config()
198
+ except Exception:
199
+ # Non-fatal, continue to reload
200
+ pass
201
+
202
+ current_agent.reload_code_generation_agent()
203
+ emit_info("Active agent reloaded with new authentication")
204
+ except Exception as e:
205
+ emit_warning(f"Authentication succeeded but agent reload failed: {e}")
206
+ logger.exception("Failed to reload agent after authentication")
207
+
208
+
183
209
  def _perform_authentication() -> None:
184
210
  context = prepare_oauth_context()
185
211
  code = _await_callback(context)
@@ -219,6 +245,9 @@ def _perform_authentication() -> None:
219
245
  "Claude Code models added to your configuration. Use the `claude-code-` prefix!"
220
246
  )
221
247
 
248
+ # Reload the current agent so the new auth token is picked up immediately
249
+ _reload_current_agent()
250
+
222
251
 
223
252
  def _handle_custom_command(command: str, name: str) -> Optional[bool]:
224
253
  if not name:
@@ -232,6 +261,7 @@ def _handle_custom_command(command: str, name: str) -> Optional[bool]:
232
261
  "Existing Claude Code tokens found. Continuing will overwrite them."
233
262
  )
234
263
  _perform_authentication()
264
+ set_model_name("claude-code-claude-opus-4-5-20251101")
235
265
  return True
236
266
 
237
267
  if name == "claude-code-status":
@@ -368,6 +368,7 @@ def add_models_to_extra_config(models: List[str]) -> bool:
368
368
  "temperature",
369
369
  "extended_thinking",
370
370
  "budget_tokens",
371
+ "interleaved_thinking",
371
372
  ],
372
373
  }
373
374
  added += 1