code-puppy 0.0.169__py3-none-any.whl → 0.0.366__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. code_puppy/__init__.py +7 -1
  2. code_puppy/agents/__init__.py +8 -8
  3. code_puppy/agents/agent_c_reviewer.py +155 -0
  4. code_puppy/agents/agent_code_puppy.py +9 -2
  5. code_puppy/agents/agent_code_reviewer.py +90 -0
  6. code_puppy/agents/agent_cpp_reviewer.py +132 -0
  7. code_puppy/agents/agent_creator_agent.py +48 -9
  8. code_puppy/agents/agent_golang_reviewer.py +151 -0
  9. code_puppy/agents/agent_javascript_reviewer.py +160 -0
  10. code_puppy/agents/agent_manager.py +146 -199
  11. code_puppy/agents/agent_pack_leader.py +383 -0
  12. code_puppy/agents/agent_planning.py +163 -0
  13. code_puppy/agents/agent_python_programmer.py +165 -0
  14. code_puppy/agents/agent_python_reviewer.py +90 -0
  15. code_puppy/agents/agent_qa_expert.py +163 -0
  16. code_puppy/agents/agent_qa_kitten.py +208 -0
  17. code_puppy/agents/agent_security_auditor.py +181 -0
  18. code_puppy/agents/agent_terminal_qa.py +323 -0
  19. code_puppy/agents/agent_typescript_reviewer.py +166 -0
  20. code_puppy/agents/base_agent.py +1713 -1
  21. code_puppy/agents/event_stream_handler.py +350 -0
  22. code_puppy/agents/json_agent.py +12 -1
  23. code_puppy/agents/pack/__init__.py +34 -0
  24. code_puppy/agents/pack/bloodhound.py +304 -0
  25. code_puppy/agents/pack/husky.py +321 -0
  26. code_puppy/agents/pack/retriever.py +393 -0
  27. code_puppy/agents/pack/shepherd.py +348 -0
  28. code_puppy/agents/pack/terrier.py +287 -0
  29. code_puppy/agents/pack/watchdog.py +367 -0
  30. code_puppy/agents/prompt_reviewer.py +145 -0
  31. code_puppy/agents/subagent_stream_handler.py +276 -0
  32. code_puppy/api/__init__.py +13 -0
  33. code_puppy/api/app.py +169 -0
  34. code_puppy/api/main.py +21 -0
  35. code_puppy/api/pty_manager.py +446 -0
  36. code_puppy/api/routers/__init__.py +12 -0
  37. code_puppy/api/routers/agents.py +36 -0
  38. code_puppy/api/routers/commands.py +217 -0
  39. code_puppy/api/routers/config.py +74 -0
  40. code_puppy/api/routers/sessions.py +232 -0
  41. code_puppy/api/templates/terminal.html +361 -0
  42. code_puppy/api/websocket.py +154 -0
  43. code_puppy/callbacks.py +174 -4
  44. code_puppy/chatgpt_codex_client.py +283 -0
  45. code_puppy/claude_cache_client.py +586 -0
  46. code_puppy/cli_runner.py +916 -0
  47. code_puppy/command_line/add_model_menu.py +1079 -0
  48. code_puppy/command_line/agent_menu.py +395 -0
  49. code_puppy/command_line/attachments.py +395 -0
  50. code_puppy/command_line/autosave_menu.py +605 -0
  51. code_puppy/command_line/clipboard.py +527 -0
  52. code_puppy/command_line/colors_menu.py +520 -0
  53. code_puppy/command_line/command_handler.py +233 -627
  54. code_puppy/command_line/command_registry.py +150 -0
  55. code_puppy/command_line/config_commands.py +715 -0
  56. code_puppy/command_line/core_commands.py +792 -0
  57. code_puppy/command_line/diff_menu.py +863 -0
  58. code_puppy/command_line/load_context_completion.py +15 -22
  59. code_puppy/command_line/mcp/base.py +1 -4
  60. code_puppy/command_line/mcp/catalog_server_installer.py +175 -0
  61. code_puppy/command_line/mcp/custom_server_form.py +688 -0
  62. code_puppy/command_line/mcp/custom_server_installer.py +195 -0
  63. code_puppy/command_line/mcp/edit_command.py +148 -0
  64. code_puppy/command_line/mcp/handler.py +9 -4
  65. code_puppy/command_line/mcp/help_command.py +6 -5
  66. code_puppy/command_line/mcp/install_command.py +16 -27
  67. code_puppy/command_line/mcp/install_menu.py +685 -0
  68. code_puppy/command_line/mcp/list_command.py +3 -3
  69. code_puppy/command_line/mcp/logs_command.py +174 -65
  70. code_puppy/command_line/mcp/remove_command.py +2 -2
  71. code_puppy/command_line/mcp/restart_command.py +12 -4
  72. code_puppy/command_line/mcp/search_command.py +17 -11
  73. code_puppy/command_line/mcp/start_all_command.py +22 -13
  74. code_puppy/command_line/mcp/start_command.py +50 -31
  75. code_puppy/command_line/mcp/status_command.py +6 -7
  76. code_puppy/command_line/mcp/stop_all_command.py +11 -8
  77. code_puppy/command_line/mcp/stop_command.py +11 -10
  78. code_puppy/command_line/mcp/test_command.py +2 -2
  79. code_puppy/command_line/mcp/utils.py +1 -1
  80. code_puppy/command_line/mcp/wizard_utils.py +22 -18
  81. code_puppy/command_line/mcp_completion.py +174 -0
  82. code_puppy/command_line/model_picker_completion.py +89 -30
  83. code_puppy/command_line/model_settings_menu.py +884 -0
  84. code_puppy/command_line/motd.py +14 -8
  85. code_puppy/command_line/onboarding_slides.py +179 -0
  86. code_puppy/command_line/onboarding_wizard.py +340 -0
  87. code_puppy/command_line/pin_command_completion.py +329 -0
  88. code_puppy/command_line/prompt_toolkit_completion.py +626 -75
  89. code_puppy/command_line/session_commands.py +296 -0
  90. code_puppy/command_line/utils.py +54 -0
  91. code_puppy/config.py +1181 -51
  92. code_puppy/error_logging.py +118 -0
  93. code_puppy/gemini_code_assist.py +385 -0
  94. code_puppy/gemini_model.py +602 -0
  95. code_puppy/http_utils.py +220 -104
  96. code_puppy/keymap.py +128 -0
  97. code_puppy/main.py +5 -594
  98. code_puppy/{mcp → mcp_}/__init__.py +17 -0
  99. code_puppy/{mcp → mcp_}/async_lifecycle.py +35 -4
  100. code_puppy/{mcp → mcp_}/blocking_startup.py +70 -43
  101. code_puppy/{mcp → mcp_}/captured_stdio_server.py +2 -2
  102. code_puppy/{mcp → mcp_}/config_wizard.py +5 -5
  103. code_puppy/{mcp → mcp_}/dashboard.py +15 -6
  104. code_puppy/{mcp → mcp_}/examples/retry_example.py +4 -1
  105. code_puppy/{mcp → mcp_}/managed_server.py +66 -39
  106. code_puppy/{mcp → mcp_}/manager.py +146 -52
  107. code_puppy/mcp_/mcp_logs.py +224 -0
  108. code_puppy/{mcp → mcp_}/registry.py +6 -6
  109. code_puppy/{mcp → mcp_}/server_registry_catalog.py +25 -8
  110. code_puppy/messaging/__init__.py +199 -2
  111. code_puppy/messaging/bus.py +610 -0
  112. code_puppy/messaging/commands.py +167 -0
  113. code_puppy/messaging/markdown_patches.py +57 -0
  114. code_puppy/messaging/message_queue.py +17 -48
  115. code_puppy/messaging/messages.py +500 -0
  116. code_puppy/messaging/queue_console.py +1 -24
  117. code_puppy/messaging/renderers.py +43 -146
  118. code_puppy/messaging/rich_renderer.py +1027 -0
  119. code_puppy/messaging/spinner/__init__.py +33 -5
  120. code_puppy/messaging/spinner/console_spinner.py +92 -52
  121. code_puppy/messaging/spinner/spinner_base.py +29 -0
  122. code_puppy/messaging/subagent_console.py +461 -0
  123. code_puppy/model_factory.py +686 -80
  124. code_puppy/model_utils.py +167 -0
  125. code_puppy/models.json +86 -104
  126. code_puppy/models_dev_api.json +1 -0
  127. code_puppy/models_dev_parser.py +592 -0
  128. code_puppy/plugins/__init__.py +164 -10
  129. code_puppy/plugins/antigravity_oauth/__init__.py +10 -0
  130. code_puppy/plugins/antigravity_oauth/accounts.py +406 -0
  131. code_puppy/plugins/antigravity_oauth/antigravity_model.py +704 -0
  132. code_puppy/plugins/antigravity_oauth/config.py +42 -0
  133. code_puppy/plugins/antigravity_oauth/constants.py +136 -0
  134. code_puppy/plugins/antigravity_oauth/oauth.py +478 -0
  135. code_puppy/plugins/antigravity_oauth/register_callbacks.py +406 -0
  136. code_puppy/plugins/antigravity_oauth/storage.py +271 -0
  137. code_puppy/plugins/antigravity_oauth/test_plugin.py +319 -0
  138. code_puppy/plugins/antigravity_oauth/token.py +167 -0
  139. code_puppy/plugins/antigravity_oauth/transport.py +767 -0
  140. code_puppy/plugins/antigravity_oauth/utils.py +169 -0
  141. code_puppy/plugins/chatgpt_oauth/__init__.py +8 -0
  142. code_puppy/plugins/chatgpt_oauth/config.py +52 -0
  143. code_puppy/plugins/chatgpt_oauth/oauth_flow.py +328 -0
  144. code_puppy/plugins/chatgpt_oauth/register_callbacks.py +94 -0
  145. code_puppy/plugins/chatgpt_oauth/test_plugin.py +293 -0
  146. code_puppy/plugins/chatgpt_oauth/utils.py +489 -0
  147. code_puppy/plugins/claude_code_oauth/README.md +167 -0
  148. code_puppy/plugins/claude_code_oauth/SETUP.md +93 -0
  149. code_puppy/plugins/claude_code_oauth/__init__.py +6 -0
  150. code_puppy/plugins/claude_code_oauth/config.py +50 -0
  151. code_puppy/plugins/claude_code_oauth/register_callbacks.py +308 -0
  152. code_puppy/plugins/claude_code_oauth/test_plugin.py +283 -0
  153. code_puppy/plugins/claude_code_oauth/utils.py +518 -0
  154. code_puppy/plugins/customizable_commands/__init__.py +0 -0
  155. code_puppy/plugins/customizable_commands/register_callbacks.py +169 -0
  156. code_puppy/plugins/example_custom_command/README.md +280 -0
  157. code_puppy/plugins/example_custom_command/register_callbacks.py +51 -0
  158. code_puppy/plugins/file_permission_handler/__init__.py +4 -0
  159. code_puppy/plugins/file_permission_handler/register_callbacks.py +523 -0
  160. code_puppy/plugins/frontend_emitter/__init__.py +25 -0
  161. code_puppy/plugins/frontend_emitter/emitter.py +121 -0
  162. code_puppy/plugins/frontend_emitter/register_callbacks.py +261 -0
  163. code_puppy/plugins/oauth_puppy_html.py +228 -0
  164. code_puppy/plugins/shell_safety/__init__.py +6 -0
  165. code_puppy/plugins/shell_safety/agent_shell_safety.py +69 -0
  166. code_puppy/plugins/shell_safety/command_cache.py +156 -0
  167. code_puppy/plugins/shell_safety/register_callbacks.py +202 -0
  168. code_puppy/prompts/antigravity_system_prompt.md +1 -0
  169. code_puppy/prompts/codex_system_prompt.md +310 -0
  170. code_puppy/pydantic_patches.py +131 -0
  171. code_puppy/reopenable_async_client.py +8 -8
  172. code_puppy/round_robin_model.py +10 -15
  173. code_puppy/session_storage.py +294 -0
  174. code_puppy/status_display.py +21 -4
  175. code_puppy/summarization_agent.py +52 -14
  176. code_puppy/terminal_utils.py +418 -0
  177. code_puppy/tools/__init__.py +139 -6
  178. code_puppy/tools/agent_tools.py +548 -49
  179. code_puppy/tools/browser/__init__.py +37 -0
  180. code_puppy/tools/browser/browser_control.py +289 -0
  181. code_puppy/tools/browser/browser_interactions.py +545 -0
  182. code_puppy/tools/browser/browser_locators.py +640 -0
  183. code_puppy/tools/browser/browser_manager.py +316 -0
  184. code_puppy/tools/browser/browser_navigation.py +251 -0
  185. code_puppy/tools/browser/browser_screenshot.py +179 -0
  186. code_puppy/tools/browser/browser_scripts.py +462 -0
  187. code_puppy/tools/browser/browser_workflows.py +221 -0
  188. code_puppy/tools/browser/chromium_terminal_manager.py +259 -0
  189. code_puppy/tools/browser/terminal_command_tools.py +521 -0
  190. code_puppy/tools/browser/terminal_screenshot_tools.py +556 -0
  191. code_puppy/tools/browser/terminal_tools.py +525 -0
  192. code_puppy/tools/command_runner.py +941 -153
  193. code_puppy/tools/common.py +1146 -6
  194. code_puppy/tools/display.py +84 -0
  195. code_puppy/tools/file_modifications.py +288 -89
  196. code_puppy/tools/file_operations.py +352 -266
  197. code_puppy/tools/subagent_context.py +158 -0
  198. code_puppy/uvx_detection.py +242 -0
  199. code_puppy/version_checker.py +30 -11
  200. code_puppy-0.0.366.data/data/code_puppy/models.json +110 -0
  201. code_puppy-0.0.366.data/data/code_puppy/models_dev_api.json +1 -0
  202. {code_puppy-0.0.169.dist-info → code_puppy-0.0.366.dist-info}/METADATA +184 -67
  203. code_puppy-0.0.366.dist-info/RECORD +217 -0
  204. {code_puppy-0.0.169.dist-info → code_puppy-0.0.366.dist-info}/WHEEL +1 -1
  205. {code_puppy-0.0.169.dist-info → code_puppy-0.0.366.dist-info}/entry_points.txt +1 -0
  206. code_puppy/agent.py +0 -231
  207. code_puppy/agents/agent_orchestrator.json +0 -26
  208. code_puppy/agents/runtime_manager.py +0 -272
  209. code_puppy/command_line/mcp/add_command.py +0 -183
  210. code_puppy/command_line/meta_command_handler.py +0 -153
  211. code_puppy/message_history_processor.py +0 -490
  212. code_puppy/messaging/spinner/textual_spinner.py +0 -101
  213. code_puppy/state_management.py +0 -200
  214. code_puppy/tui/__init__.py +0 -10
  215. code_puppy/tui/app.py +0 -986
  216. code_puppy/tui/components/__init__.py +0 -21
  217. code_puppy/tui/components/chat_view.py +0 -550
  218. code_puppy/tui/components/command_history_modal.py +0 -218
  219. code_puppy/tui/components/copy_button.py +0 -139
  220. code_puppy/tui/components/custom_widgets.py +0 -63
  221. code_puppy/tui/components/human_input_modal.py +0 -175
  222. code_puppy/tui/components/input_area.py +0 -167
  223. code_puppy/tui/components/sidebar.py +0 -309
  224. code_puppy/tui/components/status_bar.py +0 -182
  225. code_puppy/tui/messages.py +0 -27
  226. code_puppy/tui/models/__init__.py +0 -8
  227. code_puppy/tui/models/chat_message.py +0 -25
  228. code_puppy/tui/models/command_history.py +0 -89
  229. code_puppy/tui/models/enums.py +0 -24
  230. code_puppy/tui/screens/__init__.py +0 -15
  231. code_puppy/tui/screens/help.py +0 -130
  232. code_puppy/tui/screens/mcp_install_wizard.py +0 -803
  233. code_puppy/tui/screens/settings.py +0 -290
  234. code_puppy/tui/screens/tools.py +0 -74
  235. code_puppy-0.0.169.data/data/code_puppy/models.json +0 -128
  236. code_puppy-0.0.169.dist-info/RECORD +0 -112
  237. /code_puppy/{mcp → mcp_}/circuit_breaker.py +0 -0
  238. /code_puppy/{mcp → mcp_}/error_isolation.py +0 -0
  239. /code_puppy/{mcp → mcp_}/health_monitor.py +0 -0
  240. /code_puppy/{mcp → mcp_}/retry_manager.py +0 -0
  241. /code_puppy/{mcp → mcp_}/status_tracker.py +0 -0
  242. /code_puppy/{mcp → mcp_}/system_tools.py +0 -0
  243. {code_puppy-0.0.169.dist-info → code_puppy-0.0.366.dist-info}/licenses/LICENSE +0 -0
@@ -4,31 +4,140 @@ import os
4
4
  import pathlib
5
5
  from typing import Any, Dict
6
6
 
7
- import httpx
8
7
  from anthropic import AsyncAnthropic
9
8
  from openai import AsyncAzureOpenAI
10
- from pydantic_ai.models.anthropic import AnthropicModel
11
- from pydantic_ai.models.gemini import GeminiModel
12
- from pydantic_ai.models.openai import OpenAIChatModel
9
+ from pydantic_ai.models.anthropic import AnthropicModel, AnthropicModelSettings
10
+ from pydantic_ai.models.openai import (
11
+ OpenAIChatModel,
12
+ OpenAIChatModelSettings,
13
+ OpenAIResponsesModel,
14
+ )
15
+ from pydantic_ai.profiles import ModelProfile
13
16
  from pydantic_ai.providers.anthropic import AnthropicProvider
14
- from pydantic_ai.providers.google_gla import GoogleGLAProvider
15
- from pydantic_ai.providers.openai import OpenAIProvider
16
17
  from pydantic_ai.providers.cerebras import CerebrasProvider
18
+ from pydantic_ai.providers.openai import OpenAIProvider
17
19
  from pydantic_ai.providers.openrouter import OpenRouterProvider
20
+ from pydantic_ai.settings import ModelSettings
21
+
22
+ from code_puppy.gemini_model import GeminiModel
23
+ from code_puppy.messaging import emit_warning
18
24
 
19
25
  from . import callbacks
20
- from .config import EXTRA_MODELS_FILE
21
- from .http_utils import create_async_client
26
+ from .claude_cache_client import ClaudeCacheAsyncClient, patch_anthropic_client_messages
27
+ from .config import EXTRA_MODELS_FILE, get_value
28
+ from .http_utils import create_async_client, get_cert_bundle_path, get_http2
22
29
  from .round_robin_model import RoundRobinModel
23
30
 
24
- # Environment variables used in this module:
25
- # - GEMINI_API_KEY: API key for Google's Gemini models. Required when using Gemini models.
26
- # - OPENAI_API_KEY: API key for OpenAI models. Required when using OpenAI models or custom_openai endpoints.
27
- # - TOGETHER_AI_KEY: API key for Together AI models. Required when using Together AI models.
28
- #
29
- # When using custom endpoints (type: "custom_openai" in models.json):
30
- # - Environment variables can be referenced in header values by prefixing with $ in models.json.
31
- # Example: "X-Api-Key": "$OPENAI_API_KEY" will use the value from os.environ.get("OPENAI_API_KEY")
31
+ logger = logging.getLogger(__name__)
32
+
33
+
34
+ def get_api_key(env_var_name: str) -> str | None:
35
+ """Get an API key from config first, then fall back to environment variable.
36
+
37
+ This allows users to set API keys via `/set KIMI_API_KEY=xxx` in addition to
38
+ setting them as environment variables.
39
+
40
+ Args:
41
+ env_var_name: The name of the environment variable (e.g., "OPENAI_API_KEY")
42
+
43
+ Returns:
44
+ The API key value, or None if not found in either config or environment.
45
+ """
46
+ # First check config (case-insensitive key lookup)
47
+ config_value = get_value(env_var_name.lower())
48
+ if config_value:
49
+ return config_value
50
+
51
+ # Fall back to environment variable
52
+ return os.environ.get(env_var_name)
53
+
54
+
55
+ def make_model_settings(
56
+ model_name: str, max_tokens: int | None = None
57
+ ) -> ModelSettings:
58
+ """Create appropriate ModelSettings for a given model.
59
+
60
+ This handles model-specific settings:
61
+ - GPT-5 models: reasoning_effort and verbosity (non-codex only)
62
+ - Claude/Anthropic models: extended_thinking and budget_tokens
63
+ - Automatic max_tokens calculation based on model context length
64
+
65
+ Args:
66
+ model_name: The name of the model to create settings for.
67
+ max_tokens: Optional max tokens limit. If None, automatically calculated
68
+ as: max(2048, min(15% of context_length, 65536))
69
+
70
+ Returns:
71
+ Appropriate ModelSettings subclass instance for the model.
72
+ """
73
+ from code_puppy.config import (
74
+ get_effective_model_settings,
75
+ get_openai_reasoning_effort,
76
+ get_openai_verbosity,
77
+ )
78
+
79
+ model_settings_dict: dict = {}
80
+
81
+ # Calculate max_tokens if not explicitly provided
82
+ if max_tokens is None:
83
+ # Load model config to get context length
84
+ try:
85
+ models_config = ModelFactory.load_config()
86
+ model_config = models_config.get(model_name, {})
87
+ context_length = model_config.get("context_length", 128000)
88
+ except Exception:
89
+ # Fallback if config loading fails (e.g., in CI environments)
90
+ context_length = 128000
91
+ # min 2048, 15% of context, max 65536
92
+ max_tokens = max(2048, min(int(0.15 * context_length), 65536))
93
+
94
+ model_settings_dict["max_tokens"] = max_tokens
95
+ effective_settings = get_effective_model_settings(model_name)
96
+ model_settings_dict.update(effective_settings)
97
+
98
+ # Default to clear_thinking=False for GLM-4.7 models (preserved thinking)
99
+ if "glm-4.7" in model_name.lower():
100
+ clear_thinking = effective_settings.get("clear_thinking", False)
101
+ model_settings_dict["thinking"] = {
102
+ "type": "enabled",
103
+ "clear_thinking": clear_thinking,
104
+ }
105
+
106
+ model_settings: ModelSettings = ModelSettings(**model_settings_dict)
107
+
108
+ if "gpt-5" in model_name:
109
+ model_settings_dict["openai_reasoning_effort"] = get_openai_reasoning_effort()
110
+ # Verbosity only applies to non-codex GPT-5 models (codex only supports "medium")
111
+ if "codex" not in model_name:
112
+ verbosity = get_openai_verbosity()
113
+ model_settings_dict["extra_body"] = {"verbosity": verbosity}
114
+ model_settings = OpenAIChatModelSettings(**model_settings_dict)
115
+ elif model_name.startswith("claude-") or model_name.startswith("anthropic-"):
116
+ # Handle Anthropic extended thinking settings
117
+ # Remove top_p as Anthropic doesn't support it with extended thinking
118
+ model_settings_dict.pop("top_p", None)
119
+
120
+ # Claude extended thinking requires temperature=1.0 (API restriction)
121
+ # Default to 1.0 if not explicitly set by user
122
+ if model_settings_dict.get("temperature") is None:
123
+ model_settings_dict["temperature"] = 1.0
124
+
125
+ extended_thinking = effective_settings.get("extended_thinking", True)
126
+ budget_tokens = effective_settings.get("budget_tokens", 10000)
127
+ if extended_thinking and budget_tokens:
128
+ model_settings_dict["anthropic_thinking"] = {
129
+ "type": "enabled",
130
+ "budget_tokens": budget_tokens,
131
+ }
132
+ model_settings = AnthropicModelSettings(**model_settings_dict)
133
+
134
+ return model_settings
135
+
136
+
137
+ class ZaiChatModel(OpenAIChatModel):
138
+ def _process_response(self, response):
139
+ response.object = "chat.completion"
140
+ return super()._process_response(response)
32
141
 
33
142
 
34
143
  def get_custom_config(model_config):
@@ -44,24 +153,39 @@ def get_custom_config(model_config):
44
153
  for key, value in custom_config.get("headers", {}).items():
45
154
  if value.startswith("$"):
46
155
  env_var_name = value[1:]
47
- resolved_value = os.environ.get(env_var_name)
156
+ resolved_value = get_api_key(env_var_name)
48
157
  if resolved_value is None:
49
- raise ValueError(
50
- f"Environment variable '{env_var_name}' is required for custom endpoint headers but is not set. "
51
- f"Please set the environment variable: export {env_var_name}=your_value"
158
+ emit_warning(
159
+ f"'{env_var_name}' is not set (check config or environment) for custom endpoint header '{key}'. Proceeding with empty value."
52
160
  )
161
+ resolved_value = ""
53
162
  value = resolved_value
163
+ elif "$" in value:
164
+ tokens = value.split(" ")
165
+ resolved_values = []
166
+ for token in tokens:
167
+ if token.startswith("$"):
168
+ env_var = token[1:]
169
+ resolved_value = get_api_key(env_var)
170
+ if resolved_value is None:
171
+ emit_warning(
172
+ f"'{env_var}' is not set (check config or environment) for custom endpoint header '{key}'. Proceeding with empty value."
173
+ )
174
+ resolved_values.append("")
175
+ else:
176
+ resolved_values.append(resolved_value)
177
+ else:
178
+ resolved_values.append(token)
179
+ value = " ".join(resolved_values)
54
180
  headers[key] = value
55
-
56
181
  api_key = None
57
182
  if "api_key" in custom_config:
58
183
  if custom_config["api_key"].startswith("$"):
59
184
  env_var_name = custom_config["api_key"][1:]
60
- api_key = os.environ.get(env_var_name)
185
+ api_key = get_api_key(env_var_name)
61
186
  if api_key is None:
62
- raise ValueError(
63
- f"Environment variable '{env_var_name}' is required for custom endpoint API key but is not set. "
64
- f"Please set the environment variable: export {env_var_name}=your_value"
187
+ emit_warning(
188
+ f"API key '{env_var_name}' is not set (checked config and environment); proceeding without API key."
65
189
  )
66
190
  else:
67
191
  api_key = custom_config["api_key"]
@@ -94,26 +218,63 @@ class ModelFactory:
94
218
  with open(MODELS_FILE, "r") as f:
95
219
  config = json.load(f)
96
220
 
97
- if pathlib.Path(EXTRA_MODELS_FILE).exists():
221
+ # Import OAuth model file paths from main config
222
+ from code_puppy.config import (
223
+ ANTIGRAVITY_MODELS_FILE,
224
+ CHATGPT_MODELS_FILE,
225
+ CLAUDE_MODELS_FILE,
226
+ GEMINI_MODELS_FILE,
227
+ )
228
+
229
+ # Build list of extra model sources
230
+ extra_sources: list[tuple[pathlib.Path, str, bool]] = [
231
+ (pathlib.Path(EXTRA_MODELS_FILE), "extra models", False),
232
+ (pathlib.Path(CHATGPT_MODELS_FILE), "ChatGPT OAuth models", False),
233
+ (pathlib.Path(CLAUDE_MODELS_FILE), "Claude Code OAuth models", True),
234
+ (pathlib.Path(GEMINI_MODELS_FILE), "Gemini OAuth models", False),
235
+ (pathlib.Path(ANTIGRAVITY_MODELS_FILE), "Antigravity OAuth models", False),
236
+ ]
237
+
238
+ for source_path, label, use_filtered in extra_sources:
239
+ if not source_path.exists():
240
+ continue
98
241
  try:
99
- with open(EXTRA_MODELS_FILE, "r") as f:
100
- extra_config = json.load(f)
101
- config.update(extra_config)
102
- except json.JSONDecodeError as e:
242
+ # Use filtered loading for Claude Code OAuth models to show only latest versions
243
+ if use_filtered:
244
+ try:
245
+ from code_puppy.plugins.claude_code_oauth.utils import (
246
+ load_claude_models_filtered,
247
+ )
248
+
249
+ extra_config = load_claude_models_filtered()
250
+ except ImportError:
251
+ # Plugin not available, fall back to standard JSON loading
252
+ logging.getLogger(__name__).debug(
253
+ f"claude_code_oauth plugin not available, loading {label} as plain JSON"
254
+ )
255
+ with open(source_path, "r") as f:
256
+ extra_config = json.load(f)
257
+ else:
258
+ with open(source_path, "r") as f:
259
+ extra_config = json.load(f)
260
+ config.update(extra_config)
261
+ except json.JSONDecodeError as exc:
103
262
  logging.getLogger(__name__).warning(
104
- f"Failed to load extra models config from {EXTRA_MODELS_FILE}: Invalid JSON - {e}\n"
105
- f"Please check your extra_models.json file for syntax errors."
263
+ f"Failed to load {label} config from {source_path}: Invalid JSON - {exc}"
106
264
  )
107
- except Exception as e:
265
+ except Exception as exc:
108
266
  logging.getLogger(__name__).warning(
109
- f"Failed to load extra models config from {EXTRA_MODELS_FILE}: {e}\n"
110
- f"The extra models configuration will be ignored."
267
+ f"Failed to load {label} config from {source_path}: {exc}"
111
268
  )
112
269
  return config
113
270
 
114
271
  @staticmethod
115
272
  def get_model(model_name: str, config: Dict[str, Any]) -> Any:
116
- """Returns a configured model instance based on the provided name and config."""
273
+ """Returns a configured model instance based on the provided name and config.
274
+
275
+ API key validation happens naturally within each model type's initialization,
276
+ which emits warnings and returns None if keys are missing.
277
+ """
117
278
  model_config = config.get(model_name)
118
279
  if not model_config:
119
280
  raise ValueError(f"Model '{model_name}' not found in configuration.")
@@ -121,40 +282,189 @@ class ModelFactory:
121
282
  model_type = model_config.get("type")
122
283
 
123
284
  if model_type == "gemini":
124
- provider = GoogleGLAProvider(api_key=os.environ.get("GEMINI_API_KEY", ""))
285
+ api_key = get_api_key("GEMINI_API_KEY")
286
+ if not api_key:
287
+ emit_warning(
288
+ f"GEMINI_API_KEY is not set (check config or environment); skipping Gemini model '{model_config.get('name')}'."
289
+ )
290
+ return None
125
291
 
126
- model = GeminiModel(model_name=model_config["name"], provider=provider)
127
- setattr(model, "provider", provider)
292
+ model = GeminiModel(model_name=model_config["name"], api_key=api_key)
128
293
  return model
129
294
 
130
295
  elif model_type == "openai":
131
- provider = OpenAIProvider(api_key=os.environ.get("OPENAI_API_KEY", ""))
296
+ api_key = get_api_key("OPENAI_API_KEY")
297
+ if not api_key:
298
+ emit_warning(
299
+ f"OPENAI_API_KEY is not set (check config or environment); skipping OpenAI model '{model_config.get('name')}'."
300
+ )
301
+ return None
132
302
 
303
+ provider = OpenAIProvider(api_key=api_key)
133
304
  model = OpenAIChatModel(model_name=model_config["name"], provider=provider)
305
+ if "codex" in model_name:
306
+ model = OpenAIResponsesModel(
307
+ model_name=model_config["name"], provider=provider
308
+ )
134
309
  setattr(model, "provider", provider)
135
310
  return model
136
311
 
137
312
  elif model_type == "anthropic":
138
- api_key = os.environ.get("ANTHROPIC_API_KEY", None)
313
+ api_key = get_api_key("ANTHROPIC_API_KEY")
139
314
  if not api_key:
140
- raise ValueError(
141
- "ANTHROPIC_API_KEY environment variable must be set for Anthropic models."
315
+ emit_warning(
316
+ f"ANTHROPIC_API_KEY is not set (check config or environment); skipping Anthropic model '{model_config.get('name')}'."
142
317
  )
143
- anthropic_client = AsyncAnthropic(api_key=api_key)
318
+ return None
319
+
320
+ # Use the same caching client as claude_code models
321
+ verify = get_cert_bundle_path()
322
+ http2_enabled = get_http2()
323
+
324
+ client = ClaudeCacheAsyncClient(
325
+ verify=verify,
326
+ timeout=180,
327
+ http2=http2_enabled,
328
+ )
329
+
330
+ # Check if interleaved thinking is enabled for this model
331
+ # Only applies to Claude 4 models (Opus 4.5, Opus 4.1, Opus 4, Sonnet 4)
332
+ from code_puppy.config import get_effective_model_settings
333
+
334
+ effective_settings = get_effective_model_settings(model_name)
335
+ interleaved_thinking = effective_settings.get("interleaved_thinking", False)
336
+
337
+ default_headers = {}
338
+ if interleaved_thinking:
339
+ default_headers["anthropic-beta"] = "interleaved-thinking-2025-05-14"
340
+
341
+ anthropic_client = AsyncAnthropic(
342
+ api_key=api_key,
343
+ http_client=client,
344
+ default_headers=default_headers if default_headers else None,
345
+ )
346
+
347
+ # Ensure cache_control is injected at the Anthropic SDK layer
348
+ patch_anthropic_client_messages(anthropic_client)
349
+
144
350
  provider = AnthropicProvider(anthropic_client=anthropic_client)
145
351
  return AnthropicModel(model_name=model_config["name"], provider=provider)
146
352
 
147
353
  elif model_type == "custom_anthropic":
148
354
  url, headers, verify, api_key = get_custom_config(model_config)
149
- client = create_async_client(headers=headers, verify=verify)
355
+ if not api_key:
356
+ emit_warning(
357
+ f"API key is not set for custom Anthropic endpoint; skipping model '{model_config.get('name')}'."
358
+ )
359
+ return None
360
+
361
+ # Use the same caching client as claude_code models
362
+ if verify is None:
363
+ verify = get_cert_bundle_path()
364
+
365
+ http2_enabled = get_http2()
366
+
367
+ client = ClaudeCacheAsyncClient(
368
+ headers=headers,
369
+ verify=verify,
370
+ timeout=180,
371
+ http2=http2_enabled,
372
+ )
373
+
374
+ # Check if interleaved thinking is enabled for this model
375
+ from code_puppy.config import get_effective_model_settings
376
+
377
+ effective_settings = get_effective_model_settings(model_name)
378
+ interleaved_thinking = effective_settings.get("interleaved_thinking", False)
379
+
380
+ default_headers = {}
381
+ if interleaved_thinking:
382
+ default_headers["anthropic-beta"] = "interleaved-thinking-2025-05-14"
383
+
150
384
  anthropic_client = AsyncAnthropic(
151
385
  base_url=url,
152
386
  http_client=client,
153
387
  api_key=api_key,
388
+ default_headers=default_headers if default_headers else None,
154
389
  )
390
+
391
+ # Ensure cache_control is injected at the Anthropic SDK layer
392
+ patch_anthropic_client_messages(anthropic_client)
393
+
155
394
  provider = AnthropicProvider(anthropic_client=anthropic_client)
156
395
  return AnthropicModel(model_name=model_config["name"], provider=provider)
396
+ elif model_type == "claude_code":
397
+ url, headers, verify, api_key = get_custom_config(model_config)
398
+ if model_config.get("oauth_source") == "claude-code-plugin":
399
+ try:
400
+ from code_puppy.plugins.claude_code_oauth.utils import (
401
+ get_valid_access_token,
402
+ )
403
+
404
+ refreshed_token = get_valid_access_token()
405
+ if refreshed_token:
406
+ api_key = refreshed_token
407
+ custom_endpoint = model_config.get("custom_endpoint")
408
+ if isinstance(custom_endpoint, dict):
409
+ custom_endpoint["api_key"] = refreshed_token
410
+ except ImportError:
411
+ pass
412
+ if not api_key:
413
+ emit_warning(
414
+ f"API key is not set for Claude Code endpoint; skipping model '{model_config.get('name')}'."
415
+ )
416
+ return None
417
+
418
+ # Check if interleaved thinking is enabled (defaults to True for OAuth models)
419
+ from code_puppy.config import get_effective_model_settings
420
+
421
+ effective_settings = get_effective_model_settings(model_name)
422
+ interleaved_thinking = effective_settings.get("interleaved_thinking", True)
157
423
 
424
+ # Handle anthropic-beta header based on interleaved_thinking setting
425
+ if "anthropic-beta" in headers:
426
+ beta_parts = [p.strip() for p in headers["anthropic-beta"].split(",")]
427
+ if interleaved_thinking:
428
+ # Ensure interleaved-thinking is in the header
429
+ if "interleaved-thinking-2025-05-14" not in beta_parts:
430
+ beta_parts.append("interleaved-thinking-2025-05-14")
431
+ else:
432
+ # Remove interleaved-thinking from the header
433
+ beta_parts = [
434
+ p for p in beta_parts if "interleaved-thinking" not in p
435
+ ]
436
+ headers["anthropic-beta"] = ",".join(beta_parts) if beta_parts else None
437
+ if headers.get("anthropic-beta") is None:
438
+ del headers["anthropic-beta"]
439
+ elif interleaved_thinking:
440
+ # No existing beta header, add one for interleaved thinking
441
+ headers["anthropic-beta"] = "interleaved-thinking-2025-05-14"
442
+
443
+ # Use a dedicated client wrapper that injects cache_control on /v1/messages
444
+ if verify is None:
445
+ verify = get_cert_bundle_path()
446
+
447
+ http2_enabled = get_http2()
448
+
449
+ client = ClaudeCacheAsyncClient(
450
+ headers=headers,
451
+ verify=verify,
452
+ timeout=180,
453
+ http2=http2_enabled,
454
+ )
455
+
456
+ anthropic_client = AsyncAnthropic(
457
+ base_url=url,
458
+ http_client=client,
459
+ auth_token=api_key,
460
+ )
461
+ # Ensure cache_control is injected at the Anthropic SDK layer too
462
+ # so we don't depend solely on httpx internals.
463
+ patch_anthropic_client_messages(anthropic_client)
464
+ anthropic_client.api_key = None
465
+ anthropic_client.auth_token = api_key
466
+ provider = AnthropicProvider(anthropic_client=anthropic_client)
467
+ return AnthropicModel(model_name=model_config["name"], provider=provider)
158
468
  elif model_type == "azure_openai":
159
469
  azure_endpoint_config = model_config.get("azure_endpoint")
160
470
  if not azure_endpoint_config:
@@ -163,11 +473,12 @@ class ModelFactory:
163
473
  )
164
474
  azure_endpoint = azure_endpoint_config
165
475
  if azure_endpoint_config.startswith("$"):
166
- azure_endpoint = os.environ.get(azure_endpoint_config[1:])
476
+ azure_endpoint = get_api_key(azure_endpoint_config[1:])
167
477
  if not azure_endpoint:
168
- raise ValueError(
169
- f"Azure OpenAI endpoint environment variable '{azure_endpoint_config[1:] if azure_endpoint_config.startswith('$') else ''}' not found or is empty."
478
+ emit_warning(
479
+ f"Azure OpenAI endpoint '{azure_endpoint_config[1:] if azure_endpoint_config.startswith('$') else azure_endpoint_config}' not found (check config or environment); skipping model '{model_config.get('name')}'."
170
480
  )
481
+ return None
171
482
 
172
483
  api_version_config = model_config.get("api_version")
173
484
  if not api_version_config:
@@ -176,11 +487,12 @@ class ModelFactory:
176
487
  )
177
488
  api_version = api_version_config
178
489
  if api_version_config.startswith("$"):
179
- api_version = os.environ.get(api_version_config[1:])
490
+ api_version = get_api_key(api_version_config[1:])
180
491
  if not api_version:
181
- raise ValueError(
182
- f"Azure OpenAI API version environment variable '{api_version_config[1:] if api_version_config.startswith('$') else ''}' not found or is empty."
492
+ emit_warning(
493
+ f"Azure OpenAI API version '{api_version_config[1:] if api_version_config.startswith('$') else api_version_config}' not found (check config or environment); skipping model '{model_config.get('name')}'."
183
494
  )
495
+ return None
184
496
 
185
497
  api_key_config = model_config.get("api_key")
186
498
  if not api_key_config:
@@ -189,11 +501,12 @@ class ModelFactory:
189
501
  )
190
502
  api_key = api_key_config
191
503
  if api_key_config.startswith("$"):
192
- api_key = os.environ.get(api_key_config[1:])
504
+ api_key = get_api_key(api_key_config[1:])
193
505
  if not api_key:
194
- raise ValueError(
195
- f"Azure OpenAI API key environment variable '{api_key_config[1:] if api_key_config.startswith('$') else ''}' not found or is empty."
506
+ emit_warning(
507
+ f"Azure OpenAI API key '{api_key_config[1:] if api_key_config.startswith('$') else api_key_config}' not found (check config or environment); skipping model '{model_config.get('name')}'."
196
508
  )
509
+ return None
197
510
 
198
511
  # Configure max_retries for the Azure client, defaulting if not specified in config
199
512
  azure_max_retries = model_config.get("max_retries", 2)
@@ -219,42 +532,193 @@ class ModelFactory:
219
532
  if api_key:
220
533
  provider_args["api_key"] = api_key
221
534
  provider = OpenAIProvider(**provider_args)
222
-
223
535
  model = OpenAIChatModel(model_name=model_config["name"], provider=provider)
536
+ if model_name == "chatgpt-gpt-5-codex":
537
+ model = OpenAIResponsesModel(model_config["name"], provider=provider)
224
538
  setattr(model, "provider", provider)
225
539
  return model
226
-
540
+ elif model_type == "zai_coding":
541
+ api_key = get_api_key("ZAI_API_KEY")
542
+ if not api_key:
543
+ emit_warning(
544
+ f"ZAI_API_KEY is not set (check config or environment); skipping ZAI coding model '{model_config.get('name')}'."
545
+ )
546
+ return None
547
+ provider = OpenAIProvider(
548
+ api_key=api_key,
549
+ base_url="https://api.z.ai/api/coding/paas/v4",
550
+ )
551
+ zai_model = ZaiChatModel(
552
+ model_name=model_config["name"],
553
+ provider=provider,
554
+ )
555
+ setattr(zai_model, "provider", provider)
556
+ return zai_model
557
+ elif model_type == "zai_api":
558
+ api_key = get_api_key("ZAI_API_KEY")
559
+ if not api_key:
560
+ emit_warning(
561
+ f"ZAI_API_KEY is not set (check config or environment); skipping ZAI API model '{model_config.get('name')}'."
562
+ )
563
+ return None
564
+ provider = OpenAIProvider(
565
+ api_key=api_key,
566
+ base_url="https://api.z.ai/api/paas/v4/",
567
+ )
568
+ zai_model = ZaiChatModel(
569
+ model_name=model_config["name"],
570
+ provider=provider,
571
+ )
572
+ setattr(zai_model, "provider", provider)
573
+ return zai_model
227
574
  elif model_type == "custom_gemini":
228
575
  url, headers, verify, api_key = get_custom_config(model_config)
229
- os.environ["GEMINI_API_KEY"] = api_key
230
-
231
- class CustomGoogleGLAProvider(GoogleGLAProvider):
232
- def __init__(self, *args, **kwargs):
233
- super().__init__(*args, **kwargs)
576
+ if not api_key:
577
+ emit_warning(
578
+ f"API key is not set for custom Gemini endpoint; skipping model '{model_config.get('name')}'."
579
+ )
580
+ return None
581
+
582
+ # Check if this is an Antigravity model
583
+ if model_config.get("antigravity"):
584
+ try:
585
+ from code_puppy.plugins.antigravity_oauth.token import (
586
+ is_token_expired,
587
+ refresh_access_token,
588
+ )
589
+ from code_puppy.plugins.antigravity_oauth.transport import (
590
+ create_antigravity_client,
591
+ )
592
+ from code_puppy.plugins.antigravity_oauth.utils import (
593
+ load_stored_tokens,
594
+ save_tokens,
595
+ )
596
+
597
+ # Try to import custom model for thinking signatures
598
+ try:
599
+ from code_puppy.plugins.antigravity_oauth.antigravity_model import (
600
+ AntigravityModel,
601
+ )
602
+ except ImportError:
603
+ AntigravityModel = None
604
+
605
+ # Get fresh access token (refresh if needed)
606
+ tokens = load_stored_tokens()
607
+ if not tokens:
608
+ emit_warning(
609
+ "Antigravity tokens not found; run /antigravity-auth first."
610
+ )
611
+ return None
612
+
613
+ access_token = tokens.get("access_token", "")
614
+ refresh_token = tokens.get("refresh_token", "")
615
+ expires_at = tokens.get("expires_at")
616
+
617
+ # Refresh if expired or about to expire (initial check)
618
+ if is_token_expired(expires_at):
619
+ new_tokens = refresh_access_token(refresh_token)
620
+ if new_tokens:
621
+ access_token = new_tokens.access_token
622
+ refresh_token = new_tokens.refresh_token
623
+ expires_at = new_tokens.expires_at
624
+ tokens["access_token"] = new_tokens.access_token
625
+ tokens["refresh_token"] = new_tokens.refresh_token
626
+ tokens["expires_at"] = new_tokens.expires_at
627
+ save_tokens(tokens)
628
+ else:
629
+ emit_warning(
630
+ "Failed to refresh Antigravity token; run /antigravity-auth again."
631
+ )
632
+ return None
633
+
634
+ # Callback to persist tokens when proactively refreshed during session
635
+ def on_token_refreshed(new_tokens):
636
+ """Persist new tokens when proactively refreshed."""
637
+ try:
638
+ updated_tokens = load_stored_tokens() or {}
639
+ updated_tokens["access_token"] = new_tokens.access_token
640
+ updated_tokens["refresh_token"] = new_tokens.refresh_token
641
+ updated_tokens["expires_at"] = new_tokens.expires_at
642
+ save_tokens(updated_tokens)
643
+ logger.debug(
644
+ "Persisted proactively refreshed Antigravity tokens"
645
+ )
646
+ except Exception as e:
647
+ logger.warning("Failed to persist refreshed tokens: %s", e)
648
+
649
+ project_id = tokens.get(
650
+ "project_id", model_config.get("project_id", "")
651
+ )
652
+ client = create_antigravity_client(
653
+ access_token=access_token,
654
+ project_id=project_id,
655
+ model_name=model_config["name"],
656
+ base_url=url,
657
+ headers=headers,
658
+ refresh_token=refresh_token,
659
+ expires_at=expires_at,
660
+ on_token_refreshed=on_token_refreshed,
661
+ )
662
+
663
+ # Use custom model with direct httpx client
664
+ if AntigravityModel:
665
+ model = AntigravityModel(
666
+ model_name=model_config["name"],
667
+ api_key=api_key
668
+ or "", # Antigravity uses OAuth, key may be empty
669
+ base_url=url,
670
+ http_client=client,
671
+ )
672
+ else:
673
+ model = GeminiModel(
674
+ model_name=model_config["name"],
675
+ api_key=api_key or "",
676
+ base_url=url,
677
+ http_client=client,
678
+ )
234
679
 
235
- @property
236
- def base_url(self):
237
- return url
680
+ return model
238
681
 
239
- @property
240
- def client(self) -> httpx.AsyncClient:
241
- _client = create_async_client(headers=headers, verify=verify)
242
- _client.base_url = self.base_url
243
- return _client
682
+ except ImportError:
683
+ emit_warning(
684
+ f"Antigravity transport not available; skipping model '{model_config.get('name')}'."
685
+ )
686
+ return None
687
+ else:
688
+ client = create_async_client(headers=headers, verify=verify)
244
689
 
245
- google_gla = CustomGoogleGLAProvider(api_key=api_key)
246
- model = GeminiModel(model_name=model_config["name"], provider=google_gla)
690
+ model = GeminiModel(
691
+ model_name=model_config["name"],
692
+ api_key=api_key,
693
+ base_url=url,
694
+ http_client=client,
695
+ )
247
696
  return model
248
697
  elif model_type == "cerebras":
698
+
699
+ class ZaiCerebrasProvider(CerebrasProvider):
700
+ def model_profile(self, model_name: str) -> ModelProfile | None:
701
+ profile = super().model_profile(model_name)
702
+ if model_name.startswith("zai"):
703
+ from pydantic_ai.profiles.qwen import qwen_model_profile
704
+
705
+ profile = profile.update(qwen_model_profile("qwen-3-coder"))
706
+ return profile
707
+
249
708
  url, headers, verify, api_key = get_custom_config(model_config)
709
+ if not api_key:
710
+ emit_warning(
711
+ f"API key is not set for Cerebras endpoint; skipping model '{model_config.get('name')}'."
712
+ )
713
+ return None
714
+ # Add Cerebras 3rd party integration header
715
+ headers["X-Cerebras-3rd-Party-Integration"] = "code-puppy"
250
716
  client = create_async_client(headers=headers, verify=verify)
251
717
  provider_args = dict(
252
718
  api_key=api_key,
253
719
  http_client=client,
254
720
  )
255
- if api_key:
256
- provider_args["api_key"] = api_key
257
- provider = CerebrasProvider(**provider_args)
721
+ provider = ZaiCerebrasProvider(**provider_args)
258
722
 
259
723
  model = OpenAIChatModel(model_name=model_config["name"], provider=provider)
260
724
  setattr(model, "provider", provider)
@@ -269,18 +733,23 @@ class ModelFactory:
269
733
  if api_key_config.startswith("$"):
270
734
  # It's an environment variable reference
271
735
  env_var_name = api_key_config[1:] # Remove the $ prefix
272
- api_key = os.environ.get(env_var_name)
736
+ api_key = get_api_key(env_var_name)
273
737
  if api_key is None:
274
- raise ValueError(
275
- f"OpenRouter API key environment variable '{env_var_name}' not found or is empty. "
276
- f"Please set the environment variable: export {env_var_name}=your_value"
738
+ emit_warning(
739
+ f"OpenRouter API key '{env_var_name}' not found (check config or environment); skipping model '{model_config.get('name')}'."
277
740
  )
741
+ return None
278
742
  else:
279
743
  # It's a raw API key value
280
744
  api_key = api_key_config
281
745
  else:
282
- # No API key in config, try to get it from the default environment variable
283
- api_key = os.environ.get("OPENROUTER_API_KEY")
746
+ # No API key in config, try to get it from config or the default environment variable
747
+ api_key = get_api_key("OPENROUTER_API_KEY")
748
+ if api_key is None:
749
+ emit_warning(
750
+ f"OPENROUTER_API_KEY is not set (check config or environment); skipping OpenRouter model '{model_config.get('name')}'."
751
+ )
752
+ return None
284
753
 
285
754
  provider = OpenRouterProvider(api_key=api_key)
286
755
 
@@ -288,6 +757,143 @@ class ModelFactory:
288
757
  setattr(model, "provider", provider)
289
758
  return model
290
759
 
760
+ elif model_type == "gemini_oauth":
761
+ # Gemini OAuth models use the Code Assist API (cloudcode-pa.googleapis.com)
762
+ # This is a different API than the standard Generative Language API
763
+ try:
764
+ # Try user plugin first, then built-in plugin
765
+ try:
766
+ from gemini_oauth.config import GEMINI_OAUTH_CONFIG
767
+ from gemini_oauth.utils import (
768
+ get_project_id,
769
+ get_valid_access_token,
770
+ )
771
+ except ImportError:
772
+ from code_puppy.plugins.gemini_oauth.config import (
773
+ GEMINI_OAUTH_CONFIG,
774
+ )
775
+ from code_puppy.plugins.gemini_oauth.utils import (
776
+ get_project_id,
777
+ get_valid_access_token,
778
+ )
779
+ except ImportError as exc:
780
+ emit_warning(
781
+ f"Gemini OAuth plugin not available; skipping model '{model_config.get('name')}'. "
782
+ f"Error: {exc}"
783
+ )
784
+ return None
785
+
786
+ # Get a valid access token (refreshing if needed)
787
+ access_token = get_valid_access_token()
788
+ if not access_token:
789
+ emit_warning(
790
+ f"Failed to get valid Gemini OAuth token; skipping model '{model_config.get('name')}'. "
791
+ "Run /gemini-auth to re-authenticate."
792
+ )
793
+ return None
794
+
795
+ # Get project ID from stored tokens
796
+ project_id = get_project_id()
797
+ if not project_id:
798
+ emit_warning(
799
+ f"No Code Assist project ID found; skipping model '{model_config.get('name')}'. "
800
+ "Run /gemini-auth to re-authenticate."
801
+ )
802
+ return None
803
+
804
+ # Import the Code Assist model wrapper
805
+ from code_puppy.gemini_code_assist import GeminiCodeAssistModel
806
+
807
+ # Create the Code Assist model
808
+ model = GeminiCodeAssistModel(
809
+ model_name=model_config["name"],
810
+ access_token=access_token,
811
+ project_id=project_id,
812
+ api_base_url=GEMINI_OAUTH_CONFIG["api_base_url"],
813
+ api_version=GEMINI_OAUTH_CONFIG["api_version"],
814
+ )
815
+ return model
816
+
817
+ elif model_type == "chatgpt_oauth":
818
+ # ChatGPT OAuth models use the Codex API at chatgpt.com
819
+ try:
820
+ try:
821
+ from chatgpt_oauth.config import CHATGPT_OAUTH_CONFIG
822
+ from chatgpt_oauth.utils import (
823
+ get_valid_access_token,
824
+ load_stored_tokens,
825
+ )
826
+ except ImportError:
827
+ from code_puppy.plugins.chatgpt_oauth.config import (
828
+ CHATGPT_OAUTH_CONFIG,
829
+ )
830
+ from code_puppy.plugins.chatgpt_oauth.utils import (
831
+ get_valid_access_token,
832
+ load_stored_tokens,
833
+ )
834
+ except ImportError as exc:
835
+ emit_warning(
836
+ f"ChatGPT OAuth plugin not available; skipping model '{model_config.get('name')}'. "
837
+ f"Error: {exc}"
838
+ )
839
+ return None
840
+
841
+ # Get a valid access token (refreshing if needed)
842
+ access_token = get_valid_access_token()
843
+ if not access_token:
844
+ emit_warning(
845
+ f"Failed to get valid ChatGPT OAuth token; skipping model '{model_config.get('name')}'. "
846
+ "Run /chatgpt-auth to authenticate."
847
+ )
848
+ return None
849
+
850
+ # Get account_id from stored tokens (required for ChatGPT-Account-Id header)
851
+ tokens = load_stored_tokens()
852
+ account_id = tokens.get("account_id", "") if tokens else ""
853
+ if not account_id:
854
+ emit_warning(
855
+ f"No account_id found in ChatGPT OAuth tokens; skipping model '{model_config.get('name')}'. "
856
+ "Run /chatgpt-auth to re-authenticate."
857
+ )
858
+ return None
859
+
860
+ # Build headers for ChatGPT Codex API
861
+ originator = CHATGPT_OAUTH_CONFIG.get("originator", "codex_cli_rs")
862
+ client_version = CHATGPT_OAUTH_CONFIG.get("client_version", "0.72.0")
863
+
864
+ headers = {
865
+ "ChatGPT-Account-Id": account_id,
866
+ "originator": originator,
867
+ "User-Agent": f"{originator}/{client_version}",
868
+ }
869
+ # Merge with any headers from model config
870
+ config_headers = model_config.get("custom_endpoint", {}).get("headers", {})
871
+ headers.update(config_headers)
872
+
873
+ # Get base URL - Codex API uses chatgpt.com, not api.openai.com
874
+ base_url = model_config.get("custom_endpoint", {}).get(
875
+ "url", CHATGPT_OAUTH_CONFIG["api_base_url"]
876
+ )
877
+
878
+ # Create HTTP client with Codex interceptor for store=false injection
879
+ from code_puppy.chatgpt_codex_client import create_codex_async_client
880
+
881
+ verify = get_cert_bundle_path()
882
+ client = create_codex_async_client(headers=headers, verify=verify)
883
+
884
+ provider = OpenAIProvider(
885
+ api_key=access_token,
886
+ base_url=base_url,
887
+ http_client=client,
888
+ )
889
+
890
+ # ChatGPT Codex API only supports Responses format
891
+ model = OpenAIResponsesModel(
892
+ model_name=model_config["name"], provider=provider
893
+ )
894
+ setattr(model, "provider", provider)
895
+ return model
896
+
291
897
  elif model_type == "round_robin":
292
898
  # Get the list of model names to use in the round-robin
293
899
  model_names = model_config.get("models")