@sekora_ai/claude-toggle 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/PROVIDERS.md CHANGED
@@ -141,6 +141,107 @@ Then use it:
141
141
  }
142
142
  ```
143
143
 
144
+ ## CCR-Routed Providers
145
+
146
+ For providers requiring request/response transformation (like Google Gemini), claude-toggle can use [Claude Code Router (CCR)](https://github.com/musistudio/claude-code-router) as a middleware proxy.
147
+
148
+ ### CCR Provider Schema
149
+
150
+ CCR providers use additional fields:
151
+
152
+ ```json
153
+ {
154
+ "version": "2.0",
155
+ "ccr": {
156
+ "enabled": true,
157
+ "host": "127.0.0.1",
158
+ "port": 3456,
159
+ "log_level": "info",
160
+ "api_timeout_ms": 600000,
161
+ "auto_install": true,
162
+ "auto_start": true
163
+ },
164
+ "providers": {
165
+ "gemini-ccr": {
166
+ "description": "Google Gemini 3 via Claude Code Router",
167
+ "enabled": true,
168
+ "ccr_provider": true,
169
+ "ccr_config": {
170
+ "name": "gemini",
171
+ "api_base_url": "https://generativelanguage.googleapis.com/v1beta/models/",
172
+ "api_key_env": "GEMINI_API_KEY",
173
+ "models": {
174
+ "default": "gemini-3-pro",
175
+ "background": "gemini-3-flash",
176
+ "think": "gemini-3-deep-think"
177
+ },
178
+ "transformer": {
179
+ "use": ["gemini"]
180
+ }
181
+ },
182
+ "routing_modes": {
183
+ "standard": {
184
+ "default": "gemini-3-pro",
185
+ "background": "gemini-3-flash"
186
+ },
187
+ "reasoning": {
188
+ "default": "gemini-3-pro",
189
+ "think": "gemini-3-deep-think",
190
+ "background": "gemini-3-flash"
191
+ },
192
+ "eco": {
193
+ "default": "gemini-3-flash",
194
+ "background": "gemini-3-flash"
195
+ }
196
+ },
197
+ "validation": {
198
+ "required_env_vars": ["GEMINI_API_KEY"]
199
+ }
200
+ }
201
+ }
202
+ }
203
+ ```
204
+
205
+ ### CCR-Specific Fields
206
+
207
+ | Field | Type | Description |
208
+ |-------|------|-------------|
209
+ | `ccr_provider` | boolean | Indicates this provider routes through CCR |
210
+ | `ccr_config.name` | string | Provider identifier in CCR config |
211
+ | `ccr_config.api_base_url` | string | API endpoint URL |
212
+ | `ccr_config.api_key_env` | string | Environment variable containing API key |
213
+ | `ccr_config.models` | object | Model mapping for router slots |
214
+ | `ccr_config.transformer` | object | CCR transformer configuration |
215
+ | `routing_modes` | object | Named routing configurations |
216
+
217
+ ### CCR Global Settings (`ccr` section)
218
+
219
+ | Field | Type | Default | Description |
220
+ |-------|------|---------|-------------|
221
+ | `enabled` | boolean | true | Enable CCR integration |
222
+ | `host` | string | "127.0.0.1" | CCR server host |
223
+ | `port` | number | 3456 | CCR server port |
224
+ | `log_level` | string | "info" | CCR log level |
225
+ | `api_timeout_ms` | number | 600000 | API request timeout |
226
+ | `auto_install` | boolean | true | Auto-install CCR via npm if missing |
227
+ | `auto_start` | boolean | true | Auto-start CCR server when needed |
228
+
229
+ ### Routing Modes
230
+
231
+ CCR providers can define multiple routing modes:
232
+
233
+ - **standard**: Default configuration for general tasks
234
+ - **reasoning**: Uses the `think` router for complex reasoning tasks
235
+ - **eco**: Uses fastest/cheapest models for cost-sensitive tasks
236
+
237
+ Usage:
238
+
239
+ ```bash
240
+ ./claude-toggle --provider gemini-ccr --ccr-mode reasoning
241
+ ```
242
+
243
+ See [docs/CCR_INTEGRATION.md](docs/CCR_INTEGRATION.md) for comprehensive CCR documentation.
244
+
144
245
  ## Tips
145
246
 
146
247
  1. **Variable References**: Use `${VAR_NAME}` in `value` fields to reference environment variables. The script will expand these at runtime.
@@ -153,6 +254,96 @@ Then use it:
153
254
 
154
255
  5. **Validation**: Use `required_env_vars` to ensure credentials are set before launching. The script will show helpful setup instructions if they're missing.
155
256
 
257
+ 6. **CCR Providers**: For providers requiring request/response transformation, set `ccr_provider: true` and define `ccr_config`.
258
+
259
+ 7. **LiteLLM Providers**: For routing through [LiteLLM](https://docs.litellm.ai/), point `ANTHROPIC_BASE_URL` to your LiteLLM server and use model names like `gemini/gemini-2.0-flash`. LiteLLM returns actual model names in responses.
260
+
261
+ ## LiteLLM Integration
262
+
263
+ [LiteLLM](https://docs.litellm.ai/) is a unified API gateway for 100+ AI models, with official support mentioned in [Claude Code's LLM Gateway documentation](https://code.claude.com/docs/en/llm-gateway).
264
+
265
+ ### Why Use LiteLLM?
266
+
267
+ - **Shows actual model names** in Claude Code UI (e.g., "gemini-2.0-flash" instead of "Opus 4.5")
268
+ - **100+ provider support** - Gemini, OpenAI, DeepSeek, Anthropic, and more
269
+ - **Official Anthropic compatibility** - Native Anthropic API format support
270
+ - **Load balancing and fallbacks** - Production-ready features
271
+ - **Single API key** - Manage all providers through one gateway
272
+
273
+ ### LiteLLM Provider Example
274
+
275
+ ```json
276
+ "litellm-gemini": {
277
+ "description": "Google Gemini 2 via LiteLLM (shows model name)",
278
+ "enabled": true,
279
+ "env_vars": {
280
+ "ANTHROPIC_API_KEY": {
281
+ "value": "${GEMINI_API_KEY}",
282
+ "required": true
283
+ },
284
+ "ANTHROPIC_BASE_URL": {
285
+ "value": "${LITELLM_BASE_URL:-http://localhost:4000}"
286
+ },
287
+ "ANTHROPIC_DEFAULT_OPUS_MODEL": {
288
+ "value": "gemini/gemini-2.0-flash"
289
+ }
290
+ },
291
+ "validation": {
292
+ "required_env_vars": ["GEMINI_API_KEY"]
293
+ }
294
+ }
295
+ ```
296
+
297
+ ### Setting Up LiteLLM
298
+
299
+ 1. **Install LiteLLM:**
300
+ ```bash
301
+ pip install 'litellm[proxy]'
302
+ ```
303
+
304
+ 2. **Start the proxy server:**
305
+ ```bash
306
+ # For Gemini (requires --drop_params for Claude Code compatibility)
307
+ litellm --model gemini/gemini-2.0-flash --port 4000 --drop_params
308
+
309
+ # For OpenAI
310
+ litellm --model openai/gpt-4o --port 4000
311
+
312
+ # Background mode
313
+ litellm --model gemini/gemini-2.0-flash --port 4000 --drop_params &
314
+ ```
315
+
316
+ 3. **Set provider API key:**
317
+ ```bash
318
+ export GEMINI_API_KEY="your-gemini-key"
319
+ # or
320
+ export OPENAI_API_KEY="your-openai-key"
321
+ ```
322
+
323
+ 4. **Verify LiteLLM is running:**
324
+ ```bash
325
+ nc -z localhost 4000 && echo "LiteLLM is running" || echo "LiteLLM is not running"
326
+ ```
327
+
328
+ ### Available LiteLLM Providers
329
+
330
+ | Provider | Description | Required Env Var |
331
+ |----------|-------------|------------------|
332
+ | `litellm-gemini` | Google Gemini 2 via LiteLLM | `GEMINI_API_KEY` |
333
+ | `litellm-openai` | OpenAI GPT-4 via LiteLLM | `OPENAI_API_KEY` |
334
+ | `litellm-generic` | Custom model via `LITELLM_MODEL` env var | Provider-specific key |
335
+
336
+ ### Model Name Format
337
+
338
+ LiteLLM uses the format `provider/model`:
339
+ - `gemini/gemini-2.0-flash`
340
+ - `gemini/gemini-1.5-pro`
341
+ - `openai/gpt-4o`
342
+ - `openai/gpt-4o-mini`
343
+ - `deepseek/deepseek-chat`
344
+
345
+ See [LiteLLM providers documentation](https://docs.litellm.ai/docs/providers) for the full list.
346
+
156
347
  ## Testing Your Configuration
157
348
 
158
349
  After adding a new provider, validate it:
package/README.md CHANGED
@@ -9,6 +9,8 @@ A provider-agnostic utility for toggling between different Claude API providers
9
9
  - **Pass-through design** - All Claude Code CLI options work transparently
10
10
  - **Actionable error messages** - Missing credentials trigger detailed setup instructions
11
11
  - **Extensible** - Add new providers by editing a single JSON file
12
+ - **CCR Integration** - Route through [Claude Code Router](https://github.com/musistudio/claude-code-router) for providers like Gemini
13
+ - **Advanced routing modes** - Standard, reasoning (deep think), and eco modes for CCR providers
12
14
 
13
15
  ## Prerequisites
14
16
 
@@ -30,7 +32,7 @@ This works on macOS, Linux, and Windows (via WSL or Git Bash).
30
32
 
31
33
  ```bash
32
34
  # Clone the repository
33
- git clone https://github.com/seansekora/claude-toggle.git
35
+ git clone https://gitlab.com/sekora-ai/claude-code/claude-toggle.git
34
36
  cd claude-toggle
35
37
  chmod +x claude-toggle
36
38
 
@@ -89,6 +91,16 @@ All unrecognized options are forwarded to the Claude Code CLI:
89
91
  | `-h, --help` | Show help message |
90
92
  | `--glm` | Legacy alias for `--provider glm` |
91
93
 
94
+ ### CCR Options (for CCR-routed providers)
95
+
96
+ | Option | Description |
97
+ |--------|-------------|
98
+ | `--ccr-mode <mode>` | Select routing mode: `standard`, `reasoning`, `eco` |
99
+ | `--ccr-status` | Show CCR server status |
100
+ | `--ccr-restart` | Restart CCR server |
101
+ | `--ccr-install` | Install CCR globally via npm |
102
+ | `--ccr-stop` | Stop CCR server |
103
+
92
104
  ## Configuration
93
105
 
94
106
  ### Configuration File Location
@@ -168,10 +180,12 @@ See [PROVIDERS.md](PROVIDERS.md) for detailed instructions on adding new provide
168
180
 
169
181
  ## Built-in Providers
170
182
 
171
- | Provider | Description | Required Env Var |
172
- |----------|-------------|------------------|
173
- | `anthropic` | Anthropic's official API (default) | None (uses default auth) |
174
- | `glm` | Z.AI GLM models via proxy | `ZAI_API_KEY` |
183
+ | Provider | Description | Required Env Var | Routing |
184
+ |----------|-------------|------------------|---------|
185
+ | `anthropic` | Anthropic's official API (default) | None (uses default auth) | Direct |
186
+ | `glm` | Z.AI GLM models via proxy | `ZAI_API_KEY` | Direct |
187
+ | `gemini-ccr` | Google Gemini via CCR | `GEMINI_API_KEY` | CCR |
188
+ | `litellm-gemini` | Google Gemini via LiteLLM | `GEMINI_API_KEY` | LiteLLM |
175
189
 
176
190
  ## Examples
177
191
 
@@ -203,6 +217,43 @@ export ZAI_API_KEY="your-key-here"
203
217
  # Output: Using provider: glm
204
218
  ```
205
219
 
220
+ ### Using Google Gemini via CCR
221
+
222
+ ```bash
223
+ # Set your API key
224
+ export GEMINI_API_KEY="your-key-here"
225
+
226
+ # Launch with standard mode (default)
227
+ ./claude-toggle --provider gemini-ccr
228
+
229
+ # Launch with reasoning mode (uses gemini-3-deep-think)
230
+ ./claude-toggle --provider gemini-ccr --ccr-mode reasoning
231
+
232
+ # Launch with eco mode (uses gemini-3-flash for cost savings)
233
+ ./claude-toggle --provider gemini-ccr --ccr-mode eco
234
+
235
+ # Check CCR status
236
+ ./claude-toggle --ccr-status
237
+ ```
238
+
239
+ ### Using Google Gemini via LiteLLM (Shows Model Name)
240
+
241
+ ```bash
242
+ # Install and start LiteLLM
243
+ pip install 'litellm[proxy]'
244
+ litellm --model gemini/gemini-2.0-flash --port 4000 --drop_params &
245
+
246
+ # Set your API key
247
+ export GEMINI_API_KEY="your-key-here"
248
+
249
+ # Launch (will show "gemini-2.0-flash" in UI)
250
+ ./claude-toggle --provider litellm-gemini
251
+ ```
252
+
253
+ **Note**: The `--drop_params` flag is required for Gemini compatibility with Claude Code. It drops unsupported parameters like `reasoning_effort`.
254
+
255
+ **Note**: LiteLLM displays the actual model name in the Claude Code UI, unlike CCR which shows "Opus 4.5".
256
+
206
257
  ## CI/CD Setup (For Maintainers)
207
258
 
208
259
  This project uses GitLab CI/CD to automatically publish to npm when version tags are pushed.
@@ -273,6 +324,7 @@ claude-toggle/
273
324
  │ ├── wrapper.js # Node.js wrapper for npm
274
325
  │ └── postinstall.js # Post-install config setup
275
326
  ├── docs/
327
+ │ ├── CCR_INTEGRATION.md # CCR integration guide
276
328
  │ └── NPM_PUBLISHING.md # Publishing guide
277
329
  ├── .claude/
278
330
  │ └── commands/
@@ -332,12 +384,12 @@ claude-toggle is a bash script and requires a Unix-like environment:
332
384
  1. **Windows Subsystem for Linux (WSL)** - Recommended
333
385
  ```bash
334
386
  # Install WSL, then inside WSL:
335
- npm install -g claude-toggle
387
+ npm install -g @sekora_ai/claude-toggle
336
388
  ```
337
389
 
338
390
  2. **Git Bash** - With Python 3 installed
339
391
  ```bash
340
- npm install -g claude-toggle
392
+ npm install -g @sekora_ai/claude-toggle
341
393
  ```
342
394
 
343
395
  ## Troubleshooting
package/claude-toggle CHANGED
@@ -9,7 +9,7 @@ set -e
9
9
  # =============================================
10
10
  # Script Constants
11
11
  # =============================================
12
- SCRIPT_VERSION="1.0.0"
12
+ SCRIPT_VERSION="1.1.0"
13
13
  SCRIPT_NAME="$(basename "$0")"
14
14
  SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
15
15
 
@@ -27,6 +27,15 @@ CLAUDE_ARGS=()
27
27
  ACTION="launch" # launch, list, validate, help
28
28
  VERBOSE=false
29
29
 
30
+ # CCR (Claude Code Router) Settings
31
+ CCR_CONFIG_DIR="$HOME/.claude-code-router"
32
+ CCR_CONFIG_FILE="$CCR_CONFIG_DIR/config.json"
33
+ CCR_PACKAGE="@musistudio/claude-code-router"
34
+ CCR_DEFAULT_PORT=3456
35
+ CCR_DEFAULT_HOST="127.0.0.1"
36
+ CCR_MODE="" # standard, reasoning, eco
37
+ CCR_ACTION="" # status, restart, install, stop
38
+
30
39
  # =============================================
31
40
  # Usage Information
32
41
  # =============================================
@@ -43,6 +52,13 @@ OPTIONS:
43
52
  -h, --help Show this help message
44
53
  --validate Validate provider configuration without launching
45
54
 
55
+ CCR OPTIONS (for CCR-routed providers like gemini-ccr):
56
+ --ccr-mode <mode> Select routing mode: standard, reasoning, eco
57
+ --ccr-status Show CCR server status
58
+ --ccr-restart Restart CCR server
59
+ --ccr-install Install CCR globally via npm
60
+ --ccr-stop Stop CCR server
61
+
46
62
  EXAMPLES:
47
63
  # Launch with default provider (anthropic)
48
64
  $SCRIPT_NAME
@@ -50,9 +66,21 @@ EXAMPLES:
50
66
  # Launch with specific provider
51
67
  $SCRIPT_NAME --provider glm
52
68
 
69
+ # Use Gemini via CCR with standard mode
70
+ $SCRIPT_NAME --provider gemini-ccr
71
+
72
+ # Use Gemini with reasoning mode (deep think)
73
+ $SCRIPT_NAME --provider gemini-ccr --ccr-mode reasoning
74
+
75
+ # Use Gemini with eco mode (fast/cheap)
76
+ $SCRIPT_NAME --provider gemini-ccr --ccr-mode eco
77
+
53
78
  # List available providers
54
79
  $SCRIPT_NAME --list-providers
55
80
 
81
+ # Check CCR server status
82
+ $SCRIPT_NAME --ccr-status
83
+
56
84
  # Pass additional options to claude
57
85
  $SCRIPT_NAME --provider glm --dangerously-skip-permissions
58
86
 
@@ -221,6 +249,76 @@ To set up your ANTHROPIC_API_KEY:
221
249
  3. Verify the key is set:
222
250
  echo $ANTHROPIC_API_KEY
223
251
 
252
+ EOF
253
+ ;;
254
+ GEMINI_API_KEY)
255
+ cat >&2 << 'EOF'
256
+
257
+ To set up your GEMINI_API_KEY:
258
+
259
+ 1. Get your API key from Google AI Studio:
260
+ - Visit: https://aistudio.google.com/apikey
261
+ - Sign in with your Google account
262
+ - Click "Create API Key"
263
+ - Copy your API key
264
+
265
+ 2. Set the environment variable:
266
+
267
+ # For current session only (temporary):
268
+ export GEMINI_API_KEY="your-api-key-here"
269
+
270
+ # For permanent setup (recommended):
271
+ # Add this line to your shell configuration file:
272
+
273
+ # For bash (~/.bashrc or ~/.bash_profile):
274
+ echo 'export GEMINI_API_KEY="your-api-key-here"' >> ~/.bashrc
275
+ source ~/.bashrc
276
+
277
+ # For zsh (~/.zshrc):
278
+ echo 'export GEMINI_API_KEY="your-api-key-here"' >> ~/.zshrc
279
+ source ~/.zshrc
280
+
281
+ # For fish (~/.config/fish/config.fish):
282
+ echo 'set -x GEMINI_API_KEY "your-api-key-here"' >> ~/.config/fish/config.fish
283
+
284
+ 3. Verify the key is set:
285
+ echo $GEMINI_API_KEY
286
+
287
+ EOF
288
+ ;;
289
+ LITELLM*)
290
+ cat >&2 << 'EOF'
291
+
292
+ LiteLLM is not running. To set up LiteLLM:
293
+
294
+ 1. Install LiteLLM:
295
+ pip install litellm[proxy]
296
+ # Or: pip3 install litellm[proxy]
297
+
298
+ 2. Start the LiteLLM proxy server:
299
+ # For Gemini:
300
+ litellm --model gemini/gemini-2.0-flash --port 4000
301
+
302
+ # For OpenAI:
303
+ litellm --model openai/gpt-4o --port 4000
304
+
305
+ # Or run in background:
306
+ litellm --model gemini/gemini-2.0-flash --port 4000 &
307
+
308
+ 3. Set your provider's API key:
309
+
310
+ # For Gemini:
311
+ export GEMINI_API_KEY="your-gemini-key"
312
+
313
+ # For OpenAI:
314
+ export OPENAI_API_KEY="your-openai-key"
315
+
316
+ 4. Verify LiteLLM is running:
317
+ curl http://localhost:4000/status
318
+
319
+ 5. Optionally set a custom LiteLLM URL:
320
+ export LITELLM_BASE_URL="http://localhost:4000"
321
+
224
322
  EOF
225
323
  ;;
226
324
  *)
@@ -255,6 +353,235 @@ EOF
255
353
  esac
256
354
  }
257
355
 
356
+ # =============================================
357
+ # CCR (Claude Code Router) Management
358
+ # =============================================
359
+
360
+ # Check if CCR is installed
361
+ check_ccr_installed() {
362
+ if command -v ccr &> /dev/null; then
363
+ return 0
364
+ else
365
+ return 1
366
+ fi
367
+ }
368
+
369
+ # Install CCR via npm
370
+ install_ccr() {
371
+ echo "Installing Claude Code Router..." >&2
372
+ if command -v npm &> /dev/null; then
373
+ if npm install -g "$CCR_PACKAGE" 2>&1 | while read -r line; do
374
+ echo " $line" >&2
375
+ done; then
376
+ echo "CCR installed successfully" >&2
377
+ return 0
378
+ else
379
+ echo "Error: Failed to install CCR" >&2
380
+ return 1
381
+ fi
382
+ else
383
+ echo "Error: npm not found. Please install Node.js and npm first." >&2
384
+ return 1
385
+ fi
386
+ }
387
+
388
+ # Check if CCR server is running
389
+ check_ccr_running() {
390
+ local status_output
391
+ status_output=$(ccr status 2>&1)
392
+ if echo "$status_output" | grep -qi "running\|active\|listening\|started"; then
393
+ return 0
394
+ else
395
+ return 1
396
+ fi
397
+ }
398
+
399
+ # Start CCR server
400
+ start_ccr() {
401
+ if check_ccr_running; then
402
+ [[ "$VERBOSE" == true ]] && echo "CCR already running" >&2
403
+ return 0
404
+ fi
405
+
406
+ echo "Starting Claude Code Router..." >&2
407
+ ccr start 2>&1 | while read -r line; do
408
+ [[ "$VERBOSE" == true ]] && echo " $line" >&2
409
+ done
410
+
411
+ # Wait for CCR to become available
412
+ local max_wait=10
413
+ local waited=0
414
+ while [[ $waited -lt $max_wait ]]; do
415
+ if check_ccr_running; then
416
+ echo "CCR started successfully" >&2
417
+ return 0
418
+ fi
419
+ sleep 1
420
+ ((waited++))
421
+ done
422
+
423
+ echo "Error: CCR failed to start within ${max_wait}s" >&2
424
+ return 1
425
+ }
426
+
427
+ # Stop CCR server
428
+ stop_ccr() {
429
+ if ! check_ccr_running; then
430
+ [[ "$VERBOSE" == true ]] && echo "CCR not running" >&2
431
+ return 0
432
+ fi
433
+
434
+ echo "Stopping Claude Code Router..." >&2
435
+ ccr stop 2>&1
436
+ return $?
437
+ }
438
+
439
+ # Restart CCR server
440
+ restart_ccr() {
441
+ echo "Restarting Claude Code Router..." >&2
442
+ ccr restart 2>&1
443
+ return $?
444
+ }
445
+
446
+ # Get CCR status
447
+ get_ccr_status() {
448
+ if ! check_ccr_installed; then
449
+ echo "CCR Status: Not installed"
450
+ echo " Install with: $SCRIPT_NAME --ccr-install"
451
+ return 1
452
+ fi
453
+
454
+ echo "CCR Status:"
455
+ ccr status 2>&1 | sed 's/^/ /'
456
+
457
+ if [[ -f "$CCR_CONFIG_FILE" ]]; then
458
+ echo ""
459
+ echo "Config: $CCR_CONFIG_FILE"
460
+ else
461
+ echo ""
462
+ echo "Config: Not generated (run with CCR provider to generate)"
463
+ fi
464
+ }
465
+
466
+ # Generate CCR config.json from providers.json
467
+ generate_ccr_config() {
468
+ local provider="$1"
469
+ local routing_mode="${2:-standard}"
470
+ local config_file="$3"
471
+
472
+ # Ensure CCR config directory exists
473
+ mkdir -p "$CCR_CONFIG_DIR"
474
+
475
+ # Generate config using Python
476
+ python3 << PYTHON_SCRIPT
477
+ import json
478
+ import os
479
+ import sys
480
+
481
+ config_file = '$config_file'
482
+ provider_name = '$provider'
483
+ routing_mode = '$routing_mode'
484
+ ccr_config_file = '$CCR_CONFIG_FILE'
485
+
486
+ try:
487
+ with open(config_file, 'r') as f:
488
+ data = json.load(f)
489
+
490
+ provider = data.get('providers', {}).get(provider_name)
491
+ if not provider:
492
+ print(f"Error: Provider '{provider_name}' not found", file=sys.stderr)
493
+ sys.exit(1)
494
+
495
+ if not provider.get('ccr_provider'):
496
+ print(f"Error: Provider '{provider_name}' is not a CCR provider", file=sys.stderr)
497
+ sys.exit(1)
498
+
499
+ ccr_config = provider.get('ccr_config', {})
500
+ routing_modes = provider.get('routing_modes', {})
501
+ global_ccr = data.get('ccr', {})
502
+
503
+ # Get routing configuration for selected mode
504
+ mode_config = routing_modes.get(routing_mode, routing_modes.get('standard', {}))
505
+
506
+ # Build CCR config.json
507
+ ccr_output = {
508
+ "HOST": global_ccr.get('host', '127.0.0.1'),
509
+ "PORT": global_ccr.get('port', 3456),
510
+ "LOG": True,
511
+ "LOG_LEVEL": global_ccr.get('log_level', 'info'),
512
+ "API_TIMEOUT_MS": global_ccr.get('api_timeout_ms', 600000),
513
+ "Providers": [
514
+ {
515
+ "name": ccr_config.get('name', provider_name),
516
+ "api_base_url": ccr_config.get('api_base_url'),
517
+ "api_key": f"\${{{ccr_config.get('api_key_env')}}}",
518
+ "models": list(set(
519
+ list(ccr_config.get('models', {}).values()) +
520
+ list(mode_config.values())
521
+ )),
522
+ "transformer": ccr_config.get('transformer', {})
523
+ }
524
+ ],
525
+ "Router": {}
526
+ }
527
+
528
+ # Build Router section from mode_config
529
+ provider_prefix = ccr_config.get('name', provider_name)
530
+ for slot, model in mode_config.items():
531
+ ccr_output["Router"][slot] = f"{provider_prefix},{model}"
532
+
533
+ # Ensure at least default is set
534
+ if 'default' not in ccr_output["Router"]:
535
+ default_model = ccr_config.get('models', {}).get('default', 'unknown')
536
+ ccr_output["Router"]["default"] = f"{provider_prefix},{default_model}"
537
+
538
+ # Write CCR config
539
+ with open(ccr_config_file, 'w') as f:
540
+ json.dump(ccr_output, f, indent=2)
541
+
542
+ print(f"Generated CCR config at {ccr_config_file}", file=sys.stderr)
543
+
544
+ except Exception as e:
545
+ print(f"Error generating CCR config: {e}", file=sys.stderr)
546
+ sys.exit(1)
547
+ PYTHON_SCRIPT
548
+ }
549
+
550
+ # Ensure CCR is ready (install, generate config, start)
551
+ ensure_ccr_ready() {
552
+ local provider="$1"
553
+ local routing_mode="$2"
554
+ local config_file="$3"
555
+
556
+ # Check CCR global settings
557
+ local auto_install
558
+ auto_install=$(parse_json "$config_file" "ccr.auto_install" 2>/dev/null || echo "true")
559
+
560
+ # Install CCR if needed
561
+ if ! check_ccr_installed; then
562
+ if [[ "$auto_install" == "true" || "$auto_install" == "True" ]]; then
563
+ install_ccr || return 1
564
+ else
565
+ echo "Error: CCR not installed and auto_install is disabled" >&2
566
+ echo "Install manually: npm install -g $CCR_PACKAGE" >&2
567
+ return 1
568
+ fi
569
+ fi
570
+
571
+ # Generate CCR config
572
+ generate_ccr_config "$provider" "$routing_mode" "$config_file" || return 1
573
+
574
+ # Start or restart CCR
575
+ if check_ccr_running; then
576
+ # Config changed, restart
577
+ restart_ccr || return 1
578
+ else
579
+ start_ccr || return 1
580
+ fi
581
+
582
+ return 0
583
+ }
584
+
258
585
  # =============================================
259
586
  # Provider Validation
260
587
  # =============================================
@@ -299,6 +626,39 @@ validate_provider() {
299
626
  fi
300
627
  fi
301
628
 
629
+ # Check if LiteLLM server is running for litellm providers
630
+ if [[ "$provider" == litellm-* ]]; then
631
+ local litellm_url
632
+ litellm_url=$(parse_json "$config_file" "providers.$provider.env_vars.ANTHROPIC_BASE_URL.value" 2>/dev/null || echo "http://localhost:4000")
633
+
634
+ # Expand ${VAR} references
635
+ if [[ "$litellm_url" == *\$* ]]; then
636
+ while [[ "$litellm_url" == *\$* ]]; do
637
+ local var_name="${litellm_url#*\$\{}"
638
+ var_name="${var_name%\}*}"
639
+ var_name="${var_name%%\}*}"
640
+ local var_value="${!var_name:-}"
641
+ litellm_url="${litellm_url/\$\{$var_name\*/$var_value}"
642
+ litellm_url="${litellm_url/\$\{$var_name\}$var_value}"
643
+ done
644
+ fi
645
+
646
+ # Extract host and port for TCP check
647
+ local check_url="${litellm_url#*://}"
648
+ local host="${check_url%:*}"
649
+ local port="${check_url#*:}"
650
+
651
+ # Check if LiteLLM is running using TCP connection (not HTTP endpoint)
652
+ if ! nc -z -w5 "$host" "$port" 2>/dev/null; then
653
+ echo "Error: LiteLLM server is not running at $litellm_url" >&2
654
+ echo "" >&2
655
+ echo "Tip: LiteLLM should show 'Uvicorn running on http://0.0.0.0:4000' when started" >&2
656
+ echo "" >&2
657
+ show_setup_instructions "LITELLM"
658
+ return 1
659
+ fi
660
+ fi
661
+
302
662
  return 0
303
663
  }
304
664
 
@@ -418,6 +778,27 @@ main() {
418
778
  PROVIDER_NAME="glm"
419
779
  shift
420
780
  ;;
781
+ # CCR options
782
+ --ccr-mode)
783
+ CCR_MODE="$2"
784
+ shift 2
785
+ ;;
786
+ --ccr-status)
787
+ CCR_ACTION="status"
788
+ shift
789
+ ;;
790
+ --ccr-restart)
791
+ CCR_ACTION="restart"
792
+ shift
793
+ ;;
794
+ --ccr-install)
795
+ CCR_ACTION="install"
796
+ shift
797
+ ;;
798
+ --ccr-stop)
799
+ CCR_ACTION="stop"
800
+ shift
801
+ ;;
421
802
  # Passthrough to claude
422
803
  *)
423
804
  CLAUDE_ARGS+=("$1")
@@ -448,6 +829,28 @@ main() {
448
829
  exit 0
449
830
  fi
450
831
 
832
+ # Handle CCR actions (don't require provider validation)
833
+ if [[ -n "$CCR_ACTION" ]]; then
834
+ case "$CCR_ACTION" in
835
+ status)
836
+ get_ccr_status
837
+ exit $?
838
+ ;;
839
+ restart)
840
+ restart_ccr
841
+ exit $?
842
+ ;;
843
+ install)
844
+ install_ccr
845
+ exit $?
846
+ ;;
847
+ stop)
848
+ stop_ccr
849
+ exit $?
850
+ ;;
851
+ esac
852
+ fi
853
+
451
854
  # Validate config file exists for other actions
452
855
  if [[ -z "$config_file" ]]; then
453
856
  echo "Error: Configuration file not found" >&2
@@ -479,8 +882,32 @@ main() {
479
882
  echo "Using provider: $PROVIDER_NAME" >&2
480
883
  fi
481
884
 
482
- # Export the environment variables
483
- eval "$(export_provider_vars "$PROVIDER_NAME" "$config_file")"
885
+ # Check if this is a CCR provider
886
+ local is_ccr_provider
887
+ is_ccr_provider=$(parse_json "$config_file" "providers.$PROVIDER_NAME.ccr_provider" 2>/dev/null || echo "false")
888
+
889
+ if [[ "$is_ccr_provider" == "true" || "$is_ccr_provider" == "True" ]]; then
890
+ # Set default routing mode if not specified
891
+ [[ -z "$CCR_MODE" ]] && CCR_MODE="standard"
892
+
893
+ # Ensure CCR is ready (install, config, start)
894
+ if ! ensure_ccr_ready "$PROVIDER_NAME" "$CCR_MODE" "$config_file"; then
895
+ exit 1
896
+ fi
897
+
898
+ # Get CCR host/port for environment setup
899
+ local ccr_host ccr_port
900
+ ccr_host=$(parse_json "$config_file" "ccr.host" 2>/dev/null || echo "127.0.0.1")
901
+ ccr_port=$(parse_json "$config_file" "ccr.port" 2>/dev/null || echo "3456")
902
+
903
+ # Set environment to route through CCR
904
+ export ANTHROPIC_BASE_URL="http://${ccr_host}:${ccr_port}"
905
+
906
+ [[ "$VERBOSE" == true ]] && echo "Routing through CCR at $ANTHROPIC_BASE_URL (mode: $CCR_MODE)" >&2
907
+ else
908
+ # Standard provider - export env vars directly
909
+ eval "$(export_provider_vars "$PROVIDER_NAME" "$config_file")"
910
+ fi
484
911
 
485
912
  # Launch claude with passthrough arguments
486
913
  exec claude "${CLAUDE_ARGS[@]}"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sekora_ai/claude-toggle",
3
- "version": "1.0.0",
3
+ "version": "1.1.0",
4
4
  "description": "Provider-agnostic toggle for Claude Code API providers",
5
5
  "keywords": [
6
6
  "claude",
package/providers.json CHANGED
@@ -1,7 +1,16 @@
1
1
  {
2
2
  "$schema": "http://json-schema.org/draft-07/schema#",
3
- "version": "1.0",
3
+ "version": "2.0",
4
4
  "default_provider": "anthropic",
5
+ "ccr": {
6
+ "enabled": true,
7
+ "host": "127.0.0.1",
8
+ "port": 3456,
9
+ "log_level": "info",
10
+ "api_timeout_ms": 600000,
11
+ "auto_install": true,
12
+ "auto_start": true
13
+ },
5
14
  "providers": {
6
15
  "anthropic": {
7
16
  "description": "Anthropic's official API",
@@ -36,6 +45,92 @@
36
45
  "validation": {
37
46
  "required_env_vars": ["ZAI_API_KEY"]
38
47
  }
48
+ },
49
+ "litellm-gemini": {
50
+ "description": "Google Gemini 3 via LiteLLM (shows model name)",
51
+ "enabled": true,
52
+ "env_vars": {
53
+ "ANTHROPIC_API_KEY": {
54
+ "value": "${GEMINI_API_KEY}",
55
+ "required": true
56
+ },
57
+ "ANTHROPIC_BASE_URL": {
58
+ "value": "http://localhost:4000"
59
+ },
60
+ "ANTHROPIC_DEFAULT_HAIKU_MODEL": {
61
+ "value": "gemini/gemini-3.0-pro"
62
+ },
63
+ "ANTHROPIC_DEFAULT_SONNET_MODEL": {
64
+ "value": "gemini/gemini-3.0-pro"
65
+ },
66
+ "ANTHROPIC_DEFAULT_OPUS_MODEL": {
67
+ "value": "gemini/gemini-3.0-pro"
68
+ }
69
+ },
70
+ "validation": {
71
+ "required_env_vars": ["GEMINI_API_KEY"]
72
+ }
73
+ },
74
+ "litellm-openai": {
75
+ "description": "OpenAI GPT-4 via LiteLLM",
76
+ "enabled": false,
77
+ "env_vars": {
78
+ "ANTHROPIC_API_KEY": {
79
+ "value": "${OPENAI_API_KEY}",
80
+ "required": true
81
+ },
82
+ "ANTHROPIC_BASE_URL": {
83
+ "value": "http://localhost:4000"
84
+ },
85
+ "ANTHROPIC_DEFAULT_HAIKU_MODEL": {
86
+ "value": "openai/gpt-4o-mini"
87
+ },
88
+ "ANTHROPIC_DEFAULT_SONNET_MODEL": {
89
+ "value": "openai/gpt-4o"
90
+ },
91
+ "ANTHROPIC_DEFAULT_OPUS_MODEL": {
92
+ "value": "openai/gpt-4o"
93
+ }
94
+ },
95
+ "validation": {
96
+ "required_env_vars": ["OPENAI_API_KEY"]
97
+ }
98
+ },
99
+ "gemini-ccr": {
100
+ "description": "Google Gemini 2 via Claude Code Router",
101
+ "enabled": true,
102
+ "ccr_provider": true,
103
+ "ccr_config": {
104
+ "name": "gemini",
105
+ "api_base_url": "https://generativelanguage.googleapis.com/v1beta/models/",
106
+ "api_key_env": "GEMINI_API_KEY",
107
+ "models": {
108
+ "default": "gemini-2.0-flash",
109
+ "background": "gemini-2.0-flash",
110
+ "think": "gemini-2.0-flash-thinking-exp"
111
+ },
112
+ "transformer": {
113
+ "use": ["gemini", "enhancetool", "cleancache"]
114
+ }
115
+ },
116
+ "routing_modes": {
117
+ "standard": {
118
+ "default": "gemini-2.0-flash",
119
+ "background": "gemini-2.0-flash"
120
+ },
121
+ "reasoning": {
122
+ "default": "gemini-2.0-flash",
123
+ "think": "gemini-2.0-flash-thinking-exp",
124
+ "background": "gemini-2.0-flash"
125
+ },
126
+ "eco": {
127
+ "default": "gemini-2.0-flash",
128
+ "background": "gemini-2.0-flash"
129
+ }
130
+ },
131
+ "validation": {
132
+ "required_env_vars": ["GEMINI_API_KEY"]
133
+ }
39
134
  }
40
135
  }
41
136
  }