cc-x10ded 3.0.17 → 3.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/PLUGINS.md ADDED
@@ -0,0 +1,173 @@
1
+ # ccx Plugin System
2
+
3
+ Extend ccx with custom model providers.
4
+
5
+ ## Overview
6
+
7
+ The plugin system allows you to add any OpenAI-compatible API as a provider in ccx. Plugins are discovered from `~/.config/claude-glm/plugins/` and automatically loaded at runtime.
8
+
9
+ ## Quick Start
10
+
11
+ ### 1. Create Plugin Directory
12
+
13
+ ```bash
14
+ mkdir -p ~/.config/claude-glm/plugins/my-provider
15
+ ```
16
+
17
+ ### 2. Create Plugin Manifest
18
+
19
+ Create `plugin.json` in your plugin directory:
20
+
21
+ ```json
22
+ {
23
+ "id": "my-provider",
24
+ "name": "My Custom Provider",
25
+ "version": "1.0.0",
26
+ "description": "A custom model provider",
27
+ "entry": "./dist/index.js"
28
+ }
29
+ ```
30
+
31
+ ### 3. Write Plugin Implementation
32
+
33
+ Create `index.ts` with your plugin code (see Example below).
34
+
35
+ ### 4. Build Plugin
36
+
37
+ ```bash
38
+ bun build ./index.ts --outdir ./dist --target bun
39
+ ```
40
+
41
+ ### 5. Verify Installation
42
+
43
+ ```bash
44
+ ccx models
45
+ ```
46
+
47
+ Your provider should now appear in the list.
48
+
49
+ ## Plugin API
50
+
51
+ ### ProviderPlugin Interface
52
+
53
+ ```typescript
54
+ import type { ProviderPlugin } from "@ccx/plugin-types";
55
+
56
+ export default {
57
+ id: "my-provider",
58
+ name: "My Provider",
59
+ version: "1.0.0",
60
+ description: "Description of your provider",
61
+ models: [
62
+ { id: "model-1", name: "Model 1", contextWindow: 128000 },
63
+ { id: "model-2", name: "Model 2", contextWindow: 64000, default: true }
64
+ ],
65
+ createClient(config) {
66
+ return new MyProviderClient(config);
67
+ }
68
+ } satisfies ProviderPlugin;
69
+ ```
70
+
71
+ ### ProviderClient Interface
72
+
73
+ Your client must implement:
74
+
75
+ ```typescript
76
+ interface ProviderClient {
77
+ readonly provider: string;
78
+
79
+ // Stream completions (required)
80
+ streamComplete(request: AnthropicRequest): AsyncGenerator<SSEMessage>;
81
+
82
+ // Get model info (optional)
83
+ getModelInfo(): ModelInfo | undefined;
84
+
85
+ // Health check (optional)
86
+ healthCheck(): Promise<HealthStatus>;
87
+ }
88
+ ```
89
+
90
+ ### SSE Message Format
91
+
92
+ Messages must be in Anthropic's SSE format:
93
+
94
+ ```typescript
95
+ interface SSEMessage {
96
+ type: string; // "message_start", "content_block_delta", "message_delta", "message_stop"
97
+ data: {
98
+ type?: string;
99
+ index?: number;
100
+ delta?: { type: "text_delta"; text: string };
101
+ stop_reason?: string;
102
+ usage?: { input_tokens: number; output_tokens: number };
103
+ };
104
+ }
105
+ ```
106
+
107
+ ## Example: Ollama Plugin
108
+
109
+ See `examples/plugins/ollama/` for a complete working example.
110
+
111
+ ## Type Definitions
112
+
113
+ Install the official type definitions:
114
+
115
+ ```bash
116
+ bun add -D @ccx/plugin-types
117
+ ```
118
+
119
+ ## Configuration
120
+
121
+ Plugins can read configuration from the main config file:
122
+
123
+ ```json
124
+ {
125
+ "plugins": {
126
+ "my-provider": {
127
+ "apiKey": "your-api-key",
128
+ "baseUrl": "https://api.example.com/v1",
129
+ "extra": {
130
+ "customOption": true
131
+ }
132
+ }
133
+ }
134
+ }
135
+ ```
136
+
137
+ ## Best Practices
138
+
139
+ 1. **Handle errors gracefully** - Return meaningful error messages
140
+ 2. **Implement health checks** - Helps with `ccx doctor` diagnostics
141
+ 3. **Support streaming** - Users expect real-time responses
142
+ 4. **Respect rate limits** - Implement backoff on 429 errors
143
+ 5. **Use environment variables** - Allow API key via env vars
144
+
145
+ ## Publishing
146
+
147
+ Want to share your plugin?
148
+
149
+ 1. Create a GitHub repository with your plugin code
150
+ 2. Add installation instructions to your README
151
+ 3. Submit a PR to add your plugin to the examples directory
152
+
153
+ ## Troubleshooting
154
+
155
+ ### Plugin Not Loading
156
+
157
+ Check `ccx doctor` for plugin errors. Common issues:
158
+ - Missing `plugin.json`
159
+ - Entry file not found
160
+ - TypeScript compilation errors
161
+
162
+ ### Model Not Found
163
+
164
+ Ensure your model's `id` matches what users will type:
165
+ - `ccx --model=my-provider:model-1`
166
+
167
+ ### API Errors
168
+
169
+ Implement proper error handling in your plugin to surface meaningful errors to users.
170
+
171
+ ## API Reference
172
+
173
+ See [@ccx/plugin-types](https://www.npmjs.com/package/@ccx/plugin-types) for complete type definitions.
@@ -0,0 +1,67 @@
1
+ # Ollama Plugin for ccx
2
+
3
+ This example plugin demonstrates how to integrate local Ollama models with ccx.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ # 1. Ensure Ollama is running (default: http://localhost:11434)
9
+ ollama serve
10
+
11
+ # 2. Pull a model
12
+ ollama pull llama3.1
13
+
14
+ # 3. Create plugin directory
15
+ mkdir -p ~/.config/claude-glm/plugins/ollama
16
+
17
+ # 4. Copy plugin files
18
+ cp plugin.json ~/.config/claude-glm/plugins/ollama/
19
+ cp dist/index.js ~/.config/claude-glm/plugins/ollama/
20
+
21
+ # 5. Verify installation
22
+ ccx models
23
+ ```
24
+
25
+ ## Building
26
+
27
+ ```bash
28
+ # From the examples/plugins/ollama directory:
29
+ bun build ./index.ts --outdir ./dist --target bun
30
+ ```
31
+
32
+ ## Configuration
33
+
34
+ No API key required for local Ollama. The plugin connects to `http://localhost:11434` by default.
35
+
36
+ You can override the base URL by adding to your config:
37
+
38
+ ```json
39
+ {
40
+ "plugins": {
41
+ "ollama": {
42
+ "baseUrl": "http://ollama:11434"
43
+ }
44
+ }
45
+ }
46
+ ```
47
+
48
+ ## Supported Models
49
+
50
+ - Llama 3.1 8B (131K context)
51
+ - Qwen 2.5 72B (131K context)
52
+ - Mistral 7B (32K context)
53
+ - CodeLlama 7B (16K context)
54
+ - DeepSeek Coder 6.7B (16K context)
55
+
56
+ ## Adding Custom Models
57
+
58
+ Edit `index.ts` to add more models:
59
+
60
+ ```typescript
61
+ const MODELS: ModelInfo[] = [
62
+ { id: "your-model", name: "Your Model Name", contextWindow: 32768 },
63
+ // Add more models...
64
+ ];
65
+ ```
66
+
67
+ Then rebuild and reinstall.
@@ -0,0 +1,138 @@
1
+ import type {
2
+ ProviderPlugin,
3
+ ProviderClient,
4
+ PluginConfig,
5
+ ModelInfo,
6
+ HealthStatus,
7
+ SSEMessage,
8
+ AnthropicRequest
9
+ } from "../../../packages/plugin-types/index";
10
+
11
+ const MODELS: ModelInfo[] = [
12
+ { id: "llama3.1", name: "Llama 3.1 8B", contextWindow: 131072 },
13
+ { id: "qwen2.5", name: "Qwen 2.5 72B", contextWindow: 131072 },
14
+ { id: "mistral", name: "Mistral 7B", contextWindow: 32768 },
15
+ { id: "codellama", name: "CodeLlama 7B", contextWindow: 16384 },
16
+ { id: "deepseek-coder", name: "DeepSeek Coder 6.7B", contextWindow: 16384 }
17
+ ];
18
+
19
+ export default {
20
+ id: "ollama",
21
+ name: "Ollama (Local)",
22
+ version: "1.0.0",
23
+ description: "Run local Ollama models with Claude Code",
24
+ models: MODELS,
25
+
26
+ createClient(config: PluginConfig): ProviderClient {
27
+ return new OllamaClient(config);
28
+ }
29
+ } satisfies ProviderPlugin;
30
+
31
+ class OllamaClient implements ProviderClient {
32
+ readonly provider = "ollama";
33
+ private baseUrl: string;
34
+
35
+ constructor(config: PluginConfig) {
36
+ this.baseUrl = config.baseUrl || "http://localhost:11434";
37
+ }
38
+
39
+ async *streamComplete(request: AnthropicRequest): AsyncGenerator<SSEMessage> {
40
+ const prompt = this.buildPrompt(request);
41
+
42
+ const response = await fetch(`${this.baseUrl}/api/generate`, {
43
+ method: "POST",
44
+ headers: { "Content-Type": "application/json" },
45
+ body: JSON.stringify({
46
+ model: request.model,
47
+ prompt,
48
+ stream: true,
49
+ options: {
50
+ temperature: request.temperature ?? 0.7,
51
+ num_predict: request.max_tokens
52
+ }
53
+ })
54
+ });
55
+
56
+ if (!response.ok) {
57
+ const errorText = await response.text();
58
+ throw new Error(`Ollama error: ${response.status} ${errorText}`);
59
+ }
60
+
61
+ const reader = response.body?.getReader();
62
+ if (!reader) throw new Error("No response body from Ollama");
63
+
64
+ const decoder = new TextDecoder();
65
+
66
+ while (true) {
67
+ const { done, value } = await reader.read();
68
+ if (done) break;
69
+
70
+ const chunk = decoder.decode(value);
71
+ const lines = chunk.split("\n");
72
+
73
+ for (const line of lines) {
74
+ if (!line.trim()) continue;
75
+
76
+ try {
77
+ const data = JSON.parse(line);
78
+
79
+ if (data.response) {
80
+ yield {
81
+ type: "content_block_delta",
82
+ data: { text: data.response }
83
+ };
84
+ }
85
+
86
+ if (data.done) {
87
+ yield {
88
+ type: "message_delta",
89
+ data: { stop_reason: "end_turn" }
90
+ };
91
+ yield {
92
+ type: "message_stop",
93
+ data: {}
94
+ };
95
+ break;
96
+ }
97
+ } catch {
98
+ continue;
99
+ }
100
+ }
101
+ }
102
+ }
103
+
104
+ getModelInfo(): ModelInfo | undefined {
105
+ return MODELS.find(m => m.id === "llama3.1");
106
+ }
107
+
108
+ async healthCheck(): Promise<HealthStatus> {
109
+ try {
110
+ const start = Date.now();
111
+ const response = await fetch(`${this.baseUrl}/api/tags`);
112
+ if (!response.ok) {
113
+ return { healthy: false, error: `HTTP ${response.status}` };
114
+ }
115
+ return { healthy: true, latencyMs: Date.now() - start };
116
+ } catch (error) {
117
+ return { healthy: false, error: (error as Error).message };
118
+ }
119
+ }
120
+
121
+ private buildPrompt(request: AnthropicRequest): string {
122
+ const parts: string[] = [];
123
+
124
+ if (request.system) {
125
+ parts.push(`<system>${request.system}</system>`);
126
+ }
127
+
128
+ for (const message of request.messages) {
129
+ const role = message.role === "assistant" ? "assistant" : "user";
130
+ const content = typeof message.content === "string"
131
+ ? message.content
132
+ : message.content.map(c => c.type === "text" ? c.text : "").join("");
133
+ parts.push(`<${role}>${content}</${role}>`);
134
+ }
135
+
136
+ return parts.join("\n");
137
+ }
138
+ }
@@ -0,0 +1,7 @@
1
+ {
2
+ "id": "ollama",
3
+ "name": "Ollama (Local)",
4
+ "version": "1.0.0",
5
+ "description": "Use local Ollama models with Claude Code",
6
+ "entry": "./dist/index.js"
7
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "cc-x10ded",
3
- "version": "3.0.17",
3
+ "version": "3.0.18",
4
4
  "description": "Extend Claude Code with custom OpenAI-compatible model providers",
5
5
  "repository": {
6
6
  "type": "git",
@@ -0,0 +1,60 @@
1
+ export interface ProviderPlugin {
2
+ readonly id: string;
3
+ readonly name: string;
4
+ readonly version: string;
5
+ readonly description?: string;
6
+ readonly models: ModelInfo[];
7
+
8
+ createClient(config: PluginConfig): ProviderClient;
9
+ }
10
+
11
+ export interface ModelInfo {
12
+ readonly id: string;
13
+ readonly name: string;
14
+ readonly contextWindow?: number;
15
+ readonly maxOutputTokens?: number;
16
+ readonly capabilities?: readonly ("text" | "vision" | "tools")[];
17
+ readonly default?: boolean;
18
+ }
19
+
20
+ export interface ProviderClient {
21
+ readonly provider: string;
22
+
23
+ streamComplete(request: AnthropicRequest): AsyncGenerator<SSEMessage>;
24
+
25
+ getModelInfo(): ModelInfo | undefined;
26
+
27
+ healthCheck(): Promise<HealthStatus>;
28
+ }
29
+
30
+ export interface PluginConfig {
31
+ readonly apiKey?: string;
32
+ readonly baseUrl: string;
33
+ readonly extra?: Record<string, unknown>;
34
+ }
35
+
36
+ export interface HealthStatus {
37
+ readonly healthy: boolean;
38
+ readonly latencyMs?: number;
39
+ readonly error?: string;
40
+ }
41
+
42
+ export interface SSEMessage {
43
+ type: string;
44
+ data: unknown;
45
+ }
46
+
47
+ export interface AnthropicRequest {
48
+ model: string;
49
+ messages: AnthropicMessage[];
50
+ max_tokens: number;
51
+ temperature?: number;
52
+ system?: string;
53
+ tools?: unknown[];
54
+ stream?: boolean;
55
+ }
56
+
57
+ export interface AnthropicMessage {
58
+ role: "user" | "assistant" | "system";
59
+ content: string | Array<{ type: "text"; text: string } | { type: "tool_result"; content: string | unknown }>;
60
+ }
@@ -0,0 +1,13 @@
1
+ {
2
+ "name": "@ccx/plugin-types",
3
+ "version": "1.0.0",
4
+ "description": "TypeScript types for ccx plugins",
5
+ "main": "index.ts",
6
+ "types": "index.ts",
7
+ "exports": {
8
+ ".": "./index.ts"
9
+ },
10
+ "keywords": ["ccx", "claude", "plugin", "types"],
11
+ "author": "",
12
+ "license": "MIT"
13
+ }
@@ -1,6 +1,9 @@
1
1
  import { intro, outro, spinner } from "@clack/prompts";
2
2
  import { ShellIntegrator } from "../core/shell";
3
3
  import { ConfigManager } from "../core/config";
4
+ import { telemetry } from "../core/telemetry";
5
+ import { circuitBreaker } from "../core/circuit-breaker";
6
+ import { pluginManager } from "../core/plugins";
4
7
  import * as pc from "picocolors";
5
8
  import { existsSync } from "fs";
6
9
  import { join } from "path";
@@ -30,7 +33,9 @@ export async function doctorCommand() {
30
33
 
31
34
  if (foundOld.length > 0) {
32
35
  issues.push("❌ Old ccx binaries found that may shadow the new one:");
33
- foundOld.forEach(p => issues.push(` ${p}`));
36
+ for (const p of foundOld) {
37
+ issues.push(` ${p}`);
38
+ }
34
39
  issues.push(" 👉 Run 'ccx update' to remove them automatically");
35
40
  } else {
36
41
  checks.push("✅ No conflicting binaries");
@@ -58,7 +63,7 @@ export async function doctorCommand() {
58
63
  if (!config.zaiApiKey && !config.minimaxApiKey && Object.keys(config.providers).length === 0) {
59
64
  issues.push("❌ No API keys configured. Run 'ccx setup'.");
60
65
  } else {
61
- const keys = [];
66
+ const keys: string[] = [];
62
67
  if (config.zaiApiKey) keys.push("Z.AI");
63
68
  if (config.minimaxApiKey) keys.push("Minimax");
64
69
  if (config.providers.openai?.apiKey) keys.push("OpenAI");
@@ -91,7 +96,6 @@ export async function doctorCommand() {
91
96
  if (profile && existsSync(profile)) {
92
97
  const content = await Bun.file(profile).text();
93
98
  if (content.includes("claude-glm-wrapper")) {
94
- // Check if using old bunx-based aliases
95
99
  if (content.includes("bunx cc-x10ded")) {
96
100
  warnings.push("⚠️ Old bunx-based aliases detected");
97
101
  warnings.push(" 👉 Run 'ccx update' to migrate to faster direct aliases");
@@ -105,18 +109,90 @@ export async function doctorCommand() {
105
109
  }
106
110
  s.stop("Shell check complete");
107
111
 
112
+ // 6. Telemetry (Local)
113
+ s.start("Checking telemetry...");
114
+ const sessionDuration = telemetry.getSessionDuration();
115
+ const requestCount = telemetry.getRequestCount();
116
+ const providerStats = telemetry.getProviderStats();
117
+ const errors = telemetry.getErrors();
118
+ const fallbacks = telemetry.getFallbacks();
119
+
120
+ console.log("\n" + pc.bold("Telemetry (this session):"));
121
+ console.log(` Session: ${Math.round(sessionDuration / 1000)}s | Requests: ${requestCount}`);
122
+
123
+ if (Object.keys(providerStats).length > 0) {
124
+ console.log("\n Provider Usage:");
125
+ for (const [provider, stats] of Object.entries(providerStats)) {
126
+ const statusIcon = stats.errors > 0 ? "🔴" : "🟢";
127
+ console.log(` ${statusIcon} ${provider}: ${stats.count} requests (avg ${stats.avgLatency}ms)${stats.errors > 0 ? `, ${stats.errors} errors` : ""}`);
128
+ }
129
+ }
130
+
131
+ if (errors.length > 0) {
132
+ console.log("\n Errors:");
133
+ for (const error of errors) {
134
+ console.log(` ${error.provider}: ${error.error} (${error.count})`);
135
+ }
136
+ }
137
+
138
+ if (fallbacks.length > 0) {
139
+ console.log("\n Fallbacks:");
140
+ for (const fallback of fallbacks) {
141
+ console.log(` ${fallback.fromProvider} → ${fallback.toProvider} (${fallback.reason})`);
142
+ }
143
+ }
144
+
145
+ if (requestCount === 0) {
146
+ console.log(" No requests yet in this session.");
147
+ }
148
+ s.stop("Telemetry check complete");
149
+
150
+ // 7. Circuit Breaker Status
151
+ s.start("Checking circuit breaker...");
152
+ const circuitStates = circuitBreaker.getStates();
153
+ if (circuitStates.length > 0) {
154
+ console.log("\n" + pc.bold("Circuit Breaker Status:"));
155
+ for (const state of circuitStates) {
156
+ const icon = state.state === "closed" ? "🟢" : state.state === "half-open" ? "🟡" : "🔴";
157
+ console.log(` ${icon} ${state.provider}: ${state.state} (${state.failures} failures)`);
158
+ }
159
+ } else {
160
+ console.log(" No circuit breaker activity yet.");
161
+ }
162
+ s.stop("Circuit breaker check complete");
163
+
164
+ // 8. Plugins
165
+ s.start("Checking plugins...");
166
+ const pluginCount = pluginManager.getPluginCount();
167
+ if (pluginCount > 0) {
168
+ const plugins = pluginManager.getPlugins();
169
+ console.log(`\n Installed Plugins: ${pluginCount}`);
170
+ for (const plugin of plugins) {
171
+ console.log(` - ${plugin.name} v${plugin.version}`);
172
+ }
173
+ } else {
174
+ console.log(" No plugins installed.");
175
+ }
176
+ s.stop("Plugin check complete");
177
+
108
178
  // Report
109
179
  console.log("\n" + pc.bold("Diagnostic Report:"));
110
- checks.forEach(c => console.log(c));
180
+ for (const c of checks) {
181
+ console.log(c);
182
+ }
111
183
 
112
184
  if (warnings.length > 0) {
113
185
  console.log("");
114
- warnings.forEach(w => console.log(pc.yellow(w)));
186
+ for (const w of warnings) {
187
+ console.log(pc.yellow(w));
188
+ }
115
189
  }
116
190
 
117
191
  if (issues.length > 0) {
118
192
  console.log("");
119
- issues.forEach(i => console.log(pc.red(i)));
193
+ for (const i of issues) {
194
+ console.log(pc.red(i));
195
+ }
120
196
  outro(pc.red("Issues found. Please resolve them above."));
121
197
  process.exit(1);
122
198
  } else if (warnings.length > 0) {
@@ -0,0 +1,71 @@
1
+ import { providerRegistry } from "../core/registry";
2
+ import { pluginManager } from "../core/plugins";
3
+ import { createLogger } from "../core/logger";
4
+
5
+ const logger = createLogger();
6
+
7
+ export async function modelsCommand(): Promise<void> {
8
+ console.log("\n");
9
+
10
+ const providers = providerRegistry.listProviders();
11
+ const plugins = pluginManager.getPlugins();
12
+
13
+ const allModels = providerRegistry.getAllModels();
14
+
15
+ const maxProviderWidth = Math.max(
16
+ ...providers.map(p => p.name.length),
17
+ ...plugins.map(p => p.name.length)
18
+ );
19
+ const maxModelWidth = Math.max(
20
+ ...allModels.map(m => `${m.provider.id}:${m.model.id}`.length),
21
+ 30
22
+ );
23
+
24
+ console.log("╔" + "═".repeat(maxProviderWidth + maxModelWidth + 7) + "╗");
25
+ console.log("║" + " ".repeat(Math.floor((maxProviderWidth + maxModelWidth + 7 - 26) / 2)) + "ccx Available Models" + " ".repeat(Math.ceil((maxProviderWidth + maxModelWidth + 7 - 26) / 2)) + "║");
26
+ console.log("╠" + "═".repeat(maxProviderWidth + maxModelWidth + 7) + "╣");
27
+
28
+ for (const provider of providers) {
29
+ const statusIcon = provider.isNative ? "🔵" : "🟢";
30
+ const keyHint = provider.isNative ? "[Native - No proxy needed]" : `[Requires: ${provider.requiresKey.split(".").pop()?.replace("ApiKey", "_KEY") || "key"}]`;
31
+
32
+ console.log("║");
33
+ console.log(`║ ${statusIcon} ${provider.name.padEnd(maxProviderWidth)} ${keyHint}`);
34
+
35
+ for (const model of provider.models) {
36
+ const defaultMark = model.default ? " (default)" : "";
37
+ const modelLine = ` ├── ${model.id}${defaultMark}`;
38
+ console.log(`║ ${modelLine.padEnd(maxProviderWidth + maxModelWidth + 3)}║`);
39
+ }
40
+ }
41
+
42
+ console.log("║");
43
+ console.log("╠" + "═".repeat(maxProviderWidth + maxModelWidth + 7) + "╣");
44
+
45
+ if (plugins.length > 0) {
46
+ console.log("║");
47
+ console.log("║ Installed Plugins (" + plugins.length + ")");
48
+
49
+ for (const plugin of plugins) {
50
+ console.log(`║ ├── ${plugin.name} v${plugin.version}`);
51
+ for (const model of plugin.models) {
52
+ const modelLine = ` │ ├── ${model.id}`;
53
+ console.log(`║ ${modelLine.padEnd(maxProviderWidth + maxModelWidth + 3)}║`);
54
+ }
55
+ }
56
+ } else {
57
+ console.log("║");
58
+ console.log("║ Plugins: 0 installed");
59
+ console.log("║ To add a plugin, create: ~/.config/claude-glm/plugins/<name>/");
60
+ }
61
+
62
+ console.log("║");
63
+ console.log("╚" + "═".repeat(maxProviderWidth + maxModelWidth + 7) + "╝");
64
+ console.log("\n");
65
+ console.log("Usage:");
66
+ console.log(" ccx # Interactive selection");
67
+ console.log(" ccx --model=glm-4.7 # Use specific model");
68
+ console.log(" ccx --list # Show this list");
69
+ console.log(" ccx setup # Configure API keys");
70
+ console.log("\n");
71
+ }