cc-x10ded 3.0.17 → 3.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,172 @@
1
+ import type { ProviderInfo, ModelInfo } from "../types";
2
+
3
+ const BUILTIN_PROVIDERS: ProviderInfo[] = [
4
+ {
5
+ id: "glm",
6
+ name: "GLM (Z.AI)",
7
+ models: [
8
+ { id: "glm-4.7", name: "GLM-4.7", default: true },
9
+ { id: "glm-4.6", name: "GLM-4.6" },
10
+ { id: "glm-4.5", name: "GLM-4.5" },
11
+ { id: "glm-4.5-air", name: "GLM-4.5-Air" }
12
+ ],
13
+ isNative: false,
14
+ requiresKey: "zaiApiKey"
15
+ },
16
+ {
17
+ id: "minimax",
18
+ name: "Minimax",
19
+ models: [
20
+ { id: "MiniMax-M2.1", name: "MiniMax-M2.1", default: true },
21
+ { id: "MiniMax-M2.1-32k", name: "MiniMax-M2.1-32k" }
22
+ ],
23
+ isNative: false,
24
+ requiresKey: "minimaxApiKey"
25
+ },
26
+ {
27
+ id: "openai",
28
+ name: "OpenAI",
29
+ models: [
30
+ { id: "gpt-4o", name: "GPT-4o", default: true },
31
+ { id: "gpt-4o-mini", name: "GPT-4o Mini" },
32
+ { id: "gpt-4-turbo", name: "GPT-4 Turbo" }
33
+ ],
34
+ isNative: false,
35
+ requiresKey: "providers.openai.apiKey"
36
+ },
37
+ {
38
+ id: "anthropic",
39
+ name: "Anthropic (Claude Code)",
40
+ models: [
41
+ { id: "claude-sonnet-4-20250514", name: "Claude Sonnet 4", default: true },
42
+ { id: "claude-haiku-4-20250514", name: "Claude Haiku 4" },
43
+ { id: "claude-opus-4-20250514", name: "Claude Opus 4" }
44
+ ],
45
+ isNative: true,
46
+ requiresKey: "providers.anthropic.apiKey"
47
+ },
48
+ {
49
+ id: "gemini",
50
+ name: "Google Gemini",
51
+ models: [
52
+ { id: "gemini-2.0-flash", name: "Gemini 2.0 Flash", default: true },
53
+ { id: "gemini-1.5-pro", name: "Gemini 1.5 Pro" }
54
+ ],
55
+ isNative: false,
56
+ requiresKey: "providers.gemini.apiKey"
57
+ },
58
+ {
59
+ id: "openrouter",
60
+ name: "OpenRouter",
61
+ models: [
62
+ { id: "openrouter.auto", name: "Auto-Select", default: true },
63
+ { id: "anthropic/claude-sonnet-4", name: "Anthropic Sonnet" }
64
+ ],
65
+ isNative: false,
66
+ requiresKey: "providers.openrouter.apiKey"
67
+ }
68
+ ];
69
+
70
+ const DEFAULT_PROVIDER_ORDER = ["glm", "minimax", "openai", "gemini", "openrouter", "anthropic"];
71
+
72
+ export class ProviderRegistry {
73
+ private providers: Map<string, ProviderInfo> = new Map();
74
+ private plugins: Map<string, ProviderInfo> = new Map();
75
+
76
+ constructor() {
77
+ for (const provider of BUILTIN_PROVIDERS) {
78
+ this.providers.set(provider.id, provider);
79
+ }
80
+ }
81
+
82
+ registerPlugin(plugin: ProviderInfo): void {
83
+ this.plugins.set(plugin.id, plugin);
84
+ }
85
+
86
+ unregisterPlugin(pluginId: string): void {
87
+ this.plugins.delete(pluginId);
88
+ }
89
+
90
+ listProviders(): ProviderInfo[] {
91
+ const all: ProviderInfo[] = [];
92
+ for (const id of DEFAULT_PROVIDER_ORDER) {
93
+ const builtin = this.providers.get(id);
94
+ const plugin = this.plugins.get(id);
95
+ if (builtin) all.push(builtin);
96
+ if (plugin) all.push(plugin);
97
+ }
98
+ for (const [id, plugin] of this.plugins) {
99
+ if (!DEFAULT_PROVIDER_ORDER.includes(id)) {
100
+ all.push(plugin);
101
+ }
102
+ }
103
+ return all;
104
+ }
105
+
106
+ getProvider(id: string): ProviderInfo | null {
107
+ return this.providers.get(id) || this.plugins.get(id) || null;
108
+ }
109
+
110
+ getProviderOrder(): string[] {
111
+ const order: string[] = [];
112
+ for (const id of DEFAULT_PROVIDER_ORDER) {
113
+ if (this.providers.has(id) || this.plugins.has(id)) {
114
+ order.push(id);
115
+ }
116
+ }
117
+ for (const id of this.plugins.keys()) {
118
+ if (!DEFAULT_PROVIDER_ORDER.includes(id) && !order.includes(id)) {
119
+ order.push(id);
120
+ }
121
+ }
122
+ return order;
123
+ }
124
+
125
+ getDefaultProvider(): ProviderInfo | null {
126
+ const order = this.getProviderOrder();
127
+ for (const id of order) {
128
+ const provider = this.getProvider(id);
129
+ if (provider) {
130
+ const defaultModel = provider.models.find(m => m.default);
131
+ if (defaultModel) {
132
+ return provider;
133
+ }
134
+ }
135
+ }
136
+ return null;
137
+ }
138
+
139
+ getDefaultModelForProvider(providerId: string): ModelInfo | null {
140
+ const provider = this.getProvider(providerId);
141
+ if (!provider) return null;
142
+ return provider.models.find(m => m.default) || provider.models[0] || null;
143
+ }
144
+
145
+ getModel(providerId: string, modelId: string): ModelInfo | null {
146
+ const provider = this.getProvider(providerId);
147
+ if (!provider) return null;
148
+ return provider.models.find(m => m.id === modelId) || null;
149
+ }
150
+
151
+ getAllModels(): Array<{ provider: ProviderInfo; model: ModelInfo }> {
152
+ const all: Array<{ provider: ProviderInfo; model: ModelInfo }> = [];
153
+ for (const provider of this.listProviders()) {
154
+ for (const model of provider.models) {
155
+ all.push({ provider, model });
156
+ }
157
+ }
158
+ return all;
159
+ }
160
+
161
+ isNative(providerId: string): boolean {
162
+ const provider = this.getProvider(providerId);
163
+ return provider?.isNative ?? false;
164
+ }
165
+
166
+ requiresKey(providerId: string): string | null {
167
+ const provider = this.getProvider(providerId);
168
+ return provider?.requiresKey ?? null;
169
+ }
170
+ }
171
+
172
+ export const providerRegistry = new ProviderRegistry();
@@ -0,0 +1,253 @@
1
+ import { join } from "path";
2
+ import { existsSync, writeFileSync, readFileSync } from "fs";
3
+ import { homedir } from "os";
4
+ import type { TelemetryEvent } from "../types";
5
+ import { createLogger } from "./logger";
6
+
7
+ const logger = createLogger();
8
+
9
+ export interface TelemetryData {
10
+ sessionId: string;
11
+ sessionStart: number;
12
+ requests: TelemetryRequest[];
13
+ errors: TelemetryError[];
14
+ fallbacks: TelemetryFallback[];
15
+ }
16
+
17
+ interface TelemetryRequest {
18
+ provider: string;
19
+ model: string;
20
+ latencyMs: number;
21
+ success: boolean;
22
+ timestamp: number;
23
+ errorCode?: string;
24
+ }
25
+
26
+ interface TelemetryError {
27
+ provider: string;
28
+ error: string;
29
+ count: number;
30
+ }
31
+
32
+ interface TelemetryFallback {
33
+ fromProvider: string;
34
+ toProvider: string;
35
+ reason: string;
36
+ timestamp: number;
37
+ }
38
+
39
+ export class Telemetry {
40
+ private data: TelemetryData;
41
+ private telemetryDir: string;
42
+ private telemetryFile: string;
43
+ private enabled: boolean = true;
44
+ private static instance: Telemetry | null = null;
45
+
46
+ private constructor() {
47
+ this.telemetryDir = join(homedir(), ".config", "claude-glm");
48
+ this.telemetryFile = join(this.telemetryDir, "telemetry.json");
49
+ this.data = this.loadOrCreate();
50
+ }
51
+
52
+ static getInstance(): Telemetry {
53
+ if (!Telemetry.instance) {
54
+ Telemetry.instance = new Telemetry();
55
+ }
56
+ return Telemetry.instance;
57
+ }
58
+
59
+ private loadOrCreate(): TelemetryData {
60
+ if (!existsSync(this.telemetryDir)) {
61
+ return this.createNewSession();
62
+ }
63
+
64
+ try {
65
+ if (existsSync(this.telemetryFile)) {
66
+ const content = readFileSync(this.telemetryFile, "utf-8");
67
+ const data = JSON.parse(content) as TelemetryData;
68
+ const sessionAge = Date.now() - data.sessionStart;
69
+ const ONE_DAY = 24 * 60 * 60 * 1000;
70
+
71
+ if (sessionAge > ONE_DAY) {
72
+ return this.createNewSession();
73
+ }
74
+ return data;
75
+ }
76
+ } catch (error) {
77
+ logger.warn("Failed to load telemetry data", { error: (error as Error).message });
78
+ }
79
+
80
+ return this.createNewSession();
81
+ }
82
+
83
+ private createNewSession(): TelemetryData {
84
+ return {
85
+ sessionId: this.generateSessionId(),
86
+ sessionStart: Date.now(),
87
+ requests: [],
88
+ errors: [],
89
+ fallbacks: []
90
+ };
91
+ }
92
+
93
+ private generateSessionId(): string {
94
+ const array = new Uint8Array(16);
95
+ crypto.getRandomValues(array);
96
+ return Array.from(array, byte => byte.toString(16).padStart(2, "0")).join("");
97
+ }
98
+
99
+ private save(): void {
100
+ try {
101
+ writeFileSync(this.telemetryFile, JSON.stringify(this.data, null, 2));
102
+ } catch (error) {
103
+ logger.warn("Failed to save telemetry data", { error: (error as Error).message });
104
+ }
105
+ }
106
+
107
+ disable(): void {
108
+ this.enabled = false;
109
+ }
110
+
111
+ isEnabled(): boolean {
112
+ return this.enabled;
113
+ }
114
+
115
+ trackRequest(
116
+ provider: string,
117
+ model: string,
118
+ latencyMs: number,
119
+ success: boolean,
120
+ errorCode?: string
121
+ ): void {
122
+ if (!this.enabled) return;
123
+
124
+ const request: TelemetryRequest = {
125
+ provider,
126
+ model,
127
+ latencyMs,
128
+ success,
129
+ timestamp: Date.now()
130
+ };
131
+
132
+ if (errorCode) {
133
+ request.errorCode = errorCode;
134
+ this.trackError(provider, errorCode);
135
+ }
136
+
137
+ this.data.requests.push(request);
138
+ this.save();
139
+ }
140
+
141
+ private trackError(provider: string, error: string): void {
142
+ const existing = this.data.errors.find(e => e.provider === provider && e.error === error);
143
+ if (existing) {
144
+ existing.count++;
145
+ } else {
146
+ this.data.errors.push({ provider, error, count: 1 });
147
+ }
148
+ }
149
+
150
+ trackFallback(fromProvider: string, toProvider: string, reason: string): void {
151
+ if (!this.enabled) return;
152
+
153
+ this.data.fallbacks.push({
154
+ fromProvider,
155
+ toProvider,
156
+ reason,
157
+ timestamp: Date.now()
158
+ });
159
+ this.save();
160
+ }
161
+
162
+ trackEvent(event: Omit<TelemetryEvent, "timestamp">): void {
163
+ if (!this.enabled) return;
164
+
165
+ const fullEvent: TelemetryEvent = {
166
+ ...event,
167
+ timestamp: Date.now()
168
+ };
169
+
170
+ switch (event.type) {
171
+ case "request_complete":
172
+ if (event.provider && event.model && event.latencyMs !== undefined) {
173
+ this.trackRequest(
174
+ event.provider,
175
+ event.model,
176
+ event.latencyMs,
177
+ event.success ?? true,
178
+ event.errorCode
179
+ );
180
+ }
181
+ break;
182
+ case "fallback":
183
+ if (event.fromProvider && event.toProvider) {
184
+ this.trackFallback(event.fromProvider, event.toProvider, event.reason || "unknown");
185
+ }
186
+ break;
187
+ }
188
+ }
189
+
190
+ getSessionId(): string {
191
+ return this.data.sessionId;
192
+ }
193
+
194
+ getSessionStart(): number {
195
+ return this.data.sessionStart;
196
+ }
197
+
198
+ getRequestCount(): number {
199
+ return this.data.requests.length;
200
+ }
201
+
202
+ getRequests(): TelemetryRequest[] {
203
+ return [...this.data.requests];
204
+ }
205
+
206
+ getErrors(): TelemetryError[] {
207
+ return [...this.data.errors];
208
+ }
209
+
210
+ getFallbacks(): TelemetryFallback[] {
211
+ return [...this.data.fallbacks];
212
+ }
213
+
214
+ getProviderStats(): Record<string, { count: number; avgLatency: number; errors: number }> {
215
+ const stats: Record<string, { count: number; totalLatency: number; errors: number }> = {};
216
+
217
+ for (const request of this.data.requests) {
218
+ if (!stats[request.provider]) {
219
+ stats[request.provider] = { count: 0, totalLatency: 0, errors: 0 };
220
+ }
221
+ const providerStats = stats[request.provider];
222
+ if (providerStats) {
223
+ providerStats.count++;
224
+ providerStats.totalLatency += request.latencyMs;
225
+ if (!request.success) {
226
+ providerStats.errors++;
227
+ }
228
+ }
229
+ }
230
+
231
+ const result: Record<string, { count: number; avgLatency: number; errors: number }> = {};
232
+ for (const [provider, stat] of Object.entries(stats)) {
233
+ result[provider] = {
234
+ count: stat.count,
235
+ avgLatency: Math.round(stat.totalLatency / stat.count),
236
+ errors: stat.errors
237
+ };
238
+ }
239
+
240
+ return result;
241
+ }
242
+
243
+ getSessionDuration(): number {
244
+ return Date.now() - this.data.sessionStart;
245
+ }
246
+
247
+ clear(): void {
248
+ this.data = this.createNewSession();
249
+ this.save();
250
+ }
251
+ }
252
+
253
+ export const telemetry = Telemetry.getInstance();
package/src/index.ts CHANGED
@@ -4,7 +4,11 @@ import { runCommand } from "./commands/run";
4
4
  import { setupCommand } from "./commands/setup";
5
5
  import { configCommand } from "./commands/config";
6
6
  import { doctorCommand } from "./commands/doctor";
7
+ import { modelsCommand } from "./commands/models";
7
8
  import packageJson from "../package.json";
9
+ import { createLogger } from "./core/logger";
10
+
11
+ const logger = createLogger();
8
12
 
9
13
  const cli = cac("ccx");
10
14
 
@@ -20,6 +24,12 @@ cli
20
24
  .command("doctor", "Run self-diagnostics")
21
25
  .action(doctorCommand);
22
26
 
27
+ cli
28
+ .command("models", "List all available models")
29
+ .action(async () => {
30
+ await modelsCommand();
31
+ });
32
+
23
33
  cli
24
34
  .command("update", "Update ccx to the latest version")
25
35
  .option("--skip-aliases", "Skip alias installation")
@@ -32,16 +42,16 @@ cli
32
42
  const shellInt = new ShellIntegrator();
33
43
  const shell = shellInt.detectShell();
34
44
 
35
- // 1. Clean up old binaries first
36
45
  if (!options.skipCleanup) {
37
46
  const removed = await shellInt.cleanupOldBinaries();
38
47
  if (removed.length > 0) {
39
48
  console.log(pc.default.yellow("🧹 Removed old ccx binaries:"));
40
- removed.forEach(p => console.log(pc.default.dim(` ${p}`)));
49
+ for (const p of removed) {
50
+ console.log(pc.default.dim(` ${p}`));
51
+ }
41
52
  }
42
53
  }
43
54
 
44
- // 2. Update via bun global install
45
55
  console.log(pc.default.blue("📦 Updating ccx..."));
46
56
  const proc = spawn(["bun", "install", "-g", "cc-x10ded@latest"], {
47
57
  stdio: ["inherit", "inherit", "inherit"]
@@ -55,7 +65,6 @@ cli
55
65
 
56
66
  console.log(pc.default.green("✅ ccx updated!"));
57
67
 
58
- // 3. Ensure bun bin is in PATH (and prioritized)
59
68
  if (shell !== "unknown") {
60
69
  await shellInt.ensureBunBinInPath(shell);
61
70
 
@@ -66,7 +75,6 @@ cli
66
75
  }
67
76
  }
68
77
 
69
- // 4. Reinstall aliases (unless skipped)
70
78
  if (!options.skipAliases && shell !== "unknown") {
71
79
  const success = await shellInt.installAliases(shell);
72
80
  if (success) {
@@ -84,8 +92,18 @@ cli
84
92
  .command("[...args]", "Run Claude Code with proxy (default)")
85
93
  .option("-m, --model <model>", "Override the model (e.g., glm-4.5, openai:gpt-4o)")
86
94
  .option("-p, --port <port>", "Port for the local proxy (default: 17870)")
95
+ .option("--json-log", "Output logs in JSON format")
87
96
  .action((args, options) => {
88
- runCommand(args, options);
97
+ if (options.jsonLog) {
98
+ logger.setJsonMode(true);
99
+ }
100
+ runCommand(args, { model: options.model, port: options.port });
101
+ });
102
+
103
+ cli
104
+ .command("--list", "List all available models (alias for 'models')")
105
+ .action(async () => {
106
+ await modelsCommand();
89
107
  });
90
108
 
91
109
  cli.help();
package/src/proxy/map.ts CHANGED
@@ -80,8 +80,26 @@ export function toOpenAIMessages(messages: AnthropicMessage[]) {
80
80
  }
81
81
 
82
82
  export function toGeminiContents(messages: AnthropicMessage[]) {
83
- return messages.map((m) => ({
84
- role: m.role === "assistant" ? "model" : "user",
85
- parts: [{ text: toPlainText(m.content) }]
86
- }));
83
+ let systemInstruction: string | null = null;
84
+ const mappedMessages: Array<{ role: string; parts: Array<{ text: string }> }> = [];
85
+
86
+ for (const m of messages) {
87
+ if (m.role === "system") {
88
+ systemInstruction = toPlainText(m.content);
89
+ } else {
90
+ mappedMessages.push({
91
+ role: m.role === "assistant" ? "model" : "user",
92
+ parts: [{ text: toPlainText(m.content) }]
93
+ });
94
+ }
95
+ }
96
+
97
+ if (systemInstruction && mappedMessages.length > 0) {
98
+ const firstMessage = mappedMessages[0];
99
+ if (firstMessage) {
100
+ firstMessage.parts.unshift({ text: `[System Instruction: ${systemInstruction}] ` });
101
+ }
102
+ }
103
+
104
+ return mappedMessages;
87
105
  }
@@ -1,7 +1,9 @@
1
1
  import { createParser } from "eventsource-parser";
2
2
  import type { AnthropicRequest } from "./types";
3
3
  import { toOpenAIMessages, toGeminiContents } from "./map";
4
- import { createStartMessage, createDelta, createStopMessage, ApiError } from "./utils";
4
+ import { createStartMessage, createDelta, createStopMessage, ApiError, parseErrorResponse } from "./utils";
5
+
6
+ const MAX_BUFFER_SIZE = 65536;
5
7
 
6
8
  // OpenAI
7
9
  export async function* streamOpenAI(
@@ -27,14 +29,14 @@ export async function* streamOpenAI(
27
29
  body: JSON.stringify(reqBody)
28
30
  });
29
31
 
30
- if (!resp.ok) throw new ApiError(await resp.text(), resp.status);
32
+ if (!resp.ok) throw new ApiError(parseErrorResponse(await resp.text()), resp.status);
31
33
  if (!resp.body) throw new ApiError("No response body", 500);
32
34
 
33
35
  yield createStartMessage(model);
34
36
 
35
37
  const reader = resp.body.getReader();
36
38
  const decoder = new TextDecoder();
37
- let buffer = ""; // Store partial chunks if needed, but parser handles it
39
+ let buffer = "";
38
40
 
39
41
  const parser = createParser(((event: any) => {
40
42
  if (event.type !== "event") return;
@@ -51,7 +53,10 @@ export async function* streamOpenAI(
51
53
  const { value, done } = await reader.read();
52
54
  if (done) break;
53
55
  parser.feed(decoder.decode(value));
54
- if (buffer) {
56
+ if (buffer.length >= MAX_BUFFER_SIZE) {
57
+ yield buffer;
58
+ buffer = "";
59
+ } else if (buffer) {
55
60
  yield buffer;
56
61
  buffer = "";
57
62
  }
@@ -82,7 +87,7 @@ export async function* streamGemini(
82
87
  body: JSON.stringify(reqBody)
83
88
  });
84
89
 
85
- if (!resp.ok) throw new ApiError(await resp.text(), resp.status);
90
+ if (!resp.ok) throw new ApiError(parseErrorResponse(await resp.text()), resp.status);
86
91
  if (!resp.body) throw new ApiError("No response body", 500);
87
92
 
88
93
  yield createStartMessage(model);
@@ -106,7 +111,10 @@ export async function* streamGemini(
106
111
  const { value, done } = await reader.read();
107
112
  if (done) break;
108
113
  parser.feed(decoder.decode(value));
109
- if (buffer) {
114
+ if (buffer.length >= MAX_BUFFER_SIZE) {
115
+ yield buffer;
116
+ buffer = "";
117
+ } else if (buffer) {
110
118
  yield buffer;
111
119
  buffer = "";
112
120
  }
@@ -130,7 +138,7 @@ export async function* streamPassThrough(
130
138
  body: JSON.stringify(body)
131
139
  });
132
140
 
133
- if (!resp.ok) throw new ApiError(await resp.text(), resp.status);
141
+ if (!resp.ok) throw new ApiError(parseErrorResponse(await resp.text()), resp.status);
134
142
  if (!resp.body) throw new ApiError("No response body", 500);
135
143
 
136
144
  const reader = resp.body.getReader();
@@ -6,7 +6,7 @@ import type { Config } from "../core/config";
6
6
  import type { AnthropicRequest } from "./types";
7
7
 
8
8
  export function startProxyServer(config: Config, port: number = 17870) {
9
- return serve({
9
+ const server = serve({
10
10
  port,
11
11
  hostname: "127.0.0.1",
12
12
  async fetch(req) {
@@ -85,4 +85,14 @@ export function startProxyServer(config: Config, port: number = 17870) {
85
85
  }
86
86
  },
87
87
  });
88
+
89
+ const shutdown = () => {
90
+ server.stop();
91
+ process.exit(0);
92
+ };
93
+
94
+ process.on("SIGINT", shutdown);
95
+ process.on("SIGTERM", shutdown);
96
+
97
+ return server;
88
98
  }
@@ -6,7 +6,7 @@ export interface ProviderModel {
6
6
  }
7
7
 
8
8
  export interface AnthropicMessage {
9
- role: "user" | "assistant";
9
+ role: "user" | "assistant" | "system";
10
10
  content: string | Array<{ type: "text"; text: string } | { type: "tool_result"; content: string | any }>;
11
11
  }
12
12
 
@@ -53,6 +53,16 @@ export class ApiError extends Error {
53
53
  }
54
54
  }
55
55
 
56
+ export function parseErrorResponse(text: string): string {
57
+ try {
58
+ const json = JSON.parse(text);
59
+ if (json.error?.message) return json.error.message;
60
+ if (json.message) return json.message;
61
+ if (json.error) return typeof json.error === "string" ? json.error : JSON.stringify(json.error);
62
+ } catch {}
63
+ return text;
64
+ }
65
+
56
66
  // Convert an async generator to a ReadableStream
57
67
  export function toReadableStream<T>(gen: AsyncGenerator<T>): ReadableStream<T> {
58
68
  return new ReadableStream({