cc-x10ded 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,125 @@
1
+ import { intro, outro, text, confirm, select, spinner, isCancel, cancel, note } from "@clack/prompts";
2
+ import { ConfigManager } from "../core/config";
3
+ import { ShellIntegrator } from "../core/shell";
4
+ import { spawn } from "bun";
5
+ import pc from "picocolors";
6
+
7
+ export async function setupCommand() {
8
+ intro(pc.bgBlue(pc.white(" Claude-GLM Setup ")));
9
+
10
+ const configManager = new ConfigManager();
11
+ const config = await configManager.read();
12
+ const shellInt = new ShellIntegrator();
13
+
14
+ // 1. Check for Claude Binary (Self-Healing)
15
+ const claudePath = await shellInt.findClaudeBinary();
16
+ if (!claudePath) {
17
+ note(pc.yellow("Claude Code is not installed or not found."));
18
+ const installClaude = await confirm({
19
+ message: "Would you like to install Claude Code now? (npm required)",
20
+ initialValue: true
21
+ });
22
+
23
+ if (isCancel(installClaude)) { cancel("Setup cancelled"); process.exit(0); }
24
+
25
+ if (installClaude) {
26
+ const s = spinner();
27
+ s.start("Installing @anthropic-ai/claude-code...");
28
+ try {
29
+ const proc = spawn(["npm", "install", "-g", "@anthropic-ai/claude-code"], {
30
+ stdio: ["ignore", "ignore", "ignore"]
31
+ });
32
+ await proc.exited;
33
+ if (proc.exitCode === 0) {
34
+ s.stop(pc.green("Claude Code installed successfully!"));
35
+ } else {
36
+ s.stop(pc.red("Installation failed. Please install manually: npm install -g @anthropic-ai/claude-code"));
37
+ }
38
+ } catch {
39
+ s.stop(pc.red("npm not found or failed."));
40
+ }
41
+ }
42
+ }
43
+
44
+ // 2. Z.AI Config
45
+ if (!config.zaiApiKey) {
46
+ const openBrowser = await confirm({
47
+ message: "No Z.AI API key found. Open dashboard to get one?",
48
+ initialValue: true
49
+ });
50
+
51
+ if (openBrowser && !isCancel(openBrowser)) {
52
+ spawn(["open", "https://z.ai/manage-apikey/apikey-list"]).catch(() => {});
53
+ spawn(["xdg-open", "https://z.ai/manage-apikey/apikey-list"]).catch(() => {}); // Linux
54
+ spawn(["explorer", "https://z.ai/manage-apikey/apikey-list"]).catch(() => {}); // Windows
55
+ }
56
+
57
+ const zaiKey = await text({
58
+ message: "Enter your Z.AI API Key:",
59
+ placeholder: "sk-...",
60
+ validate: (value) => {
61
+ if (!value) return "API Key is required";
62
+ },
63
+ });
64
+
65
+ if (isCancel(zaiKey)) {
66
+ cancel("Setup cancelled");
67
+ process.exit(0);
68
+ }
69
+
70
+ config.zaiApiKey = zaiKey as string;
71
+ }
72
+
73
+ // 3. Minimax Config
74
+ const minimax = await confirm({
75
+ message: "Do you want to configure Minimax (M2.1)?",
76
+ initialValue: !!config.minimaxApiKey,
77
+ });
78
+
79
+ if (isCancel(minimax)) { cancel("Setup cancelled"); process.exit(0); }
80
+
81
+ if (minimax) {
82
+ const mmKey = await text({
83
+ message: "Enter Minimax API Key:",
84
+ initialValue: config.minimaxApiKey,
85
+ placeholder: "Enter key or leave empty to skip",
86
+ });
87
+ if (isCancel(mmKey)) { cancel("Setup cancelled"); process.exit(0); }
88
+ if (mmKey) config.minimaxApiKey = mmKey as string;
89
+ }
90
+
91
+ // 4. Shell Config
92
+ const detectedShell = shellInt.detectShell();
93
+
94
+ if (detectedShell !== "unknown") {
95
+ const installAliases = await select({
96
+ message: `Install shell aliases for ${detectedShell}? (cc, ccg, ccm...)`,
97
+ options: [
98
+ { value: "yes", label: "Yes, install standard aliases", hint: "Recommended" },
99
+ { value: "no", label: "No, I will use 'ccx' directly" },
100
+ ],
101
+ });
102
+
103
+ if (isCancel(installAliases)) { cancel("Setup cancelled"); process.exit(0); }
104
+
105
+ if (installAliases === "yes") {
106
+ const s = spinner();
107
+ s.start("Installing aliases...");
108
+ const success = await shellInt.installAliases(detectedShell);
109
+
110
+ // Attempt to ensure local bin is in path
111
+ await shellInt.ensureLocalBinInPath(detectedShell);
112
+
113
+ if (success) {
114
+ s.stop("Aliases installed!");
115
+ config.aliases = true;
116
+ } else {
117
+ s.stop("Failed to install aliases (check permissions)");
118
+ }
119
+ }
120
+ }
121
+
122
+ await configManager.write(config);
123
+
124
+ outro(pc.green("Setup complete! Run 'ccx' to start."));
125
+ }
@@ -0,0 +1,111 @@
1
+ import { join } from "path";
2
+ import { homedir } from "os";
3
+ import { existsSync, mkdirSync } from "fs";
4
+
5
+ export interface Config {
6
+ zaiApiKey?: string;
7
+ minimaxApiKey?: string;
8
+ providers: {
9
+ openai?: { apiKey: string; baseUrl?: string };
10
+ openrouter?: { apiKey: string; baseUrl?: string; referer?: string; title?: string };
11
+ gemini?: { apiKey: string; baseUrl?: string };
12
+ anthropic?: { apiKey: string; baseUrl?: string };
13
+ };
14
+ defaults: {
15
+ model: string; // e.g., "glm-4.7"
16
+ provider: string; // e.g., "glm"
17
+ };
18
+ aliases: boolean; // whether to install aliases
19
+ }
20
+
21
+ const DEFAULT_CONFIG: Config = {
22
+ providers: {},
23
+ defaults: {
24
+ model: "glm-4.7",
25
+ provider: "glm",
26
+ },
27
+ aliases: false,
28
+ };
29
+
30
+ export class ConfigManager {
31
+ private configDir: string;
32
+ private configFile: string;
33
+
34
+ constructor() {
35
+ const home = homedir();
36
+ // Use XDG config standard if possible, but fallback to ~/.config
37
+ this.configDir = join(home, ".config", "claude-glm");
38
+ this.configFile = join(this.configDir, "config.json");
39
+ }
40
+
41
+ ensureConfigDir() {
42
+ if (!existsSync(this.configDir)) {
43
+ mkdirSync(this.configDir, { recursive: true });
44
+ }
45
+ }
46
+
47
+ async read(): Promise<Config> {
48
+ let config = { ...DEFAULT_CONFIG };
49
+
50
+ // 1. Read from file
51
+ try {
52
+ const file = Bun.file(this.configFile);
53
+ if (await file.exists()) {
54
+ const json = await file.json();
55
+ config = { ...config, ...json };
56
+ }
57
+ } catch (e) {
58
+ // ignore error
59
+ }
60
+
61
+ // 2. Env Var Fallback (Auto-Discovery)
62
+ if (!config.zaiApiKey && process.env.ZAI_API_KEY) {
63
+ config.zaiApiKey = process.env.ZAI_API_KEY;
64
+ }
65
+ if (!config.zaiApiKey && process.env.GLM_API_KEY) {
66
+ config.zaiApiKey = process.env.GLM_API_KEY;
67
+ }
68
+
69
+ if (!config.minimaxApiKey && process.env.MINIMAX_API_KEY) {
70
+ config.minimaxApiKey = process.env.MINIMAX_API_KEY;
71
+ }
72
+
73
+ if (!config.providers.openai?.apiKey && process.env.OPENAI_API_KEY) {
74
+ config.providers.openai = {
75
+ apiKey: process.env.OPENAI_API_KEY,
76
+ baseUrl: process.env.OPENAI_BASE_URL
77
+ };
78
+ }
79
+
80
+ if (!config.providers.anthropic?.apiKey && process.env.ANTHROPIC_API_KEY) {
81
+ config.providers.anthropic = {
82
+ apiKey: process.env.ANTHROPIC_API_KEY,
83
+ baseUrl: process.env.ANTHROPIC_BASE_URL
84
+ };
85
+ }
86
+
87
+ if (!config.providers.gemini?.apiKey && process.env.GEMINI_API_KEY) {
88
+ config.providers.gemini = { apiKey: process.env.GEMINI_API_KEY };
89
+ }
90
+
91
+ if (!config.providers.openrouter?.apiKey && process.env.OPENROUTER_API_KEY) {
92
+ config.providers.openrouter = {
93
+ apiKey: process.env.OPENROUTER_API_KEY,
94
+ baseUrl: process.env.OPENROUTER_BASE_URL,
95
+ referer: process.env.OPENROUTER_REFERER,
96
+ title: process.env.OPENROUTER_TITLE
97
+ };
98
+ }
99
+
100
+ return config;
101
+ }
102
+
103
+ async write(config: Config) {
104
+ this.ensureConfigDir();
105
+ await Bun.write(this.configFile, JSON.stringify(config, null, 2));
106
+ }
107
+
108
+ getPath() {
109
+ return this.configFile;
110
+ }
111
+ }
@@ -0,0 +1,166 @@
1
+ import { join } from "path";
2
+ import { homedir, platform } from "os";
3
+ import { existsSync, readFileSync, appendFileSync, writeFileSync } from "fs";
4
+
5
+ export type ShellType = "bash" | "zsh" | "fish" | "powershell" | "unknown";
6
+
7
+ export class ShellIntegrator {
8
+ private home = homedir();
9
+
10
+ detectShell(): ShellType {
11
+ const shellPath = process.env.SHELL;
12
+ if (platform() === "win32") return "powershell";
13
+ if (!shellPath) return "unknown";
14
+ if (shellPath.includes("zsh")) return "zsh";
15
+ if (shellPath.includes("bash")) return "bash";
16
+ if (shellPath.includes("fish")) return "fish";
17
+ return "unknown";
18
+ }
19
+
20
+ getProfilePath(shell: ShellType): string | null {
21
+ if (shell === "zsh") return join(this.home, ".zshrc");
22
+ if (shell === "bash") {
23
+ // Prefer .bashrc, fallback to .bash_profile
24
+ const bashrc = join(this.home, ".bashrc");
25
+ const bashProfile = join(this.home, ".bash_profile");
26
+ if (existsSync(bashrc)) return bashrc;
27
+ if (existsSync(bashProfile)) return bashProfile;
28
+ return bashrc; // default
29
+ }
30
+ if (shell === "fish") return join(this.home, ".config", "fish", "config.fish");
31
+ if (shell === "powershell") {
32
+ // Standard PowerShell profile paths
33
+ // We try to find Documents folder first
34
+ const docs = join(this.home, "Documents");
35
+ if (existsSync(docs)) {
36
+ const psDir = join(docs, "PowerShell");
37
+ if (!existsSync(psDir)) {
38
+ try { mkdirSync(psDir, { recursive: true }); } catch {}
39
+ }
40
+ return join(psDir, "Microsoft.PowerShell_profile.ps1");
41
+ }
42
+ // Fallback to OneDrive/Documents if needed or just home/Documents
43
+ return join(this.home, "Documents", "PowerShell", "Microsoft.PowerShell_profile.ps1");
44
+ }
45
+ return null;
46
+ }
47
+
48
+ async installAliases(shell: ShellType) {
49
+ const profile = this.getProfilePath(shell);
50
+ if (!profile) return false;
51
+
52
+ // Ensure directory exists for the profile
53
+ const dir = join(profile, "..");
54
+ if (!existsSync(dir)) {
55
+ try { mkdirSync(dir, { recursive: true }); } catch {}
56
+ }
57
+
58
+ // Check if file exists, if not create empty
59
+ if (!existsSync(profile)) {
60
+ await Bun.write(profile, "");
61
+ }
62
+
63
+ const aliasBlock = this.generateAliasBlock(shell);
64
+ if (!aliasBlock) return false;
65
+
66
+ let content = "";
67
+ if (existsSync(profile)) {
68
+ content = await Bun.file(profile).text();
69
+ }
70
+
71
+ const startMarker = "# >>> claude-glm-wrapper >>>";
72
+ const endMarker = "# <<< claude-glm-wrapper <<<";
73
+
74
+ // Remove existing block
75
+ const regex = new RegExp(`${startMarker}[\\s\\S]*?${endMarker}`, "g");
76
+ content = content.replace(regex, "").trim();
77
+
78
+ // Append new block
79
+ const newContent = `${content}\n\n${startMarker}\n${aliasBlock}\n${endMarker}\n`;
80
+
81
+ await Bun.write(profile, newContent);
82
+ return true;
83
+ }
84
+
85
+ private generateAliasBlock(shell: ShellType): string {
86
+ if (shell === "zsh" || shell === "bash") {
87
+ return `
88
+ alias cc='ccx'
89
+ alias ccg='ccx --model=glm-4.7'
90
+ alias ccg46='ccx --model=glm-4.6'
91
+ alias ccg45='ccx --model=glm-4.5'
92
+ alias ccf='ccx --model=glm-4.5-air'
93
+ alias ccm='ccx --model=MiniMax-M2.1'
94
+ `.trim();
95
+ }
96
+ if (shell === "fish") {
97
+ return `
98
+ alias cc 'ccx'
99
+ alias ccg 'ccx --model=glm-4.7'
100
+ alias ccg46 'ccx --model=glm-4.6'
101
+ alias ccg45 'ccx --model=glm-4.5'
102
+ alias ccf 'ccx --model=glm-4.5-air'
103
+ alias ccm 'ccx --model=MiniMax-M2.1'
104
+ `.trim();
105
+ }
106
+ if (shell === "powershell") {
107
+ return `
108
+ Function cc { ccx @args }
109
+ Function ccg { ccx --model=glm-4.7 @args }
110
+ Function ccg46 { ccx --model=glm-4.6 @args }
111
+ Function ccg45 { ccx --model=glm-4.5 @args }
112
+ Function ccf { ccx --model=glm-4.5-air @args }
113
+ Function ccm { ccx --model=MiniMax-M2.1 @args }
114
+ `.trim();
115
+ }
116
+ return "";
117
+ }
118
+
119
+ async ensureLocalBinInPath(shell: ShellType) {
120
+ if (platform() === "win32") return; // Windows handles PATH differently (usually handled by installer or user)
121
+
122
+ const profile = this.getProfilePath(shell);
123
+ if (!profile || !existsSync(profile)) return;
124
+
125
+ const localBin = join(this.home, ".local", "bin");
126
+ const content = await Bun.file(profile).text();
127
+
128
+ // Heuristic check
129
+ if (!content.includes(localBin) && !process.env.PATH?.includes(localBin)) {
130
+ const exportCmd = `export PATH="$HOME/.local/bin:$PATH"`;
131
+ if (!content.includes(exportCmd)) {
132
+ await Bun.write(profile, `${content}\n\n# Added by claude-glm-wrapper\n${exportCmd}\n`);
133
+ }
134
+ }
135
+ }
136
+
137
+ /**
138
+ * Hunt for the 'claude' binary in common locations
139
+ */
140
+ async findClaudeBinary(): Promise<string | null> {
141
+ // 1. Check PATH
142
+ try {
143
+ const path = Bun.which("claude");
144
+ if (path) return path;
145
+ } catch {}
146
+
147
+ // 2. Common Locations
148
+ const locations = [
149
+ join(this.home, ".npm-global", "bin", "claude"),
150
+ "/usr/local/bin/claude",
151
+ "/opt/homebrew/bin/claude",
152
+ join(this.home, "bin", "claude"),
153
+ join(this.home, ".local", "bin", "claude"),
154
+ // Windows
155
+ join(process.env.APPDATA || "", "npm", "claude.cmd"),
156
+ join(process.env.APPDATA || "", "npm", "claude.ps1"),
157
+ ];
158
+
159
+ for (const loc of locations) {
160
+ if (existsSync(loc)) return loc;
161
+ }
162
+
163
+ return null;
164
+ }
165
+
166
+ }
package/src/index.ts ADDED
@@ -0,0 +1,53 @@
1
+ #!/usr/bin/env bun
2
+ import { cac } from "cac";
3
+ import { runCommand } from "./commands/run";
4
+ import { setupCommand } from "./commands/setup";
5
+ import { configCommand } from "./commands/config";
6
+ import { doctorCommand } from "./commands/doctor";
7
+ import packageJson from "../package.json";
8
+
9
+ const cli = cac("ccx");
10
+
11
+ cli
12
+ .command("setup", "Run the interactive setup wizard")
13
+ .action(setupCommand);
14
+
15
+ cli
16
+ .command("config", "Edit configuration file")
17
+ .action(configCommand);
18
+
19
+ cli
20
+ .command("doctor", "Run self-diagnostics")
21
+ .action(doctorCommand);
22
+
23
+ cli
24
+ .command("update", "Update ccx to the latest version")
25
+ .action(async () => {
26
+ console.log("Updating ccx...");
27
+ const { spawn } = await import("bun");
28
+ const proc = spawn(["npm", "install", "-g", "claude-glm-wrapper"], { stdio: "inherit" });
29
+ await proc.exited;
30
+ if (proc.exitCode === 0) {
31
+ console.log("✅ Update complete!");
32
+ } else {
33
+ console.error("❌ Update failed.");
34
+ }
35
+ });
36
+
37
+ cli
38
+ .command("[...args]", "Run Claude Code with proxy (default)")
39
+ .option("-m, --model <model>", "Override the model (e.g., glm-4.5, openai:gpt-4o)")
40
+ .option("-p, --port <port>", "Port for the local proxy (default: 17870)")
41
+ .action((args, options) => {
42
+ runCommand(args, options);
43
+ });
44
+
45
+ cli.help();
46
+ cli.version(packageJson.version);
47
+
48
+ try {
49
+ cli.parse();
50
+ } catch (error: any) {
51
+ console.error(error.message);
52
+ process.exit(1);
53
+ }
@@ -0,0 +1,86 @@
1
+ import { AnthropicMessage, AnthropicRequest, ProviderKey, ProviderModel } from "./types";
2
+
3
+ const PROVIDER_PREFIXES: ProviderKey[] = ["openai", "openrouter", "gemini", "glm", "anthropic", "minimax"];
4
+
5
+ const PROVIDER_ALIASES: Record<string, ProviderKey> = {
6
+ "gpt": "openai",
7
+ "gpt4": "openai",
8
+ "oai": "openai",
9
+ "or": "openrouter",
10
+ "router": "openrouter",
11
+ "google": "gemini",
12
+ "bard": "gemini",
13
+ "ant": "anthropic",
14
+ "sonnet": "anthropic",
15
+ "claude": "anthropic",
16
+ "z": "glm",
17
+ "zai": "glm",
18
+ "mini": "minimax",
19
+ "mm": "minimax"
20
+ };
21
+
22
+ export function parseProviderModel(modelField: string, defaults?: ProviderModel): ProviderModel {
23
+ if (!modelField) {
24
+ if (defaults) return defaults;
25
+ throw new Error("Missing 'model' in request");
26
+ }
27
+
28
+ const sep = modelField.includes(":") ? ":" : modelField.includes("/") ? "/" : null;
29
+ if (!sep) {
30
+ // Try to auto-detect common model names without prefix
31
+ const lower = modelField.toLowerCase();
32
+ if (lower.startsWith("gpt")) return { provider: "openai", model: modelField };
33
+ if (lower.startsWith("gemini")) return { provider: "gemini", model: modelField };
34
+ if (lower.startsWith("claude")) return { provider: "anthropic", model: modelField };
35
+ if (lower.startsWith("glm")) return { provider: "glm", model: modelField };
36
+
37
+ return defaults ?? { provider: "glm", model: modelField };
38
+ }
39
+
40
+ const [maybeProv, ...rest] = modelField.split(sep);
41
+ let prov = maybeProv.toLowerCase();
42
+
43
+ // Resolve alias
44
+ if (PROVIDER_ALIASES[prov]) {
45
+ prov = PROVIDER_ALIASES[prov];
46
+ }
47
+
48
+ if (!PROVIDER_PREFIXES.includes(prov as ProviderKey)) {
49
+ // If prefix unknown, treat whole string as model for default provider
50
+ return defaults ?? { provider: "glm", model: modelField };
51
+ }
52
+
53
+ return { provider: prov as ProviderKey, model: rest.join(sep) };
54
+ }
55
+
56
+ export function toPlainText(content: AnthropicMessage["content"]): string {
57
+ if (typeof content === "string") return content;
58
+ return content
59
+ .map((c) => {
60
+ if (typeof c === "string") return c;
61
+ if (c.type === "text") return c.text;
62
+ // @ts-ignore
63
+ if (c.type === "tool_result") {
64
+ // @ts-ignore
65
+ if (typeof c.content === "string") return c.content;
66
+ // @ts-ignore
67
+ return JSON.stringify(c.content);
68
+ }
69
+ return "";
70
+ })
71
+ .join("");
72
+ }
73
+
74
+ export function toOpenAIMessages(messages: AnthropicMessage[]) {
75
+ return messages.map((m) => ({
76
+ role: m.role,
77
+ content: toPlainText(m.content)
78
+ }));
79
+ }
80
+
81
+ export function toGeminiContents(messages: AnthropicMessage[]) {
82
+ return messages.map((m) => ({
83
+ role: m.role === "assistant" ? "model" : "user",
84
+ parts: [{ text: toPlainText(m.content) }]
85
+ }));
86
+ }
@@ -0,0 +1,142 @@
1
+ import { createParser } from "eventsource-parser";
2
+ import { AnthropicRequest } from "./types";
3
+ import { toOpenAIMessages, toGeminiContents } from "./map";
4
+ import { createStartMessage, createDelta, createStopMessage, ApiError } from "./utils";
5
+
6
+ // OpenAI
7
+ export async function* streamOpenAI(
8
+ body: AnthropicRequest,
9
+ model: string,
10
+ key: string,
11
+ baseUrl: string
12
+ ) {
13
+ const url = `${baseUrl}/chat/completions`;
14
+ const reqBody: any = {
15
+ model,
16
+ messages: toOpenAIMessages(body.messages),
17
+ stream: true,
18
+ temperature: body.temperature ?? 0.7,
19
+ max_tokens: body.max_tokens
20
+ };
21
+
22
+ if (body.tools?.length) reqBody.tools = body.tools;
23
+
24
+ const resp = await fetch(url, {
25
+ method: "POST",
26
+ headers: { Authorization: `Bearer ${key}`, "Content-Type": "application/json" },
27
+ body: JSON.stringify(reqBody)
28
+ });
29
+
30
+ if (!resp.ok) throw new ApiError(await resp.text(), resp.status);
31
+ if (!resp.body) throw new ApiError("No response body", 500);
32
+
33
+ yield createStartMessage(model);
34
+
35
+ const reader = resp.body.getReader();
36
+ const decoder = new TextDecoder();
37
+ let buffer = ""; // Store partial chunks if needed, but parser handles it
38
+
39
+ const parser = createParser((event) => {
40
+ if (event.type !== "event") return;
41
+ const data = event.data;
42
+ if (!data || data === "[DONE]") return;
43
+ try {
44
+ const json = JSON.parse(data);
45
+ const chunk = json.choices?.[0]?.delta?.content ?? "";
46
+ if (chunk) buffer += createDelta(chunk);
47
+ } catch {}
48
+ });
49
+
50
+ while (true) {
51
+ const { value, done } = await reader.read();
52
+ if (done) break;
53
+ parser.feed(decoder.decode(value));
54
+ if (buffer) {
55
+ yield buffer;
56
+ buffer = "";
57
+ }
58
+ }
59
+
60
+ yield createStopMessage();
61
+ }
62
+
63
+ // Gemini
64
+ export async function* streamGemini(
65
+ body: AnthropicRequest,
66
+ model: string,
67
+ key: string,
68
+ baseUrl: string
69
+ ) {
70
+ const url = `${baseUrl}/models/${encodeURIComponent(model)}:streamGenerateContent?alt=sse&key=${key}`;
71
+ const reqBody = {
72
+ contents: toGeminiContents(body.messages),
73
+ generationConfig: {
74
+ temperature: body.temperature ?? 0.7,
75
+ maxOutputTokens: body.max_tokens
76
+ }
77
+ };
78
+
79
+ const resp = await fetch(url, {
80
+ method: "POST",
81
+ headers: { "Content-Type": "application/json" },
82
+ body: JSON.stringify(reqBody)
83
+ });
84
+
85
+ if (!resp.ok) throw new ApiError(await resp.text(), resp.status);
86
+ if (!resp.body) throw new ApiError("No response body", 500);
87
+
88
+ yield createStartMessage(model);
89
+
90
+ const reader = resp.body.getReader();
91
+ const decoder = new TextDecoder();
92
+ let buffer = "";
93
+
94
+ const parser = createParser((event) => {
95
+ if (event.type !== "event") return;
96
+ const data = event.data;
97
+ if (!data) return;
98
+ try {
99
+ const json = JSON.parse(data);
100
+ const text = json?.candidates?.[0]?.content?.parts?.map((p: any) => p?.text || "").join("") || "";
101
+ if (text) buffer += createDelta(text);
102
+ } catch {}
103
+ });
104
+
105
+ while (true) {
106
+ const { value, done } = await reader.read();
107
+ if (done) break;
108
+ parser.feed(decoder.decode(value));
109
+ if (buffer) {
110
+ yield buffer;
111
+ buffer = "";
112
+ }
113
+ }
114
+
115
+ yield createStopMessage();
116
+ }
117
+
118
+ // PassThrough (GLM, Anthropic, Minimax)
119
+ export async function* streamPassThrough(
120
+ body: AnthropicRequest,
121
+ baseUrl: string,
122
+ headers: Record<string, string>
123
+ ) {
124
+ const url = `${baseUrl.endsWith("/") ? baseUrl.slice(0, -1) : baseUrl}/v1/messages`;
125
+ body.stream = true;
126
+
127
+ const resp = await fetch(url, {
128
+ method: "POST",
129
+ headers,
130
+ body: JSON.stringify(body)
131
+ });
132
+
133
+ if (!resp.ok) throw new ApiError(await resp.text(), resp.status);
134
+ if (!resp.body) throw new ApiError("No response body", 500);
135
+
136
+ const reader = resp.body.getReader();
137
+ while (true) {
138
+ const { value, done } = await reader.read();
139
+ if (done) break;
140
+ yield value;
141
+ }
142
+ }