cc-x10ded 3.0.0 → 3.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -5,6 +5,31 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [3.0.0] - 2025-12-24
9
+
10
+ ### 🚀 Major Rewrite (The "30x" Release)
11
+ This release is a complete reimagining of the project, rewriting the entire codebase from Bash/PowerShell to **Bun/TypeScript**.
12
+
13
+ ### Added
14
+ - **Single Binary (`ccx`)**: Replaced 5+ wrapper scripts with a single, standalone executable. No dependencies required.
15
+ - **Interactive Setup (`ccx setup`)**: Beautiful CLI wizard to configure keys and aliases safely.
16
+ - **Self-Healing Diagnostics (`ccx doctor`)**: Checks for API keys, Claude installation, shell paths, and configuration health.
17
+ - **Auto-Discovery**: Automatically detects API keys (`ZAI_API_KEY`, `OPENAI_API_KEY`, etc.) from your environment.
18
+ - **Smart Proxy**: `ccx` acts as a transparent proxy for OpenAI, Gemini, Minimax, and GLM models.
19
+ - **Port Hunting**: Automatically finds an available port if 17870 is busy.
20
+ - **Binary Hunting**: Robustly locates the `claude` binary in standard locations (Homebrew, NVM, npm global) even if it's missing from PATH.
21
+ - **PowerShell Integration**: Native support for PowerShell profiles and aliases.
22
+
23
+ ### Changed
24
+ - **Renamed**: Project package name changed to `cc-x10ded`.
25
+ - **Runtime**: Switched from Node.js to **Bun** for instant startup performance.
26
+ - **Config**: Moved from hardcoded script variables to `~/.config/claude-glm/config.json`.
27
+ - **Proxy**: Migrated from Fastify to `Bun.serve()` for lower latency streaming.
28
+
29
+ ### Removed
30
+ - Removed legacy `install.sh` and `install.ps1` scripts.
31
+ - Removed dependency on user having Node.js installed (for binary usage).
32
+
8
33
  ## [Unreleased]
9
34
 
10
35
  ### Added
package/CONTRIBUTING.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Contributing to Claude-GLM Wrapper
2
2
 
3
- Thank you for your interest in contributing to the **Bun-based** version of cc-x10ded!
3
+ Thank you for your interest in contributing to the **Bun-based** version of Claude-GLM Wrapper!
4
4
 
5
5
  ## Development Setup
6
6
 
package/README.md CHANGED
@@ -5,7 +5,7 @@
5
5
  > This is an actively maintained community fork of the original [claude-glm-wrapper](https://github.com/JoeInnsp23/claude-glm-wrapper).
6
6
  > Now rewritten in **Bun** for 10x speed and single-binary simplicity.
7
7
  >
8
- > Install via: `bunx cc-x10ded` or download the binary.
8
+ > Install via: `bunx claude-glm-wrapper` or download the binary.
9
9
 
10
10
  ---
11
11
 
@@ -27,7 +27,7 @@ Use [Z.AI's GLM models](https://z.ai), [Minimax](https://minimax.io), [OpenAI](h
27
27
 
28
28
  **If you have Bun:**
29
29
  ```bash
30
- bunx cc-x10ded setup
30
+ bunx claude-glm-wrapper setup
31
31
  ```
32
32
 
33
33
  **Manual Download (Mac/Linux/Windows):**
package/package.json CHANGED
@@ -1,7 +1,11 @@
1
1
  {
2
2
  "name": "cc-x10ded",
3
- "version": "3.0.0",
4
- "description": "Cross-platform wrapper for Claude Code with Z.AI GLM models (Bun Edition)",
3
+ "version": "3.0.9",
4
+ "description": "Extend Claude Code with custom OpenAI-compatible model providers",
5
+ "repository": {
6
+ "type": "git",
7
+ "url": "https://github.com/MohMaya/claude-glm-wrapper"
8
+ },
5
9
  "type": "module",
6
10
  "bin": {
7
11
  "ccx": "./src/index.ts"
@@ -12,7 +16,9 @@
12
16
  "build:all": "bun run build:linux && bun run build:windows && bun run build:darwin",
13
17
  "build:linux": "bun build --compile --minify --sourcemap --target=bun-linux-x64 ./src/index.ts --outfile dist/ccx-linux",
14
18
  "build:windows": "bun build --compile --minify --sourcemap --target=bun-windows-x64 ./src/index.ts --outfile dist/ccx.exe",
15
- "build:darwin": "bun build --compile --minify --sourcemap --target=bun-darwin-arm64 ./src/index.ts --outfile dist/ccx-darwin"
19
+ "build:darwin": "bun build --compile --minify --sourcemap --target=bun-darwin-arm64 ./src/index.ts --outfile dist/ccx-darwin",
20
+ "typecheck": "tsc --noEmit --skipLibCheck",
21
+ "prepare": "husky"
16
22
  },
17
23
  "dependencies": {
18
24
  "@clack/prompts": "^0.11.0",
@@ -22,6 +28,9 @@
22
28
  "zod": "^3.22.4"
23
29
  },
24
30
  "devDependencies": {
25
- "@types/bun": "latest"
31
+ "@types/bun": "latest",
32
+ "husky": "^9.1.7",
33
+ "lint-staged": "^16.2.7",
34
+ "typescript": "^5.9.3"
26
35
  }
27
36
  }
@@ -1,5 +1,5 @@
1
1
  import { ConfigManager } from "../core/config";
2
- import pc from "picocolors";
2
+ import * as pc from "picocolors";
3
3
  import { spawn } from "bun";
4
4
 
5
5
  export async function configCommand() {
@@ -14,7 +14,7 @@ export async function configCommand() {
14
14
 
15
15
  try {
16
16
  const proc = spawn([editor, path], {
17
- stdio: "inherit"
17
+ stdio: ["inherit", "inherit", "inherit"]
18
18
  });
19
19
  await proc.exited;
20
20
  } catch (e) {
@@ -1,7 +1,7 @@
1
1
  import { intro, outro, spinner } from "@clack/prompts";
2
2
  import { ShellIntegrator } from "../core/shell";
3
3
  import { ConfigManager } from "../core/config";
4
- import pc from "picocolors";
4
+ import * as pc from "picocolors";
5
5
  import { existsSync } from "fs";
6
6
 
7
7
  export async function doctorCommand() {
@@ -2,7 +2,7 @@ import { spawn } from "bun";
2
2
  import { ConfigManager } from "../core/config";
3
3
  import { startProxyServer } from "../proxy/server";
4
4
  import { ShellIntegrator } from "../core/shell";
5
- import pc from "picocolors";
5
+ import * as pc from "picocolors";
6
6
 
7
7
  export async function runCommand(args: string[], options: { model?: string; port?: number }) {
8
8
  const configManager = new ConfigManager();
@@ -2,7 +2,7 @@ import { intro, outro, text, confirm, select, spinner, isCancel, cancel, note }
2
2
  import { ConfigManager } from "../core/config";
3
3
  import { ShellIntegrator } from "../core/shell";
4
4
  import { spawn } from "bun";
5
- import pc from "picocolors";
5
+ import * as pc from "picocolors";
6
6
 
7
7
  export async function setupCommand() {
8
8
  intro(pc.bgBlue(pc.white(" Claude-GLM Setup ")));
@@ -49,9 +49,9 @@ export async function setupCommand() {
49
49
  });
50
50
 
51
51
  if (openBrowser && !isCancel(openBrowser)) {
52
- spawn(["open", "https://z.ai/manage-apikey/apikey-list"]).catch(() => {});
53
- spawn(["xdg-open", "https://z.ai/manage-apikey/apikey-list"]).catch(() => {}); // Linux
54
- spawn(["explorer", "https://z.ai/manage-apikey/apikey-list"]).catch(() => {}); // Windows
52
+ try { spawn(["open", "https://z.ai/manage-apikey/apikey-list"]); } catch {}
53
+ try { spawn(["xdg-open", "https://z.ai/manage-apikey/apikey-list"]); } catch {} // Linux
54
+ try { spawn(["explorer", "https://z.ai/manage-apikey/apikey-list"]); } catch {} // Windows
55
55
  }
56
56
 
57
57
  const zaiKey = await text({
package/src/core/shell.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { join } from "path";
2
2
  import { homedir, platform } from "os";
3
- import { existsSync, readFileSync, appendFileSync, writeFileSync } from "fs";
3
+ import { existsSync, mkdirSync } from "fs";
4
4
 
5
5
  export type ShellType = "bash" | "zsh" | "fish" | "powershell" | "unknown";
6
6
 
package/src/index.ts CHANGED
@@ -25,7 +25,7 @@ cli
25
25
  .action(async () => {
26
26
  console.log("Updating ccx...");
27
27
  const { spawn } = await import("bun");
28
- const proc = spawn(["npm", "install", "-g", "claude-glm-wrapper"], { stdio: "inherit" });
28
+ const proc = spawn(["npm", "install", "-g", "claude-glm-wrapper"], { stdio: ["inherit", "inherit", "inherit"] });
29
29
  await proc.exited;
30
30
  if (proc.exitCode === 0) {
31
31
  console.log("✅ Update complete!");
package/src/proxy/map.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { AnthropicMessage, AnthropicRequest, ProviderKey, ProviderModel } from "./types";
1
+ import type { AnthropicMessage, AnthropicRequest, ProviderKey, ProviderModel } from "./types";
2
2
 
3
3
  const PROVIDER_PREFIXES: ProviderKey[] = ["openai", "openrouter", "gemini", "glm", "anthropic", "minimax"];
4
4
 
@@ -38,11 +38,11 @@ export function parseProviderModel(modelField: string, defaults?: ProviderModel)
38
38
  }
39
39
 
40
40
  const [maybeProv, ...rest] = modelField.split(sep);
41
- let prov = maybeProv.toLowerCase();
41
+ let prov = (maybeProv || "").toLowerCase();
42
42
 
43
43
  // Resolve alias
44
44
  if (PROVIDER_ALIASES[prov]) {
45
- prov = PROVIDER_ALIASES[prov];
45
+ prov = PROVIDER_ALIASES[prov] || prov;
46
46
  }
47
47
 
48
48
  if (!PROVIDER_PREFIXES.includes(prov as ProviderKey)) {
@@ -1,5 +1,5 @@
1
1
  import { createParser } from "eventsource-parser";
2
- import { AnthropicRequest } from "./types";
2
+ import type { AnthropicRequest } from "./types";
3
3
  import { toOpenAIMessages, toGeminiContents } from "./map";
4
4
  import { createStartMessage, createDelta, createStopMessage, ApiError } from "./utils";
5
5
 
@@ -36,7 +36,7 @@ export async function* streamOpenAI(
36
36
  const decoder = new TextDecoder();
37
37
  let buffer = ""; // Store partial chunks if needed, but parser handles it
38
38
 
39
- const parser = createParser((event) => {
39
+ const parser = createParser(((event: any) => {
40
40
  if (event.type !== "event") return;
41
41
  const data = event.data;
42
42
  if (!data || data === "[DONE]") return;
@@ -45,7 +45,7 @@ export async function* streamOpenAI(
45
45
  const chunk = json.choices?.[0]?.delta?.content ?? "";
46
46
  if (chunk) buffer += createDelta(chunk);
47
47
  } catch {}
48
- });
48
+ }) as any);
49
49
 
50
50
  while (true) {
51
51
  const { value, done } = await reader.read();
@@ -91,7 +91,7 @@ export async function* streamGemini(
91
91
  const decoder = new TextDecoder();
92
92
  let buffer = "";
93
93
 
94
- const parser = createParser((event) => {
94
+ const parser = createParser(((event: any) => {
95
95
  if (event.type !== "event") return;
96
96
  const data = event.data;
97
97
  if (!data) return;
@@ -100,7 +100,7 @@ export async function* streamGemini(
100
100
  const text = json?.candidates?.[0]?.content?.parts?.map((p: any) => p?.text || "").join("") || "";
101
101
  if (text) buffer += createDelta(text);
102
102
  } catch {}
103
- });
103
+ }) as any);
104
104
 
105
105
  while (true) {
106
106
  const { value, done } = await reader.read();
@@ -1,8 +1,9 @@
1
1
  import { serve } from "bun";
2
2
  import { parseProviderModel } from "./map";
3
3
  import { streamOpenAI, streamGemini, streamPassThrough } from "./providers";
4
- import { Config } from "../core/config";
5
- import { AnthropicRequest } from "./types";
4
+ import { toReadableStream } from "./utils";
5
+ import type { Config } from "../core/config";
6
+ import type { AnthropicRequest } from "./types";
6
7
 
7
8
  export function startProxyServer(config: Config, port: number = 17870) {
8
9
  return serve({
@@ -32,19 +33,19 @@ export function startProxyServer(config: Config, port: number = 17870) {
32
33
  if (provider === "openai") {
33
34
  const conf = providers.openai;
34
35
  if (!conf?.apiKey) throw new Error("Missing OpenAI API Key");
35
- return new Response(streamOpenAI(body, model, conf.apiKey, conf.baseUrl || "https://api.openai.com/v1"), { headers });
36
+ return new Response(toReadableStream(streamOpenAI(body, model, conf.apiKey, conf.baseUrl || "https://api.openai.com/v1")), { headers });
36
37
  }
37
38
 
38
39
  if (provider === "openrouter") {
39
40
  const conf = providers.openrouter;
40
41
  if (!conf?.apiKey) throw new Error("Missing OpenRouter API Key");
41
- return new Response(streamOpenAI(body, model, conf.apiKey, conf.baseUrl || "https://openrouter.ai/api/v1"), { headers });
42
+ return new Response(toReadableStream(streamOpenAI(body, model, conf.apiKey, conf.baseUrl || "https://openrouter.ai/api/v1")), { headers });
42
43
  }
43
44
 
44
45
  if (provider === "gemini") {
45
46
  const conf = providers.gemini;
46
47
  if (!conf?.apiKey) throw new Error("Missing Gemini API Key");
47
- return new Response(streamGemini(body, model, conf.apiKey, conf.baseUrl || "https://generativelanguage.googleapis.com/v1beta"), { headers });
48
+ return new Response(toReadableStream(streamGemini(body, model, conf.apiKey, conf.baseUrl || "https://generativelanguage.googleapis.com/v1beta")), { headers });
48
49
  }
49
50
 
50
51
  // Anthropic-compatible handlers (Passthrough)
@@ -77,7 +78,7 @@ export function startProxyServer(config: Config, port: number = 17870) {
77
78
  ...extraHeaders
78
79
  };
79
80
 
80
- return new Response(streamPassThrough(body, baseUrl, apiHeaders), { headers });
81
+ return new Response(toReadableStream(streamPassThrough(body, baseUrl, apiHeaders)), { headers });
81
82
 
82
83
  } catch (e: any) {
83
84
  return new Response(JSON.stringify({ error: e.message }), { status: 500, headers: { "Content-Type": "application/json" } });
@@ -48,7 +48,24 @@ export function createStopMessage() {
48
48
  }
49
49
 
50
50
  export class ApiError extends Error {
51
- constructor(public message: string, public statusCode: number = 500) {
51
+ constructor(public override message: string, public statusCode: number = 500) {
52
52
  super(message);
53
53
  }
54
54
  }
55
+
56
+ // Convert an async generator to a ReadableStream
57
+ export function toReadableStream<T>(gen: AsyncGenerator<T>): ReadableStream<T> {
58
+ return new ReadableStream({
59
+ async pull(controller) {
60
+ const { value, done } = await gen.next();
61
+ if (done) {
62
+ controller.close();
63
+ } else {
64
+ controller.enqueue(value);
65
+ }
66
+ },
67
+ cancel() {
68
+ gen.return(undefined);
69
+ }
70
+ });
71
+ }
package/tsconfig.json CHANGED
@@ -13,6 +13,7 @@
13
13
  "allowImportingTsExtensions": true,
14
14
  "verbatimModuleSyntax": true,
15
15
  "noEmit": true,
16
+ "resolveJsonModule": true,
16
17
 
17
18
  // Best practices
18
19
  "strict": true,
@@ -1,84 +0,0 @@
1
- # list of languages for which language servers are started; choose from:
2
- # al bash clojure cpp csharp csharp_omnisharp
3
- # dart elixir elm erlang fortran go
4
- # haskell java julia kotlin lua markdown
5
- # nix perl php python python_jedi r
6
- # rego ruby ruby_solargraph rust scala swift
7
- # terraform typescript typescript_vts yaml zig
8
- # Note:
9
- # - For C, use cpp
10
- # - For JavaScript, use typescript
11
- # Special requirements:
12
- # - csharp: Requires the presence of a .sln file in the project folder.
13
- # When using multiple languages, the first language server that supports a given file will be used for that file.
14
- # The first language is the default language and the respective language server will be used as a fallback.
15
- # Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored.
16
- languages:
17
- - typescript
18
-
19
- # the encoding used by text files in the project
20
- # For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings
21
- encoding: "utf-8"
22
-
23
- # whether to use the project's gitignore file to ignore files
24
- # Added on 2025-04-07
25
- ignore_all_files_in_gitignore: true
26
-
27
- # list of additional paths to ignore
28
- # same syntax as gitignore, so you can use * and **
29
- # Was previously called `ignored_dirs`, please update your config if you are using that.
30
- # Added (renamed) on 2025-04-07
31
- ignored_paths: []
32
-
33
- # whether the project is in read-only mode
34
- # If set to true, all editing tools will be disabled and attempts to use them will result in an error
35
- # Added on 2025-04-18
36
- read_only: false
37
-
38
- # list of tool names to exclude. We recommend not excluding any tools, see the readme for more details.
39
- # Below is the complete list of tools for convenience.
40
- # To make sure you have the latest list of tools, and to view their descriptions,
41
- # execute `uv run scripts/print_tool_overview.py`.
42
- #
43
- # * `activate_project`: Activates a project by name.
44
- # * `check_onboarding_performed`: Checks whether project onboarding was already performed.
45
- # * `create_text_file`: Creates/overwrites a file in the project directory.
46
- # * `delete_lines`: Deletes a range of lines within a file.
47
- # * `delete_memory`: Deletes a memory from Serena's project-specific memory store.
48
- # * `execute_shell_command`: Executes a shell command.
49
- # * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced.
50
- # * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type).
51
- # * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type).
52
- # * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes.
53
- # * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file.
54
- # * `initial_instructions`: Gets the initial instructions for the current project.
55
- # Should only be used in settings where the system prompt cannot be set,
56
- # e.g. in clients you have no control over, like Claude Desktop.
57
- # * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol.
58
- # * `insert_at_line`: Inserts content at a given line in a file.
59
- # * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol.
60
- # * `list_dir`: Lists files and directories in the given directory (optionally with recursion).
61
- # * `list_memories`: Lists memories in Serena's project-specific memory store.
62
- # * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building).
63
- # * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context).
64
- # * `read_file`: Reads a file within the project directory.
65
- # * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store.
66
- # * `remove_project`: Removes a project from the Serena configuration.
67
- # * `replace_lines`: Replaces a range of lines within a file with new content.
68
- # * `replace_symbol_body`: Replaces the full definition of a symbol.
69
- # * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen.
70
- # * `search_for_pattern`: Performs a search for a pattern in the project.
71
- # * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase.
72
- # * `switch_modes`: Activates modes by providing a list of their names
73
- # * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information.
74
- # * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
75
- # * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
76
- # * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
77
- excluded_tools: []
78
-
79
- # initial prompt for the project. It will always be given to the LLM upon activating the project
80
- # (contrary to the memories, which are loaded on demand).
81
- initial_prompt: ""
82
-
83
- project_name: "claude-glm-wrapper"
84
- included_optional_tools: []
package/bun.lock DELETED
@@ -1,47 +0,0 @@
1
- {
2
- "lockfileVersion": 1,
3
- "configVersion": 1,
4
- "workspaces": {
5
- "": {
6
- "name": "claude-glm-wrapper",
7
- "dependencies": {
8
- "@clack/prompts": "^0.11.0",
9
- "cac": "^6.7.14",
10
- "eventsource-parser": "^3.0.6",
11
- "picocolors": "^1.1.1",
12
- "zod": "^4.2.1",
13
- },
14
- "devDependencies": {
15
- "@types/bun": "latest",
16
- },
17
- "peerDependencies": {
18
- "typescript": "^5",
19
- },
20
- },
21
- },
22
- "packages": {
23
- "@clack/core": ["@clack/core@0.5.0", "", { "dependencies": { "picocolors": "^1.0.0", "sisteransi": "^1.0.5" } }, "sha512-p3y0FIOwaYRUPRcMO7+dlmLh8PSRcrjuTndsiA0WAFbWES0mLZlrjVoBRZ9DzkPFJZG6KGkJmoEAY0ZcVWTkow=="],
24
-
25
- "@clack/prompts": ["@clack/prompts@0.11.0", "", { "dependencies": { "@clack/core": "0.5.0", "picocolors": "^1.0.0", "sisteransi": "^1.0.5" } }, "sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw=="],
26
-
27
- "@types/bun": ["@types/bun@1.3.5", "", { "dependencies": { "bun-types": "1.3.5" } }, "sha512-RnygCqNrd3srIPEWBd5LFeUYG7plCoH2Yw9WaZGyNmdTEei+gWaHqydbaIRkIkcbXwhBT94q78QljxN0Sk838w=="],
28
-
29
- "@types/node": ["@types/node@25.0.3", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA=="],
30
-
31
- "bun-types": ["bun-types@1.3.5", "", { "dependencies": { "@types/node": "*" } }, "sha512-inmAYe2PFLs0SUbFOWSVD24sg1jFlMPxOjOSSCYqUgn4Hsc3rDc7dFvfVYjFPNHtov6kgUeulV4SxbuIV/stPw=="],
32
-
33
- "cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
34
-
35
- "eventsource-parser": ["eventsource-parser@3.0.6", "", {}, "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg=="],
36
-
37
- "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
38
-
39
- "sisteransi": ["sisteransi@1.0.5", "", {}, "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg=="],
40
-
41
- "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
42
-
43
- "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
44
-
45
- "zod": ["zod@4.2.1", "", {}, "sha512-0wZ1IRqGGhMP76gLqz8EyfBXKk0J2qo2+H3fi4mcUP/KtTocoX08nmIAHl1Z2kJIZbZee8KOpBCSNPRgauucjw=="],
46
- }
47
- }
package/dist/ccx DELETED
Binary file