@vacbo/opencode-anthropic-fix 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/backoff.ts CHANGED
@@ -4,6 +4,26 @@ const QUOTA_EXHAUSTED_BACKOFFS = [60_000, 300_000, 1_800_000, 7_200_000];
4
4
  const AUTH_FAILED_BACKOFF = 5_000;
5
5
  const RATE_LIMIT_EXCEEDED_BACKOFF = 30_000;
6
6
  const MIN_BACKOFF_MS = 2_000;
7
+ const RETRIABLE_NETWORK_ERROR_CODES = new Set(["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT", "UND_ERR_SOCKET"]);
8
+ const NON_RETRIABLE_ERROR_NAMES = new Set(["AbortError", "TimeoutError", "APIUserAbortError"]);
9
+ const RETRIABLE_NETWORK_ERROR_MESSAGES = [
10
+ "bun proxy upstream error",
11
+ "connection reset by peer",
12
+ "connection reset by server",
13
+ "econnreset",
14
+ "econnrefused",
15
+ "epipe",
16
+ "etimedout",
17
+ "fetch failed",
18
+ "network connection lost",
19
+ "socket hang up",
20
+ "und_err_socket",
21
+ ];
22
+
23
+ interface ErrorWithCode extends Error {
24
+ code?: string;
25
+ cause?: unknown;
26
+ }
7
27
 
8
28
  /**
9
29
  * Parse the Retry-After header from a response.
@@ -132,6 +152,69 @@ function bodyHasAccountError(body: string | object | null | undefined): boolean
132
152
  );
133
153
  }
134
154
 
155
+ function collectErrorChain(error: unknown): ErrorWithCode[] {
156
+ const queue: unknown[] = [error];
157
+ const visited = new Set<unknown>();
158
+ const chain: ErrorWithCode[] = [];
159
+
160
+ while (queue.length > 0) {
161
+ const candidate = queue.shift();
162
+ if (candidate == null || visited.has(candidate)) {
163
+ continue;
164
+ }
165
+
166
+ visited.add(candidate);
167
+
168
+ if (candidate instanceof Error) {
169
+ const typedCandidate = candidate as ErrorWithCode;
170
+ chain.push(typedCandidate);
171
+ if (typedCandidate.cause !== undefined) {
172
+ queue.push(typedCandidate.cause);
173
+ }
174
+ continue;
175
+ }
176
+
177
+ if (typeof candidate === "object" && "cause" in candidate) {
178
+ queue.push((candidate as { cause?: unknown }).cause);
179
+ }
180
+ }
181
+
182
+ return chain;
183
+ }
184
+
185
+ /**
186
+ * Check whether an error represents a transient transport/network failure.
187
+ */
188
+ export function isRetriableNetworkError(error: unknown): boolean {
189
+ if (typeof error === "string") {
190
+ const text = error.toLowerCase();
191
+ return RETRIABLE_NETWORK_ERROR_MESSAGES.some((signal) => text.includes(signal));
192
+ }
193
+
194
+ const chain = collectErrorChain(error);
195
+ if (chain.length === 0) {
196
+ return false;
197
+ }
198
+
199
+ for (const candidate of chain) {
200
+ if (NON_RETRIABLE_ERROR_NAMES.has(candidate.name)) {
201
+ return false;
202
+ }
203
+
204
+ const code = candidate.code?.toUpperCase();
205
+ if (code && RETRIABLE_NETWORK_ERROR_CODES.has(code)) {
206
+ return true;
207
+ }
208
+
209
+ const message = candidate.message.toLowerCase();
210
+ if (RETRIABLE_NETWORK_ERROR_MESSAGES.some((signal) => message.includes(signal))) {
211
+ return true;
212
+ }
213
+ }
214
+
215
+ return false;
216
+ }
217
+
135
218
  /**
136
219
  * Check whether an HTTP response represents an account-specific error
137
220
  * that would benefit from switching to a different account.
@@ -379,3 +379,106 @@ describe("createBunFetch runtime lifecycle (RED until T20)", () => {
379
379
  expect(proxyA.child.killSignals).toEqual([]);
380
380
  });
381
381
  });
382
+
383
+ describe("createBunFetch debug request dumping", () => {
384
+ const UNIQUE_REQUEST_PATTERN =
385
+ /^\/tmp\/opencode-request-\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z-[0-9a-f]{8}\.json$/;
386
+ const UNIQUE_HEADERS_PATTERN =
387
+ /^\/tmp\/opencode-headers-\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z-[0-9a-f]{8}\.json$/;
388
+ const LATEST_REQUEST_PATH = "/tmp/opencode-last-request.json";
389
+ const LATEST_HEADERS_PATH = "/tmp/opencode-last-headers.json";
390
+
391
+ function writtenPaths(): string[] {
392
+ return writeFileSyncMock.mock.calls.map((call) => String(call[0]));
393
+ }
394
+
395
+ it("writes a uniquely-named request file AND a latest-alias file when debug=true", async () => {
396
+ const proxy = createMockBunProxy();
397
+ spawnMock.mockImplementation(proxy.mockSpawn);
398
+ installMockFetch();
399
+
400
+ const moduleNs = await loadBunFetchModule();
401
+ const createBunFetch = getCreateBunFetch(moduleNs);
402
+ const instance = createBunFetch({ debug: true });
403
+
404
+ proxy.simulateStdoutBanner(42001);
405
+
406
+ await instance.fetch("https://api.anthropic.com/v1/messages?beta=true", {
407
+ method: "POST",
408
+ body: JSON.stringify({ hello: "world" }),
409
+ });
410
+
411
+ const paths = writtenPaths();
412
+ const uniqueRequest = paths.find((path) => UNIQUE_REQUEST_PATTERN.test(path));
413
+ const uniqueHeaders = paths.find((path) => UNIQUE_HEADERS_PATTERN.test(path));
414
+
415
+ expect(uniqueRequest, "expected a uniquely-named request dump").toBeDefined();
416
+ expect(uniqueHeaders, "expected a uniquely-named headers dump").toBeDefined();
417
+ expect(paths).toContain(LATEST_REQUEST_PATH);
418
+ expect(paths).toContain(LATEST_HEADERS_PATH);
419
+ });
420
+
421
+ it("produces a different unique path for each sequential debug request", async () => {
422
+ const proxy = createMockBunProxy();
423
+ spawnMock.mockImplementation(proxy.mockSpawn);
424
+ installMockFetch();
425
+
426
+ const moduleNs = await loadBunFetchModule();
427
+ const createBunFetch = getCreateBunFetch(moduleNs);
428
+ const instance = createBunFetch({ debug: true });
429
+
430
+ proxy.simulateStdoutBanner(42002);
431
+
432
+ await instance.fetch("https://api.anthropic.com/v1/messages?beta=true", {
433
+ method: "POST",
434
+ body: JSON.stringify({ n: 1 }),
435
+ });
436
+ await instance.fetch("https://api.anthropic.com/v1/messages?beta=true", {
437
+ method: "POST",
438
+ body: JSON.stringify({ n: 2 }),
439
+ });
440
+
441
+ const uniquePaths = writtenPaths().filter((path) => UNIQUE_REQUEST_PATTERN.test(path));
442
+
443
+ expect(uniquePaths).toHaveLength(2);
444
+ expect(uniquePaths[0]).not.toBe(uniquePaths[1]);
445
+ });
446
+
447
+ it("does not dump any artifact for count_tokens requests even when debug=true", async () => {
448
+ const proxy = createMockBunProxy();
449
+ spawnMock.mockImplementation(proxy.mockSpawn);
450
+ installMockFetch();
451
+
452
+ const moduleNs = await loadBunFetchModule();
453
+ const createBunFetch = getCreateBunFetch(moduleNs);
454
+ const instance = createBunFetch({ debug: true });
455
+
456
+ proxy.simulateStdoutBanner(42003);
457
+
458
+ await instance.fetch("https://api.anthropic.com/v1/messages/count_tokens", {
459
+ method: "POST",
460
+ body: JSON.stringify({ hello: "world" }),
461
+ });
462
+
463
+ expect(writeFileSyncMock).not.toHaveBeenCalled();
464
+ });
465
+
466
+ it("does not dump any artifact when debug=false", async () => {
467
+ const proxy = createMockBunProxy();
468
+ spawnMock.mockImplementation(proxy.mockSpawn);
469
+ installMockFetch();
470
+
471
+ const moduleNs = await loadBunFetchModule();
472
+ const createBunFetch = getCreateBunFetch(moduleNs);
473
+ const instance = createBunFetch({ debug: false });
474
+
475
+ proxy.simulateStdoutBanner(42004);
476
+
477
+ await instance.fetch("https://api.anthropic.com/v1/messages?beta=true", {
478
+ method: "POST",
479
+ body: JSON.stringify({ hello: "world" }),
480
+ });
481
+
482
+ expect(writeFileSyncMock).not.toHaveBeenCalled();
483
+ });
484
+ });
package/src/bun-fetch.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  import { execFileSync, spawn, type ChildProcess } from "node:child_process";
2
+ import { randomUUID } from "node:crypto";
2
3
  import { existsSync } from "node:fs";
3
4
  import { dirname, join } from "node:path";
4
5
  import * as readline from "node:readline";
@@ -119,23 +120,52 @@ function buildProxyRequestInit(input: FetchInput, init?: RequestInit): RequestIn
119
120
  };
120
121
  }
121
122
 
122
- async function writeDebugArtifacts(url: string, init: RequestInit): Promise<void> {
123
+ const DEBUG_DUMP_DIR = "/tmp";
124
+ const DEBUG_LATEST_REQUEST_PATH = `${DEBUG_DUMP_DIR}/opencode-last-request.json`;
125
+ const DEBUG_LATEST_HEADERS_PATH = `${DEBUG_DUMP_DIR}/opencode-last-headers.json`;
126
+
127
+ interface DebugDumpPaths {
128
+ requestPath: string;
129
+ headersPath: string;
130
+ latestRequestPath: string;
131
+ latestHeadersPath: string;
132
+ }
133
+
134
+ function makeDebugDumpId(): string {
135
+ const filesystemSafeTimestamp = new Date().toISOString().replace(/[:.]/g, "-");
136
+ const subMillisecondCollisionGuard = randomUUID().slice(0, 8);
137
+ return `${filesystemSafeTimestamp}-${subMillisecondCollisionGuard}`;
138
+ }
139
+
140
+ async function writeDebugArtifacts(url: string, init: RequestInit): Promise<DebugDumpPaths | null> {
123
141
  if (!init.body || !url.includes("/v1/messages") || url.includes("count_tokens")) {
124
- return;
142
+ return null;
125
143
  }
126
144
 
127
145
  const { writeFileSync } = await import("node:fs");
128
- writeFileSync(
129
- "/tmp/opencode-last-request.json",
130
- typeof init.body === "string" ? init.body : JSON.stringify(init.body),
131
- );
146
+ const id = makeDebugDumpId();
147
+ const requestPath = `${DEBUG_DUMP_DIR}/opencode-request-${id}.json`;
148
+ const headersPath = `${DEBUG_DUMP_DIR}/opencode-headers-${id}.json`;
149
+
150
+ const bodyText = typeof init.body === "string" ? init.body : JSON.stringify(init.body);
132
151
 
133
152
  const logHeaders: Record<string, string> = {};
134
153
  toHeaders(init.headers).forEach((value, key) => {
135
154
  logHeaders[key] = key === "authorization" ? "Bearer ***" : value;
136
155
  });
156
+ const headersText = JSON.stringify(logHeaders, null, 2);
157
+
158
+ writeFileSync(requestPath, bodyText);
159
+ writeFileSync(headersPath, headersText);
160
+ writeFileSync(DEBUG_LATEST_REQUEST_PATH, bodyText);
161
+ writeFileSync(DEBUG_LATEST_HEADERS_PATH, headersText);
137
162
 
138
- writeFileSync("/tmp/opencode-last-headers.json", JSON.stringify(logHeaders, null, 2));
163
+ return {
164
+ requestPath,
165
+ headersPath,
166
+ latestRequestPath: DEBUG_LATEST_REQUEST_PATH,
167
+ latestHeadersPath: DEBUG_LATEST_HEADERS_PATH,
168
+ };
139
169
  }
140
170
 
141
171
  export function createBunFetch(options: BunFetchOptions = {}): BunFetchInstance {
@@ -400,10 +430,12 @@ export function createBunFetch(options: BunFetchOptions = {}): BunFetchInstance
400
430
 
401
431
  if (resolveDebug(debugOverride)) {
402
432
  try {
403
- await writeDebugArtifacts(url, init ?? {});
404
- if ((init?.body ?? null) !== null && url.includes("/v1/messages") && !url.includes("count_tokens")) {
433
+ const dumped = await writeDebugArtifacts(url, init ?? {});
434
+ if (dumped) {
405
435
  // eslint-disable-next-line no-console -- debug-gated diagnostic; confirms request artifact dump location
406
- console.error("[opencode-anthropic-auth] Dumped request to /tmp/opencode-last-request.json");
436
+ console.error(
437
+ `[opencode-anthropic-auth] Dumped request to ${dumped.requestPath} (latest alias: ${dumped.latestRequestPath})`,
438
+ );
407
439
  }
408
440
  } catch (error) {
409
441
  // eslint-disable-next-line no-console -- error-path diagnostic surfaced to stderr for operator visibility
@@ -0,0 +1,37 @@
1
+ import { describe, expect, it, vi } from "vitest";
2
+
3
+ import { createProxyRequestHandler } from "./bun-proxy.js";
4
+
5
+ function makeProxyRequest(headers?: HeadersInit): Request {
6
+ const requestHeaders = new Headers(headers);
7
+ requestHeaders.set("x-proxy-url", "https://api.anthropic.com/v1/messages");
8
+ requestHeaders.set("content-type", "application/json");
9
+
10
+ return new Request("http://127.0.0.1/proxy", {
11
+ method: "POST",
12
+ headers: requestHeaders,
13
+ body: JSON.stringify({ ok: true }),
14
+ });
15
+ }
16
+
17
+ describe("createProxyRequestHandler", () => {
18
+ it("forwards retry requests with keepalive disabled to the upstream fetch", async () => {
19
+ const upstreamFetch = vi.fn(async (_input, init?: RequestInit) => {
20
+ expect(init?.keepalive).toBe(false);
21
+ const forwardedHeaders = init?.headers instanceof Headers ? init.headers : new Headers(init?.headers);
22
+ expect(forwardedHeaders.get("connection")).toBe("close");
23
+ expect(forwardedHeaders.get("x-proxy-disable-keepalive")).toBeNull();
24
+ return new Response("ok", { status: 200 });
25
+ });
26
+ const handler = createProxyRequestHandler({
27
+ fetchImpl: upstreamFetch as typeof fetch,
28
+ allowHosts: ["api.anthropic.com"],
29
+ requestTimeoutMs: 50,
30
+ });
31
+
32
+ const response = await handler(makeProxyRequest({ "x-proxy-disable-keepalive": "true" }));
33
+
34
+ await expect(response.text()).resolves.toBe("ok");
35
+ expect(upstreamFetch).toHaveBeenCalledTimes(1);
36
+ });
37
+ });
package/src/bun-proxy.ts CHANGED
@@ -8,6 +8,7 @@ const DEFAULT_REQUEST_TIMEOUT_MS = 600_000;
8
8
  const DEFAULT_PARENT_EXIT_CODE = 1;
9
9
  const DEFAULT_PARENT_POLL_INTERVAL_MS = 5_000;
10
10
  const HEALTH_PATH = "/__health";
11
+ const PROXY_DISABLE_KEEPALIVE_HEADER = "x-proxy-disable-keepalive";
11
12
  const DEBUG_ENABLED = process.env.OPENCODE_ANTHROPIC_DEBUG === "1";
12
13
 
13
14
  interface ProxyRequestHandlerOptions {
@@ -85,11 +86,16 @@ function createDefaultParentWatcherFactory(): ParentWatcherFactory {
85
86
  });
86
87
  }
87
88
 
88
- function sanitizeForwardHeaders(source: Headers): Headers {
89
+ function sanitizeForwardHeaders(source: Headers, forceFreshConnection = false): Headers {
89
90
  const headers = new Headers(source);
90
- ["x-proxy-url", "host", "connection", "content-length"].forEach((headerName) => {
91
+ [PROXY_DISABLE_KEEPALIVE_HEADER, "x-proxy-url", "host", "connection", "content-length"].forEach((headerName) => {
91
92
  headers.delete(headerName);
92
93
  });
94
+
95
+ if (forceFreshConnection) {
96
+ headers.set("connection", "close");
97
+ }
98
+
93
99
  return headers;
94
100
  }
95
101
 
@@ -161,11 +167,13 @@ async function createUpstreamInit(req: Request, signal: AbortSignal): Promise<Re
161
167
  const method = req.method || "GET";
162
168
  const hasBody = method !== "GET" && method !== "HEAD";
163
169
  const bodyText = hasBody ? await req.text() : "";
170
+ const forceFreshConnection = req.headers.get(PROXY_DISABLE_KEEPALIVE_HEADER) === "true";
164
171
 
165
172
  return {
166
173
  method,
167
- headers: sanitizeForwardHeaders(req.headers),
174
+ headers: sanitizeForwardHeaders(req.headers, forceFreshConnection),
168
175
  signal,
176
+ ...(forceFreshConnection ? { keepalive: false } : {}),
169
177
  ...(hasBody && bodyText.length > 0 ? { body: bodyText } : {}),
170
178
  };
171
179
  }
@@ -0,0 +1,133 @@
1
+ export const EXPECTED_CCH_PLACEHOLDER = "00000";
2
+ export const EXPECTED_CCH_SALT = "59cf53e54c78";
3
+ export const EXPECTED_CCH_SEED = 0x6e52_736a_c806_831en;
4
+
5
+ export const EXPECTED_XXHASH64_PRIMES = [
6
+ 0x9e37_79b1_85eb_ca87n,
7
+ 0xc2b2_ae3d_27d4_eb4fn,
8
+ 0x1656_67b1_9e37_79f9n,
9
+ 0x85eb_ca77_c2b2_ae63n,
10
+ 0x27d4_eb2f_1656_67c5n,
11
+ ] as const;
12
+
13
+ export type DriftSeverity = "critical" | "warning";
14
+
15
+ export interface DriftFinding {
16
+ name: string;
17
+ severity: DriftSeverity;
18
+ expected: string;
19
+ actual: string;
20
+ count: number;
21
+ }
22
+
23
+ export interface DriftScanReport {
24
+ target: string;
25
+ mode: "standalone" | "bundle";
26
+ findings: DriftFinding[];
27
+ checked: {
28
+ placeholder: number;
29
+ salt: number;
30
+ seed: number;
31
+ primes: number[];
32
+ };
33
+ passed: boolean;
34
+ }
35
+
36
+ function encodeAscii(value: string): Uint8Array {
37
+ return new TextEncoder().encode(value);
38
+ }
39
+
40
+ export function bigintToLittleEndianBytes(value: bigint): Uint8Array {
41
+ const bytes = new Uint8Array(8);
42
+ let remaining = value;
43
+ for (let index = 0; index < bytes.length; index += 1) {
44
+ bytes[index] = Number(remaining & 0xffn);
45
+ remaining >>= 8n;
46
+ }
47
+ return bytes;
48
+ }
49
+
50
+ export function findAllOccurrences(haystack: Uint8Array, needle: Uint8Array): number[] {
51
+ if (needle.length === 0 || haystack.length < needle.length) {
52
+ return [];
53
+ }
54
+
55
+ const matches: number[] = [];
56
+ outer: for (let start = 0; start <= haystack.length - needle.length; start += 1) {
57
+ for (let offset = 0; offset < needle.length; offset += 1) {
58
+ if (haystack[start + offset] !== needle[offset]) {
59
+ continue outer;
60
+ }
61
+ }
62
+ matches.push(start);
63
+ }
64
+ return matches;
65
+ }
66
+
67
+ function addFinding(
68
+ findings: DriftFinding[],
69
+ count: number,
70
+ name: string,
71
+ severity: DriftSeverity,
72
+ expected: string,
73
+ actual: string,
74
+ ): void {
75
+ if (count > 0) {
76
+ return;
77
+ }
78
+ findings.push({ name, severity, expected, actual, count });
79
+ }
80
+
81
+ export function scanCchConstants(bytes: Uint8Array, target: string, mode: "standalone" | "bundle"): DriftScanReport {
82
+ const placeholderMatches = findAllOccurrences(bytes, encodeAscii(`cch=${EXPECTED_CCH_PLACEHOLDER}`));
83
+ const saltMatches = findAllOccurrences(bytes, encodeAscii(EXPECTED_CCH_SALT));
84
+ const seedMatches = findAllOccurrences(bytes, bigintToLittleEndianBytes(EXPECTED_CCH_SEED));
85
+ const primeMatches = EXPECTED_XXHASH64_PRIMES.map(
86
+ (prime) => findAllOccurrences(bytes, bigintToLittleEndianBytes(prime)).length,
87
+ );
88
+
89
+ const findings: DriftFinding[] = [];
90
+ addFinding(
91
+ findings,
92
+ placeholderMatches.length,
93
+ "cch placeholder",
94
+ "critical",
95
+ `cch=${EXPECTED_CCH_PLACEHOLDER}`,
96
+ "not found",
97
+ );
98
+ addFinding(findings, saltMatches.length, "cc_version salt", "critical", EXPECTED_CCH_SALT, "not found");
99
+
100
+ if (mode === "standalone") {
101
+ addFinding(
102
+ findings,
103
+ seedMatches.length,
104
+ "native cch seed",
105
+ "critical",
106
+ `0x${EXPECTED_CCH_SEED.toString(16)}`,
107
+ "not found",
108
+ );
109
+ for (const [index, count] of primeMatches.entries()) {
110
+ addFinding(
111
+ findings,
112
+ count,
113
+ `xxHash64 prime ${index + 1}`,
114
+ "warning",
115
+ `0x${EXPECTED_XXHASH64_PRIMES[index].toString(16)}`,
116
+ "not found",
117
+ );
118
+ }
119
+ }
120
+
121
+ return {
122
+ target,
123
+ mode,
124
+ findings,
125
+ checked: {
126
+ placeholder: placeholderMatches.length,
127
+ salt: saltMatches.length,
128
+ seed: seedMatches.length,
129
+ primes: primeMatches,
130
+ },
131
+ passed: findings.length === 0,
132
+ };
133
+ }
@@ -1,12 +1,14 @@
1
1
  import { createHash } from "node:crypto";
2
+ import { CCH_PLACEHOLDER } from "./cch.js";
2
3
  import { isFalsyEnv } from "../env.js";
3
4
 
4
5
  export function buildAnthropicBillingHeader(claudeCliVersion: string, messages: unknown[]): string {
5
6
  if (isFalsyEnv(process.env.CLAUDE_CODE_ATTRIBUTION_HEADER)) return "";
6
7
 
7
- // CC derives a 3-char hash from the first user message content using SHA-256
8
- // with salt "59cf53e54c78", extracting chars at positions [4,7,20] and appending
9
- // the CLI version, then taking the first 3 hex chars of that combined string.
8
+ // CC derives the 3-char cc_version suffix from the first user message using
9
+ // SHA-256 with salt "59cf53e54c78" and positions [4,7,20]. The cch field is
10
+ // emitted here as the literal placeholder "00000" and replaced later, after
11
+ // full-body serialization, by replaceNativeStyleCch() in src/headers/cch.ts.
10
12
  let versionSuffix = "";
11
13
  if (Array.isArray(messages)) {
12
14
  // Find first user message (CC uses first non-meta user turn)
@@ -38,34 +40,5 @@ export function buildAnthropicBillingHeader(claudeCliVersion: string, messages:
38
40
 
39
41
  const entrypoint = process.env.CLAUDE_CODE_ENTRYPOINT ?? "cli";
40
42
 
41
- // ---------------------------------------------------------------------------
42
- // Billing header construction — mimics CC's mk_() function with two deliberate gaps:
43
- // 1. cc_workload field: CC tracks this via AsyncLocalStorage for session-level
44
- // workload attribution. Not applicable to the plugin (no workload tracking).
45
- // See .omc/research/cch-source-analysis.md:124-131
46
- // 2. cch value: CC uses placeholder "00000". Plugin computes a deterministic hash
47
- // from prompt content for consistent routing. See cch-source-analysis.md:28-39
48
- // ---------------------------------------------------------------------------
49
-
50
- // CC's Bun binary computes a 5-char hex attestation hash via Attestation.zig
51
- // and overwrites the "00000" placeholder before sending. On Node.js (npm CC)
52
- // the placeholder is sent as-is. The server may reject literal "00000" and
53
- // route to extra usage. Generate a body-derived 5-char hex hash to mimic
54
- // the attestation without the Zig layer.
55
- let cchValue: string;
56
- if (Array.isArray(messages) && messages.length > 0) {
57
- const bodyHint = JSON.stringify(messages).slice(0, 512);
58
- const cchHash = createHash("sha256")
59
- .update(bodyHint + claudeCliVersion + Date.now().toString(36))
60
- .digest("hex");
61
- cchValue = cchHash.slice(0, 5);
62
- } else {
63
- // Fallback: random 5-char hex
64
- const buf = createHash("sha256")
65
- .update(Date.now().toString(36) + Math.random().toString(36))
66
- .digest("hex");
67
- cchValue = buf.slice(0, 5);
68
- }
69
-
70
- return `x-anthropic-billing-header: cc_version=${claudeCliVersion}${versionSuffix}; cc_entrypoint=${entrypoint}; cch=${cchValue};`;
43
+ return `x-anthropic-billing-header: cc_version=${claudeCliVersion}${versionSuffix}; cc_entrypoint=${entrypoint}; cch=${CCH_PLACEHOLDER};`;
71
44
  }
@@ -0,0 +1,120 @@
1
+ const MASK64 = 0xffff_ffff_ffff_ffffn;
2
+ const CCH_MASK = 0x0f_ffffn;
3
+ const PRIME1 = 0x9e37_79b1_85eb_ca87n;
4
+ const PRIME2 = 0xc2b2_ae3d_27d4_eb4fn;
5
+ const PRIME3 = 0x1656_67b1_9e37_79f9n;
6
+ const PRIME4 = 0x85eb_ca77_c2b2_ae63n;
7
+ const PRIME5 = 0x27d4_eb2f_1656_67c5n;
8
+ const CCH_FIELD_PREFIX = "cch=";
9
+
10
+ export const CCH_PLACEHOLDER = "00000";
11
+ export const CCH_SEED = 0x6e52_736a_c806_831en;
12
+
13
+ const encoder = new TextEncoder();
14
+
15
+ function toUint64(value: bigint): bigint {
16
+ return value & MASK64;
17
+ }
18
+
19
+ function rotateLeft64(value: bigint, bits: number): bigint {
20
+ const shift = BigInt(bits);
21
+ return toUint64((value << shift) | (value >> (64n - shift)));
22
+ }
23
+
24
+ function readUint32LE(view: DataView, offset: number): bigint {
25
+ return BigInt(view.getUint32(offset, true));
26
+ }
27
+
28
+ function readUint64LE(view: DataView, offset: number): bigint {
29
+ return view.getBigUint64(offset, true);
30
+ }
31
+
32
+ function round64(acc: bigint, input: bigint): bigint {
33
+ const mixed = toUint64(acc + toUint64(input * PRIME2));
34
+ return toUint64(rotateLeft64(mixed, 31) * PRIME1);
35
+ }
36
+
37
+ function mergeRound64(acc: bigint, value: bigint): bigint {
38
+ const mixed = acc ^ round64(0n, value);
39
+ return toUint64(toUint64(mixed) * PRIME1 + PRIME4);
40
+ }
41
+
42
+ function avalanche64(hash: bigint): bigint {
43
+ let mixed = hash ^ (hash >> 33n);
44
+ mixed = toUint64(mixed * PRIME2);
45
+ mixed ^= mixed >> 29n;
46
+ mixed = toUint64(mixed * PRIME3);
47
+ mixed ^= mixed >> 32n;
48
+ return toUint64(mixed);
49
+ }
50
+
51
+ export function xxHash64(input: Uint8Array, seed: bigint = CCH_SEED): bigint {
52
+ const view = new DataView(input.buffer, input.byteOffset, input.byteLength);
53
+ const length = input.byteLength;
54
+ let offset = 0;
55
+ let hash: bigint;
56
+
57
+ if (length >= 32) {
58
+ let v1 = toUint64(seed + PRIME1 + PRIME2);
59
+ let v2 = toUint64(seed + PRIME2);
60
+ let v3 = toUint64(seed);
61
+ let v4 = toUint64(seed - PRIME1);
62
+
63
+ while (offset <= length - 32) {
64
+ v1 = round64(v1, readUint64LE(view, offset));
65
+ v2 = round64(v2, readUint64LE(view, offset + 8));
66
+ v3 = round64(v3, readUint64LE(view, offset + 16));
67
+ v4 = round64(v4, readUint64LE(view, offset + 24));
68
+ offset += 32;
69
+ }
70
+
71
+ hash = toUint64(rotateLeft64(v1, 1) + rotateLeft64(v2, 7) + rotateLeft64(v3, 12) + rotateLeft64(v4, 18));
72
+ hash = mergeRound64(hash, v1);
73
+ hash = mergeRound64(hash, v2);
74
+ hash = mergeRound64(hash, v3);
75
+ hash = mergeRound64(hash, v4);
76
+ } else {
77
+ hash = toUint64(seed + PRIME5);
78
+ }
79
+
80
+ hash = toUint64(hash + BigInt(length));
81
+
82
+ while (offset <= length - 8) {
83
+ const lane = round64(0n, readUint64LE(view, offset));
84
+ hash ^= lane;
85
+ hash = toUint64(rotateLeft64(hash, 27) * PRIME1 + PRIME4);
86
+ offset += 8;
87
+ }
88
+
89
+ if (offset <= length - 4) {
90
+ hash ^= toUint64(readUint32LE(view, offset) * PRIME1);
91
+ hash = toUint64(rotateLeft64(hash, 23) * PRIME2 + PRIME3);
92
+ offset += 4;
93
+ }
94
+
95
+ while (offset < length) {
96
+ hash ^= toUint64(BigInt(view.getUint8(offset)) * PRIME5);
97
+ hash = toUint64(rotateLeft64(hash, 11) * PRIME1);
98
+ offset += 1;
99
+ }
100
+
101
+ return avalanche64(hash);
102
+ }
103
+
104
+ export function computeNativeStyleCch(serializedBody: string): string {
105
+ const hash = xxHash64(encoder.encode(serializedBody), CCH_SEED);
106
+ return (hash & CCH_MASK).toString(16).padStart(5, "0");
107
+ }
108
+
109
+ export function replaceNativeStyleCch(serializedBody: string): string {
110
+ const sentinel = `${CCH_FIELD_PREFIX}${CCH_PLACEHOLDER}`;
111
+ const fieldIndex = serializedBody.indexOf(sentinel);
112
+ if (fieldIndex === -1) {
113
+ return serializedBody;
114
+ }
115
+
116
+ const valueStart = fieldIndex + CCH_FIELD_PREFIX.length;
117
+ const valueEnd = valueStart + CCH_PLACEHOLDER.length;
118
+ const cch = computeNativeStyleCch(serializedBody);
119
+ return `${serializedBody.slice(0, valueStart)}${cch}${serializedBody.slice(valueEnd)}`;
120
+ }