@mariozechner/pi-ai 0.48.0 → 0.49.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../../src/providers/anthropic.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EAQX,cAAc,EACd,aAAa,EAMb,MAAM,aAAa,CAAC;AAmFrB,MAAM,WAAW,gBAAiB,SAAQ,aAAa;IACtD,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,UAAU,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,GAAG;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC;CACtE;AAED,eAAO,MAAM,eAAe,EAAE,cAAc,CAAC,oBAAoB,CA+LhE,CAAC","sourcesContent":["import Anthropic from \"@anthropic-ai/sdk\";\nimport type {\n\tContentBlockParam,\n\tMessageCreateParamsStreaming,\n\tMessageParam,\n} from \"@anthropic-ai/sdk/resources/messages.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tImageContent,\n\tMessage,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tToolResultMessage,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\n\nimport { transformMessages } from \"./transform-messages.js\";\n\n// Stealth mode: Mimic Claude Code's tool naming exactly\nconst claudeCodeVersion = \"2.1.2\";\n\n// Map pi! tool names to Claude Code's exact tool names\nconst claudeCodeToolNames: Record<string, string> = {\n\tread: \"Read\",\n\twrite: \"Write\",\n\tedit: \"Edit\",\n\tbash: \"Bash\",\n\tgrep: \"Grep\",\n\tfind: \"Glob\",\n\tls: \"Ls\",\n};\n\nconst toClaudeCodeName = (name: string) => claudeCodeToolNames[name] || name;\nconst fromClaudeCodeName = (name: string, tools?: Tool[]) => {\n\tif (tools && tools.length > 0) {\n\t\tconst lowerName = name.toLowerCase();\n\t\tconst matchedTool = tools.find((tool) => tool.name.toLowerCase() === lowerName);\n\t\tif (matchedTool) return matchedTool.name;\n\t}\n\treturn name;\n};\n\n/**\n * Convert content blocks to Anthropic API format\n */\nfunction convertContentBlocks(content: (TextContent | ImageContent)[]):\n\t| string\n\t| Array<\n\t\t\t| { type: \"text\"; text: string }\n\t\t\t| {\n\t\t\t\t\ttype: \"image\";\n\t\t\t\t\tsource: {\n\t\t\t\t\t\ttype: \"base64\";\n\t\t\t\t\t\tmedia_type: \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\";\n\t\t\t\t\t\tdata: string;\n\t\t\t\t\t};\n\t\t\t }\n\t > {\n\t// If only text blocks, return as concatenated string for simplicity\n\tconst hasImages = content.some((c) => c.type === \"image\");\n\tif (!hasImages) {\n\t\treturn sanitizeSurrogates(content.map((c) => (c as TextContent).text).join(\"\\n\"));\n\t}\n\n\t// If we have images, convert to content block array\n\tconst blocks = content.map((block) => {\n\t\tif (block.type === \"text\") {\n\t\t\treturn {\n\t\t\t\ttype: \"text\" as const,\n\t\t\t\ttext: sanitizeSurrogates(block.text),\n\t\t\t};\n\t\t}\n\t\treturn {\n\t\t\ttype: \"image\" as const,\n\t\t\tsource: {\n\t\t\t\ttype: \"base64\" as const,\n\t\t\t\tmedia_type: block.mimeType as \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\",\n\t\t\t\tdata: block.data,\n\t\t\t},\n\t\t};\n\t});\n\n\t// If only images (no text), add placeholder text block\n\tconst hasText = blocks.some((b) => b.type === \"text\");\n\tif (!hasText) {\n\t\tblocks.unshift({\n\t\t\ttype: \"text\" as const,\n\t\t\ttext: \"(see attached image)\",\n\t\t});\n\t}\n\n\treturn blocks;\n}\n\nexport interface AnthropicOptions extends StreamOptions {\n\tthinkingEnabled?: boolean;\n\tthinkingBudgetTokens?: number;\n\tinterleavedThinking?: boolean;\n\ttoolChoice?: \"auto\" | \"any\" | \"none\" | { type: \"tool\"; name: string };\n}\n\nexport const streamAnthropic: StreamFunction<\"anthropic-messages\"> = (\n\tmodel: Model<\"anthropic-messages\">,\n\tcontext: Context,\n\toptions?: AnthropicOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"anthropic-messages\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey ?? getEnvApiKey(model.provider) ?? \"\";\n\t\t\tconst { client, isOAuthToken } = createClient(model, apiKey, options?.interleavedThinking ?? true);\n\t\t\tconst params = buildParams(model, context, isOAuthToken, options);\n\t\t\tconst anthropicStream = client.messages.stream({ ...params, stream: true }, { signal: options?.signal });\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\ttype Block = (ThinkingContent | TextContent | (ToolCall & { partialJson: string })) & { index: number };\n\t\t\tconst blocks = output.content as Block[];\n\n\t\t\tfor await (const event of anthropicStream) {\n\t\t\t\tif (event.type === \"message_start\") {\n\t\t\t\t\t// Capture initial token usage from message_start event\n\t\t\t\t\t// This ensures we have input token counts even if the stream is aborted early\n\t\t\t\t\toutput.usage.input = event.message.usage.input_tokens || 0;\n\t\t\t\t\toutput.usage.output = event.message.usage.output_tokens || 0;\n\t\t\t\t\toutput.usage.cacheRead = event.message.usage.cache_read_input_tokens || 0;\n\t\t\t\t\toutput.usage.cacheWrite = event.message.usage.cache_creation_input_tokens || 0;\n\t\t\t\t\t// Anthropic doesn't provide total_tokens, compute from components\n\t\t\t\t\toutput.usage.totalTokens =\n\t\t\t\t\t\toutput.usage.input + output.usage.output + output.usage.cacheRead + output.usage.cacheWrite;\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t} else if (event.type === \"content_block_start\") {\n\t\t\t\t\tif (event.content_block.type === \"text\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t} else if (event.content_block.type === \"thinking\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: \"\",\n\t\t\t\t\t\t\tthinkingSignature: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t} else if (event.content_block.type === \"tool_use\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: event.content_block.id,\n\t\t\t\t\t\t\tname: isOAuthToken\n\t\t\t\t\t\t\t\t? fromClaudeCodeName(event.content_block.name, context.tools)\n\t\t\t\t\t\t\t\t: event.content_block.name,\n\t\t\t\t\t\t\targuments: event.content_block.input as Record<string, any>,\n\t\t\t\t\t\t\tpartialJson: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"content_block_delta\") {\n\t\t\t\t\tif (event.delta.type === \"text_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"text\") {\n\t\t\t\t\t\t\tblock.text += event.delta.text;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.text,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"thinking_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"thinking\") {\n\t\t\t\t\t\t\tblock.thinking += event.delta.thinking;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.thinking,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"input_json_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"toolCall\") {\n\t\t\t\t\t\t\tblock.partialJson += event.delta.partial_json;\n\t\t\t\t\t\t\tblock.arguments = parseStreamingJson(block.partialJson);\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.partial_json,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"signature_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"thinking\") {\n\t\t\t\t\t\t\tblock.thinkingSignature = block.thinkingSignature || \"\";\n\t\t\t\t\t\t\tblock.thinkingSignature += event.delta.signature;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"content_block_stop\") {\n\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\tif (block) {\n\t\t\t\t\t\tdelete (block as any).index;\n\t\t\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tcontent: block.text,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tcontent: block.thinking,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\t\t\tblock.arguments = parseStreamingJson(block.partialJson);\n\t\t\t\t\t\t\tdelete (block as any).partialJson;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\ttoolCall: block,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"message_delta\") {\n\t\t\t\t\tif (event.delta.stop_reason) {\n\t\t\t\t\t\toutput.stopReason = mapStopReason(event.delta.stop_reason);\n\t\t\t\t\t}\n\t\t\t\t\toutput.usage.input = event.usage.input_tokens || 0;\n\t\t\t\t\toutput.usage.output = event.usage.output_tokens || 0;\n\t\t\t\t\toutput.usage.cacheRead = event.usage.cache_read_input_tokens || 0;\n\t\t\t\t\toutput.usage.cacheWrite = event.usage.cache_creation_input_tokens || 0;\n\t\t\t\t\t// Anthropic doesn't provide total_tokens, compute from components\n\t\t\t\t\toutput.usage.totalTokens =\n\t\t\t\t\t\toutput.usage.input + output.usage.output + output.usage.cacheRead + output.usage.cacheWrite;\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction isOAuthToken(apiKey: string): boolean {\n\treturn apiKey.includes(\"sk-ant-oat\");\n}\n\nfunction createClient(\n\tmodel: Model<\"anthropic-messages\">,\n\tapiKey: string,\n\tinterleavedThinking: boolean,\n): { client: Anthropic; isOAuthToken: boolean } {\n\tconst betaFeatures = [\"fine-grained-tool-streaming-2025-05-14\"];\n\tif (interleavedThinking) {\n\t\tbetaFeatures.push(\"interleaved-thinking-2025-05-14\");\n\t}\n\n\tconst oauthToken = isOAuthToken(apiKey);\n\tif (oauthToken) {\n\t\t// Stealth mode: Mimic Claude Code's headers exactly\n\t\tconst defaultHeaders = {\n\t\t\taccept: \"application/json\",\n\t\t\t\"anthropic-dangerous-direct-browser-access\": \"true\",\n\t\t\t\"anthropic-beta\": `claude-code-20250219,oauth-2025-04-20,${betaFeatures.join(\",\")}`,\n\t\t\t\"user-agent\": `claude-cli/${claudeCodeVersion} (external, cli)`,\n\t\t\t\"x-app\": \"cli\",\n\t\t\t...(model.headers || {}),\n\t\t};\n\n\t\tconst client = new Anthropic({\n\t\t\tapiKey: null,\n\t\t\tauthToken: apiKey,\n\t\t\tbaseURL: model.baseUrl,\n\t\t\tdefaultHeaders,\n\t\t\tdangerouslyAllowBrowser: true,\n\t\t});\n\n\t\treturn { client, isOAuthToken: true };\n\t}\n\n\tconst defaultHeaders = {\n\t\taccept: \"application/json\",\n\t\t\"anthropic-dangerous-direct-browser-access\": \"true\",\n\t\t\"anthropic-beta\": betaFeatures.join(\",\"),\n\t\t...(model.headers || {}),\n\t};\n\n\tconst client = new Anthropic({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders,\n\t});\n\n\treturn { client, isOAuthToken: false };\n}\n\nfunction buildParams(\n\tmodel: Model<\"anthropic-messages\">,\n\tcontext: Context,\n\tisOAuthToken: boolean,\n\toptions?: AnthropicOptions,\n): MessageCreateParamsStreaming {\n\tconst params: MessageCreateParamsStreaming = {\n\t\tmodel: model.id,\n\t\tmessages: convertMessages(context.messages, model, isOAuthToken),\n\t\tmax_tokens: options?.maxTokens || (model.maxTokens / 3) | 0,\n\t\tstream: true,\n\t};\n\n\t// For OAuth tokens, we MUST include Claude Code identity\n\tif (isOAuthToken) {\n\t\tparams.system = [\n\t\t\t{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: \"You are Claude Code, Anthropic's official CLI for Claude.\",\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t},\n\t\t];\n\t\tif (context.systemPrompt) {\n\t\t\tparams.system.push({\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(context.systemPrompt),\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t} else if (context.systemPrompt) {\n\t\t// Add cache control to system prompt for non-OAuth tokens\n\t\tparams.system = [\n\t\t\t{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(context.systemPrompt),\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t},\n\t\t];\n\t}\n\n\tif (options?.temperature !== undefined) {\n\t\tparams.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tparams.tools = convertTools(context.tools, isOAuthToken);\n\t}\n\n\tif (options?.thinkingEnabled && model.reasoning) {\n\t\tparams.thinking = {\n\t\t\ttype: \"enabled\",\n\t\t\tbudget_tokens: options.thinkingBudgetTokens || 1024,\n\t\t};\n\t}\n\n\tif (options?.toolChoice) {\n\t\tif (typeof options.toolChoice === \"string\") {\n\t\t\tparams.tool_choice = { type: options.toolChoice };\n\t\t} else {\n\t\t\tparams.tool_choice = options.toolChoice;\n\t\t}\n\t}\n\n\treturn params;\n}\n\n// Sanitize tool call IDs to match Anthropic's required pattern: ^[a-zA-Z0-9_-]+$\nfunction sanitizeToolCallId(id: string): string {\n\t// Replace any character that isn't alphanumeric, underscore, or hyphen with underscore\n\treturn id.replace(/[^a-zA-Z0-9_-]/g, \"_\");\n}\n\nfunction convertMessages(\n\tmessages: Message[],\n\tmodel: Model<\"anthropic-messages\">,\n\tisOAuthToken: boolean,\n): MessageParam[] {\n\tconst params: MessageParam[] = [];\n\n\t// Transform messages for cross-provider compatibility\n\tconst transformedMessages = transformMessages(messages, model);\n\n\tfor (let i = 0; i < transformedMessages.length; i++) {\n\t\tconst msg = transformedMessages[i];\n\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tif (msg.content.trim().length > 0) {\n\t\t\t\t\tparams.push({\n\t\t\t\t\t\trole: \"user\",\n\t\t\t\t\t\tcontent: sanitizeSurrogates(msg.content),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tconst blocks: ContentBlockParam[] = msg.content.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t};\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"image\",\n\t\t\t\t\t\t\tsource: {\n\t\t\t\t\t\t\t\ttype: \"base64\",\n\t\t\t\t\t\t\t\tmedia_type: item.mimeType as \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\",\n\t\t\t\t\t\t\t\tdata: item.data,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tlet filteredBlocks = !model?.input.includes(\"image\") ? blocks.filter((b) => b.type !== \"image\") : blocks;\n\t\t\t\tfilteredBlocks = filteredBlocks.filter((b) => {\n\t\t\t\t\tif (b.type === \"text\") {\n\t\t\t\t\t\treturn b.text.trim().length > 0;\n\t\t\t\t\t}\n\t\t\t\t\treturn true;\n\t\t\t\t});\n\t\t\t\tif (filteredBlocks.length === 0) continue;\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredBlocks,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst blocks: ContentBlockParam[] = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tif (block.text.trim().length === 0) continue;\n\t\t\t\t\tblocks.push({\n\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\ttext: sanitizeSurrogates(block.text),\n\t\t\t\t\t});\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tif (block.thinking.trim().length === 0) continue;\n\t\t\t\t\t// If thinking signature is missing/empty (e.g., from aborted stream),\n\t\t\t\t\t// convert to plain text block without <thinking> tags to avoid API rejection\n\t\t\t\t\t// and prevent Claude from mimicking the tags in responses\n\t\t\t\t\tif (!block.thinkingSignature || block.thinkingSignature.trim().length === 0) {\n\t\t\t\t\t\tblocks.push({\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\tblocks.push({\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t\t\tsignature: block.thinkingSignature,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tblocks.push({\n\t\t\t\t\t\ttype: \"tool_use\",\n\t\t\t\t\t\tid: sanitizeToolCallId(block.id),\n\t\t\t\t\t\tname: isOAuthToken ? toClaudeCodeName(block.name) : block.name,\n\t\t\t\t\t\tinput: block.arguments,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (blocks.length === 0) continue;\n\t\t\tparams.push({\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: blocks,\n\t\t\t});\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Collect all consecutive toolResult messages, needed for z.ai Anthropic endpoint\n\t\t\tconst toolResults: ContentBlockParam[] = [];\n\n\t\t\t// Add the current tool result\n\t\t\ttoolResults.push({\n\t\t\t\ttype: \"tool_result\",\n\t\t\t\ttool_use_id: sanitizeToolCallId(msg.toolCallId),\n\t\t\t\tcontent: convertContentBlocks(msg.content),\n\t\t\t\tis_error: msg.isError,\n\t\t\t});\n\n\t\t\t// Look ahead for consecutive toolResult messages\n\t\t\tlet j = i + 1;\n\t\t\twhile (j < transformedMessages.length && transformedMessages[j].role === \"toolResult\") {\n\t\t\t\tconst nextMsg = transformedMessages[j] as ToolResultMessage; // We know it's a toolResult\n\t\t\t\ttoolResults.push({\n\t\t\t\t\ttype: \"tool_result\",\n\t\t\t\t\ttool_use_id: sanitizeToolCallId(nextMsg.toolCallId),\n\t\t\t\t\tcontent: convertContentBlocks(nextMsg.content),\n\t\t\t\t\tis_error: nextMsg.isError,\n\t\t\t\t});\n\t\t\t\tj++;\n\t\t\t}\n\n\t\t\t// Skip the messages we've already processed\n\t\t\ti = j - 1;\n\n\t\t\t// Add a single user message with all tool results\n\t\t\tparams.push({\n\t\t\t\trole: \"user\",\n\t\t\t\tcontent: toolResults,\n\t\t\t});\n\t\t}\n\t}\n\n\t// Add cache_control to the last user message to cache conversation history\n\tif (params.length > 0) {\n\t\tconst lastMessage = params[params.length - 1];\n\t\tif (lastMessage.role === \"user\") {\n\t\t\t// Add cache control to the last content block\n\t\t\tif (Array.isArray(lastMessage.content)) {\n\t\t\t\tconst lastBlock = lastMessage.content[lastMessage.content.length - 1];\n\t\t\t\tif (\n\t\t\t\t\tlastBlock &&\n\t\t\t\t\t(lastBlock.type === \"text\" || lastBlock.type === \"image\" || lastBlock.type === \"tool_result\")\n\t\t\t\t) {\n\t\t\t\t\t(lastBlock as any).cache_control = { type: \"ephemeral\" };\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn params;\n}\n\nfunction convertTools(tools: Tool[], isOAuthToken: boolean): Anthropic.Messages.Tool[] {\n\tif (!tools) return [];\n\n\treturn tools.map((tool) => {\n\t\tconst jsonSchema = tool.parameters as any; // TypeBox already generates JSON Schema\n\n\t\treturn {\n\t\t\tname: isOAuthToken ? toClaudeCodeName(tool.name) : tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tinput_schema: {\n\t\t\t\ttype: \"object\" as const,\n\t\t\t\tproperties: jsonSchema.properties || {},\n\t\t\t\trequired: jsonSchema.required || [],\n\t\t\t},\n\t\t};\n\t});\n}\n\nfunction mapStopReason(reason: Anthropic.Messages.StopReason): StopReason {\n\tswitch (reason) {\n\t\tcase \"end_turn\":\n\t\t\treturn \"stop\";\n\t\tcase \"max_tokens\":\n\t\t\treturn \"length\";\n\t\tcase \"tool_use\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"refusal\":\n\t\t\treturn \"error\";\n\t\tcase \"pause_turn\": // Stop is good enough -> resubmit\n\t\t\treturn \"stop\";\n\t\tcase \"stop_sequence\":\n\t\t\treturn \"stop\"; // We don't supply stop sequences, so this should never happen\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
1
+ {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../../src/providers/anthropic.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EAQX,cAAc,EACd,aAAa,EAMb,MAAM,aAAa,CAAC;AAkGrB,MAAM,WAAW,gBAAiB,SAAQ,aAAa;IACtD,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAC9B,mBAAmB,CAAC,EAAE,OAAO,CAAC;IAC9B,UAAU,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,MAAM,GAAG;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAA;KAAE,CAAC;CACtE;AAED,eAAO,MAAM,eAAe,EAAE,cAAc,CAAC,oBAAoB,CA+LhE,CAAC","sourcesContent":["import Anthropic from \"@anthropic-ai/sdk\";\nimport type {\n\tContentBlockParam,\n\tMessageCreateParamsStreaming,\n\tMessageParam,\n} from \"@anthropic-ai/sdk/resources/messages.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tImageContent,\n\tMessage,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tToolResultMessage,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\n\nimport { transformMessages } from \"./transform-messages.js\";\n\n// Stealth mode: Mimic Claude Code's tool naming exactly\nconst claudeCodeVersion = \"2.1.2\";\n\n// Claude Code 2.x tool names (canonical casing)\n// Source: https://cchistory.mariozechner.at/data/prompts-2.1.11.md\n// To update: https://github.com/badlogic/cchistory\nconst claudeCodeTools = [\n\t\"Read\",\n\t\"Write\",\n\t\"Edit\",\n\t\"Bash\",\n\t\"Grep\",\n\t\"Glob\",\n\t\"AskUserQuestion\",\n\t\"EnterPlanMode\",\n\t\"ExitPlanMode\",\n\t\"KillShell\",\n\t\"NotebookEdit\",\n\t\"Skill\",\n\t\"Task\",\n\t\"TaskOutput\",\n\t\"TodoWrite\",\n\t\"WebFetch\",\n\t\"WebSearch\",\n];\n\nconst ccToolLookup = new Map(claudeCodeTools.map((t) => [t.toLowerCase(), t]));\n\n// Convert tool name to CC canonical casing if it matches (case-insensitive)\nconst toClaudeCodeName = (name: string) => ccToolLookup.get(name.toLowerCase()) ?? name;\nconst fromClaudeCodeName = (name: string, tools?: Tool[]) => {\n\tif (tools && tools.length > 0) {\n\t\tconst lowerName = name.toLowerCase();\n\t\tconst matchedTool = tools.find((tool) => tool.name.toLowerCase() === lowerName);\n\t\tif (matchedTool) return matchedTool.name;\n\t}\n\treturn name;\n};\n\n/**\n * Convert content blocks to Anthropic API format\n */\nfunction convertContentBlocks(content: (TextContent | ImageContent)[]):\n\t| string\n\t| Array<\n\t\t\t| { type: \"text\"; text: string }\n\t\t\t| {\n\t\t\t\t\ttype: \"image\";\n\t\t\t\t\tsource: {\n\t\t\t\t\t\ttype: \"base64\";\n\t\t\t\t\t\tmedia_type: \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\";\n\t\t\t\t\t\tdata: string;\n\t\t\t\t\t};\n\t\t\t }\n\t > {\n\t// If only text blocks, return as concatenated string for simplicity\n\tconst hasImages = content.some((c) => c.type === \"image\");\n\tif (!hasImages) {\n\t\treturn sanitizeSurrogates(content.map((c) => (c as TextContent).text).join(\"\\n\"));\n\t}\n\n\t// If we have images, convert to content block array\n\tconst blocks = content.map((block) => {\n\t\tif (block.type === \"text\") {\n\t\t\treturn {\n\t\t\t\ttype: \"text\" as const,\n\t\t\t\ttext: sanitizeSurrogates(block.text),\n\t\t\t};\n\t\t}\n\t\treturn {\n\t\t\ttype: \"image\" as const,\n\t\t\tsource: {\n\t\t\t\ttype: \"base64\" as const,\n\t\t\t\tmedia_type: block.mimeType as \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\",\n\t\t\t\tdata: block.data,\n\t\t\t},\n\t\t};\n\t});\n\n\t// If only images (no text), add placeholder text block\n\tconst hasText = blocks.some((b) => b.type === \"text\");\n\tif (!hasText) {\n\t\tblocks.unshift({\n\t\t\ttype: \"text\" as const,\n\t\t\ttext: \"(see attached image)\",\n\t\t});\n\t}\n\n\treturn blocks;\n}\n\nexport interface AnthropicOptions extends StreamOptions {\n\tthinkingEnabled?: boolean;\n\tthinkingBudgetTokens?: number;\n\tinterleavedThinking?: boolean;\n\ttoolChoice?: \"auto\" | \"any\" | \"none\" | { type: \"tool\"; name: string };\n}\n\nexport const streamAnthropic: StreamFunction<\"anthropic-messages\"> = (\n\tmodel: Model<\"anthropic-messages\">,\n\tcontext: Context,\n\toptions?: AnthropicOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"anthropic-messages\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey ?? getEnvApiKey(model.provider) ?? \"\";\n\t\t\tconst { client, isOAuthToken } = createClient(model, apiKey, options?.interleavedThinking ?? true);\n\t\t\tconst params = buildParams(model, context, isOAuthToken, options);\n\t\t\tconst anthropicStream = client.messages.stream({ ...params, stream: true }, { signal: options?.signal });\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\ttype Block = (ThinkingContent | TextContent | (ToolCall & { partialJson: string })) & { index: number };\n\t\t\tconst blocks = output.content as Block[];\n\n\t\t\tfor await (const event of anthropicStream) {\n\t\t\t\tif (event.type === \"message_start\") {\n\t\t\t\t\t// Capture initial token usage from message_start event\n\t\t\t\t\t// This ensures we have input token counts even if the stream is aborted early\n\t\t\t\t\toutput.usage.input = event.message.usage.input_tokens || 0;\n\t\t\t\t\toutput.usage.output = event.message.usage.output_tokens || 0;\n\t\t\t\t\toutput.usage.cacheRead = event.message.usage.cache_read_input_tokens || 0;\n\t\t\t\t\toutput.usage.cacheWrite = event.message.usage.cache_creation_input_tokens || 0;\n\t\t\t\t\t// Anthropic doesn't provide total_tokens, compute from components\n\t\t\t\t\toutput.usage.totalTokens =\n\t\t\t\t\t\toutput.usage.input + output.usage.output + output.usage.cacheRead + output.usage.cacheWrite;\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t} else if (event.type === \"content_block_start\") {\n\t\t\t\t\tif (event.content_block.type === \"text\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t} else if (event.content_block.type === \"thinking\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: \"\",\n\t\t\t\t\t\t\tthinkingSignature: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t} else if (event.content_block.type === \"tool_use\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: event.content_block.id,\n\t\t\t\t\t\t\tname: isOAuthToken\n\t\t\t\t\t\t\t\t? fromClaudeCodeName(event.content_block.name, context.tools)\n\t\t\t\t\t\t\t\t: event.content_block.name,\n\t\t\t\t\t\t\targuments: event.content_block.input as Record<string, any>,\n\t\t\t\t\t\t\tpartialJson: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"content_block_delta\") {\n\t\t\t\t\tif (event.delta.type === \"text_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"text\") {\n\t\t\t\t\t\t\tblock.text += event.delta.text;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.text,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"thinking_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"thinking\") {\n\t\t\t\t\t\t\tblock.thinking += event.delta.thinking;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.thinking,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"input_json_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"toolCall\") {\n\t\t\t\t\t\t\tblock.partialJson += event.delta.partial_json;\n\t\t\t\t\t\t\tblock.arguments = parseStreamingJson(block.partialJson);\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.partial_json,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"signature_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"thinking\") {\n\t\t\t\t\t\t\tblock.thinkingSignature = block.thinkingSignature || \"\";\n\t\t\t\t\t\t\tblock.thinkingSignature += event.delta.signature;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"content_block_stop\") {\n\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\tif (block) {\n\t\t\t\t\t\tdelete (block as any).index;\n\t\t\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tcontent: block.text,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tcontent: block.thinking,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\t\t\tblock.arguments = parseStreamingJson(block.partialJson);\n\t\t\t\t\t\t\tdelete (block as any).partialJson;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\ttoolCall: block,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"message_delta\") {\n\t\t\t\t\tif (event.delta.stop_reason) {\n\t\t\t\t\t\toutput.stopReason = mapStopReason(event.delta.stop_reason);\n\t\t\t\t\t}\n\t\t\t\t\toutput.usage.input = event.usage.input_tokens || 0;\n\t\t\t\t\toutput.usage.output = event.usage.output_tokens || 0;\n\t\t\t\t\toutput.usage.cacheRead = event.usage.cache_read_input_tokens || 0;\n\t\t\t\t\toutput.usage.cacheWrite = event.usage.cache_creation_input_tokens || 0;\n\t\t\t\t\t// Anthropic doesn't provide total_tokens, compute from components\n\t\t\t\t\toutput.usage.totalTokens =\n\t\t\t\t\t\toutput.usage.input + output.usage.output + output.usage.cacheRead + output.usage.cacheWrite;\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction isOAuthToken(apiKey: string): boolean {\n\treturn apiKey.includes(\"sk-ant-oat\");\n}\n\nfunction createClient(\n\tmodel: Model<\"anthropic-messages\">,\n\tapiKey: string,\n\tinterleavedThinking: boolean,\n): { client: Anthropic; isOAuthToken: boolean } {\n\tconst betaFeatures = [\"fine-grained-tool-streaming-2025-05-14\"];\n\tif (interleavedThinking) {\n\t\tbetaFeatures.push(\"interleaved-thinking-2025-05-14\");\n\t}\n\n\tconst oauthToken = isOAuthToken(apiKey);\n\tif (oauthToken) {\n\t\t// Stealth mode: Mimic Claude Code's headers exactly\n\t\tconst defaultHeaders = {\n\t\t\taccept: \"application/json\",\n\t\t\t\"anthropic-dangerous-direct-browser-access\": \"true\",\n\t\t\t\"anthropic-beta\": `claude-code-20250219,oauth-2025-04-20,${betaFeatures.join(\",\")}`,\n\t\t\t\"user-agent\": `claude-cli/${claudeCodeVersion} (external, cli)`,\n\t\t\t\"x-app\": \"cli\",\n\t\t\t...(model.headers || {}),\n\t\t};\n\n\t\tconst client = new Anthropic({\n\t\t\tapiKey: null,\n\t\t\tauthToken: apiKey,\n\t\t\tbaseURL: model.baseUrl,\n\t\t\tdefaultHeaders,\n\t\t\tdangerouslyAllowBrowser: true,\n\t\t});\n\n\t\treturn { client, isOAuthToken: true };\n\t}\n\n\tconst defaultHeaders = {\n\t\taccept: \"application/json\",\n\t\t\"anthropic-dangerous-direct-browser-access\": \"true\",\n\t\t\"anthropic-beta\": betaFeatures.join(\",\"),\n\t\t...(model.headers || {}),\n\t};\n\n\tconst client = new Anthropic({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders,\n\t});\n\n\treturn { client, isOAuthToken: false };\n}\n\nfunction buildParams(\n\tmodel: Model<\"anthropic-messages\">,\n\tcontext: Context,\n\tisOAuthToken: boolean,\n\toptions?: AnthropicOptions,\n): MessageCreateParamsStreaming {\n\tconst params: MessageCreateParamsStreaming = {\n\t\tmodel: model.id,\n\t\tmessages: convertMessages(context.messages, model, isOAuthToken),\n\t\tmax_tokens: options?.maxTokens || (model.maxTokens / 3) | 0,\n\t\tstream: true,\n\t};\n\n\t// For OAuth tokens, we MUST include Claude Code identity\n\tif (isOAuthToken) {\n\t\tparams.system = [\n\t\t\t{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: \"You are Claude Code, Anthropic's official CLI for Claude.\",\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t},\n\t\t];\n\t\tif (context.systemPrompt) {\n\t\t\tparams.system.push({\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(context.systemPrompt),\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t} else if (context.systemPrompt) {\n\t\t// Add cache control to system prompt for non-OAuth tokens\n\t\tparams.system = [\n\t\t\t{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(context.systemPrompt),\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t},\n\t\t];\n\t}\n\n\tif (options?.temperature !== undefined) {\n\t\tparams.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tparams.tools = convertTools(context.tools, isOAuthToken);\n\t}\n\n\tif (options?.thinkingEnabled && model.reasoning) {\n\t\tparams.thinking = {\n\t\t\ttype: \"enabled\",\n\t\t\tbudget_tokens: options.thinkingBudgetTokens || 1024,\n\t\t};\n\t}\n\n\tif (options?.toolChoice) {\n\t\tif (typeof options.toolChoice === \"string\") {\n\t\t\tparams.tool_choice = { type: options.toolChoice };\n\t\t} else {\n\t\t\tparams.tool_choice = options.toolChoice;\n\t\t}\n\t}\n\n\treturn params;\n}\n\n// Sanitize tool call IDs to match Anthropic's required pattern: ^[a-zA-Z0-9_-]+$\nfunction sanitizeToolCallId(id: string): string {\n\t// Replace any character that isn't alphanumeric, underscore, or hyphen with underscore\n\treturn id.replace(/[^a-zA-Z0-9_-]/g, \"_\");\n}\n\nfunction convertMessages(\n\tmessages: Message[],\n\tmodel: Model<\"anthropic-messages\">,\n\tisOAuthToken: boolean,\n): MessageParam[] {\n\tconst params: MessageParam[] = [];\n\n\t// Transform messages for cross-provider compatibility\n\tconst transformedMessages = transformMessages(messages, model);\n\n\tfor (let i = 0; i < transformedMessages.length; i++) {\n\t\tconst msg = transformedMessages[i];\n\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tif (msg.content.trim().length > 0) {\n\t\t\t\t\tparams.push({\n\t\t\t\t\t\trole: \"user\",\n\t\t\t\t\t\tcontent: sanitizeSurrogates(msg.content),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tconst blocks: ContentBlockParam[] = msg.content.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t};\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"image\",\n\t\t\t\t\t\t\tsource: {\n\t\t\t\t\t\t\t\ttype: \"base64\",\n\t\t\t\t\t\t\t\tmedia_type: item.mimeType as \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\",\n\t\t\t\t\t\t\t\tdata: item.data,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tlet filteredBlocks = !model?.input.includes(\"image\") ? blocks.filter((b) => b.type !== \"image\") : blocks;\n\t\t\t\tfilteredBlocks = filteredBlocks.filter((b) => {\n\t\t\t\t\tif (b.type === \"text\") {\n\t\t\t\t\t\treturn b.text.trim().length > 0;\n\t\t\t\t\t}\n\t\t\t\t\treturn true;\n\t\t\t\t});\n\t\t\t\tif (filteredBlocks.length === 0) continue;\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredBlocks,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst blocks: ContentBlockParam[] = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tif (block.text.trim().length === 0) continue;\n\t\t\t\t\tblocks.push({\n\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\ttext: sanitizeSurrogates(block.text),\n\t\t\t\t\t});\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tif (block.thinking.trim().length === 0) continue;\n\t\t\t\t\t// If thinking signature is missing/empty (e.g., from aborted stream),\n\t\t\t\t\t// convert to plain text block without <thinking> tags to avoid API rejection\n\t\t\t\t\t// and prevent Claude from mimicking the tags in responses\n\t\t\t\t\tif (!block.thinkingSignature || block.thinkingSignature.trim().length === 0) {\n\t\t\t\t\t\tblocks.push({\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\tblocks.push({\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t\t\tsignature: block.thinkingSignature,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tblocks.push({\n\t\t\t\t\t\ttype: \"tool_use\",\n\t\t\t\t\t\tid: sanitizeToolCallId(block.id),\n\t\t\t\t\t\tname: isOAuthToken ? toClaudeCodeName(block.name) : block.name,\n\t\t\t\t\t\tinput: block.arguments,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (blocks.length === 0) continue;\n\t\t\tparams.push({\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: blocks,\n\t\t\t});\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Collect all consecutive toolResult messages, needed for z.ai Anthropic endpoint\n\t\t\tconst toolResults: ContentBlockParam[] = [];\n\n\t\t\t// Add the current tool result\n\t\t\ttoolResults.push({\n\t\t\t\ttype: \"tool_result\",\n\t\t\t\ttool_use_id: sanitizeToolCallId(msg.toolCallId),\n\t\t\t\tcontent: convertContentBlocks(msg.content),\n\t\t\t\tis_error: msg.isError,\n\t\t\t});\n\n\t\t\t// Look ahead for consecutive toolResult messages\n\t\t\tlet j = i + 1;\n\t\t\twhile (j < transformedMessages.length && transformedMessages[j].role === \"toolResult\") {\n\t\t\t\tconst nextMsg = transformedMessages[j] as ToolResultMessage; // We know it's a toolResult\n\t\t\t\ttoolResults.push({\n\t\t\t\t\ttype: \"tool_result\",\n\t\t\t\t\ttool_use_id: sanitizeToolCallId(nextMsg.toolCallId),\n\t\t\t\t\tcontent: convertContentBlocks(nextMsg.content),\n\t\t\t\t\tis_error: nextMsg.isError,\n\t\t\t\t});\n\t\t\t\tj++;\n\t\t\t}\n\n\t\t\t// Skip the messages we've already processed\n\t\t\ti = j - 1;\n\n\t\t\t// Add a single user message with all tool results\n\t\t\tparams.push({\n\t\t\t\trole: \"user\",\n\t\t\t\tcontent: toolResults,\n\t\t\t});\n\t\t}\n\t}\n\n\t// Add cache_control to the last user message to cache conversation history\n\tif (params.length > 0) {\n\t\tconst lastMessage = params[params.length - 1];\n\t\tif (lastMessage.role === \"user\") {\n\t\t\t// Add cache control to the last content block\n\t\t\tif (Array.isArray(lastMessage.content)) {\n\t\t\t\tconst lastBlock = lastMessage.content[lastMessage.content.length - 1];\n\t\t\t\tif (\n\t\t\t\t\tlastBlock &&\n\t\t\t\t\t(lastBlock.type === \"text\" || lastBlock.type === \"image\" || lastBlock.type === \"tool_result\")\n\t\t\t\t) {\n\t\t\t\t\t(lastBlock as any).cache_control = { type: \"ephemeral\" };\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn params;\n}\n\nfunction convertTools(tools: Tool[], isOAuthToken: boolean): Anthropic.Messages.Tool[] {\n\tif (!tools) return [];\n\n\treturn tools.map((tool) => {\n\t\tconst jsonSchema = tool.parameters as any; // TypeBox already generates JSON Schema\n\n\t\treturn {\n\t\t\tname: isOAuthToken ? toClaudeCodeName(tool.name) : tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tinput_schema: {\n\t\t\t\ttype: \"object\" as const,\n\t\t\t\tproperties: jsonSchema.properties || {},\n\t\t\t\trequired: jsonSchema.required || [],\n\t\t\t},\n\t\t};\n\t});\n}\n\nfunction mapStopReason(reason: Anthropic.Messages.StopReason): StopReason {\n\tswitch (reason) {\n\t\tcase \"end_turn\":\n\t\t\treturn \"stop\";\n\t\tcase \"max_tokens\":\n\t\t\treturn \"length\";\n\t\tcase \"tool_use\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"refusal\":\n\t\t\treturn \"error\";\n\t\tcase \"pause_turn\": // Stop is good enough -> resubmit\n\t\t\treturn \"stop\";\n\t\tcase \"stop_sequence\":\n\t\t\treturn \"stop\"; // We don't supply stop sequences, so this should never happen\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
@@ -7,17 +7,31 @@ import { sanitizeSurrogates } from "../utils/sanitize-unicode.js";
7
7
  import { transformMessages } from "./transform-messages.js";
8
8
  // Stealth mode: Mimic Claude Code's tool naming exactly
9
9
  const claudeCodeVersion = "2.1.2";
10
- // Map pi! tool names to Claude Code's exact tool names
11
- const claudeCodeToolNames = {
12
- read: "Read",
13
- write: "Write",
14
- edit: "Edit",
15
- bash: "Bash",
16
- grep: "Grep",
17
- find: "Glob",
18
- ls: "Ls",
19
- };
20
- const toClaudeCodeName = (name) => claudeCodeToolNames[name] || name;
10
+ // Claude Code 2.x tool names (canonical casing)
11
+ // Source: https://cchistory.mariozechner.at/data/prompts-2.1.11.md
12
+ // To update: https://github.com/badlogic/cchistory
13
+ const claudeCodeTools = [
14
+ "Read",
15
+ "Write",
16
+ "Edit",
17
+ "Bash",
18
+ "Grep",
19
+ "Glob",
20
+ "AskUserQuestion",
21
+ "EnterPlanMode",
22
+ "ExitPlanMode",
23
+ "KillShell",
24
+ "NotebookEdit",
25
+ "Skill",
26
+ "Task",
27
+ "TaskOutput",
28
+ "TodoWrite",
29
+ "WebFetch",
30
+ "WebSearch",
31
+ ];
32
+ const ccToolLookup = new Map(claudeCodeTools.map((t) => [t.toLowerCase(), t]));
33
+ // Convert tool name to CC canonical casing if it matches (case-insensitive)
34
+ const toClaudeCodeName = (name) => ccToolLookup.get(name.toLowerCase()) ?? name;
21
35
  const fromClaudeCodeName = (name, tools) => {
22
36
  if (tools && tools.length > 0) {
23
37
  const lowerName = name.toLowerCase();
@@ -1 +1 @@
1
- {"version":3,"file":"anthropic.js","sourceRoot":"","sources":["../../src/providers/anthropic.ts"],"names":[],"mappings":"AAAA,OAAO,SAAS,MAAM,mBAAmB,CAAC;AAM1C,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC7C,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AAiB5C,OAAO,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAC;AAC5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAElE,OAAO,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE5D,wDAAwD;AACxD,MAAM,iBAAiB,GAAG,OAAO,CAAC;AAElC,uDAAuD;AACvD,MAAM,mBAAmB,GAA2B;IACnD,IAAI,EAAE,MAAM;IACZ,KAAK,EAAE,OAAO;IACd,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,MAAM;IACZ,EAAE,EAAE,IAAI;CACR,CAAC;AAEF,MAAM,gBAAgB,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;AAC7E,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,KAAc,EAAE,EAAE,CAAC;IAC5D,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC/B,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;QACrC,MAAM,WAAW,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,SAAS,CAAC,CAAC;QAChF,IAAI,WAAW;YAAE,OAAO,WAAW,CAAC,IAAI,CAAC;IAC1C,CAAC;IACD,OAAO,IAAI,CAAC;AAAA,CACZ,CAAC;AAEF;;GAEG;AACH,SAAS,oBAAoB,CAAC,OAAuC,EAYhE;IACJ,oEAAoE;IACpE,MAAM,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;IAC1D,IAAI,CAAC,SAAS,EAAE,CAAC;QAChB,OAAO,kBAAkB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAAiB,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IACnF,CAAC;IAED,oDAAoD;IACpD,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;QACrC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO;gBACN,IAAI,EAAE,MAAe;gBACrB,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,IAAI,CAAC;aACpC,CAAC;QACH,CAAC;QACD,OAAO;YACN,IAAI,EAAE,OAAgB;YACtB,MAAM,EAAE;gBACP,IAAI,EAAE,QAAiB;gBACvB,UAAU,EAAE,KAAK,CAAC,QAAmE;gBACrF,IAAI,EAAE,KAAK,CAAC,IAAI;aAChB;SACD,CAAC;IAAA,CACF,CAAC,CAAC;IAEH,uDAAuD;IACvD,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC;IACtD,IAAI,CAAC,OAAO,EAAE,CAAC;QACd,MAAM,CAAC,OAAO,CAAC;YACd,IAAI,EAAE,MAAe;YACrB,IAAI,EAAE,sBAAsB;SAC5B,CAAC,CAAC;IACJ,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AASD,MAAM,CAAC,MAAM,eAAe,GAAyC,CACpE,KAAkC,EAClC,OAAgB,EAChB,OAA0B,EACI,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,IAAI,2BAA2B,EAAE,CAAC;IAEjD,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,MAAM,GAAqB;YAChC,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,oBAA2B;YAChC,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,EAAE;YACf,KAAK,EAAE;gBACN,KAAK,EAAE,CAAC;gBACR,MAAM,EAAE,CAAC;gBACT,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;aACpE;YACD,UAAU,EAAE,MAAM;YAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,IAAI,CAAC;YACJ,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACrE,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,mBAAmB,IAAI,IAAI,CAAC,CAAC;YACnG,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,YAAY,EAAE,OAAO,CAAC,CAAC;YAClE,MAAM,eAAe,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YACzG,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAGhD,MAAM,MAAM,GAAG,MAAM,CAAC,OAAkB,CAAC;YAEzC,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,eAAe,EAAE,CAAC;gBAC3C,IAAI,KAAK,CAAC,IAAI,KAAK,eAAe,EAAE,CAAC;oBACpC,uDAAuD;oBACvD,8EAA8E;oBAC9E,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,YAAY,IAAI,CAAC,CAAC;oBAC3D,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC;oBAC7D,MAAM,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,uBAAuB,IAAI,CAAC,CAAC;oBAC1E,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,2BAA2B,IAAI,CAAC,CAAC;oBAC/E,kEAAkE;oBAClE,MAAM,CAAC,KAAK,CAAC,WAAW;wBACvB,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC;oBAC7F,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,qBAAqB,EAAE,CAAC;oBACjD,IAAI,KAAK,CAAC,aAAa,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBACzC,MAAM,KAAK,GAAU;4BACpB,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,EAAE;4BACR,KAAK,EAAE,KAAK,CAAC,KAAK;yBAClB,CAAC;wBACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;wBAC3B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;oBAC/F,CAAC;yBAAM,IAAI,KAAK,CAAC,aAAa,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACpD,MAAM,KAAK,GAAU;4BACpB,IAAI,EAAE,UAAU;4BAChB,QAAQ,EAAE,EAAE;4BACZ,iBAAiB,EAAE,EAAE;4BACrB,KAAK,EAAE,KAAK,CAAC,KAAK;yBAClB,CAAC;wBACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;wBAC3B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;oBACnG,CAAC;yBAAM,IAAI,KAAK,CAAC,aAAa,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACpD,MAAM,KAAK,GAAU;4BACpB,IAAI,EAAE,UAAU;4BAChB,EAAE,EAAE,KAAK,CAAC,aAAa,CAAC,EAAE;4BAC1B,IAAI,EAAE,YAAY;gCACjB,CAAC,CAAC,kBAAkB,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,EAAE,OAAO,CAAC,KAAK,CAAC;gCAC7D,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI;4BAC3B,SAAS,EAAE,KAAK,CAAC,aAAa,CAAC,KAA4B;4BAC3D,WAAW,EAAE,EAAE;4BACf,KAAK,EAAE,KAAK,CAAC,KAAK;yBAClB,CAAC;wBACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;wBAC3B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;oBACnG,CAAC;gBACF,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,qBAAqB,EAAE,CAAC;oBACjD,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;wBACvC,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;wBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC5B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BACpC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC;4BAC/B,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,YAAY;gCAClB,YAAY,EAAE,KAAK;gCACnB,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,IAAI;gCACvB,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;yBAAM,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBAClD,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;wBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC5B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACxC,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC;4BACvC,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,KAAK;gCACnB,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ;gCAC3B,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;yBAAM,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,kBAAkB,EAAE,CAAC;wBACpD,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;wBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC5B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACxC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC;4BAC9C,KAAK,CAAC,SAAS,GAAG,kBAAkB,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;4BACxD,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,KAAK;gCACnB,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,YAAY;gCAC/B,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;yBAAM,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,iBAAiB,EAAE,CAAC;wBACnD,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;wBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC5B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACxC,KAAK,CAAC,iBAAiB,GAAG,KAAK,CAAC,iBAAiB,IAAI,EAAE,CAAC;4BACxD,KAAK,CAAC,iBAAiB,IAAI,KAAK,CAAC,KAAK,CAAC,SAAS,CAAC;wBAClD,CAAC;oBACF,CAAC;gBACF,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,oBAAoB,EAAE,CAAC;oBAChD,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;oBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;oBAC5B,IAAI,KAAK,EAAE,CAAC;wBACX,OAAQ,KAAa,CAAC,KAAK,CAAC;wBAC5B,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BAC3B,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,UAAU;gCAChB,YAAY,EAAE,KAAK;gCACnB,OAAO,EAAE,KAAK,CAAC,IAAI;gCACnB,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;6BAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACtC,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,cAAc;gCACpB,YAAY,EAAE,KAAK;gCACnB,OAAO,EAAE,KAAK,CAAC,QAAQ;gCACvB,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;6BAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACtC,KAAK,CAAC,SAAS,GAAG,kBAAkB,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;4BACxD,OAAQ,KAAa,CAAC,WAAW,CAAC;4BAClC,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,cAAc;gCACpB,YAAY,EAAE,KAAK;gCACnB,QAAQ,EAAE,KAAK;gCACf,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;gBACF,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,eAAe,EAAE,CAAC;oBAC3C,IAAI,KAAK,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC;wBAC7B,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;oBAC5D,CAAC;oBACD,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,YAAY,IAAI,CAAC,CAAC;oBACnD,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,KAAK,CAAC,uBAAuB,IAAI,CAAC,CAAC;oBAClE,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,KAAK,CAAC,KAAK,CAAC,2BAA2B,IAAI,CAAC,CAAC;oBACvE,kEAAkE;oBAClE,MAAM,CAAC,KAAK,CAAC,WAAW;wBACvB,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC;oBAC7F,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;YACF,CAAC;YAED,IAAI,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;gBAC9B,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;YACxC,CAAC;YAED,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,IAAI,MAAM,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;gBACtE,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;YAC9C,CAAC;YAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC1E,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YAChB,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO;gBAAE,OAAQ,KAAa,CAAC,KAAK,CAAC;YAChE,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YACrF,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YACzE,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;IAAA,CACD,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd,CAAC;AAEF,SAAS,YAAY,CAAC,MAAc,EAAW;IAC9C,OAAO,MAAM,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAAA,CACrC;AAED,SAAS,YAAY,CACpB,KAAkC,EAClC,MAAc,EACd,mBAA4B,EACmB;IAC/C,MAAM,YAAY,GAAG,CAAC,wCAAwC,CAAC,CAAC;IAChE,IAAI,mBAAmB,EAAE,CAAC;QACzB,YAAY,CAAC,IAAI,CAAC,iCAAiC,CAAC,CAAC;IACtD,CAAC;IAED,MAAM,UAAU,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC;IACxC,IAAI,UAAU,EAAE,CAAC;QAChB,oDAAoD;QACpD,MAAM,cAAc,GAAG;YACtB,MAAM,EAAE,kBAAkB;YAC1B,2CAA2C,EAAE,MAAM;YACnD,gBAAgB,EAAE,yCAAyC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;YACnF,YAAY,EAAE,cAAc,iBAAiB,kBAAkB;YAC/D,OAAO,EAAE,KAAK;YACd,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;SACxB,CAAC;QAEF,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;YAC5B,MAAM,EAAE,IAAI;YACZ,SAAS,EAAE,MAAM;YACjB,OAAO,EAAE,KAAK,CAAC,OAAO;YACtB,cAAc;YACd,uBAAuB,EAAE,IAAI;SAC7B,CAAC,CAAC;QAEH,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC;IACvC,CAAC;IAED,MAAM,cAAc,GAAG;QACtB,MAAM,EAAE,kBAAkB;QAC1B,2CAA2C,EAAE,MAAM;QACnD,gBAAgB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QACxC,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;KACxB,CAAC;IAEF,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;QAC5B,MAAM;QACN,OAAO,EAAE,KAAK,CAAC,OAAO;QACtB,uBAAuB,EAAE,IAAI;QAC7B,cAAc;KACd,CAAC,CAAC;IAEH,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC;AAAA,CACvC;AAED,SAAS,WAAW,CACnB,KAAkC,EAClC,OAAgB,EAChB,YAAqB,EACrB,OAA0B,EACK;IAC/B,MAAM,MAAM,GAAiC;QAC5C,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ,EAAE,eAAe,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,EAAE,YAAY,CAAC;QAChE,UAAU,EAAE,OAAO,EAAE,SAAS,IAAI,CAAC,KAAK,CAAC,SAAS,GAAG,CAAC,CAAC,GAAG,CAAC;QAC3D,MAAM,EAAE,IAAI;KACZ,CAAC;IAEF,yDAAyD;IACzD,IAAI,YAAY,EAAE,CAAC;QAClB,MAAM,CAAC,MAAM,GAAG;YACf;gBACC,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,2DAA2D;gBACjE,aAAa,EAAE;oBACd,IAAI,EAAE,WAAW;iBACjB;aACD;SACD,CAAC;QACF,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;YAC1B,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC;gBAClB,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC;gBAC9C,aAAa,EAAE;oBACd,IAAI,EAAE,WAAW;iBACjB;aACD,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;SAAM,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QACjC,0DAA0D;QAC1D,MAAM,CAAC,MAAM,GAAG;YACf;gBACC,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC;gBAC9C,aAAa,EAAE;oBACd,IAAI,EAAE,WAAW;iBACjB;aACD;SACD,CAAC;IACH,CAAC;IAED,IAAI,OAAO,EAAE,WAAW,KAAK,SAAS,EAAE,CAAC;QACxC,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAC1C,CAAC;IAED,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;QACnB,MAAM,CAAC,KAAK,GAAG,YAAY,CAAC,OAAO,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;IAC1D,CAAC;IAED,IAAI,OAAO,EAAE,eAAe,IAAI,KAAK,CAAC,SAAS,EAAE,CAAC;QACjD,MAAM,CAAC,QAAQ,GAAG;YACjB,IAAI,EAAE,SAAS;YACf,aAAa,EAAE,OAAO,CAAC,oBAAoB,IAAI,IAAI;SACnD,CAAC;IACH,CAAC;IAED,IAAI,OAAO,EAAE,UAAU,EAAE,CAAC;QACzB,IAAI,OAAO,OAAO,CAAC,UAAU,KAAK,QAAQ,EAAE,CAAC;YAC5C,MAAM,CAAC,WAAW,GAAG,EAAE,IAAI,EAAE,OAAO,CAAC,UAAU,EAAE,CAAC;QACnD,CAAC;aAAM,CAAC;YACP,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,CAAC;QACzC,CAAC;IACF,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,iFAAiF;AACjF,SAAS,kBAAkB,CAAC,EAAU,EAAU;IAC/C,uFAAuF;IACvF,OAAO,EAAE,CAAC,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC;AAAA,CAC1C;AAED,SAAS,eAAe,CACvB,QAAmB,EACnB,KAAkC,EAClC,YAAqB,EACJ;IACjB,MAAM,MAAM,GAAmB,EAAE,CAAC;IAElC,sDAAsD;IACtD,MAAM,mBAAmB,GAAG,iBAAiB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAE/D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,mBAAmB,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACrD,MAAM,GAAG,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAC;QAEnC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACzB,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACrC,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBACnC,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,MAAM;wBACZ,OAAO,EAAE,kBAAkB,CAAC,GAAG,CAAC,OAAO,CAAC;qBACxC,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;iBAAM,CAAC;gBACP,MAAM,MAAM,GAAwB,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;oBAC7D,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC1B,OAAO;4BACN,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC;yBACnC,CAAC;oBACH,CAAC;yBAAM,CAAC;wBACP,OAAO;4BACN,IAAI,EAAE,OAAO;4BACb,MAAM,EAAE;gCACP,IAAI,EAAE,QAAQ;gCACd,UAAU,EAAE,IAAI,CAAC,QAAmE;gCACpF,IAAI,EAAE,IAAI,CAAC,IAAI;6BACf;yBACD,CAAC;oBACH,CAAC;gBAAA,CACD,CAAC,CAAC;gBACH,IAAI,cAAc,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;gBACzG,cAAc,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;oBAC7C,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBACvB,OAAO,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;oBACjC,CAAC;oBACD,OAAO,IAAI,CAAC;gBAAA,CACZ,CAAC,CAAC;gBACH,IAAI,cAAc,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBAC1C,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE,cAAc;iBACvB,CAAC,CAAC;YACJ,CAAC;QACF,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YACrC,MAAM,MAAM,GAAwB,EAAE,CAAC;YAEvC,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,OAAO,EAAE,CAAC;gBACjC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAC3B,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC;wBAAE,SAAS;oBAC7C,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,MAAM;wBACZ,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,IAAI,CAAC;qBACpC,CAAC,CAAC;gBACJ,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,IAAI,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC;wBAAE,SAAS;oBACjD,sEAAsE;oBACtE,6EAA6E;oBAC7E,0DAA0D;oBAC1D,IAAI,CAAC,KAAK,CAAC,iBAAiB,IAAI,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;wBAC7E,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC;yBACxC,CAAC,CAAC;oBACJ,CAAC;yBAAM,CAAC;wBACP,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,UAAU;4BAChB,QAAQ,EAAE,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC;4BAC5C,SAAS,EAAE,KAAK,CAAC,iBAAiB;yBAClC,CAAC,CAAC;oBACJ,CAAC;gBACF,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,UAAU;wBAChB,EAAE,EAAE,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC;wBAChC,IAAI,EAAE,YAAY,CAAC,CAAC,CAAC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI;wBAC9D,KAAK,EAAE,KAAK,CAAC,SAAS;qBACtB,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;YACD,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YAClC,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,MAAM;aACf,CAAC,CAAC;QACJ,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACtC,kFAAkF;YAClF,MAAM,WAAW,GAAwB,EAAE,CAAC;YAE5C,8BAA8B;YAC9B,WAAW,CAAC,IAAI,CAAC;gBAChB,IAAI,EAAE,aAAa;gBACnB,WAAW,EAAE,kBAAkB,CAAC,GAAG,CAAC,UAAU,CAAC;gBAC/C,OAAO,EAAE,oBAAoB,CAAC,GAAG,CAAC,OAAO,CAAC;gBAC1C,QAAQ,EAAE,GAAG,CAAC,OAAO;aACrB,CAAC,CAAC;YAEH,iDAAiD;YACjD,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACd,OAAO,CAAC,GAAG,mBAAmB,CAAC,MAAM,IAAI,mBAAmB,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;gBACvF,MAAM,OAAO,GAAG,mBAAmB,CAAC,CAAC,CAAsB,CAAC,CAAC,4BAA4B;gBACzF,WAAW,CAAC,IAAI,CAAC;oBAChB,IAAI,EAAE,aAAa;oBACnB,WAAW,EAAE,kBAAkB,CAAC,OAAO,CAAC,UAAU,CAAC;oBACnD,OAAO,EAAE,oBAAoB,CAAC,OAAO,CAAC,OAAO,CAAC;oBAC9C,QAAQ,EAAE,OAAO,CAAC,OAAO;iBACzB,CAAC,CAAC;gBACH,CAAC,EAAE,CAAC;YACL,CAAC;YAED,4CAA4C;YAC5C,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAEV,kDAAkD;YAClD,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,WAAW;aACpB,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;IAED,2EAA2E;IAC3E,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACvB,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC9C,IAAI,WAAW,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACjC,8CAA8C;YAC9C,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,EAAE,CAAC;gBACxC,MAAM,SAAS,GAAG,WAAW,CAAC,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;gBACtE,IACC,SAAS;oBACT,CAAC,SAAS,CAAC,IAAI,KAAK,MAAM,IAAI,SAAS,CAAC,IAAI,KAAK,OAAO,IAAI,SAAS,CAAC,IAAI,KAAK,aAAa,CAAC,EAC5F,CAAC;oBACD,SAAiB,CAAC,aAAa,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,CAAC;gBAC1D,CAAC;YACF,CAAC;QACF,CAAC;IACF,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,SAAS,YAAY,CAAC,KAAa,EAAE,YAAqB,EAA6B;IACtF,IAAI,CAAC,KAAK;QAAE,OAAO,EAAE,CAAC;IAEtB,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;QAC1B,MAAM,UAAU,GAAG,IAAI,CAAC,UAAiB,CAAC,CAAC,wCAAwC;QAEnF,OAAO;YACN,IAAI,EAAE,YAAY,CAAC,CAAC,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI;YAC5D,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,YAAY,EAAE;gBACb,IAAI,EAAE,QAAiB;gBACvB,UAAU,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE;gBACvC,QAAQ,EAAE,UAAU,CAAC,QAAQ,IAAI,EAAE;aACnC;SACD,CAAC;IAAA,CACF,CAAC,CAAC;AAAA,CACH;AAED,SAAS,aAAa,CAAC,MAAqC,EAAc;IACzE,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,UAAU;YACd,OAAO,MAAM,CAAC;QACf,KAAK,YAAY;YAChB,OAAO,QAAQ,CAAC;QACjB,KAAK,UAAU;YACd,OAAO,SAAS,CAAC;QAClB,KAAK,SAAS;YACb,OAAO,OAAO,CAAC;QAChB,KAAK,YAAY,EAAE,kCAAkC;YACpD,OAAO,MAAM,CAAC;QACf,KAAK,eAAe;YACnB,OAAO,MAAM,CAAC,CAAC,8DAA8D;QAC9E,SAAS,CAAC;YACT,MAAM,WAAW,GAAU,MAAM,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,0BAA0B,WAAW,EAAE,CAAC,CAAC;QAC1D,CAAC;IACF,CAAC;AAAA,CACD","sourcesContent":["import Anthropic from \"@anthropic-ai/sdk\";\nimport type {\n\tContentBlockParam,\n\tMessageCreateParamsStreaming,\n\tMessageParam,\n} from \"@anthropic-ai/sdk/resources/messages.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tImageContent,\n\tMessage,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tToolResultMessage,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\n\nimport { transformMessages } from \"./transform-messages.js\";\n\n// Stealth mode: Mimic Claude Code's tool naming exactly\nconst claudeCodeVersion = \"2.1.2\";\n\n// Map pi! tool names to Claude Code's exact tool names\nconst claudeCodeToolNames: Record<string, string> = {\n\tread: \"Read\",\n\twrite: \"Write\",\n\tedit: \"Edit\",\n\tbash: \"Bash\",\n\tgrep: \"Grep\",\n\tfind: \"Glob\",\n\tls: \"Ls\",\n};\n\nconst toClaudeCodeName = (name: string) => claudeCodeToolNames[name] || name;\nconst fromClaudeCodeName = (name: string, tools?: Tool[]) => {\n\tif (tools && tools.length > 0) {\n\t\tconst lowerName = name.toLowerCase();\n\t\tconst matchedTool = tools.find((tool) => tool.name.toLowerCase() === lowerName);\n\t\tif (matchedTool) return matchedTool.name;\n\t}\n\treturn name;\n};\n\n/**\n * Convert content blocks to Anthropic API format\n */\nfunction convertContentBlocks(content: (TextContent | ImageContent)[]):\n\t| string\n\t| Array<\n\t\t\t| { type: \"text\"; text: string }\n\t\t\t| {\n\t\t\t\t\ttype: \"image\";\n\t\t\t\t\tsource: {\n\t\t\t\t\t\ttype: \"base64\";\n\t\t\t\t\t\tmedia_type: \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\";\n\t\t\t\t\t\tdata: string;\n\t\t\t\t\t};\n\t\t\t }\n\t > {\n\t// If only text blocks, return as concatenated string for simplicity\n\tconst hasImages = content.some((c) => c.type === \"image\");\n\tif (!hasImages) {\n\t\treturn sanitizeSurrogates(content.map((c) => (c as TextContent).text).join(\"\\n\"));\n\t}\n\n\t// If we have images, convert to content block array\n\tconst blocks = content.map((block) => {\n\t\tif (block.type === \"text\") {\n\t\t\treturn {\n\t\t\t\ttype: \"text\" as const,\n\t\t\t\ttext: sanitizeSurrogates(block.text),\n\t\t\t};\n\t\t}\n\t\treturn {\n\t\t\ttype: \"image\" as const,\n\t\t\tsource: {\n\t\t\t\ttype: \"base64\" as const,\n\t\t\t\tmedia_type: block.mimeType as \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\",\n\t\t\t\tdata: block.data,\n\t\t\t},\n\t\t};\n\t});\n\n\t// If only images (no text), add placeholder text block\n\tconst hasText = blocks.some((b) => b.type === \"text\");\n\tif (!hasText) {\n\t\tblocks.unshift({\n\t\t\ttype: \"text\" as const,\n\t\t\ttext: \"(see attached image)\",\n\t\t});\n\t}\n\n\treturn blocks;\n}\n\nexport interface AnthropicOptions extends StreamOptions {\n\tthinkingEnabled?: boolean;\n\tthinkingBudgetTokens?: number;\n\tinterleavedThinking?: boolean;\n\ttoolChoice?: \"auto\" | \"any\" | \"none\" | { type: \"tool\"; name: string };\n}\n\nexport const streamAnthropic: StreamFunction<\"anthropic-messages\"> = (\n\tmodel: Model<\"anthropic-messages\">,\n\tcontext: Context,\n\toptions?: AnthropicOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"anthropic-messages\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey ?? getEnvApiKey(model.provider) ?? \"\";\n\t\t\tconst { client, isOAuthToken } = createClient(model, apiKey, options?.interleavedThinking ?? true);\n\t\t\tconst params = buildParams(model, context, isOAuthToken, options);\n\t\t\tconst anthropicStream = client.messages.stream({ ...params, stream: true }, { signal: options?.signal });\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\ttype Block = (ThinkingContent | TextContent | (ToolCall & { partialJson: string })) & { index: number };\n\t\t\tconst blocks = output.content as Block[];\n\n\t\t\tfor await (const event of anthropicStream) {\n\t\t\t\tif (event.type === \"message_start\") {\n\t\t\t\t\t// Capture initial token usage from message_start event\n\t\t\t\t\t// This ensures we have input token counts even if the stream is aborted early\n\t\t\t\t\toutput.usage.input = event.message.usage.input_tokens || 0;\n\t\t\t\t\toutput.usage.output = event.message.usage.output_tokens || 0;\n\t\t\t\t\toutput.usage.cacheRead = event.message.usage.cache_read_input_tokens || 0;\n\t\t\t\t\toutput.usage.cacheWrite = event.message.usage.cache_creation_input_tokens || 0;\n\t\t\t\t\t// Anthropic doesn't provide total_tokens, compute from components\n\t\t\t\t\toutput.usage.totalTokens =\n\t\t\t\t\t\toutput.usage.input + output.usage.output + output.usage.cacheRead + output.usage.cacheWrite;\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t} else if (event.type === \"content_block_start\") {\n\t\t\t\t\tif (event.content_block.type === \"text\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t} else if (event.content_block.type === \"thinking\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: \"\",\n\t\t\t\t\t\t\tthinkingSignature: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t} else if (event.content_block.type === \"tool_use\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: event.content_block.id,\n\t\t\t\t\t\t\tname: isOAuthToken\n\t\t\t\t\t\t\t\t? fromClaudeCodeName(event.content_block.name, context.tools)\n\t\t\t\t\t\t\t\t: event.content_block.name,\n\t\t\t\t\t\t\targuments: event.content_block.input as Record<string, any>,\n\t\t\t\t\t\t\tpartialJson: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"content_block_delta\") {\n\t\t\t\t\tif (event.delta.type === \"text_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"text\") {\n\t\t\t\t\t\t\tblock.text += event.delta.text;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.text,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"thinking_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"thinking\") {\n\t\t\t\t\t\t\tblock.thinking += event.delta.thinking;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.thinking,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"input_json_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"toolCall\") {\n\t\t\t\t\t\t\tblock.partialJson += event.delta.partial_json;\n\t\t\t\t\t\t\tblock.arguments = parseStreamingJson(block.partialJson);\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.partial_json,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"signature_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"thinking\") {\n\t\t\t\t\t\t\tblock.thinkingSignature = block.thinkingSignature || \"\";\n\t\t\t\t\t\t\tblock.thinkingSignature += event.delta.signature;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"content_block_stop\") {\n\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\tif (block) {\n\t\t\t\t\t\tdelete (block as any).index;\n\t\t\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tcontent: block.text,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tcontent: block.thinking,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\t\t\tblock.arguments = parseStreamingJson(block.partialJson);\n\t\t\t\t\t\t\tdelete (block as any).partialJson;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\ttoolCall: block,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"message_delta\") {\n\t\t\t\t\tif (event.delta.stop_reason) {\n\t\t\t\t\t\toutput.stopReason = mapStopReason(event.delta.stop_reason);\n\t\t\t\t\t}\n\t\t\t\t\toutput.usage.input = event.usage.input_tokens || 0;\n\t\t\t\t\toutput.usage.output = event.usage.output_tokens || 0;\n\t\t\t\t\toutput.usage.cacheRead = event.usage.cache_read_input_tokens || 0;\n\t\t\t\t\toutput.usage.cacheWrite = event.usage.cache_creation_input_tokens || 0;\n\t\t\t\t\t// Anthropic doesn't provide total_tokens, compute from components\n\t\t\t\t\toutput.usage.totalTokens =\n\t\t\t\t\t\toutput.usage.input + output.usage.output + output.usage.cacheRead + output.usage.cacheWrite;\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction isOAuthToken(apiKey: string): boolean {\n\treturn apiKey.includes(\"sk-ant-oat\");\n}\n\nfunction createClient(\n\tmodel: Model<\"anthropic-messages\">,\n\tapiKey: string,\n\tinterleavedThinking: boolean,\n): { client: Anthropic; isOAuthToken: boolean } {\n\tconst betaFeatures = [\"fine-grained-tool-streaming-2025-05-14\"];\n\tif (interleavedThinking) {\n\t\tbetaFeatures.push(\"interleaved-thinking-2025-05-14\");\n\t}\n\n\tconst oauthToken = isOAuthToken(apiKey);\n\tif (oauthToken) {\n\t\t// Stealth mode: Mimic Claude Code's headers exactly\n\t\tconst defaultHeaders = {\n\t\t\taccept: \"application/json\",\n\t\t\t\"anthropic-dangerous-direct-browser-access\": \"true\",\n\t\t\t\"anthropic-beta\": `claude-code-20250219,oauth-2025-04-20,${betaFeatures.join(\",\")}`,\n\t\t\t\"user-agent\": `claude-cli/${claudeCodeVersion} (external, cli)`,\n\t\t\t\"x-app\": \"cli\",\n\t\t\t...(model.headers || {}),\n\t\t};\n\n\t\tconst client = new Anthropic({\n\t\t\tapiKey: null,\n\t\t\tauthToken: apiKey,\n\t\t\tbaseURL: model.baseUrl,\n\t\t\tdefaultHeaders,\n\t\t\tdangerouslyAllowBrowser: true,\n\t\t});\n\n\t\treturn { client, isOAuthToken: true };\n\t}\n\n\tconst defaultHeaders = {\n\t\taccept: \"application/json\",\n\t\t\"anthropic-dangerous-direct-browser-access\": \"true\",\n\t\t\"anthropic-beta\": betaFeatures.join(\",\"),\n\t\t...(model.headers || {}),\n\t};\n\n\tconst client = new Anthropic({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders,\n\t});\n\n\treturn { client, isOAuthToken: false };\n}\n\nfunction buildParams(\n\tmodel: Model<\"anthropic-messages\">,\n\tcontext: Context,\n\tisOAuthToken: boolean,\n\toptions?: AnthropicOptions,\n): MessageCreateParamsStreaming {\n\tconst params: MessageCreateParamsStreaming = {\n\t\tmodel: model.id,\n\t\tmessages: convertMessages(context.messages, model, isOAuthToken),\n\t\tmax_tokens: options?.maxTokens || (model.maxTokens / 3) | 0,\n\t\tstream: true,\n\t};\n\n\t// For OAuth tokens, we MUST include Claude Code identity\n\tif (isOAuthToken) {\n\t\tparams.system = [\n\t\t\t{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: \"You are Claude Code, Anthropic's official CLI for Claude.\",\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t},\n\t\t];\n\t\tif (context.systemPrompt) {\n\t\t\tparams.system.push({\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(context.systemPrompt),\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t} else if (context.systemPrompt) {\n\t\t// Add cache control to system prompt for non-OAuth tokens\n\t\tparams.system = [\n\t\t\t{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(context.systemPrompt),\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t},\n\t\t];\n\t}\n\n\tif (options?.temperature !== undefined) {\n\t\tparams.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tparams.tools = convertTools(context.tools, isOAuthToken);\n\t}\n\n\tif (options?.thinkingEnabled && model.reasoning) {\n\t\tparams.thinking = {\n\t\t\ttype: \"enabled\",\n\t\t\tbudget_tokens: options.thinkingBudgetTokens || 1024,\n\t\t};\n\t}\n\n\tif (options?.toolChoice) {\n\t\tif (typeof options.toolChoice === \"string\") {\n\t\t\tparams.tool_choice = { type: options.toolChoice };\n\t\t} else {\n\t\t\tparams.tool_choice = options.toolChoice;\n\t\t}\n\t}\n\n\treturn params;\n}\n\n// Sanitize tool call IDs to match Anthropic's required pattern: ^[a-zA-Z0-9_-]+$\nfunction sanitizeToolCallId(id: string): string {\n\t// Replace any character that isn't alphanumeric, underscore, or hyphen with underscore\n\treturn id.replace(/[^a-zA-Z0-9_-]/g, \"_\");\n}\n\nfunction convertMessages(\n\tmessages: Message[],\n\tmodel: Model<\"anthropic-messages\">,\n\tisOAuthToken: boolean,\n): MessageParam[] {\n\tconst params: MessageParam[] = [];\n\n\t// Transform messages for cross-provider compatibility\n\tconst transformedMessages = transformMessages(messages, model);\n\n\tfor (let i = 0; i < transformedMessages.length; i++) {\n\t\tconst msg = transformedMessages[i];\n\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tif (msg.content.trim().length > 0) {\n\t\t\t\t\tparams.push({\n\t\t\t\t\t\trole: \"user\",\n\t\t\t\t\t\tcontent: sanitizeSurrogates(msg.content),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tconst blocks: ContentBlockParam[] = msg.content.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t};\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"image\",\n\t\t\t\t\t\t\tsource: {\n\t\t\t\t\t\t\t\ttype: \"base64\",\n\t\t\t\t\t\t\t\tmedia_type: item.mimeType as \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\",\n\t\t\t\t\t\t\t\tdata: item.data,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tlet filteredBlocks = !model?.input.includes(\"image\") ? blocks.filter((b) => b.type !== \"image\") : blocks;\n\t\t\t\tfilteredBlocks = filteredBlocks.filter((b) => {\n\t\t\t\t\tif (b.type === \"text\") {\n\t\t\t\t\t\treturn b.text.trim().length > 0;\n\t\t\t\t\t}\n\t\t\t\t\treturn true;\n\t\t\t\t});\n\t\t\t\tif (filteredBlocks.length === 0) continue;\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredBlocks,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst blocks: ContentBlockParam[] = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tif (block.text.trim().length === 0) continue;\n\t\t\t\t\tblocks.push({\n\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\ttext: sanitizeSurrogates(block.text),\n\t\t\t\t\t});\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tif (block.thinking.trim().length === 0) continue;\n\t\t\t\t\t// If thinking signature is missing/empty (e.g., from aborted stream),\n\t\t\t\t\t// convert to plain text block without <thinking> tags to avoid API rejection\n\t\t\t\t\t// and prevent Claude from mimicking the tags in responses\n\t\t\t\t\tif (!block.thinkingSignature || block.thinkingSignature.trim().length === 0) {\n\t\t\t\t\t\tblocks.push({\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\tblocks.push({\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t\t\tsignature: block.thinkingSignature,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tblocks.push({\n\t\t\t\t\t\ttype: \"tool_use\",\n\t\t\t\t\t\tid: sanitizeToolCallId(block.id),\n\t\t\t\t\t\tname: isOAuthToken ? toClaudeCodeName(block.name) : block.name,\n\t\t\t\t\t\tinput: block.arguments,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (blocks.length === 0) continue;\n\t\t\tparams.push({\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: blocks,\n\t\t\t});\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Collect all consecutive toolResult messages, needed for z.ai Anthropic endpoint\n\t\t\tconst toolResults: ContentBlockParam[] = [];\n\n\t\t\t// Add the current tool result\n\t\t\ttoolResults.push({\n\t\t\t\ttype: \"tool_result\",\n\t\t\t\ttool_use_id: sanitizeToolCallId(msg.toolCallId),\n\t\t\t\tcontent: convertContentBlocks(msg.content),\n\t\t\t\tis_error: msg.isError,\n\t\t\t});\n\n\t\t\t// Look ahead for consecutive toolResult messages\n\t\t\tlet j = i + 1;\n\t\t\twhile (j < transformedMessages.length && transformedMessages[j].role === \"toolResult\") {\n\t\t\t\tconst nextMsg = transformedMessages[j] as ToolResultMessage; // We know it's a toolResult\n\t\t\t\ttoolResults.push({\n\t\t\t\t\ttype: \"tool_result\",\n\t\t\t\t\ttool_use_id: sanitizeToolCallId(nextMsg.toolCallId),\n\t\t\t\t\tcontent: convertContentBlocks(nextMsg.content),\n\t\t\t\t\tis_error: nextMsg.isError,\n\t\t\t\t});\n\t\t\t\tj++;\n\t\t\t}\n\n\t\t\t// Skip the messages we've already processed\n\t\t\ti = j - 1;\n\n\t\t\t// Add a single user message with all tool results\n\t\t\tparams.push({\n\t\t\t\trole: \"user\",\n\t\t\t\tcontent: toolResults,\n\t\t\t});\n\t\t}\n\t}\n\n\t// Add cache_control to the last user message to cache conversation history\n\tif (params.length > 0) {\n\t\tconst lastMessage = params[params.length - 1];\n\t\tif (lastMessage.role === \"user\") {\n\t\t\t// Add cache control to the last content block\n\t\t\tif (Array.isArray(lastMessage.content)) {\n\t\t\t\tconst lastBlock = lastMessage.content[lastMessage.content.length - 1];\n\t\t\t\tif (\n\t\t\t\t\tlastBlock &&\n\t\t\t\t\t(lastBlock.type === \"text\" || lastBlock.type === \"image\" || lastBlock.type === \"tool_result\")\n\t\t\t\t) {\n\t\t\t\t\t(lastBlock as any).cache_control = { type: \"ephemeral\" };\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn params;\n}\n\nfunction convertTools(tools: Tool[], isOAuthToken: boolean): Anthropic.Messages.Tool[] {\n\tif (!tools) return [];\n\n\treturn tools.map((tool) => {\n\t\tconst jsonSchema = tool.parameters as any; // TypeBox already generates JSON Schema\n\n\t\treturn {\n\t\t\tname: isOAuthToken ? toClaudeCodeName(tool.name) : tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tinput_schema: {\n\t\t\t\ttype: \"object\" as const,\n\t\t\t\tproperties: jsonSchema.properties || {},\n\t\t\t\trequired: jsonSchema.required || [],\n\t\t\t},\n\t\t};\n\t});\n}\n\nfunction mapStopReason(reason: Anthropic.Messages.StopReason): StopReason {\n\tswitch (reason) {\n\t\tcase \"end_turn\":\n\t\t\treturn \"stop\";\n\t\tcase \"max_tokens\":\n\t\t\treturn \"length\";\n\t\tcase \"tool_use\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"refusal\":\n\t\t\treturn \"error\";\n\t\tcase \"pause_turn\": // Stop is good enough -> resubmit\n\t\t\treturn \"stop\";\n\t\tcase \"stop_sequence\":\n\t\t\treturn \"stop\"; // We don't supply stop sequences, so this should never happen\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
1
+ {"version":3,"file":"anthropic.js","sourceRoot":"","sources":["../../src/providers/anthropic.ts"],"names":[],"mappings":"AAAA,OAAO,SAAS,MAAM,mBAAmB,CAAC;AAM1C,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAC7C,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AAiB5C,OAAO,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAC;AAC5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAElE,OAAO,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE5D,wDAAwD;AACxD,MAAM,iBAAiB,GAAG,OAAO,CAAC;AAElC,gDAAgD;AAChD,mEAAmE;AACnE,mDAAmD;AACnD,MAAM,eAAe,GAAG;IACvB,MAAM;IACN,OAAO;IACP,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,iBAAiB;IACjB,eAAe;IACf,cAAc;IACd,WAAW;IACX,cAAc;IACd,OAAO;IACP,MAAM;IACN,YAAY;IACZ,WAAW;IACX,UAAU;IACV,WAAW;CACX,CAAC;AAEF,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AAE/E,4EAA4E;AAC5E,MAAM,gBAAgB,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,IAAI,IAAI,CAAC;AACxF,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,KAAc,EAAE,EAAE,CAAC;IAC5D,IAAI,KAAK,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC/B,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;QACrC,MAAM,WAAW,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,SAAS,CAAC,CAAC;QAChF,IAAI,WAAW;YAAE,OAAO,WAAW,CAAC,IAAI,CAAC;IAC1C,CAAC;IACD,OAAO,IAAI,CAAC;AAAA,CACZ,CAAC;AAEF;;GAEG;AACH,SAAS,oBAAoB,CAAC,OAAuC,EAYhE;IACJ,oEAAoE;IACpE,MAAM,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;IAC1D,IAAI,CAAC,SAAS,EAAE,CAAC;QAChB,OAAO,kBAAkB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAAiB,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;IACnF,CAAC;IAED,oDAAoD;IACpD,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;QACrC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YAC3B,OAAO;gBACN,IAAI,EAAE,MAAe;gBACrB,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,IAAI,CAAC;aACpC,CAAC;QACH,CAAC;QACD,OAAO;YACN,IAAI,EAAE,OAAgB;YACtB,MAAM,EAAE;gBACP,IAAI,EAAE,QAAiB;gBACvB,UAAU,EAAE,KAAK,CAAC,QAAmE;gBACrF,IAAI,EAAE,KAAK,CAAC,IAAI;aAChB;SACD,CAAC;IAAA,CACF,CAAC,CAAC;IAEH,uDAAuD;IACvD,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC;IACtD,IAAI,CAAC,OAAO,EAAE,CAAC;QACd,MAAM,CAAC,OAAO,CAAC;YACd,IAAI,EAAE,MAAe;YACrB,IAAI,EAAE,sBAAsB;SAC5B,CAAC,CAAC;IACJ,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AASD,MAAM,CAAC,MAAM,eAAe,GAAyC,CACpE,KAAkC,EAClC,OAAgB,EAChB,OAA0B,EACI,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,IAAI,2BAA2B,EAAE,CAAC;IAEjD,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,MAAM,GAAqB;YAChC,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,oBAA2B;YAChC,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,EAAE;YACf,KAAK,EAAE;gBACN,KAAK,EAAE,CAAC;gBACR,MAAM,EAAE,CAAC;gBACT,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;aACpE;YACD,UAAU,EAAE,MAAM;YAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,IAAI,CAAC;YACJ,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACrE,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,mBAAmB,IAAI,IAAI,CAAC,CAAC;YACnG,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,YAAY,EAAE,OAAO,CAAC,CAAC;YAClE,MAAM,eAAe,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,GAAG,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YACzG,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAGhD,MAAM,MAAM,GAAG,MAAM,CAAC,OAAkB,CAAC;YAEzC,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,eAAe,EAAE,CAAC;gBAC3C,IAAI,KAAK,CAAC,IAAI,KAAK,eAAe,EAAE,CAAC;oBACpC,uDAAuD;oBACvD,8EAA8E;oBAC9E,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,YAAY,IAAI,CAAC,CAAC;oBAC3D,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC;oBAC7D,MAAM,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,uBAAuB,IAAI,CAAC,CAAC;oBAC1E,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,2BAA2B,IAAI,CAAC,CAAC;oBAC/E,kEAAkE;oBAClE,MAAM,CAAC,KAAK,CAAC,WAAW;wBACvB,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC;oBAC7F,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,qBAAqB,EAAE,CAAC;oBACjD,IAAI,KAAK,CAAC,aAAa,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBACzC,MAAM,KAAK,GAAU;4BACpB,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,EAAE;4BACR,KAAK,EAAE,KAAK,CAAC,KAAK;yBAClB,CAAC;wBACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;wBAC3B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;oBAC/F,CAAC;yBAAM,IAAI,KAAK,CAAC,aAAa,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACpD,MAAM,KAAK,GAAU;4BACpB,IAAI,EAAE,UAAU;4BAChB,QAAQ,EAAE,EAAE;4BACZ,iBAAiB,EAAE,EAAE;4BACrB,KAAK,EAAE,KAAK,CAAC,KAAK;yBAClB,CAAC;wBACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;wBAC3B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;oBACnG,CAAC;yBAAM,IAAI,KAAK,CAAC,aAAa,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACpD,MAAM,KAAK,GAAU;4BACpB,IAAI,EAAE,UAAU;4BAChB,EAAE,EAAE,KAAK,CAAC,aAAa,CAAC,EAAE;4BAC1B,IAAI,EAAE,YAAY;gCACjB,CAAC,CAAC,kBAAkB,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,EAAE,OAAO,CAAC,KAAK,CAAC;gCAC7D,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI;4BAC3B,SAAS,EAAE,KAAK,CAAC,aAAa,CAAC,KAA4B;4BAC3D,WAAW,EAAE,EAAE;4BACf,KAAK,EAAE,KAAK,CAAC,KAAK;yBAClB,CAAC;wBACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;wBAC3B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;oBACnG,CAAC;gBACF,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,qBAAqB,EAAE,CAAC;oBACjD,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;wBACvC,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;wBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC5B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BACpC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC;4BAC/B,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,YAAY;gCAClB,YAAY,EAAE,KAAK;gCACnB,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,IAAI;gCACvB,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;yBAAM,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;wBAClD,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;wBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC5B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACxC,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC;4BACvC,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,KAAK;gCACnB,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,QAAQ;gCAC3B,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;yBAAM,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,kBAAkB,EAAE,CAAC;wBACpD,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;wBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC5B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACxC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC;4BAC9C,KAAK,CAAC,SAAS,GAAG,kBAAkB,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;4BACxD,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,KAAK;gCACnB,KAAK,EAAE,KAAK,CAAC,KAAK,CAAC,YAAY;gCAC/B,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;yBAAM,IAAI,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,iBAAiB,EAAE,CAAC;wBACnD,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;wBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;wBAC5B,IAAI,KAAK,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACxC,KAAK,CAAC,iBAAiB,GAAG,KAAK,CAAC,iBAAiB,IAAI,EAAE,CAAC;4BACxD,KAAK,CAAC,iBAAiB,IAAI,KAAK,CAAC,KAAK,CAAC,SAAS,CAAC;wBAClD,CAAC;oBACF,CAAC;gBACF,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,oBAAoB,EAAE,CAAC;oBAChD,MAAM,KAAK,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,KAAK,CAAC,CAAC;oBAC/D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;oBAC5B,IAAI,KAAK,EAAE,CAAC;wBACX,OAAQ,KAAa,CAAC,KAAK,CAAC;wBAC5B,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BAC3B,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,UAAU;gCAChB,YAAY,EAAE,KAAK;gCACnB,OAAO,EAAE,KAAK,CAAC,IAAI;gCACnB,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;6BAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACtC,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,cAAc;gCACpB,YAAY,EAAE,KAAK;gCACnB,OAAO,EAAE,KAAK,CAAC,QAAQ;gCACvB,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;6BAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACtC,KAAK,CAAC,SAAS,GAAG,kBAAkB,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;4BACxD,OAAQ,KAAa,CAAC,WAAW,CAAC;4BAClC,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,cAAc;gCACpB,YAAY,EAAE,KAAK;gCACnB,QAAQ,EAAE,KAAK;gCACf,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;gBACF,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,eAAe,EAAE,CAAC;oBAC3C,IAAI,KAAK,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC;wBAC7B,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;oBAC5D,CAAC;oBACD,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,YAAY,IAAI,CAAC,CAAC;oBACnD,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC;oBACrD,MAAM,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,KAAK,CAAC,uBAAuB,IAAI,CAAC,CAAC;oBAClE,MAAM,CAAC,KAAK,CAAC,UAAU,GAAG,KAAK,CAAC,KAAK,CAAC,2BAA2B,IAAI,CAAC,CAAC;oBACvE,kEAAkE;oBAClE,MAAM,CAAC,KAAK,CAAC,WAAW;wBACvB,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC;oBAC7F,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;YACF,CAAC;YAED,IAAI,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;gBAC9B,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;YACxC,CAAC;YAED,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,IAAI,MAAM,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;gBACtE,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;YAC9C,CAAC;YAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC1E,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YAChB,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO;gBAAE,OAAQ,KAAa,CAAC,KAAK,CAAC;YAChE,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YACrF,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YACzE,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;IAAA,CACD,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd,CAAC;AAEF,SAAS,YAAY,CAAC,MAAc,EAAW;IAC9C,OAAO,MAAM,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAAA,CACrC;AAED,SAAS,YAAY,CACpB,KAAkC,EAClC,MAAc,EACd,mBAA4B,EACmB;IAC/C,MAAM,YAAY,GAAG,CAAC,wCAAwC,CAAC,CAAC;IAChE,IAAI,mBAAmB,EAAE,CAAC;QACzB,YAAY,CAAC,IAAI,CAAC,iCAAiC,CAAC,CAAC;IACtD,CAAC;IAED,MAAM,UAAU,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC;IACxC,IAAI,UAAU,EAAE,CAAC;QAChB,oDAAoD;QACpD,MAAM,cAAc,GAAG;YACtB,MAAM,EAAE,kBAAkB;YAC1B,2CAA2C,EAAE,MAAM;YACnD,gBAAgB,EAAE,yCAAyC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;YACnF,YAAY,EAAE,cAAc,iBAAiB,kBAAkB;YAC/D,OAAO,EAAE,KAAK;YACd,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;SACxB,CAAC;QAEF,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;YAC5B,MAAM,EAAE,IAAI;YACZ,SAAS,EAAE,MAAM;YACjB,OAAO,EAAE,KAAK,CAAC,OAAO;YACtB,cAAc;YACd,uBAAuB,EAAE,IAAI;SAC7B,CAAC,CAAC;QAEH,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC;IACvC,CAAC;IAED,MAAM,cAAc,GAAG;QACtB,MAAM,EAAE,kBAAkB;QAC1B,2CAA2C,EAAE,MAAM;QACnD,gBAAgB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QACxC,GAAG,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,CAAC;KACxB,CAAC;IAEF,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC;QAC5B,MAAM;QACN,OAAO,EAAE,KAAK,CAAC,OAAO;QACtB,uBAAuB,EAAE,IAAI;QAC7B,cAAc;KACd,CAAC,CAAC;IAEH,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC;AAAA,CACvC;AAED,SAAS,WAAW,CACnB,KAAkC,EAClC,OAAgB,EAChB,YAAqB,EACrB,OAA0B,EACK;IAC/B,MAAM,MAAM,GAAiC;QAC5C,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ,EAAE,eAAe,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,EAAE,YAAY,CAAC;QAChE,UAAU,EAAE,OAAO,EAAE,SAAS,IAAI,CAAC,KAAK,CAAC,SAAS,GAAG,CAAC,CAAC,GAAG,CAAC;QAC3D,MAAM,EAAE,IAAI;KACZ,CAAC;IAEF,yDAAyD;IACzD,IAAI,YAAY,EAAE,CAAC;QAClB,MAAM,CAAC,MAAM,GAAG;YACf;gBACC,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,2DAA2D;gBACjE,aAAa,EAAE;oBACd,IAAI,EAAE,WAAW;iBACjB;aACD;SACD,CAAC;QACF,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;YAC1B,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC;gBAClB,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC;gBAC9C,aAAa,EAAE;oBACd,IAAI,EAAE,WAAW;iBACjB;aACD,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;SAAM,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QACjC,0DAA0D;QAC1D,MAAM,CAAC,MAAM,GAAG;YACf;gBACC,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC;gBAC9C,aAAa,EAAE;oBACd,IAAI,EAAE,WAAW;iBACjB;aACD;SACD,CAAC;IACH,CAAC;IAED,IAAI,OAAO,EAAE,WAAW,KAAK,SAAS,EAAE,CAAC;QACxC,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAC1C,CAAC;IAED,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;QACnB,MAAM,CAAC,KAAK,GAAG,YAAY,CAAC,OAAO,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;IAC1D,CAAC;IAED,IAAI,OAAO,EAAE,eAAe,IAAI,KAAK,CAAC,SAAS,EAAE,CAAC;QACjD,MAAM,CAAC,QAAQ,GAAG;YACjB,IAAI,EAAE,SAAS;YACf,aAAa,EAAE,OAAO,CAAC,oBAAoB,IAAI,IAAI;SACnD,CAAC;IACH,CAAC;IAED,IAAI,OAAO,EAAE,UAAU,EAAE,CAAC;QACzB,IAAI,OAAO,OAAO,CAAC,UAAU,KAAK,QAAQ,EAAE,CAAC;YAC5C,MAAM,CAAC,WAAW,GAAG,EAAE,IAAI,EAAE,OAAO,CAAC,UAAU,EAAE,CAAC;QACnD,CAAC;aAAM,CAAC;YACP,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,CAAC;QACzC,CAAC;IACF,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,iFAAiF;AACjF,SAAS,kBAAkB,CAAC,EAAU,EAAU;IAC/C,uFAAuF;IACvF,OAAO,EAAE,CAAC,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC;AAAA,CAC1C;AAED,SAAS,eAAe,CACvB,QAAmB,EACnB,KAAkC,EAClC,YAAqB,EACJ;IACjB,MAAM,MAAM,GAAmB,EAAE,CAAC;IAElC,sDAAsD;IACtD,MAAM,mBAAmB,GAAG,iBAAiB,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAE/D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,mBAAmB,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACrD,MAAM,GAAG,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAC;QAEnC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACzB,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACrC,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBACnC,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,MAAM;wBACZ,OAAO,EAAE,kBAAkB,CAAC,GAAG,CAAC,OAAO,CAAC;qBACxC,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;iBAAM,CAAC;gBACP,MAAM,MAAM,GAAwB,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;oBAC7D,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC1B,OAAO;4BACN,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC;yBACnC,CAAC;oBACH,CAAC;yBAAM,CAAC;wBACP,OAAO;4BACN,IAAI,EAAE,OAAO;4BACb,MAAM,EAAE;gCACP,IAAI,EAAE,QAAQ;gCACd,UAAU,EAAE,IAAI,CAAC,QAAmE;gCACpF,IAAI,EAAE,IAAI,CAAC,IAAI;6BACf;yBACD,CAAC;oBACH,CAAC;gBAAA,CACD,CAAC,CAAC;gBACH,IAAI,cAAc,GAAG,CAAC,KAAK,EAAE,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;gBACzG,cAAc,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;oBAC7C,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBACvB,OAAO,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;oBACjC,CAAC;oBACD,OAAO,IAAI,CAAC;gBAAA,CACZ,CAAC,CAAC;gBACH,IAAI,cAAc,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBAC1C,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE,cAAc;iBACvB,CAAC,CAAC;YACJ,CAAC;QACF,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YACrC,MAAM,MAAM,GAAwB,EAAE,CAAC;YAEvC,KAAK,MAAM,KAAK,IAAI,GAAG,CAAC,OAAO,EAAE,CAAC;gBACjC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAC3B,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC;wBAAE,SAAS;oBAC7C,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,MAAM;wBACZ,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,IAAI,CAAC;qBACpC,CAAC,CAAC;gBACJ,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,IAAI,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC;wBAAE,SAAS;oBACjD,sEAAsE;oBACtE,6EAA6E;oBAC7E,0DAA0D;oBAC1D,IAAI,CAAC,KAAK,CAAC,iBAAiB,IAAI,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;wBAC7E,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC;yBACxC,CAAC,CAAC;oBACJ,CAAC;yBAAM,CAAC;wBACP,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,UAAU;4BAChB,QAAQ,EAAE,kBAAkB,CAAC,KAAK,CAAC,QAAQ,CAAC;4BAC5C,SAAS,EAAE,KAAK,CAAC,iBAAiB;yBAClC,CAAC,CAAC;oBACJ,CAAC;gBACF,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,UAAU;wBAChB,EAAE,EAAE,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC;wBAChC,IAAI,EAAE,YAAY,CAAC,CAAC,CAAC,gBAAgB,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI;wBAC9D,KAAK,EAAE,KAAK,CAAC,SAAS;qBACtB,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;YACD,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YAClC,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,MAAM;aACf,CAAC,CAAC;QACJ,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACtC,kFAAkF;YAClF,MAAM,WAAW,GAAwB,EAAE,CAAC;YAE5C,8BAA8B;YAC9B,WAAW,CAAC,IAAI,CAAC;gBAChB,IAAI,EAAE,aAAa;gBACnB,WAAW,EAAE,kBAAkB,CAAC,GAAG,CAAC,UAAU,CAAC;gBAC/C,OAAO,EAAE,oBAAoB,CAAC,GAAG,CAAC,OAAO,CAAC;gBAC1C,QAAQ,EAAE,GAAG,CAAC,OAAO;aACrB,CAAC,CAAC;YAEH,iDAAiD;YACjD,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACd,OAAO,CAAC,GAAG,mBAAmB,CAAC,MAAM,IAAI,mBAAmB,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;gBACvF,MAAM,OAAO,GAAG,mBAAmB,CAAC,CAAC,CAAsB,CAAC,CAAC,4BAA4B;gBACzF,WAAW,CAAC,IAAI,CAAC;oBAChB,IAAI,EAAE,aAAa;oBACnB,WAAW,EAAE,kBAAkB,CAAC,OAAO,CAAC,UAAU,CAAC;oBACnD,OAAO,EAAE,oBAAoB,CAAC,OAAO,CAAC,OAAO,CAAC;oBAC9C,QAAQ,EAAE,OAAO,CAAC,OAAO;iBACzB,CAAC,CAAC;gBACH,CAAC,EAAE,CAAC;YACL,CAAC;YAED,4CAA4C;YAC5C,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAEV,kDAAkD;YAClD,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,WAAW;aACpB,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;IAED,2EAA2E;IAC3E,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACvB,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC9C,IAAI,WAAW,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACjC,8CAA8C;YAC9C,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,EAAE,CAAC;gBACxC,MAAM,SAAS,GAAG,WAAW,CAAC,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;gBACtE,IACC,SAAS;oBACT,CAAC,SAAS,CAAC,IAAI,KAAK,MAAM,IAAI,SAAS,CAAC,IAAI,KAAK,OAAO,IAAI,SAAS,CAAC,IAAI,KAAK,aAAa,CAAC,EAC5F,CAAC;oBACD,SAAiB,CAAC,aAAa,GAAG,EAAE,IAAI,EAAE,WAAW,EAAE,CAAC;gBAC1D,CAAC;YACF,CAAC;QACF,CAAC;IACF,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,SAAS,YAAY,CAAC,KAAa,EAAE,YAAqB,EAA6B;IACtF,IAAI,CAAC,KAAK;QAAE,OAAO,EAAE,CAAC;IAEtB,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;QAC1B,MAAM,UAAU,GAAG,IAAI,CAAC,UAAiB,CAAC,CAAC,wCAAwC;QAEnF,OAAO;YACN,IAAI,EAAE,YAAY,CAAC,CAAC,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI;YAC5D,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,YAAY,EAAE;gBACb,IAAI,EAAE,QAAiB;gBACvB,UAAU,EAAE,UAAU,CAAC,UAAU,IAAI,EAAE;gBACvC,QAAQ,EAAE,UAAU,CAAC,QAAQ,IAAI,EAAE;aACnC;SACD,CAAC;IAAA,CACF,CAAC,CAAC;AAAA,CACH;AAED,SAAS,aAAa,CAAC,MAAqC,EAAc;IACzE,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,UAAU;YACd,OAAO,MAAM,CAAC;QACf,KAAK,YAAY;YAChB,OAAO,QAAQ,CAAC;QACjB,KAAK,UAAU;YACd,OAAO,SAAS,CAAC;QAClB,KAAK,SAAS;YACb,OAAO,OAAO,CAAC;QAChB,KAAK,YAAY,EAAE,kCAAkC;YACpD,OAAO,MAAM,CAAC;QACf,KAAK,eAAe;YACnB,OAAO,MAAM,CAAC,CAAC,8DAA8D;QAC9E,SAAS,CAAC;YACT,MAAM,WAAW,GAAU,MAAM,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,0BAA0B,WAAW,EAAE,CAAC,CAAC;QAC1D,CAAC;IACF,CAAC;AAAA,CACD","sourcesContent":["import Anthropic from \"@anthropic-ai/sdk\";\nimport type {\n\tContentBlockParam,\n\tMessageCreateParamsStreaming,\n\tMessageParam,\n} from \"@anthropic-ai/sdk/resources/messages.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tImageContent,\n\tMessage,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tToolResultMessage,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\n\nimport { transformMessages } from \"./transform-messages.js\";\n\n// Stealth mode: Mimic Claude Code's tool naming exactly\nconst claudeCodeVersion = \"2.1.2\";\n\n// Claude Code 2.x tool names (canonical casing)\n// Source: https://cchistory.mariozechner.at/data/prompts-2.1.11.md\n// To update: https://github.com/badlogic/cchistory\nconst claudeCodeTools = [\n\t\"Read\",\n\t\"Write\",\n\t\"Edit\",\n\t\"Bash\",\n\t\"Grep\",\n\t\"Glob\",\n\t\"AskUserQuestion\",\n\t\"EnterPlanMode\",\n\t\"ExitPlanMode\",\n\t\"KillShell\",\n\t\"NotebookEdit\",\n\t\"Skill\",\n\t\"Task\",\n\t\"TaskOutput\",\n\t\"TodoWrite\",\n\t\"WebFetch\",\n\t\"WebSearch\",\n];\n\nconst ccToolLookup = new Map(claudeCodeTools.map((t) => [t.toLowerCase(), t]));\n\n// Convert tool name to CC canonical casing if it matches (case-insensitive)\nconst toClaudeCodeName = (name: string) => ccToolLookup.get(name.toLowerCase()) ?? name;\nconst fromClaudeCodeName = (name: string, tools?: Tool[]) => {\n\tif (tools && tools.length > 0) {\n\t\tconst lowerName = name.toLowerCase();\n\t\tconst matchedTool = tools.find((tool) => tool.name.toLowerCase() === lowerName);\n\t\tif (matchedTool) return matchedTool.name;\n\t}\n\treturn name;\n};\n\n/**\n * Convert content blocks to Anthropic API format\n */\nfunction convertContentBlocks(content: (TextContent | ImageContent)[]):\n\t| string\n\t| Array<\n\t\t\t| { type: \"text\"; text: string }\n\t\t\t| {\n\t\t\t\t\ttype: \"image\";\n\t\t\t\t\tsource: {\n\t\t\t\t\t\ttype: \"base64\";\n\t\t\t\t\t\tmedia_type: \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\";\n\t\t\t\t\t\tdata: string;\n\t\t\t\t\t};\n\t\t\t }\n\t > {\n\t// If only text blocks, return as concatenated string for simplicity\n\tconst hasImages = content.some((c) => c.type === \"image\");\n\tif (!hasImages) {\n\t\treturn sanitizeSurrogates(content.map((c) => (c as TextContent).text).join(\"\\n\"));\n\t}\n\n\t// If we have images, convert to content block array\n\tconst blocks = content.map((block) => {\n\t\tif (block.type === \"text\") {\n\t\t\treturn {\n\t\t\t\ttype: \"text\" as const,\n\t\t\t\ttext: sanitizeSurrogates(block.text),\n\t\t\t};\n\t\t}\n\t\treturn {\n\t\t\ttype: \"image\" as const,\n\t\t\tsource: {\n\t\t\t\ttype: \"base64\" as const,\n\t\t\t\tmedia_type: block.mimeType as \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\",\n\t\t\t\tdata: block.data,\n\t\t\t},\n\t\t};\n\t});\n\n\t// If only images (no text), add placeholder text block\n\tconst hasText = blocks.some((b) => b.type === \"text\");\n\tif (!hasText) {\n\t\tblocks.unshift({\n\t\t\ttype: \"text\" as const,\n\t\t\ttext: \"(see attached image)\",\n\t\t});\n\t}\n\n\treturn blocks;\n}\n\nexport interface AnthropicOptions extends StreamOptions {\n\tthinkingEnabled?: boolean;\n\tthinkingBudgetTokens?: number;\n\tinterleavedThinking?: boolean;\n\ttoolChoice?: \"auto\" | \"any\" | \"none\" | { type: \"tool\"; name: string };\n}\n\nexport const streamAnthropic: StreamFunction<\"anthropic-messages\"> = (\n\tmodel: Model<\"anthropic-messages\">,\n\tcontext: Context,\n\toptions?: AnthropicOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"anthropic-messages\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey ?? getEnvApiKey(model.provider) ?? \"\";\n\t\t\tconst { client, isOAuthToken } = createClient(model, apiKey, options?.interleavedThinking ?? true);\n\t\t\tconst params = buildParams(model, context, isOAuthToken, options);\n\t\t\tconst anthropicStream = client.messages.stream({ ...params, stream: true }, { signal: options?.signal });\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\ttype Block = (ThinkingContent | TextContent | (ToolCall & { partialJson: string })) & { index: number };\n\t\t\tconst blocks = output.content as Block[];\n\n\t\t\tfor await (const event of anthropicStream) {\n\t\t\t\tif (event.type === \"message_start\") {\n\t\t\t\t\t// Capture initial token usage from message_start event\n\t\t\t\t\t// This ensures we have input token counts even if the stream is aborted early\n\t\t\t\t\toutput.usage.input = event.message.usage.input_tokens || 0;\n\t\t\t\t\toutput.usage.output = event.message.usage.output_tokens || 0;\n\t\t\t\t\toutput.usage.cacheRead = event.message.usage.cache_read_input_tokens || 0;\n\t\t\t\t\toutput.usage.cacheWrite = event.message.usage.cache_creation_input_tokens || 0;\n\t\t\t\t\t// Anthropic doesn't provide total_tokens, compute from components\n\t\t\t\t\toutput.usage.totalTokens =\n\t\t\t\t\t\toutput.usage.input + output.usage.output + output.usage.cacheRead + output.usage.cacheWrite;\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t} else if (event.type === \"content_block_start\") {\n\t\t\t\t\tif (event.content_block.type === \"text\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t} else if (event.content_block.type === \"thinking\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: \"\",\n\t\t\t\t\t\t\tthinkingSignature: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t} else if (event.content_block.type === \"tool_use\") {\n\t\t\t\t\t\tconst block: Block = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: event.content_block.id,\n\t\t\t\t\t\t\tname: isOAuthToken\n\t\t\t\t\t\t\t\t? fromClaudeCodeName(event.content_block.name, context.tools)\n\t\t\t\t\t\t\t\t: event.content_block.name,\n\t\t\t\t\t\t\targuments: event.content_block.input as Record<string, any>,\n\t\t\t\t\t\t\tpartialJson: \"\",\n\t\t\t\t\t\t\tindex: event.index,\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(block);\n\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: output.content.length - 1, partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"content_block_delta\") {\n\t\t\t\t\tif (event.delta.type === \"text_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"text\") {\n\t\t\t\t\t\t\tblock.text += event.delta.text;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.text,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"thinking_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"thinking\") {\n\t\t\t\t\t\t\tblock.thinking += event.delta.thinking;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.thinking,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"input_json_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"toolCall\") {\n\t\t\t\t\t\t\tblock.partialJson += event.delta.partial_json;\n\t\t\t\t\t\t\tblock.arguments = parseStreamingJson(block.partialJson);\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tdelta: event.delta.partial_json,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (event.delta.type === \"signature_delta\") {\n\t\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\t\tif (block && block.type === \"thinking\") {\n\t\t\t\t\t\t\tblock.thinkingSignature = block.thinkingSignature || \"\";\n\t\t\t\t\t\t\tblock.thinkingSignature += event.delta.signature;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"content_block_stop\") {\n\t\t\t\t\tconst index = blocks.findIndex((b) => b.index === event.index);\n\t\t\t\t\tconst block = blocks[index];\n\t\t\t\t\tif (block) {\n\t\t\t\t\t\tdelete (block as any).index;\n\t\t\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tcontent: block.text,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\tcontent: block.thinking,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\t\t\tblock.arguments = parseStreamingJson(block.partialJson);\n\t\t\t\t\t\t\tdelete (block as any).partialJson;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\t\t\t\tcontentIndex: index,\n\t\t\t\t\t\t\t\ttoolCall: block,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (event.type === \"message_delta\") {\n\t\t\t\t\tif (event.delta.stop_reason) {\n\t\t\t\t\t\toutput.stopReason = mapStopReason(event.delta.stop_reason);\n\t\t\t\t\t}\n\t\t\t\t\toutput.usage.input = event.usage.input_tokens || 0;\n\t\t\t\t\toutput.usage.output = event.usage.output_tokens || 0;\n\t\t\t\t\toutput.usage.cacheRead = event.usage.cache_read_input_tokens || 0;\n\t\t\t\t\toutput.usage.cacheWrite = event.usage.cache_creation_input_tokens || 0;\n\t\t\t\t\t// Anthropic doesn't provide total_tokens, compute from components\n\t\t\t\t\toutput.usage.totalTokens =\n\t\t\t\t\t\toutput.usage.input + output.usage.output + output.usage.cacheRead + output.usage.cacheWrite;\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction isOAuthToken(apiKey: string): boolean {\n\treturn apiKey.includes(\"sk-ant-oat\");\n}\n\nfunction createClient(\n\tmodel: Model<\"anthropic-messages\">,\n\tapiKey: string,\n\tinterleavedThinking: boolean,\n): { client: Anthropic; isOAuthToken: boolean } {\n\tconst betaFeatures = [\"fine-grained-tool-streaming-2025-05-14\"];\n\tif (interleavedThinking) {\n\t\tbetaFeatures.push(\"interleaved-thinking-2025-05-14\");\n\t}\n\n\tconst oauthToken = isOAuthToken(apiKey);\n\tif (oauthToken) {\n\t\t// Stealth mode: Mimic Claude Code's headers exactly\n\t\tconst defaultHeaders = {\n\t\t\taccept: \"application/json\",\n\t\t\t\"anthropic-dangerous-direct-browser-access\": \"true\",\n\t\t\t\"anthropic-beta\": `claude-code-20250219,oauth-2025-04-20,${betaFeatures.join(\",\")}`,\n\t\t\t\"user-agent\": `claude-cli/${claudeCodeVersion} (external, cli)`,\n\t\t\t\"x-app\": \"cli\",\n\t\t\t...(model.headers || {}),\n\t\t};\n\n\t\tconst client = new Anthropic({\n\t\t\tapiKey: null,\n\t\t\tauthToken: apiKey,\n\t\t\tbaseURL: model.baseUrl,\n\t\t\tdefaultHeaders,\n\t\t\tdangerouslyAllowBrowser: true,\n\t\t});\n\n\t\treturn { client, isOAuthToken: true };\n\t}\n\n\tconst defaultHeaders = {\n\t\taccept: \"application/json\",\n\t\t\"anthropic-dangerous-direct-browser-access\": \"true\",\n\t\t\"anthropic-beta\": betaFeatures.join(\",\"),\n\t\t...(model.headers || {}),\n\t};\n\n\tconst client = new Anthropic({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders,\n\t});\n\n\treturn { client, isOAuthToken: false };\n}\n\nfunction buildParams(\n\tmodel: Model<\"anthropic-messages\">,\n\tcontext: Context,\n\tisOAuthToken: boolean,\n\toptions?: AnthropicOptions,\n): MessageCreateParamsStreaming {\n\tconst params: MessageCreateParamsStreaming = {\n\t\tmodel: model.id,\n\t\tmessages: convertMessages(context.messages, model, isOAuthToken),\n\t\tmax_tokens: options?.maxTokens || (model.maxTokens / 3) | 0,\n\t\tstream: true,\n\t};\n\n\t// For OAuth tokens, we MUST include Claude Code identity\n\tif (isOAuthToken) {\n\t\tparams.system = [\n\t\t\t{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: \"You are Claude Code, Anthropic's official CLI for Claude.\",\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t},\n\t\t];\n\t\tif (context.systemPrompt) {\n\t\t\tparams.system.push({\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(context.systemPrompt),\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t});\n\t\t}\n\t} else if (context.systemPrompt) {\n\t\t// Add cache control to system prompt for non-OAuth tokens\n\t\tparams.system = [\n\t\t\t{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: sanitizeSurrogates(context.systemPrompt),\n\t\t\t\tcache_control: {\n\t\t\t\t\ttype: \"ephemeral\",\n\t\t\t\t},\n\t\t\t},\n\t\t];\n\t}\n\n\tif (options?.temperature !== undefined) {\n\t\tparams.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tparams.tools = convertTools(context.tools, isOAuthToken);\n\t}\n\n\tif (options?.thinkingEnabled && model.reasoning) {\n\t\tparams.thinking = {\n\t\t\ttype: \"enabled\",\n\t\t\tbudget_tokens: options.thinkingBudgetTokens || 1024,\n\t\t};\n\t}\n\n\tif (options?.toolChoice) {\n\t\tif (typeof options.toolChoice === \"string\") {\n\t\t\tparams.tool_choice = { type: options.toolChoice };\n\t\t} else {\n\t\t\tparams.tool_choice = options.toolChoice;\n\t\t}\n\t}\n\n\treturn params;\n}\n\n// Sanitize tool call IDs to match Anthropic's required pattern: ^[a-zA-Z0-9_-]+$\nfunction sanitizeToolCallId(id: string): string {\n\t// Replace any character that isn't alphanumeric, underscore, or hyphen with underscore\n\treturn id.replace(/[^a-zA-Z0-9_-]/g, \"_\");\n}\n\nfunction convertMessages(\n\tmessages: Message[],\n\tmodel: Model<\"anthropic-messages\">,\n\tisOAuthToken: boolean,\n): MessageParam[] {\n\tconst params: MessageParam[] = [];\n\n\t// Transform messages for cross-provider compatibility\n\tconst transformedMessages = transformMessages(messages, model);\n\n\tfor (let i = 0; i < transformedMessages.length; i++) {\n\t\tconst msg = transformedMessages[i];\n\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tif (msg.content.trim().length > 0) {\n\t\t\t\t\tparams.push({\n\t\t\t\t\t\trole: \"user\",\n\t\t\t\t\t\tcontent: sanitizeSurrogates(msg.content),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tconst blocks: ContentBlockParam[] = msg.content.map((item) => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t};\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"image\",\n\t\t\t\t\t\t\tsource: {\n\t\t\t\t\t\t\t\ttype: \"base64\",\n\t\t\t\t\t\t\t\tmedia_type: item.mimeType as \"image/jpeg\" | \"image/png\" | \"image/gif\" | \"image/webp\",\n\t\t\t\t\t\t\t\tdata: item.data,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tlet filteredBlocks = !model?.input.includes(\"image\") ? blocks.filter((b) => b.type !== \"image\") : blocks;\n\t\t\t\tfilteredBlocks = filteredBlocks.filter((b) => {\n\t\t\t\t\tif (b.type === \"text\") {\n\t\t\t\t\t\treturn b.text.trim().length > 0;\n\t\t\t\t\t}\n\t\t\t\t\treturn true;\n\t\t\t\t});\n\t\t\t\tif (filteredBlocks.length === 0) continue;\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredBlocks,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst blocks: ContentBlockParam[] = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tif (block.text.trim().length === 0) continue;\n\t\t\t\t\tblocks.push({\n\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\ttext: sanitizeSurrogates(block.text),\n\t\t\t\t\t});\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tif (block.thinking.trim().length === 0) continue;\n\t\t\t\t\t// If thinking signature is missing/empty (e.g., from aborted stream),\n\t\t\t\t\t// convert to plain text block without <thinking> tags to avoid API rejection\n\t\t\t\t\t// and prevent Claude from mimicking the tags in responses\n\t\t\t\t\tif (!block.thinkingSignature || block.thinkingSignature.trim().length === 0) {\n\t\t\t\t\t\tblocks.push({\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\tblocks.push({\n\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\tthinking: sanitizeSurrogates(block.thinking),\n\t\t\t\t\t\t\tsignature: block.thinkingSignature,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tblocks.push({\n\t\t\t\t\t\ttype: \"tool_use\",\n\t\t\t\t\t\tid: sanitizeToolCallId(block.id),\n\t\t\t\t\t\tname: isOAuthToken ? toClaudeCodeName(block.name) : block.name,\n\t\t\t\t\t\tinput: block.arguments,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (blocks.length === 0) continue;\n\t\t\tparams.push({\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: blocks,\n\t\t\t});\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Collect all consecutive toolResult messages, needed for z.ai Anthropic endpoint\n\t\t\tconst toolResults: ContentBlockParam[] = [];\n\n\t\t\t// Add the current tool result\n\t\t\ttoolResults.push({\n\t\t\t\ttype: \"tool_result\",\n\t\t\t\ttool_use_id: sanitizeToolCallId(msg.toolCallId),\n\t\t\t\tcontent: convertContentBlocks(msg.content),\n\t\t\t\tis_error: msg.isError,\n\t\t\t});\n\n\t\t\t// Look ahead for consecutive toolResult messages\n\t\t\tlet j = i + 1;\n\t\t\twhile (j < transformedMessages.length && transformedMessages[j].role === \"toolResult\") {\n\t\t\t\tconst nextMsg = transformedMessages[j] as ToolResultMessage; // We know it's a toolResult\n\t\t\t\ttoolResults.push({\n\t\t\t\t\ttype: \"tool_result\",\n\t\t\t\t\ttool_use_id: sanitizeToolCallId(nextMsg.toolCallId),\n\t\t\t\t\tcontent: convertContentBlocks(nextMsg.content),\n\t\t\t\t\tis_error: nextMsg.isError,\n\t\t\t\t});\n\t\t\t\tj++;\n\t\t\t}\n\n\t\t\t// Skip the messages we've already processed\n\t\t\ti = j - 1;\n\n\t\t\t// Add a single user message with all tool results\n\t\t\tparams.push({\n\t\t\t\trole: \"user\",\n\t\t\t\tcontent: toolResults,\n\t\t\t});\n\t\t}\n\t}\n\n\t// Add cache_control to the last user message to cache conversation history\n\tif (params.length > 0) {\n\t\tconst lastMessage = params[params.length - 1];\n\t\tif (lastMessage.role === \"user\") {\n\t\t\t// Add cache control to the last content block\n\t\t\tif (Array.isArray(lastMessage.content)) {\n\t\t\t\tconst lastBlock = lastMessage.content[lastMessage.content.length - 1];\n\t\t\t\tif (\n\t\t\t\t\tlastBlock &&\n\t\t\t\t\t(lastBlock.type === \"text\" || lastBlock.type === \"image\" || lastBlock.type === \"tool_result\")\n\t\t\t\t) {\n\t\t\t\t\t(lastBlock as any).cache_control = { type: \"ephemeral\" };\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn params;\n}\n\nfunction convertTools(tools: Tool[], isOAuthToken: boolean): Anthropic.Messages.Tool[] {\n\tif (!tools) return [];\n\n\treturn tools.map((tool) => {\n\t\tconst jsonSchema = tool.parameters as any; // TypeBox already generates JSON Schema\n\n\t\treturn {\n\t\t\tname: isOAuthToken ? toClaudeCodeName(tool.name) : tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tinput_schema: {\n\t\t\t\ttype: \"object\" as const,\n\t\t\t\tproperties: jsonSchema.properties || {},\n\t\t\t\trequired: jsonSchema.required || [],\n\t\t\t},\n\t\t};\n\t});\n}\n\nfunction mapStopReason(reason: Anthropic.Messages.StopReason): StopReason {\n\tswitch (reason) {\n\t\tcase \"end_turn\":\n\t\t\treturn \"stop\";\n\t\tcase \"max_tokens\":\n\t\t\treturn \"length\";\n\t\tcase \"tool_use\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"refusal\":\n\t\t\treturn \"error\";\n\t\tcase \"pause_turn\": // Stop is good enough -> resubmit\n\t\t\treturn \"stop\";\n\t\tcase \"stop_sequence\":\n\t\t\treturn \"stop\"; // We don't supply stop sequences, so this should never happen\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"openai-codex-responses.d.ts","sourceRoot":"","sources":["../../src/providers/openai-codex-responses.ts"],"names":[],"mappings":"AASA,OAAO,KAAK,EAMX,cAAc,EACd,aAAa,EAIb,MAAM,aAAa,CAAC;AAmBrB,MAAM,WAAW,2BAA4B,SAAQ,aAAa;IACjE,eAAe,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IAC3E,gBAAgB,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,GAAG,IAAI,CAAC;IACzE,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;CAC1C;AAgDD,eAAO,MAAM,0BAA0B,EAAE,cAAc,CAAC,wBAAwB,CAmH/E,CAAC","sourcesContent":["import os from \"node:os\";\nimport type {\n\tResponseFunctionToolCall,\n\tResponseOutputMessage,\n\tResponseReasoningItem,\n} from \"openai/resources/responses/responses.js\";\nimport { PI_STATIC_INSTRUCTIONS } from \"../constants.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\n// ============================================================================\n// Configuration\n// ============================================================================\n\nconst CODEX_URL = \"https://chatgpt.com/backend-api/codex/responses\";\nconst JWT_CLAIM_PATH = \"https://api.openai.com/auth\" as const;\nconst MAX_RETRIES = 3;\nconst BASE_DELAY_MS = 1000;\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface OpenAICodexResponsesOptions extends StreamOptions {\n\treasoningEffort?: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\treasoningSummary?: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\" | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n}\n\ninterface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: unknown[];\n\ttools?: unknown;\n\ttool_choice?: \"auto\";\n\tparallel_tool_calls?: boolean;\n\ttemperature?: number;\n\treasoning?: { effort?: string; summary?: string };\n\ttext?: { verbosity?: string };\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\t[key: string]: unknown;\n}\n\n// ============================================================================\n// Retry Helpers\n// ============================================================================\n\nfunction isRetryableError(status: number, errorText: string): boolean {\n\tif (status === 429 || status === 500 || status === 502 || status === 503 || status === 504) {\n\t\treturn true;\n\t}\n\treturn /rate.?limit|overloaded|service.?unavailable|upstream.?connect|connection.?refused/i.test(errorText);\n}\n\nfunction sleep(ms: number, signal?: AbortSignal): Promise<void> {\n\treturn new Promise((resolve, reject) => {\n\t\tif (signal?.aborted) {\n\t\t\treject(new Error(\"Request was aborted\"));\n\t\t\treturn;\n\t\t}\n\t\tconst timeout = setTimeout(resolve, ms);\n\t\tsignal?.addEventListener(\"abort\", () => {\n\t\t\tclearTimeout(timeout);\n\t\t\treject(new Error(\"Request was aborted\"));\n\t\t});\n\t});\n}\n\n// ============================================================================\n// Main Stream Function\n// ============================================================================\n\nexport const streamOpenAICodexResponses: StreamFunction<\"openai-codex-responses\"> = (\n\tmodel: Model<\"openai-codex-responses\">,\n\tcontext: Context,\n\toptions?: OpenAICodexResponsesOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"openai-codex-responses\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tif (!apiKey) {\n\t\t\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t\t\t}\n\n\t\t\tconst accountId = extractAccountId(apiKey);\n\t\t\tconst body = buildRequestBody(model, context, options);\n\t\t\tconst headers = buildHeaders(model.headers, accountId, apiKey, options?.sessionId);\n\t\t\tconst bodyJson = JSON.stringify(body);\n\n\t\t\t// Fetch with retry logic for rate limits and transient errors\n\t\t\tlet response: Response | undefined;\n\t\t\tlet lastError: Error | undefined;\n\n\t\t\tfor (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n\t\t\t\tif (options?.signal?.aborted) {\n\t\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t\t}\n\n\t\t\t\ttry {\n\t\t\t\t\tresponse = await fetch(CODEX_URL, {\n\t\t\t\t\t\tmethod: \"POST\",\n\t\t\t\t\t\theaders,\n\t\t\t\t\t\tbody: bodyJson,\n\t\t\t\t\t\tsignal: options?.signal,\n\t\t\t\t\t});\n\n\t\t\t\t\tif (response.ok) {\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\n\t\t\t\t\tconst errorText = await response.text();\n\t\t\t\t\tif (attempt < MAX_RETRIES && isRetryableError(response.status, errorText)) {\n\t\t\t\t\t\tconst delayMs = BASE_DELAY_MS * 2 ** attempt;\n\t\t\t\t\t\tawait sleep(delayMs, options?.signal);\n\t\t\t\t\t\tcontinue;\n\t\t\t\t\t}\n\n\t\t\t\t\t// Parse error for friendly message on final attempt or non-retryable error\n\t\t\t\t\tconst fakeResponse = new Response(errorText, {\n\t\t\t\t\t\tstatus: response.status,\n\t\t\t\t\t\tstatusText: response.statusText,\n\t\t\t\t\t});\n\t\t\t\t\tconst info = await parseErrorResponse(fakeResponse);\n\t\t\t\t\tthrow new Error(info.friendlyMessage || info.message);\n\t\t\t\t} catch (error) {\n\t\t\t\t\tif (error instanceof Error) {\n\t\t\t\t\t\tif (error.name === \"AbortError\" || error.message === \"Request was aborted\") {\n\t\t\t\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tlastError = error instanceof Error ? error : new Error(String(error));\n\t\t\t\t\t// Network errors are retryable\n\t\t\t\t\tif (attempt < MAX_RETRIES && !lastError.message.includes(\"usage limit\")) {\n\t\t\t\t\t\tconst delayMs = BASE_DELAY_MS * 2 ** attempt;\n\t\t\t\t\t\tawait sleep(delayMs, options?.signal);\n\t\t\t\t\t\tcontinue;\n\t\t\t\t\t}\n\t\t\t\t\tthrow lastError;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (!response?.ok) {\n\t\t\t\tthrow lastError ?? new Error(\"Failed after retries\");\n\t\t\t}\n\n\t\t\tif (!response.body) {\n\t\t\t\tthrow new Error(\"No response body\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\t\t\tawait processStream(response, output, stream, model);\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason as \"stop\" | \"length\" | \"toolUse\", message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : String(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\n// ============================================================================\n// Request Building\n// ============================================================================\n\nfunction buildRequestBody(\n\tmodel: Model<\"openai-codex-responses\">,\n\tcontext: Context,\n\toptions?: OpenAICodexResponsesOptions,\n): RequestBody {\n\tconst systemPrompt = buildSystemPrompt(context.systemPrompt);\n\tconst messages = convertMessages(model, context);\n\n\t// Prepend developer messages\n\tconst developerMessages = systemPrompt.developerMessages.map((text) => ({\n\t\ttype: \"message\",\n\t\trole: \"developer\",\n\t\tcontent: [{ type: \"input_text\", text }],\n\t}));\n\n\tconst body: RequestBody = {\n\t\tmodel: model.id,\n\t\tstore: false,\n\t\tstream: true,\n\t\tinstructions: systemPrompt.instructions,\n\t\tinput: [...developerMessages, ...messages],\n\t\ttext: { verbosity: options?.textVerbosity || \"medium\" },\n\t\tinclude: [\"reasoning.encrypted_content\"],\n\t\tprompt_cache_key: options?.sessionId,\n\t\ttool_choice: \"auto\",\n\t\tparallel_tool_calls: true,\n\t};\n\n\tif (options?.temperature !== undefined) {\n\t\tbody.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tbody.tools = context.tools.map((tool) => ({\n\t\t\ttype: \"function\",\n\t\t\tname: tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: tool.parameters,\n\t\t\tstrict: null,\n\t\t}));\n\t}\n\n\tif (options?.reasoningEffort !== undefined) {\n\t\tbody.reasoning = {\n\t\t\teffort: clampReasoningEffort(model.id, options.reasoningEffort),\n\t\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t\t};\n\t}\n\n\treturn body;\n}\n\nfunction buildSystemPrompt(userSystemPrompt?: string): { instructions: string; developerMessages: string[] } {\n\t// PI_STATIC_INSTRUCTIONS is whitelisted and must be in the instructions field.\n\t// User's system prompt goes in developer messages, with the static prefix stripped.\n\tconst staticPrefix = PI_STATIC_INSTRUCTIONS.trim();\n\tconst developerMessages: string[] = [];\n\n\tif (userSystemPrompt?.trim()) {\n\t\tlet dynamicPart = userSystemPrompt.trim();\n\t\tif (dynamicPart.startsWith(staticPrefix)) {\n\t\t\tdynamicPart = dynamicPart.slice(staticPrefix.length).trim();\n\t\t}\n\t\tif (dynamicPart) developerMessages.push(dynamicPart);\n\t}\n\n\treturn { instructions: staticPrefix, developerMessages };\n}\n\nfunction clampReasoningEffort(modelId: string, effort: string): string {\n\tconst id = modelId.includes(\"/\") ? modelId.split(\"/\").pop()! : modelId;\n\tif (id.startsWith(\"gpt-5.2\") && effort === \"minimal\") return \"low\";\n\tif (id === \"gpt-5.1\" && effort === \"xhigh\") return \"high\";\n\tif (id === \"gpt-5.1-codex-mini\") return effort === \"high\" || effort === \"xhigh\" ? \"high\" : \"medium\";\n\treturn effort;\n}\n\n// ============================================================================\n// Message Conversion\n// ============================================================================\n\nfunction convertMessages(model: Model<\"openai-codex-responses\">, context: Context): unknown[] {\n\tconst messages: unknown[] = [];\n\tconst transformed = transformMessages(context.messages, model);\n\n\tfor (const msg of transformed) {\n\t\tif (msg.role === \"user\") {\n\t\t\tmessages.push(convertUserMessage(msg, model));\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tmessages.push(...convertAssistantMessage(msg));\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tmessages.push(...convertToolResult(msg, model));\n\t\t}\n\t}\n\n\treturn messages.filter(Boolean);\n}\n\nfunction convertUserMessage(\n\tmsg: { content: string | Array<{ type: string; text?: string; mimeType?: string; data?: string }> },\n\tmodel: Model<\"openai-codex-responses\">,\n): unknown {\n\tif (typeof msg.content === \"string\") {\n\t\treturn {\n\t\t\trole: \"user\",\n\t\t\tcontent: [{ type: \"input_text\", text: sanitizeSurrogates(msg.content) }],\n\t\t};\n\t}\n\n\tconst content = msg.content.map((item) => {\n\t\tif (item.type === \"text\") {\n\t\t\treturn { type: \"input_text\", text: sanitizeSurrogates(item.text || \"\") };\n\t\t}\n\t\treturn {\n\t\t\ttype: \"input_image\",\n\t\t\tdetail: \"auto\",\n\t\t\timage_url: `data:${item.mimeType};base64,${item.data}`,\n\t\t};\n\t});\n\n\tconst filtered = model.input.includes(\"image\") ? content : content.filter((c) => c.type !== \"input_image\");\n\treturn filtered.length > 0 ? { role: \"user\", content: filtered } : null;\n}\n\nfunction convertAssistantMessage(msg: AssistantMessage): unknown[] {\n\tconst output: unknown[] = [];\n\n\tfor (const block of msg.content) {\n\t\tif (block.type === \"thinking\" && msg.stopReason !== \"error\" && block.thinkingSignature) {\n\t\t\toutput.push(JSON.parse(block.thinkingSignature));\n\t\t} else if (block.type === \"text\") {\n\t\t\toutput.push({\n\t\t\t\ttype: \"message\",\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: [{ type: \"output_text\", text: sanitizeSurrogates(block.text), annotations: [] }],\n\t\t\t\tstatus: \"completed\",\n\t\t\t});\n\t\t} else if (block.type === \"toolCall\" && msg.stopReason !== \"error\") {\n\t\t\tconst [callId, id] = block.id.split(\"|\");\n\t\t\toutput.push({\n\t\t\t\ttype: \"function_call\",\n\t\t\t\tid,\n\t\t\t\tcall_id: callId,\n\t\t\t\tname: block.name,\n\t\t\t\targuments: JSON.stringify(block.arguments),\n\t\t\t});\n\t\t}\n\t}\n\n\treturn output;\n}\n\nfunction convertToolResult(\n\tmsg: { toolCallId: string; content: Array<{ type: string; text?: string; mimeType?: string; data?: string }> },\n\tmodel: Model<\"openai-codex-responses\">,\n): unknown[] {\n\tconst output: unknown[] = [];\n\tconst textResult = msg.content\n\t\t.filter((c) => c.type === \"text\")\n\t\t.map((c) => c.text || \"\")\n\t\t.join(\"\\n\");\n\tconst hasImages = msg.content.some((c) => c.type === \"image\");\n\n\toutput.push({\n\t\ttype: \"function_call_output\",\n\t\tcall_id: msg.toolCallId.split(\"|\")[0],\n\t\toutput: sanitizeSurrogates(textResult || \"(see attached image)\"),\n\t});\n\n\tif (hasImages && model.input.includes(\"image\")) {\n\t\tconst imageParts = msg.content\n\t\t\t.filter((c) => c.type === \"image\")\n\t\t\t.map((c) => ({\n\t\t\t\ttype: \"input_image\",\n\t\t\t\tdetail: \"auto\",\n\t\t\t\timage_url: `data:${c.mimeType};base64,${c.data}`,\n\t\t\t}));\n\n\t\toutput.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: [{ type: \"input_text\", text: \"Attached image(s) from tool result:\" }, ...imageParts],\n\t\t});\n\t}\n\n\treturn output;\n}\n\n// ============================================================================\n// Response Processing\n// ============================================================================\n\nasync function processStream(\n\tresponse: Response,\n\toutput: AssistantMessage,\n\tstream: AssistantMessageEventStream,\n\tmodel: Model<\"openai-codex-responses\">,\n): Promise<void> {\n\tlet currentItem: ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall | null = null;\n\tlet currentBlock: ThinkingContent | TextContent | (ToolCall & { partialJson: string }) | null = null;\n\tconst blockIndex = () => output.content.length - 1;\n\n\tfor await (const event of parseSSE(response)) {\n\t\tconst type = event.type as string;\n\n\t\tswitch (type) {\n\t\t\tcase \"response.output_item.added\": {\n\t\t\t\tconst item = event.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\tif (item.type === \"reasoning\") {\n\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\" };\n\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t} else if (item.type === \"message\") {\n\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\tpartialJson: item.arguments || \"\",\n\t\t\t\t\t};\n\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.reasoning_summary_part.added\": {\n\t\t\t\tif (currentItem?.type === \"reasoning\") {\n\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\tcurrentItem.summary.push((event as { part: ResponseReasoningItem[\"summary\"][number] }).part);\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.reasoning_summary_text.delta\": {\n\t\t\t\tif (currentItem?.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\tconst delta = (event as { delta?: string }).delta || \"\";\n\t\t\t\t\tconst lastPart = currentItem.summary?.[currentItem.summary.length - 1];\n\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\tstream.push({ type: \"thinking_delta\", contentIndex: blockIndex(), delta, partial: output });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.reasoning_summary_part.done\": {\n\t\t\t\tif (currentItem?.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\tconst lastPart = currentItem.summary?.[currentItem.summary.length - 1];\n\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\tcurrentBlock.thinking += \"\\n\\n\";\n\t\t\t\t\t\tlastPart.text += \"\\n\\n\";\n\t\t\t\t\t\tstream.push({ type: \"thinking_delta\", contentIndex: blockIndex(), delta: \"\\n\\n\", partial: output });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.content_part.added\": {\n\t\t\t\tif (currentItem?.type === \"message\") {\n\t\t\t\t\tcurrentItem.content = currentItem.content || [];\n\t\t\t\t\tconst part = (event as { part?: ResponseOutputMessage[\"content\"][number] }).part;\n\t\t\t\t\tif (part && (part.type === \"output_text\" || part.type === \"refusal\")) {\n\t\t\t\t\t\tcurrentItem.content.push(part);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.output_text.delta\": {\n\t\t\t\tif (currentItem?.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\tif (lastPart?.type === \"output_text\") {\n\t\t\t\t\t\tconst delta = (event as { delta?: string }).delta || \"\";\n\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\tstream.push({ type: \"text_delta\", contentIndex: blockIndex(), delta, partial: output });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.refusal.delta\": {\n\t\t\t\tif (currentItem?.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\tif (lastPart?.type === \"refusal\") {\n\t\t\t\t\t\tconst delta = (event as { delta?: string }).delta || \"\";\n\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\tlastPart.refusal += delta;\n\t\t\t\t\t\tstream.push({ type: \"text_delta\", contentIndex: blockIndex(), delta, partial: output });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.function_call_arguments.delta\": {\n\t\t\t\tif (currentItem?.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\t\tconst delta = (event as { delta?: string }).delta || \"\";\n\t\t\t\t\tcurrentBlock.partialJson += delta;\n\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t\t\tstream.push({ type: \"toolcall_delta\", contentIndex: blockIndex(), delta, partial: output });\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.output_item.done\": {\n\t\t\t\tconst item = event.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\tif (item.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\tcurrentBlock.thinking = item.summary?.map((s) => s.text).join(\"\\n\\n\") || \"\";\n\t\t\t\t\tcurrentBlock.thinkingSignature = JSON.stringify(item);\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t} else if (item.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\tcurrentBlock.text = item.content.map((c) => (c.type === \"output_text\" ? c.text : c.refusal)).join(\"\");\n\t\t\t\t\tcurrentBlock.textSignature = item.id;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\targuments: JSON.parse(item.arguments),\n\t\t\t\t\t};\n\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.completed\":\n\t\t\tcase \"response.done\": {\n\t\t\t\tconst resp = (\n\t\t\t\t\tevent as {\n\t\t\t\t\t\tresponse?: {\n\t\t\t\t\t\t\tusage?: {\n\t\t\t\t\t\t\t\tinput_tokens?: number;\n\t\t\t\t\t\t\t\toutput_tokens?: number;\n\t\t\t\t\t\t\t\ttotal_tokens?: number;\n\t\t\t\t\t\t\t\tinput_tokens_details?: { cached_tokens?: number };\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\tstatus?: string;\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t).response;\n\t\t\t\tif (resp?.usage) {\n\t\t\t\t\tconst cached = resp.usage.input_tokens_details?.cached_tokens || 0;\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\tinput: (resp.usage.input_tokens || 0) - cached,\n\t\t\t\t\t\toutput: resp.usage.output_tokens || 0,\n\t\t\t\t\t\tcacheRead: cached,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\ttotalTokens: resp.usage.total_tokens || 0,\n\t\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t\toutput.stopReason = mapStopReason(resp?.status);\n\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\") && output.stopReason === \"stop\") {\n\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"error\": {\n\t\t\t\tconst code = (event as { code?: string }).code || \"\";\n\t\t\t\tconst message = (event as { message?: string }).message || \"\";\n\t\t\t\tthrow new Error(`Codex error: ${message || code || JSON.stringify(event)}`);\n\t\t\t}\n\n\t\t\tcase \"response.failed\": {\n\t\t\t\tconst msg = (event as { response?: { error?: { message?: string } } }).response?.error?.message;\n\t\t\t\tthrow new Error(msg || \"Codex response failed\");\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunction mapStopReason(status?: string): StopReason {\n\tswitch (status) {\n\t\tcase \"completed\":\n\t\t\treturn \"stop\";\n\t\tcase \"incomplete\":\n\t\t\treturn \"length\";\n\t\tcase \"failed\":\n\t\tcase \"cancelled\":\n\t\t\treturn \"error\";\n\t\tdefault:\n\t\t\treturn \"stop\";\n\t}\n}\n\n// ============================================================================\n// SSE Parsing\n// ============================================================================\n\nasync function* parseSSE(response: Response): AsyncGenerator<Record<string, unknown>> {\n\tif (!response.body) return;\n\n\tconst reader = response.body.getReader();\n\tconst decoder = new TextDecoder();\n\tlet buffer = \"\";\n\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tbuffer += decoder.decode(value, { stream: true });\n\n\t\tlet idx = buffer.indexOf(\"\\n\\n\");\n\t\twhile (idx !== -1) {\n\t\t\tconst chunk = buffer.slice(0, idx);\n\t\t\tbuffer = buffer.slice(idx + 2);\n\n\t\t\tconst dataLines = chunk\n\t\t\t\t.split(\"\\n\")\n\t\t\t\t.filter((l) => l.startsWith(\"data:\"))\n\t\t\t\t.map((l) => l.slice(5).trim());\n\t\t\tif (dataLines.length > 0) {\n\t\t\t\tconst data = dataLines.join(\"\\n\").trim();\n\t\t\t\tif (data && data !== \"[DONE]\") {\n\t\t\t\t\ttry {\n\t\t\t\t\t\tyield JSON.parse(data);\n\t\t\t\t\t} catch {}\n\t\t\t\t}\n\t\t\t}\n\t\t\tidx = buffer.indexOf(\"\\n\\n\");\n\t\t}\n\t}\n}\n\n// ============================================================================\n// Error Handling\n// ============================================================================\n\nasync function parseErrorResponse(response: Response): Promise<{ message: string; friendlyMessage?: string }> {\n\tconst raw = await response.text();\n\tlet message = raw || response.statusText || \"Request failed\";\n\tlet friendlyMessage: string | undefined;\n\n\ttry {\n\t\tconst parsed = JSON.parse(raw) as {\n\t\t\terror?: { code?: string; type?: string; message?: string; plan_type?: string; resets_at?: number };\n\t\t};\n\t\tconst err = parsed?.error;\n\t\tif (err) {\n\t\t\tconst code = err.code || err.type || \"\";\n\t\t\tif (/usage_limit_reached|usage_not_included|rate_limit_exceeded/i.test(code) || response.status === 429) {\n\t\t\t\tconst plan = err.plan_type ? ` (${err.plan_type.toLowerCase()} plan)` : \"\";\n\t\t\t\tconst mins = err.resets_at\n\t\t\t\t\t? Math.max(0, Math.round((err.resets_at * 1000 - Date.now()) / 60000))\n\t\t\t\t\t: undefined;\n\t\t\t\tconst when = mins !== undefined ? ` Try again in ~${mins} min.` : \"\";\n\t\t\t\tfriendlyMessage = `You have hit your ChatGPT usage limit${plan}.${when}`.trim();\n\t\t\t}\n\t\t\tmessage = err.message || friendlyMessage || message;\n\t\t}\n\t} catch {}\n\n\treturn { message, friendlyMessage };\n}\n\n// ============================================================================\n// Auth & Headers\n// ============================================================================\n\nfunction extractAccountId(token: string): string {\n\ttry {\n\t\tconst parts = token.split(\".\");\n\t\tif (parts.length !== 3) throw new Error(\"Invalid token\");\n\t\tconst payload = JSON.parse(Buffer.from(parts[1], \"base64\").toString(\"utf-8\"));\n\t\tconst accountId = payload?.[JWT_CLAIM_PATH]?.chatgpt_account_id;\n\t\tif (!accountId) throw new Error(\"No account ID in token\");\n\t\treturn accountId;\n\t} catch {\n\t\tthrow new Error(\"Failed to extract accountId from token\");\n\t}\n}\n\nfunction buildHeaders(\n\tinitHeaders: Record<string, string> | undefined,\n\taccountId: string,\n\ttoken: string,\n\tsessionId?: string,\n): Headers {\n\tconst headers = new Headers(initHeaders);\n\theaders.set(\"Authorization\", `Bearer ${token}`);\n\theaders.set(\"chatgpt-account-id\", accountId);\n\theaders.set(\"OpenAI-Beta\", \"responses=experimental\");\n\theaders.set(\"originator\", \"pi\");\n\theaders.set(\"User-Agent\", `pi (${os.platform()} ${os.release()}; ${os.arch()})`);\n\theaders.set(\"accept\", \"text/event-stream\");\n\theaders.set(\"content-type\", \"application/json\");\n\n\tif (sessionId) {\n\t\theaders.set(\"session_id\", sessionId);\n\t}\n\n\treturn headers;\n}\n"]}
1
+ {"version":3,"file":"openai-codex-responses.d.ts","sourceRoot":"","sources":["../../src/providers/openai-codex-responses.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EAMX,cAAc,EACd,aAAa,EAIb,MAAM,aAAa,CAAC;AAmBrB,MAAM,WAAW,2BAA4B,SAAQ,aAAa;IACjE,eAAe,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IAC3E,gBAAgB,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,GAAG,IAAI,CAAC;IACzE,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;CAC1C;AAgDD,eAAO,MAAM,0BAA0B,EAAE,cAAc,CAAC,wBAAwB,CAmH/E,CAAC","sourcesContent":["import os from \"node:os\";\nimport type {\n\tResponseFunctionToolCall,\n\tResponseOutputMessage,\n\tResponseReasoningItem,\n} from \"openai/resources/responses/responses.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\n// ============================================================================\n// Configuration\n// ============================================================================\n\nconst CODEX_URL = \"https://chatgpt.com/backend-api/codex/responses\";\nconst JWT_CLAIM_PATH = \"https://api.openai.com/auth\" as const;\nconst MAX_RETRIES = 3;\nconst BASE_DELAY_MS = 1000;\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface OpenAICodexResponsesOptions extends StreamOptions {\n\treasoningEffort?: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\treasoningSummary?: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\" | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n}\n\ninterface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: unknown[];\n\ttools?: unknown;\n\ttool_choice?: \"auto\";\n\tparallel_tool_calls?: boolean;\n\ttemperature?: number;\n\treasoning?: { effort?: string; summary?: string };\n\ttext?: { verbosity?: string };\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\t[key: string]: unknown;\n}\n\n// ============================================================================\n// Retry Helpers\n// ============================================================================\n\nfunction isRetryableError(status: number, errorText: string): boolean {\n\tif (status === 429 || status === 500 || status === 502 || status === 503 || status === 504) {\n\t\treturn true;\n\t}\n\treturn /rate.?limit|overloaded|service.?unavailable|upstream.?connect|connection.?refused/i.test(errorText);\n}\n\nfunction sleep(ms: number, signal?: AbortSignal): Promise<void> {\n\treturn new Promise((resolve, reject) => {\n\t\tif (signal?.aborted) {\n\t\t\treject(new Error(\"Request was aborted\"));\n\t\t\treturn;\n\t\t}\n\t\tconst timeout = setTimeout(resolve, ms);\n\t\tsignal?.addEventListener(\"abort\", () => {\n\t\t\tclearTimeout(timeout);\n\t\t\treject(new Error(\"Request was aborted\"));\n\t\t});\n\t});\n}\n\n// ============================================================================\n// Main Stream Function\n// ============================================================================\n\nexport const streamOpenAICodexResponses: StreamFunction<\"openai-codex-responses\"> = (\n\tmodel: Model<\"openai-codex-responses\">,\n\tcontext: Context,\n\toptions?: OpenAICodexResponsesOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"openai-codex-responses\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tif (!apiKey) {\n\t\t\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t\t\t}\n\n\t\t\tconst accountId = extractAccountId(apiKey);\n\t\t\tconst body = buildRequestBody(model, context, options);\n\t\t\tconst headers = buildHeaders(model.headers, accountId, apiKey, options?.sessionId);\n\t\t\tconst bodyJson = JSON.stringify(body);\n\n\t\t\t// Fetch with retry logic for rate limits and transient errors\n\t\t\tlet response: Response | undefined;\n\t\t\tlet lastError: Error | undefined;\n\n\t\t\tfor (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n\t\t\t\tif (options?.signal?.aborted) {\n\t\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t\t}\n\n\t\t\t\ttry {\n\t\t\t\t\tresponse = await fetch(CODEX_URL, {\n\t\t\t\t\t\tmethod: \"POST\",\n\t\t\t\t\t\theaders,\n\t\t\t\t\t\tbody: bodyJson,\n\t\t\t\t\t\tsignal: options?.signal,\n\t\t\t\t\t});\n\n\t\t\t\t\tif (response.ok) {\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\n\t\t\t\t\tconst errorText = await response.text();\n\t\t\t\t\tif (attempt < MAX_RETRIES && isRetryableError(response.status, errorText)) {\n\t\t\t\t\t\tconst delayMs = BASE_DELAY_MS * 2 ** attempt;\n\t\t\t\t\t\tawait sleep(delayMs, options?.signal);\n\t\t\t\t\t\tcontinue;\n\t\t\t\t\t}\n\n\t\t\t\t\t// Parse error for friendly message on final attempt or non-retryable error\n\t\t\t\t\tconst fakeResponse = new Response(errorText, {\n\t\t\t\t\t\tstatus: response.status,\n\t\t\t\t\t\tstatusText: response.statusText,\n\t\t\t\t\t});\n\t\t\t\t\tconst info = await parseErrorResponse(fakeResponse);\n\t\t\t\t\tthrow new Error(info.friendlyMessage || info.message);\n\t\t\t\t} catch (error) {\n\t\t\t\t\tif (error instanceof Error) {\n\t\t\t\t\t\tif (error.name === \"AbortError\" || error.message === \"Request was aborted\") {\n\t\t\t\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tlastError = error instanceof Error ? error : new Error(String(error));\n\t\t\t\t\t// Network errors are retryable\n\t\t\t\t\tif (attempt < MAX_RETRIES && !lastError.message.includes(\"usage limit\")) {\n\t\t\t\t\t\tconst delayMs = BASE_DELAY_MS * 2 ** attempt;\n\t\t\t\t\t\tawait sleep(delayMs, options?.signal);\n\t\t\t\t\t\tcontinue;\n\t\t\t\t\t}\n\t\t\t\t\tthrow lastError;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (!response?.ok) {\n\t\t\t\tthrow lastError ?? new Error(\"Failed after retries\");\n\t\t\t}\n\n\t\t\tif (!response.body) {\n\t\t\t\tthrow new Error(\"No response body\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\t\t\tawait processStream(response, output, stream, model);\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason as \"stop\" | \"length\" | \"toolUse\", message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : String(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\n// ============================================================================\n// Request Building\n// ============================================================================\n\nfunction buildRequestBody(\n\tmodel: Model<\"openai-codex-responses\">,\n\tcontext: Context,\n\toptions?: OpenAICodexResponsesOptions,\n): RequestBody {\n\tconst messages = convertMessages(model, context);\n\n\tconst body: RequestBody = {\n\t\tmodel: model.id,\n\t\tstore: false,\n\t\tstream: true,\n\t\tinstructions: context.systemPrompt,\n\t\tinput: messages,\n\t\ttext: { verbosity: options?.textVerbosity || \"medium\" },\n\t\tinclude: [\"reasoning.encrypted_content\"],\n\t\tprompt_cache_key: options?.sessionId,\n\t\ttool_choice: \"auto\",\n\t\tparallel_tool_calls: true,\n\t};\n\n\tif (options?.temperature !== undefined) {\n\t\tbody.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tbody.tools = context.tools.map((tool) => ({\n\t\t\ttype: \"function\",\n\t\t\tname: tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: tool.parameters,\n\t\t\tstrict: null,\n\t\t}));\n\t}\n\n\tif (options?.reasoningEffort !== undefined) {\n\t\tbody.reasoning = {\n\t\t\teffort: clampReasoningEffort(model.id, options.reasoningEffort),\n\t\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t\t};\n\t}\n\n\treturn body;\n}\n\nfunction clampReasoningEffort(modelId: string, effort: string): string {\n\tconst id = modelId.includes(\"/\") ? modelId.split(\"/\").pop()! : modelId;\n\tif (id.startsWith(\"gpt-5.2\") && effort === \"minimal\") return \"low\";\n\tif (id === \"gpt-5.1\" && effort === \"xhigh\") return \"high\";\n\tif (id === \"gpt-5.1-codex-mini\") return effort === \"high\" || effort === \"xhigh\" ? \"high\" : \"medium\";\n\treturn effort;\n}\n\n// ============================================================================\n// Message Conversion\n// ============================================================================\n\nfunction convertMessages(model: Model<\"openai-codex-responses\">, context: Context): unknown[] {\n\tconst messages: unknown[] = [];\n\tconst transformed = transformMessages(context.messages, model);\n\n\tfor (const msg of transformed) {\n\t\tif (msg.role === \"user\") {\n\t\t\tmessages.push(convertUserMessage(msg, model));\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tmessages.push(...convertAssistantMessage(msg));\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tmessages.push(...convertToolResult(msg, model));\n\t\t}\n\t}\n\n\treturn messages.filter(Boolean);\n}\n\nfunction convertUserMessage(\n\tmsg: { content: string | Array<{ type: string; text?: string; mimeType?: string; data?: string }> },\n\tmodel: Model<\"openai-codex-responses\">,\n): unknown {\n\tif (typeof msg.content === \"string\") {\n\t\treturn {\n\t\t\trole: \"user\",\n\t\t\tcontent: [{ type: \"input_text\", text: sanitizeSurrogates(msg.content) }],\n\t\t};\n\t}\n\n\tconst content = msg.content.map((item) => {\n\t\tif (item.type === \"text\") {\n\t\t\treturn { type: \"input_text\", text: sanitizeSurrogates(item.text || \"\") };\n\t\t}\n\t\treturn {\n\t\t\ttype: \"input_image\",\n\t\t\tdetail: \"auto\",\n\t\t\timage_url: `data:${item.mimeType};base64,${item.data}`,\n\t\t};\n\t});\n\n\tconst filtered = model.input.includes(\"image\") ? content : content.filter((c) => c.type !== \"input_image\");\n\treturn filtered.length > 0 ? { role: \"user\", content: filtered } : null;\n}\n\nfunction convertAssistantMessage(msg: AssistantMessage): unknown[] {\n\tconst output: unknown[] = [];\n\n\tfor (const block of msg.content) {\n\t\tif (block.type === \"thinking\" && msg.stopReason !== \"error\" && block.thinkingSignature) {\n\t\t\toutput.push(JSON.parse(block.thinkingSignature));\n\t\t} else if (block.type === \"text\") {\n\t\t\toutput.push({\n\t\t\t\ttype: \"message\",\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: [{ type: \"output_text\", text: sanitizeSurrogates(block.text), annotations: [] }],\n\t\t\t\tstatus: \"completed\",\n\t\t\t});\n\t\t} else if (block.type === \"toolCall\" && msg.stopReason !== \"error\") {\n\t\t\tconst [callId, id] = block.id.split(\"|\");\n\t\t\toutput.push({\n\t\t\t\ttype: \"function_call\",\n\t\t\t\tid,\n\t\t\t\tcall_id: callId,\n\t\t\t\tname: block.name,\n\t\t\t\targuments: JSON.stringify(block.arguments),\n\t\t\t});\n\t\t}\n\t}\n\n\treturn output;\n}\n\nfunction convertToolResult(\n\tmsg: { toolCallId: string; content: Array<{ type: string; text?: string; mimeType?: string; data?: string }> },\n\tmodel: Model<\"openai-codex-responses\">,\n): unknown[] {\n\tconst output: unknown[] = [];\n\tconst textResult = msg.content\n\t\t.filter((c) => c.type === \"text\")\n\t\t.map((c) => c.text || \"\")\n\t\t.join(\"\\n\");\n\tconst hasImages = msg.content.some((c) => c.type === \"image\");\n\n\toutput.push({\n\t\ttype: \"function_call_output\",\n\t\tcall_id: msg.toolCallId.split(\"|\")[0],\n\t\toutput: sanitizeSurrogates(textResult || \"(see attached image)\"),\n\t});\n\n\tif (hasImages && model.input.includes(\"image\")) {\n\t\tconst imageParts = msg.content\n\t\t\t.filter((c) => c.type === \"image\")\n\t\t\t.map((c) => ({\n\t\t\t\ttype: \"input_image\",\n\t\t\t\tdetail: \"auto\",\n\t\t\t\timage_url: `data:${c.mimeType};base64,${c.data}`,\n\t\t\t}));\n\n\t\toutput.push({\n\t\t\trole: \"user\",\n\t\t\tcontent: [{ type: \"input_text\", text: \"Attached image(s) from tool result:\" }, ...imageParts],\n\t\t});\n\t}\n\n\treturn output;\n}\n\n// ============================================================================\n// Response Processing\n// ============================================================================\n\nasync function processStream(\n\tresponse: Response,\n\toutput: AssistantMessage,\n\tstream: AssistantMessageEventStream,\n\tmodel: Model<\"openai-codex-responses\">,\n): Promise<void> {\n\tlet currentItem: ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall | null = null;\n\tlet currentBlock: ThinkingContent | TextContent | (ToolCall & { partialJson: string }) | null = null;\n\tconst blockIndex = () => output.content.length - 1;\n\n\tfor await (const event of parseSSE(response)) {\n\t\tconst type = event.type as string;\n\n\t\tswitch (type) {\n\t\t\tcase \"response.output_item.added\": {\n\t\t\t\tconst item = event.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\tif (item.type === \"reasoning\") {\n\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\" };\n\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t} else if (item.type === \"message\") {\n\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\tpartialJson: item.arguments || \"\",\n\t\t\t\t\t};\n\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.reasoning_summary_part.added\": {\n\t\t\t\tif (currentItem?.type === \"reasoning\") {\n\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\tcurrentItem.summary.push((event as { part: ResponseReasoningItem[\"summary\"][number] }).part);\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.reasoning_summary_text.delta\": {\n\t\t\t\tif (currentItem?.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\tconst delta = (event as { delta?: string }).delta || \"\";\n\t\t\t\t\tconst lastPart = currentItem.summary?.[currentItem.summary.length - 1];\n\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\tstream.push({ type: \"thinking_delta\", contentIndex: blockIndex(), delta, partial: output });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.reasoning_summary_part.done\": {\n\t\t\t\tif (currentItem?.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\tconst lastPart = currentItem.summary?.[currentItem.summary.length - 1];\n\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\tcurrentBlock.thinking += \"\\n\\n\";\n\t\t\t\t\t\tlastPart.text += \"\\n\\n\";\n\t\t\t\t\t\tstream.push({ type: \"thinking_delta\", contentIndex: blockIndex(), delta: \"\\n\\n\", partial: output });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.content_part.added\": {\n\t\t\t\tif (currentItem?.type === \"message\") {\n\t\t\t\t\tcurrentItem.content = currentItem.content || [];\n\t\t\t\t\tconst part = (event as { part?: ResponseOutputMessage[\"content\"][number] }).part;\n\t\t\t\t\tif (part && (part.type === \"output_text\" || part.type === \"refusal\")) {\n\t\t\t\t\t\tcurrentItem.content.push(part);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.output_text.delta\": {\n\t\t\t\tif (currentItem?.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\tif (lastPart?.type === \"output_text\") {\n\t\t\t\t\t\tconst delta = (event as { delta?: string }).delta || \"\";\n\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\tstream.push({ type: \"text_delta\", contentIndex: blockIndex(), delta, partial: output });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.refusal.delta\": {\n\t\t\t\tif (currentItem?.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\tif (lastPart?.type === \"refusal\") {\n\t\t\t\t\t\tconst delta = (event as { delta?: string }).delta || \"\";\n\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\tlastPart.refusal += delta;\n\t\t\t\t\t\tstream.push({ type: \"text_delta\", contentIndex: blockIndex(), delta, partial: output });\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.function_call_arguments.delta\": {\n\t\t\t\tif (currentItem?.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\t\tconst delta = (event as { delta?: string }).delta || \"\";\n\t\t\t\t\tcurrentBlock.partialJson += delta;\n\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t\t\tstream.push({ type: \"toolcall_delta\", contentIndex: blockIndex(), delta, partial: output });\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.output_item.done\": {\n\t\t\t\tconst item = event.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\tif (item.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\tcurrentBlock.thinking = item.summary?.map((s) => s.text).join(\"\\n\\n\") || \"\";\n\t\t\t\t\tcurrentBlock.thinkingSignature = JSON.stringify(item);\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t} else if (item.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\tcurrentBlock.text = item.content.map((c) => (c.type === \"output_text\" ? c.text : c.refusal)).join(\"\");\n\t\t\t\t\tcurrentBlock.textSignature = item.id;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\targuments: JSON.parse(item.arguments),\n\t\t\t\t\t};\n\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"response.completed\":\n\t\t\tcase \"response.done\": {\n\t\t\t\tconst resp = (\n\t\t\t\t\tevent as {\n\t\t\t\t\t\tresponse?: {\n\t\t\t\t\t\t\tusage?: {\n\t\t\t\t\t\t\t\tinput_tokens?: number;\n\t\t\t\t\t\t\t\toutput_tokens?: number;\n\t\t\t\t\t\t\t\ttotal_tokens?: number;\n\t\t\t\t\t\t\t\tinput_tokens_details?: { cached_tokens?: number };\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\tstatus?: string;\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t).response;\n\t\t\t\tif (resp?.usage) {\n\t\t\t\t\tconst cached = resp.usage.input_tokens_details?.cached_tokens || 0;\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\tinput: (resp.usage.input_tokens || 0) - cached,\n\t\t\t\t\t\toutput: resp.usage.output_tokens || 0,\n\t\t\t\t\t\tcacheRead: cached,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\ttotalTokens: resp.usage.total_tokens || 0,\n\t\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\t\t\t\toutput.stopReason = mapStopReason(resp?.status);\n\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\") && output.stopReason === \"stop\") {\n\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tcase \"error\": {\n\t\t\t\tconst code = (event as { code?: string }).code || \"\";\n\t\t\t\tconst message = (event as { message?: string }).message || \"\";\n\t\t\t\tthrow new Error(`Codex error: ${message || code || JSON.stringify(event)}`);\n\t\t\t}\n\n\t\t\tcase \"response.failed\": {\n\t\t\t\tconst msg = (event as { response?: { error?: { message?: string } } }).response?.error?.message;\n\t\t\t\tthrow new Error(msg || \"Codex response failed\");\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunction mapStopReason(status?: string): StopReason {\n\tswitch (status) {\n\t\tcase \"completed\":\n\t\t\treturn \"stop\";\n\t\tcase \"incomplete\":\n\t\t\treturn \"length\";\n\t\tcase \"failed\":\n\t\tcase \"cancelled\":\n\t\t\treturn \"error\";\n\t\tdefault:\n\t\t\treturn \"stop\";\n\t}\n}\n\n// ============================================================================\n// SSE Parsing\n// ============================================================================\n\nasync function* parseSSE(response: Response): AsyncGenerator<Record<string, unknown>> {\n\tif (!response.body) return;\n\n\tconst reader = response.body.getReader();\n\tconst decoder = new TextDecoder();\n\tlet buffer = \"\";\n\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tbuffer += decoder.decode(value, { stream: true });\n\n\t\tlet idx = buffer.indexOf(\"\\n\\n\");\n\t\twhile (idx !== -1) {\n\t\t\tconst chunk = buffer.slice(0, idx);\n\t\t\tbuffer = buffer.slice(idx + 2);\n\n\t\t\tconst dataLines = chunk\n\t\t\t\t.split(\"\\n\")\n\t\t\t\t.filter((l) => l.startsWith(\"data:\"))\n\t\t\t\t.map((l) => l.slice(5).trim());\n\t\t\tif (dataLines.length > 0) {\n\t\t\t\tconst data = dataLines.join(\"\\n\").trim();\n\t\t\t\tif (data && data !== \"[DONE]\") {\n\t\t\t\t\ttry {\n\t\t\t\t\t\tyield JSON.parse(data);\n\t\t\t\t\t} catch {}\n\t\t\t\t}\n\t\t\t}\n\t\t\tidx = buffer.indexOf(\"\\n\\n\");\n\t\t}\n\t}\n}\n\n// ============================================================================\n// Error Handling\n// ============================================================================\n\nasync function parseErrorResponse(response: Response): Promise<{ message: string; friendlyMessage?: string }> {\n\tconst raw = await response.text();\n\tlet message = raw || response.statusText || \"Request failed\";\n\tlet friendlyMessage: string | undefined;\n\n\ttry {\n\t\tconst parsed = JSON.parse(raw) as {\n\t\t\terror?: { code?: string; type?: string; message?: string; plan_type?: string; resets_at?: number };\n\t\t};\n\t\tconst err = parsed?.error;\n\t\tif (err) {\n\t\t\tconst code = err.code || err.type || \"\";\n\t\t\tif (/usage_limit_reached|usage_not_included|rate_limit_exceeded/i.test(code) || response.status === 429) {\n\t\t\t\tconst plan = err.plan_type ? ` (${err.plan_type.toLowerCase()} plan)` : \"\";\n\t\t\t\tconst mins = err.resets_at\n\t\t\t\t\t? Math.max(0, Math.round((err.resets_at * 1000 - Date.now()) / 60000))\n\t\t\t\t\t: undefined;\n\t\t\t\tconst when = mins !== undefined ? ` Try again in ~${mins} min.` : \"\";\n\t\t\t\tfriendlyMessage = `You have hit your ChatGPT usage limit${plan}.${when}`.trim();\n\t\t\t}\n\t\t\tmessage = err.message || friendlyMessage || message;\n\t\t}\n\t} catch {}\n\n\treturn { message, friendlyMessage };\n}\n\n// ============================================================================\n// Auth & Headers\n// ============================================================================\n\nfunction extractAccountId(token: string): string {\n\ttry {\n\t\tconst parts = token.split(\".\");\n\t\tif (parts.length !== 3) throw new Error(\"Invalid token\");\n\t\tconst payload = JSON.parse(Buffer.from(parts[1], \"base64\").toString(\"utf-8\"));\n\t\tconst accountId = payload?.[JWT_CLAIM_PATH]?.chatgpt_account_id;\n\t\tif (!accountId) throw new Error(\"No account ID in token\");\n\t\treturn accountId;\n\t} catch {\n\t\tthrow new Error(\"Failed to extract accountId from token\");\n\t}\n}\n\nfunction buildHeaders(\n\tinitHeaders: Record<string, string> | undefined,\n\taccountId: string,\n\ttoken: string,\n\tsessionId?: string,\n): Headers {\n\tconst headers = new Headers(initHeaders);\n\theaders.set(\"Authorization\", `Bearer ${token}`);\n\theaders.set(\"chatgpt-account-id\", accountId);\n\theaders.set(\"OpenAI-Beta\", \"responses=experimental\");\n\theaders.set(\"originator\", \"pi\");\n\theaders.set(\"User-Agent\", `pi (${os.platform()} ${os.release()}; ${os.arch()})`);\n\theaders.set(\"accept\", \"text/event-stream\");\n\theaders.set(\"content-type\", \"application/json\");\n\n\tif (sessionId) {\n\t\theaders.set(\"session_id\", sessionId);\n\t}\n\n\treturn headers;\n}\n"]}
@@ -1,5 +1,4 @@
1
1
  import os from "node:os";
2
- import { PI_STATIC_INSTRUCTIONS } from "../constants.js";
3
2
  import { calculateCost } from "../models.js";
4
3
  import { getEnvApiKey } from "../stream.js";
5
4
  import { AssistantMessageEventStream } from "../utils/event-stream.js";
@@ -141,20 +140,13 @@ export const streamOpenAICodexResponses = (model, context, options) => {
141
140
  // Request Building
142
141
  // ============================================================================
143
142
  function buildRequestBody(model, context, options) {
144
- const systemPrompt = buildSystemPrompt(context.systemPrompt);
145
143
  const messages = convertMessages(model, context);
146
- // Prepend developer messages
147
- const developerMessages = systemPrompt.developerMessages.map((text) => ({
148
- type: "message",
149
- role: "developer",
150
- content: [{ type: "input_text", text }],
151
- }));
152
144
  const body = {
153
145
  model: model.id,
154
146
  store: false,
155
147
  stream: true,
156
- instructions: systemPrompt.instructions,
157
- input: [...developerMessages, ...messages],
148
+ instructions: context.systemPrompt,
149
+ input: messages,
158
150
  text: { verbosity: options?.textVerbosity || "medium" },
159
151
  include: ["reasoning.encrypted_content"],
160
152
  prompt_cache_key: options?.sessionId,
@@ -181,21 +173,6 @@ function buildRequestBody(model, context, options) {
181
173
  }
182
174
  return body;
183
175
  }
184
- function buildSystemPrompt(userSystemPrompt) {
185
- // PI_STATIC_INSTRUCTIONS is whitelisted and must be in the instructions field.
186
- // User's system prompt goes in developer messages, with the static prefix stripped.
187
- const staticPrefix = PI_STATIC_INSTRUCTIONS.trim();
188
- const developerMessages = [];
189
- if (userSystemPrompt?.trim()) {
190
- let dynamicPart = userSystemPrompt.trim();
191
- if (dynamicPart.startsWith(staticPrefix)) {
192
- dynamicPart = dynamicPart.slice(staticPrefix.length).trim();
193
- }
194
- if (dynamicPart)
195
- developerMessages.push(dynamicPart);
196
- }
197
- return { instructions: staticPrefix, developerMessages };
198
- }
199
176
  function clampReasoningEffort(modelId, effort) {
200
177
  const id = modelId.includes("/") ? modelId.split("/").pop() : modelId;
201
178
  if (id.startsWith("gpt-5.2") && effort === "minimal")