@mariozechner/pi-ai 0.50.1 → 0.50.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -0
- package/dist/env-api-keys.d.ts.map +1 -1
- package/dist/env-api-keys.js +1 -0
- package/dist/env-api-keys.js.map +1 -1
- package/dist/models.generated.d.ts +439 -17
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +414 -23
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/anthropic.d.ts.map +1 -1
- package/dist/providers/anthropic.js +38 -9
- package/dist/providers/anthropic.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +12 -0
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/providers/openai-responses-shared.d.ts.map +1 -1
- package/dist/providers/openai-responses-shared.js +5 -2
- package/dist/providers/openai-responses-shared.js.map +1 -1
- package/dist/providers/openai-responses.d.ts.map +1 -1
- package/dist/providers/openai-responses.js +14 -0
- package/dist/providers/openai-responses.js.map +1 -1
- package/dist/types.d.ts +1 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/overflow.d.ts +2 -2
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js +7 -7
- package/dist/utils/overflow.js.map +1 -1
- package/package.json +1 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-completions.d.ts","sourceRoot":"","sources":["../../src/providers/openai-completions.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAMX,0BAA0B,EAE1B,MAAM,sCAAsC,CAAC;AAG9C,OAAO,KAAK,EAEX,OAAO,EAEP,KAAK,EACL,uBAAuB,EACvB,mBAAmB,EAEnB,cAAc,EACd,aAAa,EAMb,MAAM,aAAa,CAAC;AA4CrB,MAAM,WAAW,wBAAyB,SAAQ,aAAa;IAC9D,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,UAAU,GAAG;QAAE,IAAI,EAAE,UAAU,CAAC;QAAC,QAAQ,EAAE;YAAE,IAAI,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE,CAAC;IAC7F,eAAe,CAAC,EAAE,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;CAClE;AAED,eAAO,MAAM,uBAAuB,EAAE,cAAc,CAAC,oBAAoB,EAAE,wBAAwB,CAqPlG,CAAC;AAEF,eAAO,MAAM,6BAA6B,EAAE,cAAc,CAAC,oBAAoB,EAAE,mBAAmB,CAiBnG,CAAC;AAoJF,wBAAgB,eAAe,CAC9B,KAAK,EAAE,KAAK,CAAC,oBAAoB,CAAC,EAClC,OAAO,EAAE,OAAO,EAChB,MAAM,EAAE,QAAQ,CAAC,uBAAuB,CAAC,GACvC,0BAA0B,EAAE,CA2N9B","sourcesContent":["import OpenAI from \"openai\";\nimport type {\n\tChatCompletionAssistantMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionContentPart,\n\tChatCompletionContentPartImage,\n\tChatCompletionContentPartText,\n\tChatCompletionMessageParam,\n\tChatCompletionToolMessageParam,\n} from \"openai/resources/chat/completions.js\";\nimport { getEnvApiKey } from \"../env-api-keys.js\";\nimport { calculateCost, supportsXhigh } from \"../models.js\";\nimport type {\n\tAssistantMessage,\n\tContext,\n\tMessage,\n\tModel,\n\tOpenAICompletionsCompat,\n\tSimpleStreamOptions,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tToolResultMessage,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { buildBaseOptions, clampReasoning } from \"./simple-options.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\n/**\n * Normalize tool call ID for Mistral.\n * Mistral requires tool IDs to be exactly 9 alphanumeric characters (a-z, A-Z, 0-9).\n */\nfunction normalizeMistralToolId(id: string): string {\n\t// Remove non-alphanumeric characters\n\tlet normalized = id.replace(/[^a-zA-Z0-9]/g, \"\");\n\t// Mistral requires exactly 9 characters\n\tif (normalized.length < 9) {\n\t\t// Pad with deterministic characters based on original ID to ensure matching\n\t\tconst padding = \"ABCDEFGHI\";\n\t\tnormalized = normalized + padding.slice(0, 9 - normalized.length);\n\t} else if (normalized.length > 9) {\n\t\tnormalized = normalized.slice(0, 9);\n\t}\n\treturn normalized;\n}\n\n/**\n * Check if conversation messages contain tool calls or tool results.\n * This is needed because Anthropic (via proxy) requires the tools param\n * to be present when messages include tool_calls or tool role messages.\n */\nfunction hasToolHistory(messages: Message[]): boolean {\n\tfor (const msg of messages) {\n\t\tif (msg.role === \"toolResult\") {\n\t\t\treturn true;\n\t\t}\n\t\tif (msg.role === \"assistant\") {\n\t\t\tif (msg.content.some((block) => block.type === \"toolCall\")) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n}\n\nexport interface OpenAICompletionsOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"required\" | { type: \"function\"; function: { name: string } };\n\treasoningEffort?: \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n}\n\nexport const streamOpenAICompletions: StreamFunction<\"openai-completions\", OpenAICompletionsOptions> = (\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\toptions?: OpenAICompletionsOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: model.api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tconst client = createClient(model, context, apiKey, options?.headers);\n\t\t\tconst params = buildParams(model, context, options);\n\t\t\toptions?.onPayload?.(params);\n\t\t\tconst openaiStream = await client.chat.completions.create(params, { signal: options?.signal });\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\tlet currentBlock: TextContent | ThinkingContent | (ToolCall & { partialArgs?: string }) | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tconst finishCurrentBlock = (block?: typeof currentBlock) => {\n\t\t\t\tif (block) {\n\t\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: block.text,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: block.thinking,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\t\tblock.arguments = JSON.parse(block.partialArgs || \"{}\");\n\t\t\t\t\t\tdelete block.partialArgs;\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\ttoolCall: block,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tfor await (const chunk of openaiStream) {\n\t\t\t\tif (chunk.usage) {\n\t\t\t\t\tconst cachedTokens = chunk.usage.prompt_tokens_details?.cached_tokens || 0;\n\t\t\t\t\tconst reasoningTokens = chunk.usage.completion_tokens_details?.reasoning_tokens || 0;\n\t\t\t\t\tconst input = (chunk.usage.prompt_tokens || 0) - cachedTokens;\n\t\t\t\t\tconst outputTokens = (chunk.usage.completion_tokens || 0) + reasoningTokens;\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\t// OpenAI includes cached tokens in prompt_tokens, so subtract to get non-cached input\n\t\t\t\t\t\tinput,\n\t\t\t\t\t\toutput: outputTokens,\n\t\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t// Compute totalTokens ourselves since we add reasoning_tokens to output\n\t\t\t\t\t\t// and some providers (e.g., Groq) don't include them in total_tokens\n\t\t\t\t\t\ttotalTokens: input + outputTokens + cachedTokens,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\n\t\t\t\tconst choice = chunk.choices[0];\n\t\t\t\tif (!choice) continue;\n\n\t\t\t\tif (choice.finish_reason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(choice.finish_reason);\n\t\t\t\t}\n\n\t\t\t\tif (choice.delta) {\n\t\t\t\t\tif (\n\t\t\t\t\t\tchoice.delta.content !== null &&\n\t\t\t\t\t\tchoice.delta.content !== undefined &&\n\t\t\t\t\t\tchoice.delta.content.length > 0\n\t\t\t\t\t) {\n\t\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"text\") {\n\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\tcurrentBlock.text += choice.delta.content;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: choice.delta.content,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// Some endpoints return reasoning in reasoning_content (llama.cpp),\n\t\t\t\t\t// or reasoning (other openai compatible endpoints)\n\t\t\t\t\t// Use the first non-empty reasoning field to avoid duplication\n\t\t\t\t\t// (e.g., chutes.ai returns both reasoning_content and reasoning with same content)\n\t\t\t\t\tconst reasoningFields = [\"reasoning_content\", \"reasoning\", \"reasoning_text\"];\n\t\t\t\t\tlet foundReasoningField: string | null = null;\n\t\t\t\t\tfor (const field of reasoningFields) {\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t(choice.delta as any)[field] !== null &&\n\t\t\t\t\t\t\t(choice.delta as any)[field] !== undefined &&\n\t\t\t\t\t\t\t(choice.delta as any)[field].length > 0\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tif (!foundReasoningField) {\n\t\t\t\t\t\t\t\tfoundReasoningField = field;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (foundReasoningField) {\n\t\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"thinking\") {\n\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\t\tthinking: \"\",\n\t\t\t\t\t\t\t\tthinkingSignature: foundReasoningField,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\tconst delta = (choice.delta as any)[foundReasoningField];\n\t\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (choice?.delta?.tool_calls) {\n\t\t\t\t\t\tfor (const toolCall of choice.delta.tool_calls) {\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\tcurrentBlock.type !== \"toolCall\" ||\n\t\t\t\t\t\t\t\t(toolCall.id && currentBlock.id !== toolCall.id)\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\t\tid: toolCall.id || \"\",\n\t\t\t\t\t\t\t\t\tname: toolCall.function?.name || \"\",\n\t\t\t\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\t\t\t\tpartialArgs: \"\",\n\t\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tif (currentBlock.type === \"toolCall\") {\n\t\t\t\t\t\t\t\tif (toolCall.id) currentBlock.id = toolCall.id;\n\t\t\t\t\t\t\t\tif (toolCall.function?.name) currentBlock.name = toolCall.function.name;\n\t\t\t\t\t\t\t\tlet delta = \"\";\n\t\t\t\t\t\t\t\tif (toolCall.function?.arguments) {\n\t\t\t\t\t\t\t\t\tdelta = toolCall.function.arguments;\n\t\t\t\t\t\t\t\t\tcurrentBlock.partialArgs += toolCall.function.arguments;\n\t\t\t\t\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialArgs);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tconst reasoningDetails = (choice.delta as any).reasoning_details;\n\t\t\t\t\tif (reasoningDetails && Array.isArray(reasoningDetails)) {\n\t\t\t\t\t\tfor (const detail of reasoningDetails) {\n\t\t\t\t\t\t\tif (detail.type === \"reasoning.encrypted\" && detail.id && detail.data) {\n\t\t\t\t\t\t\t\tconst matchingToolCall = output.content.find(\n\t\t\t\t\t\t\t\t\t(b) => b.type === \"toolCall\" && b.id === detail.id,\n\t\t\t\t\t\t\t\t) as ToolCall | undefined;\n\t\t\t\t\t\t\t\tif (matchingToolCall) {\n\t\t\t\t\t\t\t\t\tmatchingToolCall.thoughtSignature = JSON.stringify(detail);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfinishCurrentBlock(currentBlock);\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\t// Some providers via OpenRouter give additional information in this field.\n\t\t\tconst rawMetadata = (error as any)?.error?.metadata?.raw;\n\t\t\tif (rawMetadata) output.errorMessage += `\\n${rawMetadata}`;\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nexport const streamSimpleOpenAICompletions: StreamFunction<\"openai-completions\", SimpleStreamOptions> = (\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream => {\n\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst base = buildBaseOptions(model, options, apiKey);\n\tconst reasoningEffort = supportsXhigh(model) ? options?.reasoning : clampReasoning(options?.reasoning);\n\n\treturn streamOpenAICompletions(model, context, {\n\t\t...base,\n\t\treasoningEffort,\n\t} satisfies OpenAICompletionsOptions);\n};\n\nfunction createClient(\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\tapiKey?: string,\n\toptionsHeaders?: Record<string, string>,\n) {\n\tif (!apiKey) {\n\t\tif (!process.env.OPENAI_API_KEY) {\n\t\t\tthrow new Error(\n\t\t\t\t\"OpenAI API key is required. Set OPENAI_API_KEY environment variable or pass it as an argument.\",\n\t\t\t);\n\t\t}\n\t\tapiKey = process.env.OPENAI_API_KEY;\n\t}\n\n\tconst headers = { ...model.headers };\n\tif (model.provider === \"github-copilot\") {\n\t\t// Copilot expects X-Initiator to indicate whether the request is user-initiated\n\t\t// or agent-initiated (e.g. follow-up after assistant/tool messages). If there is\n\t\t// no prior message, default to user-initiated.\n\t\tconst messages = context.messages || [];\n\t\tconst lastMessage = messages[messages.length - 1];\n\t\tconst isAgentCall = lastMessage ? lastMessage.role !== \"user\" : false;\n\t\theaders[\"X-Initiator\"] = isAgentCall ? \"agent\" : \"user\";\n\t\theaders[\"Openai-Intent\"] = \"conversation-edits\";\n\n\t\t// Copilot requires this header when sending images\n\t\tconst hasImages = messages.some((msg) => {\n\t\t\tif (msg.role === \"user\" && Array.isArray(msg.content)) {\n\t\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t\t}\n\t\t\tif (msg.role === \"toolResult\" && Array.isArray(msg.content)) {\n\t\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t\t}\n\t\t\treturn false;\n\t\t});\n\t\tif (hasImages) {\n\t\t\theaders[\"Copilot-Vision-Request\"] = \"true\";\n\t\t}\n\t}\n\n\t// Merge options headers last so they can override defaults\n\tif (optionsHeaders) {\n\t\tObject.assign(headers, optionsHeaders);\n\t}\n\n\treturn new OpenAI({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders: headers,\n\t});\n}\n\nfunction buildParams(model: Model<\"openai-completions\">, context: Context, options?: OpenAICompletionsOptions) {\n\tconst compat = getCompat(model);\n\tconst messages = convertMessages(model, context, compat);\n\tmaybeAddOpenRouterAnthropicCacheControl(model, messages);\n\n\tconst params: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {\n\t\tmodel: model.id,\n\t\tmessages,\n\t\tstream: true,\n\t};\n\n\tif (compat.supportsUsageInStreaming !== false) {\n\t\t(params as any).stream_options = { include_usage: true };\n\t}\n\n\tif (compat.supportsStore) {\n\t\tparams.store = false;\n\t}\n\n\tif (options?.maxTokens) {\n\t\tif (compat.maxTokensField === \"max_tokens\") {\n\t\t\t(params as any).max_tokens = options.maxTokens;\n\t\t} else {\n\t\t\tparams.max_completion_tokens = options.maxTokens;\n\t\t}\n\t}\n\n\tif (options?.temperature !== undefined) {\n\t\tparams.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tparams.tools = convertTools(context.tools);\n\t} else if (hasToolHistory(context.messages)) {\n\t\t// Anthropic (via LiteLLM/proxy) requires tools param when conversation has tool_calls/tool_results\n\t\tparams.tools = [];\n\t}\n\n\tif (options?.toolChoice) {\n\t\tparams.tool_choice = options.toolChoice;\n\t}\n\n\tif (compat.thinkingFormat === \"zai\" && model.reasoning) {\n\t\t// Z.ai uses binary thinking: { type: \"enabled\" | \"disabled\" }\n\t\t// Must explicitly disable since z.ai defaults to thinking enabled\n\t\t(params as any).thinking = { type: options?.reasoningEffort ? \"enabled\" : \"disabled\" };\n\t} else if (options?.reasoningEffort && model.reasoning && compat.supportsReasoningEffort) {\n\t\t// OpenAI-style reasoning_effort\n\t\tparams.reasoning_effort = options.reasoningEffort;\n\t}\n\n\t// OpenRouter provider routing preferences\n\tif (model.baseUrl.includes(\"openrouter.ai\") && model.compat?.openRouterRouting) {\n\t\t(params as any).provider = model.compat.openRouterRouting;\n\t}\n\n\treturn params;\n}\n\nfunction maybeAddOpenRouterAnthropicCacheControl(\n\tmodel: Model<\"openai-completions\">,\n\tmessages: ChatCompletionMessageParam[],\n): void {\n\tif (model.provider !== \"openrouter\" || !model.id.startsWith(\"anthropic/\")) return;\n\n\t// Anthropic-style caching requires cache_control on a text part. Add a breakpoint\n\t// on the last user/assistant message (walking backwards until we find text content).\n\tfor (let i = messages.length - 1; i >= 0; i--) {\n\t\tconst msg = messages[i];\n\t\tif (msg.role !== \"user\" && msg.role !== \"assistant\") continue;\n\n\t\tconst content = msg.content;\n\t\tif (typeof content === \"string\") {\n\t\t\tmsg.content = [\n\t\t\t\tObject.assign({ type: \"text\" as const, text: content }, { cache_control: { type: \"ephemeral\" } }),\n\t\t\t];\n\t\t\treturn;\n\t\t}\n\n\t\tif (!Array.isArray(content)) continue;\n\n\t\t// Find last text part and add cache_control\n\t\tfor (let j = content.length - 1; j >= 0; j--) {\n\t\t\tconst part = content[j];\n\t\t\tif (part?.type === \"text\") {\n\t\t\t\tObject.assign(part, { cache_control: { type: \"ephemeral\" } });\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t}\n}\n\nexport function convertMessages(\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\tcompat: Required<OpenAICompletionsCompat>,\n): ChatCompletionMessageParam[] {\n\tconst params: ChatCompletionMessageParam[] = [];\n\n\tconst normalizeToolCallId = (id: string): string => {\n\t\tif (compat.requiresMistralToolIds) return normalizeMistralToolId(id);\n\t\tif (model.provider === \"openai\") return id.length > 40 ? id.slice(0, 40) : id;\n\t\t// Copilot Claude models route to Claude backend which requires Anthropic ID format\n\t\tif (model.provider === \"github-copilot\" && model.id.toLowerCase().includes(\"claude\")) {\n\t\t\treturn id.replace(/[^a-zA-Z0-9_-]/g, \"_\").slice(0, 64);\n\t\t}\n\t\treturn id;\n\t};\n\n\tconst transformedMessages = transformMessages(context.messages, model, (id) => normalizeToolCallId(id));\n\n\tif (context.systemPrompt) {\n\t\tconst useDeveloperRole = model.reasoning && compat.supportsDeveloperRole;\n\t\tconst role = useDeveloperRole ? \"developer\" : \"system\";\n\t\tparams.push({ role: role, content: sanitizeSurrogates(context.systemPrompt) });\n\t}\n\n\tlet lastRole: string | null = null;\n\n\tfor (let i = 0; i < transformedMessages.length; i++) {\n\t\tconst msg = transformedMessages[i];\n\t\t// Some providers (e.g. Mistral/Devstral) don't allow user messages directly after tool results\n\t\t// Insert a synthetic assistant message to bridge the gap\n\t\tif (compat.requiresAssistantAfterToolResult && lastRole === \"toolResult\" && msg.role === \"user\") {\n\t\t\tparams.push({\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: \"I have processed the tool results.\",\n\t\t\t});\n\t\t}\n\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: sanitizeSurrogates(msg.content),\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ChatCompletionContentPart[] = msg.content.map((item): ChatCompletionContentPart => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ChatCompletionContentPartText;\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"image_url\",\n\t\t\t\t\t\t\timage_url: {\n\t\t\t\t\t\t\t\turl: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t} satisfies ChatCompletionContentPartImage;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"image_url\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\t// Some providers (e.g. Mistral) don't accept null content, use empty string instead\n\t\t\tconst assistantMsg: ChatCompletionAssistantMessageParam = {\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: compat.requiresAssistantAfterToolResult ? \"\" : null,\n\t\t\t};\n\n\t\t\tconst textBlocks = msg.content.filter((b) => b.type === \"text\") as TextContent[];\n\t\t\t// Filter out empty text blocks to avoid API validation errors\n\t\t\tconst nonEmptyTextBlocks = textBlocks.filter((b) => b.text && b.text.trim().length > 0);\n\t\t\tif (nonEmptyTextBlocks.length > 0) {\n\t\t\t\t// GitHub Copilot requires assistant content as a string, not an array.\n\t\t\t\t// Sending as array causes Claude models to re-answer all previous prompts.\n\t\t\t\tif (model.provider === \"github-copilot\") {\n\t\t\t\t\tassistantMsg.content = nonEmptyTextBlocks.map((b) => sanitizeSurrogates(b.text)).join(\"\");\n\t\t\t\t} else {\n\t\t\t\t\tassistantMsg.content = nonEmptyTextBlocks.map((b) => {\n\t\t\t\t\t\treturn { type: \"text\", text: sanitizeSurrogates(b.text) };\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Handle thinking blocks\n\t\t\tconst thinkingBlocks = msg.content.filter((b) => b.type === \"thinking\") as ThinkingContent[];\n\t\t\t// Filter out empty thinking blocks to avoid API validation errors\n\t\t\tconst nonEmptyThinkingBlocks = thinkingBlocks.filter((b) => b.thinking && b.thinking.trim().length > 0);\n\t\t\tif (nonEmptyThinkingBlocks.length > 0) {\n\t\t\t\tif (compat.requiresThinkingAsText) {\n\t\t\t\t\t// Convert thinking blocks to plain text (no tags to avoid model mimicking them)\n\t\t\t\t\tconst thinkingText = nonEmptyThinkingBlocks.map((b) => b.thinking).join(\"\\n\\n\");\n\t\t\t\t\tconst textContent = assistantMsg.content as Array<{ type: \"text\"; text: string }> | null;\n\t\t\t\t\tif (textContent) {\n\t\t\t\t\t\ttextContent.unshift({ type: \"text\", text: thinkingText });\n\t\t\t\t\t} else {\n\t\t\t\t\t\tassistantMsg.content = [{ type: \"text\", text: thinkingText }];\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// Use the signature from the first thinking block if available (for llama.cpp server + gpt-oss)\n\t\t\t\t\tconst signature = nonEmptyThinkingBlocks[0].thinkingSignature;\n\t\t\t\t\tif (signature && signature.length > 0) {\n\t\t\t\t\t\t(assistantMsg as any)[signature] = nonEmptyThinkingBlocks.map((b) => b.thinking).join(\"\\n\");\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tconst toolCalls = msg.content.filter((b) => b.type === \"toolCall\") as ToolCall[];\n\t\t\tif (toolCalls.length > 0) {\n\t\t\t\tassistantMsg.tool_calls = toolCalls.map((tc) => ({\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\ttype: \"function\" as const,\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: tc.name,\n\t\t\t\t\t\targuments: JSON.stringify(tc.arguments),\n\t\t\t\t\t},\n\t\t\t\t}));\n\t\t\t\tconst reasoningDetails = toolCalls\n\t\t\t\t\t.filter((tc) => tc.thoughtSignature)\n\t\t\t\t\t.map((tc) => {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\treturn JSON.parse(tc.thoughtSignature!);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\treturn null;\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t\t.filter(Boolean);\n\t\t\t\tif (reasoningDetails.length > 0) {\n\t\t\t\t\t(assistantMsg as any).reasoning_details = reasoningDetails;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Skip assistant messages that have no content and no tool calls.\n\t\t\t// Mistral explicitly requires \"either content or tool_calls, but not none\".\n\t\t\t// Other providers also don't accept empty assistant messages.\n\t\t\t// This handles aborted assistant responses that got no content.\n\t\t\tconst content = assistantMsg.content;\n\t\t\tconst hasContent =\n\t\t\t\tcontent !== null &&\n\t\t\t\tcontent !== undefined &&\n\t\t\t\t(typeof content === \"string\" ? content.length > 0 : content.length > 0);\n\t\t\tif (!hasContent && !assistantMsg.tool_calls) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tparams.push(assistantMsg);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tconst imageBlocks: Array<{ type: \"image_url\"; image_url: { url: string } }> = [];\n\t\t\tlet j = i;\n\n\t\t\tfor (; j < transformedMessages.length && transformedMessages[j].role === \"toolResult\"; j++) {\n\t\t\t\tconst toolMsg = transformedMessages[j] as ToolResultMessage;\n\n\t\t\t\t// Extract text and image content\n\t\t\t\tconst textResult = toolMsg.content\n\t\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t\t.map((c) => (c as any).text)\n\t\t\t\t\t.join(\"\\n\");\n\t\t\t\tconst hasImages = toolMsg.content.some((c) => c.type === \"image\");\n\n\t\t\t\t// Always send tool result with text (or placeholder if only images)\n\t\t\t\tconst hasText = textResult.length > 0;\n\t\t\t\t// Some providers (e.g. Mistral) require the 'name' field in tool results\n\t\t\t\tconst toolResultMsg: ChatCompletionToolMessageParam = {\n\t\t\t\t\trole: \"tool\",\n\t\t\t\t\tcontent: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t\t\ttool_call_id: toolMsg.toolCallId,\n\t\t\t\t};\n\t\t\t\tif (compat.requiresToolResultName && toolMsg.toolName) {\n\t\t\t\t\t(toolResultMsg as any).name = toolMsg.toolName;\n\t\t\t\t}\n\t\t\t\tparams.push(toolResultMsg);\n\n\t\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\t\tfor (const block of toolMsg.content) {\n\t\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\t\timageBlocks.push({\n\t\t\t\t\t\t\t\ttype: \"image_url\",\n\t\t\t\t\t\t\t\timage_url: {\n\t\t\t\t\t\t\t\t\turl: `data:${(block as any).mimeType};base64,${(block as any).data}`,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\ti = j - 1;\n\n\t\t\tif (imageBlocks.length > 0) {\n\t\t\t\tif (compat.requiresAssistantAfterToolResult) {\n\t\t\t\t\tparams.push({\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: \"I have processed the tool results.\",\n\t\t\t\t\t});\n\t\t\t\t}\n\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t...imageBlocks,\n\t\t\t\t\t],\n\t\t\t\t});\n\t\t\t\tlastRole = \"user\";\n\t\t\t} else {\n\t\t\t\tlastRole = \"toolResult\";\n\t\t\t}\n\t\t\tcontinue;\n\t\t}\n\n\t\tlastRole = msg.role;\n\t}\n\n\treturn params;\n}\n\nfunction convertTools(tools: Tool[]): OpenAI.Chat.Completions.ChatCompletionTool[] {\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: tool.parameters as any, // TypeBox already generates JSON Schema\n\t\t\tstrict: false, // Disable strict mode to allow optional parameters without null unions\n\t\t},\n\t}));\n}\n\nfunction mapStopReason(reason: ChatCompletionChunk.Choice[\"finish_reason\"]): StopReason {\n\tif (reason === null) return \"stop\";\n\tswitch (reason) {\n\t\tcase \"stop\":\n\t\t\treturn \"stop\";\n\t\tcase \"length\":\n\t\t\treturn \"length\";\n\t\tcase \"function_call\":\n\t\tcase \"tool_calls\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"content_filter\":\n\t\t\treturn \"error\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\n/**\n * Detect compatibility settings from provider and baseUrl for known providers.\n * Provider takes precedence over URL-based detection since it's explicitly configured.\n * Returns a fully resolved OpenAICompletionsCompat object with all fields set.\n */\nfunction detectCompat(model: Model<\"openai-completions\">): Required<OpenAICompletionsCompat> {\n\tconst provider = model.provider;\n\tconst baseUrl = model.baseUrl;\n\n\tconst isZai = provider === \"zai\" || baseUrl.includes(\"api.z.ai\");\n\n\tconst isNonStandard =\n\t\tprovider === \"cerebras\" ||\n\t\tbaseUrl.includes(\"cerebras.ai\") ||\n\t\tprovider === \"xai\" ||\n\t\tbaseUrl.includes(\"api.x.ai\") ||\n\t\tprovider === \"mistral\" ||\n\t\tbaseUrl.includes(\"mistral.ai\") ||\n\t\tbaseUrl.includes(\"chutes.ai\") ||\n\t\tisZai ||\n\t\tprovider === \"opencode\" ||\n\t\tbaseUrl.includes(\"opencode.ai\");\n\n\tconst useMaxTokens = provider === \"mistral\" || baseUrl.includes(\"mistral.ai\") || baseUrl.includes(\"chutes.ai\");\n\n\tconst isGrok = provider === \"xai\" || baseUrl.includes(\"api.x.ai\");\n\n\tconst isMistral = provider === \"mistral\" || baseUrl.includes(\"mistral.ai\");\n\n\treturn {\n\t\tsupportsStore: !isNonStandard,\n\t\tsupportsDeveloperRole: !isNonStandard,\n\t\tsupportsReasoningEffort: !isGrok && !isZai,\n\t\tsupportsUsageInStreaming: true,\n\t\tmaxTokensField: useMaxTokens ? \"max_tokens\" : \"max_completion_tokens\",\n\t\trequiresToolResultName: isMistral,\n\t\trequiresAssistantAfterToolResult: false, // Mistral no longer requires this as of Dec 2024\n\t\trequiresThinkingAsText: isMistral,\n\t\trequiresMistralToolIds: isMistral,\n\t\tthinkingFormat: isZai ? \"zai\" : \"openai\",\n\t\topenRouterRouting: {},\n\t};\n}\n\n/**\n * Get resolved compatibility settings for a model.\n * Uses explicit model.compat if provided, otherwise auto-detects from provider/URL.\n */\nfunction getCompat(model: Model<\"openai-completions\">): Required<OpenAICompletionsCompat> {\n\tconst detected = detectCompat(model);\n\tif (!model.compat) return detected;\n\n\treturn {\n\t\tsupportsStore: model.compat.supportsStore ?? detected.supportsStore,\n\t\tsupportsDeveloperRole: model.compat.supportsDeveloperRole ?? detected.supportsDeveloperRole,\n\t\tsupportsReasoningEffort: model.compat.supportsReasoningEffort ?? detected.supportsReasoningEffort,\n\t\tsupportsUsageInStreaming: model.compat.supportsUsageInStreaming ?? detected.supportsUsageInStreaming,\n\t\tmaxTokensField: model.compat.maxTokensField ?? detected.maxTokensField,\n\t\trequiresToolResultName: model.compat.requiresToolResultName ?? detected.requiresToolResultName,\n\t\trequiresAssistantAfterToolResult:\n\t\t\tmodel.compat.requiresAssistantAfterToolResult ?? detected.requiresAssistantAfterToolResult,\n\t\trequiresThinkingAsText: model.compat.requiresThinkingAsText ?? detected.requiresThinkingAsText,\n\t\trequiresMistralToolIds: model.compat.requiresMistralToolIds ?? detected.requiresMistralToolIds,\n\t\tthinkingFormat: model.compat.thinkingFormat ?? detected.thinkingFormat,\n\t\topenRouterRouting: model.compat.openRouterRouting ?? {},\n\t};\n}\n"]}
|
|
1
|
+
{"version":3,"file":"openai-completions.d.ts","sourceRoot":"","sources":["../../src/providers/openai-completions.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAMX,0BAA0B,EAE1B,MAAM,sCAAsC,CAAC;AAG9C,OAAO,KAAK,EAEX,OAAO,EAEP,KAAK,EACL,uBAAuB,EACvB,mBAAmB,EAEnB,cAAc,EACd,aAAa,EAMb,MAAM,aAAa,CAAC;AA4CrB,MAAM,WAAW,wBAAyB,SAAQ,aAAa;IAC9D,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,UAAU,GAAG;QAAE,IAAI,EAAE,UAAU,CAAC;QAAC,QAAQ,EAAE;YAAE,IAAI,EAAE,MAAM,CAAA;SAAE,CAAA;KAAE,CAAC;IAC7F,eAAe,CAAC,EAAE,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;CAClE;AAED,eAAO,MAAM,uBAAuB,EAAE,cAAc,CAAC,oBAAoB,EAAE,wBAAwB,CAqPlG,CAAC;AAEF,eAAO,MAAM,6BAA6B,EAAE,cAAc,CAAC,oBAAoB,EAAE,mBAAmB,CAmBnG,CAAC;AAoJF,wBAAgB,eAAe,CAC9B,KAAK,EAAE,KAAK,CAAC,oBAAoB,CAAC,EAClC,OAAO,EAAE,OAAO,EAChB,MAAM,EAAE,QAAQ,CAAC,uBAAuB,CAAC,GACvC,0BAA0B,EAAE,CAsO9B","sourcesContent":["import OpenAI from \"openai\";\nimport type {\n\tChatCompletionAssistantMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionContentPart,\n\tChatCompletionContentPartImage,\n\tChatCompletionContentPartText,\n\tChatCompletionMessageParam,\n\tChatCompletionToolMessageParam,\n} from \"openai/resources/chat/completions.js\";\nimport { getEnvApiKey } from \"../env-api-keys.js\";\nimport { calculateCost, supportsXhigh } from \"../models.js\";\nimport type {\n\tAssistantMessage,\n\tContext,\n\tMessage,\n\tModel,\n\tOpenAICompletionsCompat,\n\tSimpleStreamOptions,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tToolResultMessage,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { buildBaseOptions, clampReasoning } from \"./simple-options.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\n/**\n * Normalize tool call ID for Mistral.\n * Mistral requires tool IDs to be exactly 9 alphanumeric characters (a-z, A-Z, 0-9).\n */\nfunction normalizeMistralToolId(id: string): string {\n\t// Remove non-alphanumeric characters\n\tlet normalized = id.replace(/[^a-zA-Z0-9]/g, \"\");\n\t// Mistral requires exactly 9 characters\n\tif (normalized.length < 9) {\n\t\t// Pad with deterministic characters based on original ID to ensure matching\n\t\tconst padding = \"ABCDEFGHI\";\n\t\tnormalized = normalized + padding.slice(0, 9 - normalized.length);\n\t} else if (normalized.length > 9) {\n\t\tnormalized = normalized.slice(0, 9);\n\t}\n\treturn normalized;\n}\n\n/**\n * Check if conversation messages contain tool calls or tool results.\n * This is needed because Anthropic (via proxy) requires the tools param\n * to be present when messages include tool_calls or tool role messages.\n */\nfunction hasToolHistory(messages: Message[]): boolean {\n\tfor (const msg of messages) {\n\t\tif (msg.role === \"toolResult\") {\n\t\t\treturn true;\n\t\t}\n\t\tif (msg.role === \"assistant\") {\n\t\t\tif (msg.content.some((block) => block.type === \"toolCall\")) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n}\n\nexport interface OpenAICompletionsOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"required\" | { type: \"function\"; function: { name: string } };\n\treasoningEffort?: \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n}\n\nexport const streamOpenAICompletions: StreamFunction<\"openai-completions\", OpenAICompletionsOptions> = (\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\toptions?: OpenAICompletionsOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: model.api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tconst client = createClient(model, context, apiKey, options?.headers);\n\t\t\tconst params = buildParams(model, context, options);\n\t\t\toptions?.onPayload?.(params);\n\t\t\tconst openaiStream = await client.chat.completions.create(params, { signal: options?.signal });\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\tlet currentBlock: TextContent | ThinkingContent | (ToolCall & { partialArgs?: string }) | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tconst finishCurrentBlock = (block?: typeof currentBlock) => {\n\t\t\t\tif (block) {\n\t\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: block.text,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: block.thinking,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\t\tblock.arguments = JSON.parse(block.partialArgs || \"{}\");\n\t\t\t\t\t\tdelete block.partialArgs;\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\ttoolCall: block,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tfor await (const chunk of openaiStream) {\n\t\t\t\tif (chunk.usage) {\n\t\t\t\t\tconst cachedTokens = chunk.usage.prompt_tokens_details?.cached_tokens || 0;\n\t\t\t\t\tconst reasoningTokens = chunk.usage.completion_tokens_details?.reasoning_tokens || 0;\n\t\t\t\t\tconst input = (chunk.usage.prompt_tokens || 0) - cachedTokens;\n\t\t\t\t\tconst outputTokens = (chunk.usage.completion_tokens || 0) + reasoningTokens;\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\t// OpenAI includes cached tokens in prompt_tokens, so subtract to get non-cached input\n\t\t\t\t\t\tinput,\n\t\t\t\t\t\toutput: outputTokens,\n\t\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t// Compute totalTokens ourselves since we add reasoning_tokens to output\n\t\t\t\t\t\t// and some providers (e.g., Groq) don't include them in total_tokens\n\t\t\t\t\t\ttotalTokens: input + outputTokens + cachedTokens,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\n\t\t\t\tconst choice = chunk.choices[0];\n\t\t\t\tif (!choice) continue;\n\n\t\t\t\tif (choice.finish_reason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(choice.finish_reason);\n\t\t\t\t}\n\n\t\t\t\tif (choice.delta) {\n\t\t\t\t\tif (\n\t\t\t\t\t\tchoice.delta.content !== null &&\n\t\t\t\t\t\tchoice.delta.content !== undefined &&\n\t\t\t\t\t\tchoice.delta.content.length > 0\n\t\t\t\t\t) {\n\t\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"text\") {\n\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\tcurrentBlock.text += choice.delta.content;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: choice.delta.content,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// Some endpoints return reasoning in reasoning_content (llama.cpp),\n\t\t\t\t\t// or reasoning (other openai compatible endpoints)\n\t\t\t\t\t// Use the first non-empty reasoning field to avoid duplication\n\t\t\t\t\t// (e.g., chutes.ai returns both reasoning_content and reasoning with same content)\n\t\t\t\t\tconst reasoningFields = [\"reasoning_content\", \"reasoning\", \"reasoning_text\"];\n\t\t\t\t\tlet foundReasoningField: string | null = null;\n\t\t\t\t\tfor (const field of reasoningFields) {\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t(choice.delta as any)[field] !== null &&\n\t\t\t\t\t\t\t(choice.delta as any)[field] !== undefined &&\n\t\t\t\t\t\t\t(choice.delta as any)[field].length > 0\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tif (!foundReasoningField) {\n\t\t\t\t\t\t\t\tfoundReasoningField = field;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (foundReasoningField) {\n\t\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"thinking\") {\n\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\t\tthinking: \"\",\n\t\t\t\t\t\t\t\tthinkingSignature: foundReasoningField,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\tconst delta = (choice.delta as any)[foundReasoningField];\n\t\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (choice?.delta?.tool_calls) {\n\t\t\t\t\t\tfor (const toolCall of choice.delta.tool_calls) {\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\tcurrentBlock.type !== \"toolCall\" ||\n\t\t\t\t\t\t\t\t(toolCall.id && currentBlock.id !== toolCall.id)\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\t\tid: toolCall.id || \"\",\n\t\t\t\t\t\t\t\t\tname: toolCall.function?.name || \"\",\n\t\t\t\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\t\t\t\tpartialArgs: \"\",\n\t\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tif (currentBlock.type === \"toolCall\") {\n\t\t\t\t\t\t\t\tif (toolCall.id) currentBlock.id = toolCall.id;\n\t\t\t\t\t\t\t\tif (toolCall.function?.name) currentBlock.name = toolCall.function.name;\n\t\t\t\t\t\t\t\tlet delta = \"\";\n\t\t\t\t\t\t\t\tif (toolCall.function?.arguments) {\n\t\t\t\t\t\t\t\t\tdelta = toolCall.function.arguments;\n\t\t\t\t\t\t\t\t\tcurrentBlock.partialArgs += toolCall.function.arguments;\n\t\t\t\t\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialArgs);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tconst reasoningDetails = (choice.delta as any).reasoning_details;\n\t\t\t\t\tif (reasoningDetails && Array.isArray(reasoningDetails)) {\n\t\t\t\t\t\tfor (const detail of reasoningDetails) {\n\t\t\t\t\t\t\tif (detail.type === \"reasoning.encrypted\" && detail.id && detail.data) {\n\t\t\t\t\t\t\t\tconst matchingToolCall = output.content.find(\n\t\t\t\t\t\t\t\t\t(b) => b.type === \"toolCall\" && b.id === detail.id,\n\t\t\t\t\t\t\t\t) as ToolCall | undefined;\n\t\t\t\t\t\t\t\tif (matchingToolCall) {\n\t\t\t\t\t\t\t\t\tmatchingToolCall.thoughtSignature = JSON.stringify(detail);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfinishCurrentBlock(currentBlock);\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\t// Some providers via OpenRouter give additional information in this field.\n\t\t\tconst rawMetadata = (error as any)?.error?.metadata?.raw;\n\t\t\tif (rawMetadata) output.errorMessage += `\\n${rawMetadata}`;\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nexport const streamSimpleOpenAICompletions: StreamFunction<\"openai-completions\", SimpleStreamOptions> = (\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream => {\n\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst base = buildBaseOptions(model, options, apiKey);\n\tconst reasoningEffort = supportsXhigh(model) ? options?.reasoning : clampReasoning(options?.reasoning);\n\tconst toolChoice = (options as OpenAICompletionsOptions | undefined)?.toolChoice;\n\n\treturn streamOpenAICompletions(model, context, {\n\t\t...base,\n\t\treasoningEffort,\n\t\ttoolChoice,\n\t} satisfies OpenAICompletionsOptions);\n};\n\nfunction createClient(\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\tapiKey?: string,\n\toptionsHeaders?: Record<string, string>,\n) {\n\tif (!apiKey) {\n\t\tif (!process.env.OPENAI_API_KEY) {\n\t\t\tthrow new Error(\n\t\t\t\t\"OpenAI API key is required. Set OPENAI_API_KEY environment variable or pass it as an argument.\",\n\t\t\t);\n\t\t}\n\t\tapiKey = process.env.OPENAI_API_KEY;\n\t}\n\n\tconst headers = { ...model.headers };\n\tif (model.provider === \"github-copilot\") {\n\t\t// Copilot expects X-Initiator to indicate whether the request is user-initiated\n\t\t// or agent-initiated (e.g. follow-up after assistant/tool messages). If there is\n\t\t// no prior message, default to user-initiated.\n\t\tconst messages = context.messages || [];\n\t\tconst lastMessage = messages[messages.length - 1];\n\t\tconst isAgentCall = lastMessage ? lastMessage.role !== \"user\" : false;\n\t\theaders[\"X-Initiator\"] = isAgentCall ? \"agent\" : \"user\";\n\t\theaders[\"Openai-Intent\"] = \"conversation-edits\";\n\n\t\t// Copilot requires this header when sending images\n\t\tconst hasImages = messages.some((msg) => {\n\t\t\tif (msg.role === \"user\" && Array.isArray(msg.content)) {\n\t\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t\t}\n\t\t\tif (msg.role === \"toolResult\" && Array.isArray(msg.content)) {\n\t\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t\t}\n\t\t\treturn false;\n\t\t});\n\t\tif (hasImages) {\n\t\t\theaders[\"Copilot-Vision-Request\"] = \"true\";\n\t\t}\n\t}\n\n\t// Merge options headers last so they can override defaults\n\tif (optionsHeaders) {\n\t\tObject.assign(headers, optionsHeaders);\n\t}\n\n\treturn new OpenAI({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders: headers,\n\t});\n}\n\nfunction buildParams(model: Model<\"openai-completions\">, context: Context, options?: OpenAICompletionsOptions) {\n\tconst compat = getCompat(model);\n\tconst messages = convertMessages(model, context, compat);\n\tmaybeAddOpenRouterAnthropicCacheControl(model, messages);\n\n\tconst params: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {\n\t\tmodel: model.id,\n\t\tmessages,\n\t\tstream: true,\n\t};\n\n\tif (compat.supportsUsageInStreaming !== false) {\n\t\t(params as any).stream_options = { include_usage: true };\n\t}\n\n\tif (compat.supportsStore) {\n\t\tparams.store = false;\n\t}\n\n\tif (options?.maxTokens) {\n\t\tif (compat.maxTokensField === \"max_tokens\") {\n\t\t\t(params as any).max_tokens = options.maxTokens;\n\t\t} else {\n\t\t\tparams.max_completion_tokens = options.maxTokens;\n\t\t}\n\t}\n\n\tif (options?.temperature !== undefined) {\n\t\tparams.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tparams.tools = convertTools(context.tools);\n\t} else if (hasToolHistory(context.messages)) {\n\t\t// Anthropic (via LiteLLM/proxy) requires tools param when conversation has tool_calls/tool_results\n\t\tparams.tools = [];\n\t}\n\n\tif (options?.toolChoice) {\n\t\tparams.tool_choice = options.toolChoice;\n\t}\n\n\tif (compat.thinkingFormat === \"zai\" && model.reasoning) {\n\t\t// Z.ai uses binary thinking: { type: \"enabled\" | \"disabled\" }\n\t\t// Must explicitly disable since z.ai defaults to thinking enabled\n\t\t(params as any).thinking = { type: options?.reasoningEffort ? \"enabled\" : \"disabled\" };\n\t} else if (options?.reasoningEffort && model.reasoning && compat.supportsReasoningEffort) {\n\t\t// OpenAI-style reasoning_effort\n\t\tparams.reasoning_effort = options.reasoningEffort;\n\t}\n\n\t// OpenRouter provider routing preferences\n\tif (model.baseUrl.includes(\"openrouter.ai\") && model.compat?.openRouterRouting) {\n\t\t(params as any).provider = model.compat.openRouterRouting;\n\t}\n\n\treturn params;\n}\n\nfunction maybeAddOpenRouterAnthropicCacheControl(\n\tmodel: Model<\"openai-completions\">,\n\tmessages: ChatCompletionMessageParam[],\n): void {\n\tif (model.provider !== \"openrouter\" || !model.id.startsWith(\"anthropic/\")) return;\n\n\t// Anthropic-style caching requires cache_control on a text part. Add a breakpoint\n\t// on the last user/assistant message (walking backwards until we find text content).\n\tfor (let i = messages.length - 1; i >= 0; i--) {\n\t\tconst msg = messages[i];\n\t\tif (msg.role !== \"user\" && msg.role !== \"assistant\") continue;\n\n\t\tconst content = msg.content;\n\t\tif (typeof content === \"string\") {\n\t\t\tmsg.content = [\n\t\t\t\tObject.assign({ type: \"text\" as const, text: content }, { cache_control: { type: \"ephemeral\" } }),\n\t\t\t];\n\t\t\treturn;\n\t\t}\n\n\t\tif (!Array.isArray(content)) continue;\n\n\t\t// Find last text part and add cache_control\n\t\tfor (let j = content.length - 1; j >= 0; j--) {\n\t\t\tconst part = content[j];\n\t\t\tif (part?.type === \"text\") {\n\t\t\t\tObject.assign(part, { cache_control: { type: \"ephemeral\" } });\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t}\n}\n\nexport function convertMessages(\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\tcompat: Required<OpenAICompletionsCompat>,\n): ChatCompletionMessageParam[] {\n\tconst params: ChatCompletionMessageParam[] = [];\n\n\tconst normalizeToolCallId = (id: string): string => {\n\t\tif (compat.requiresMistralToolIds) return normalizeMistralToolId(id);\n\n\t\t// Handle pipe-separated IDs from OpenAI Responses API\n\t\t// Format: {call_id}|{id} where {id} can be 400+ chars with special chars (+, /, =)\n\t\t// These come from providers like github-copilot, openai-codex, opencode\n\t\t// Extract just the call_id part and normalize it\n\t\tif (id.includes(\"|\")) {\n\t\t\tconst [callId] = id.split(\"|\");\n\t\t\t// Sanitize to allowed chars and truncate to 40 chars (OpenAI limit)\n\t\t\treturn callId.replace(/[^a-zA-Z0-9_-]/g, \"_\").slice(0, 40);\n\t\t}\n\n\t\tif (model.provider === \"openai\") return id.length > 40 ? id.slice(0, 40) : id;\n\t\t// Copilot Claude models route to Claude backend which requires Anthropic ID format\n\t\tif (model.provider === \"github-copilot\" && model.id.toLowerCase().includes(\"claude\")) {\n\t\t\treturn id.replace(/[^a-zA-Z0-9_-]/g, \"_\").slice(0, 64);\n\t\t}\n\t\treturn id;\n\t};\n\n\tconst transformedMessages = transformMessages(context.messages, model, (id) => normalizeToolCallId(id));\n\n\tif (context.systemPrompt) {\n\t\tconst useDeveloperRole = model.reasoning && compat.supportsDeveloperRole;\n\t\tconst role = useDeveloperRole ? \"developer\" : \"system\";\n\t\tparams.push({ role: role, content: sanitizeSurrogates(context.systemPrompt) });\n\t}\n\n\tlet lastRole: string | null = null;\n\n\tfor (let i = 0; i < transformedMessages.length; i++) {\n\t\tconst msg = transformedMessages[i];\n\t\t// Some providers (e.g. Mistral/Devstral) don't allow user messages directly after tool results\n\t\t// Insert a synthetic assistant message to bridge the gap\n\t\tif (compat.requiresAssistantAfterToolResult && lastRole === \"toolResult\" && msg.role === \"user\") {\n\t\t\tparams.push({\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: \"I have processed the tool results.\",\n\t\t\t});\n\t\t}\n\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: sanitizeSurrogates(msg.content),\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ChatCompletionContentPart[] = msg.content.map((item): ChatCompletionContentPart => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ChatCompletionContentPartText;\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"image_url\",\n\t\t\t\t\t\t\timage_url: {\n\t\t\t\t\t\t\t\turl: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t} satisfies ChatCompletionContentPartImage;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"image_url\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\t// Some providers (e.g. Mistral) don't accept null content, use empty string instead\n\t\t\tconst assistantMsg: ChatCompletionAssistantMessageParam = {\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: compat.requiresAssistantAfterToolResult ? \"\" : null,\n\t\t\t};\n\n\t\t\tconst textBlocks = msg.content.filter((b) => b.type === \"text\") as TextContent[];\n\t\t\t// Filter out empty text blocks to avoid API validation errors\n\t\t\tconst nonEmptyTextBlocks = textBlocks.filter((b) => b.text && b.text.trim().length > 0);\n\t\t\tif (nonEmptyTextBlocks.length > 0) {\n\t\t\t\t// GitHub Copilot requires assistant content as a string, not an array.\n\t\t\t\t// Sending as array causes Claude models to re-answer all previous prompts.\n\t\t\t\tif (model.provider === \"github-copilot\") {\n\t\t\t\t\tassistantMsg.content = nonEmptyTextBlocks.map((b) => sanitizeSurrogates(b.text)).join(\"\");\n\t\t\t\t} else {\n\t\t\t\t\tassistantMsg.content = nonEmptyTextBlocks.map((b) => {\n\t\t\t\t\t\treturn { type: \"text\", text: sanitizeSurrogates(b.text) };\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Handle thinking blocks\n\t\t\tconst thinkingBlocks = msg.content.filter((b) => b.type === \"thinking\") as ThinkingContent[];\n\t\t\t// Filter out empty thinking blocks to avoid API validation errors\n\t\t\tconst nonEmptyThinkingBlocks = thinkingBlocks.filter((b) => b.thinking && b.thinking.trim().length > 0);\n\t\t\tif (nonEmptyThinkingBlocks.length > 0) {\n\t\t\t\tif (compat.requiresThinkingAsText) {\n\t\t\t\t\t// Convert thinking blocks to plain text (no tags to avoid model mimicking them)\n\t\t\t\t\tconst thinkingText = nonEmptyThinkingBlocks.map((b) => b.thinking).join(\"\\n\\n\");\n\t\t\t\t\tconst textContent = assistantMsg.content as Array<{ type: \"text\"; text: string }> | null;\n\t\t\t\t\tif (textContent) {\n\t\t\t\t\t\ttextContent.unshift({ type: \"text\", text: thinkingText });\n\t\t\t\t\t} else {\n\t\t\t\t\t\tassistantMsg.content = [{ type: \"text\", text: thinkingText }];\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// Use the signature from the first thinking block if available (for llama.cpp server + gpt-oss)\n\t\t\t\t\tconst signature = nonEmptyThinkingBlocks[0].thinkingSignature;\n\t\t\t\t\tif (signature && signature.length > 0) {\n\t\t\t\t\t\t(assistantMsg as any)[signature] = nonEmptyThinkingBlocks.map((b) => b.thinking).join(\"\\n\");\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tconst toolCalls = msg.content.filter((b) => b.type === \"toolCall\") as ToolCall[];\n\t\t\tif (toolCalls.length > 0) {\n\t\t\t\tassistantMsg.tool_calls = toolCalls.map((tc) => ({\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\ttype: \"function\" as const,\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: tc.name,\n\t\t\t\t\t\targuments: JSON.stringify(tc.arguments),\n\t\t\t\t\t},\n\t\t\t\t}));\n\t\t\t\tconst reasoningDetails = toolCalls\n\t\t\t\t\t.filter((tc) => tc.thoughtSignature)\n\t\t\t\t\t.map((tc) => {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\treturn JSON.parse(tc.thoughtSignature!);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\treturn null;\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t\t.filter(Boolean);\n\t\t\t\tif (reasoningDetails.length > 0) {\n\t\t\t\t\t(assistantMsg as any).reasoning_details = reasoningDetails;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Skip assistant messages that have no content and no tool calls.\n\t\t\t// Mistral explicitly requires \"either content or tool_calls, but not none\".\n\t\t\t// Other providers also don't accept empty assistant messages.\n\t\t\t// This handles aborted assistant responses that got no content.\n\t\t\tconst content = assistantMsg.content;\n\t\t\tconst hasContent =\n\t\t\t\tcontent !== null &&\n\t\t\t\tcontent !== undefined &&\n\t\t\t\t(typeof content === \"string\" ? content.length > 0 : content.length > 0);\n\t\t\tif (!hasContent && !assistantMsg.tool_calls) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tparams.push(assistantMsg);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tconst imageBlocks: Array<{ type: \"image_url\"; image_url: { url: string } }> = [];\n\t\t\tlet j = i;\n\n\t\t\tfor (; j < transformedMessages.length && transformedMessages[j].role === \"toolResult\"; j++) {\n\t\t\t\tconst toolMsg = transformedMessages[j] as ToolResultMessage;\n\n\t\t\t\t// Extract text and image content\n\t\t\t\tconst textResult = toolMsg.content\n\t\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t\t.map((c) => (c as any).text)\n\t\t\t\t\t.join(\"\\n\");\n\t\t\t\tconst hasImages = toolMsg.content.some((c) => c.type === \"image\");\n\n\t\t\t\t// Always send tool result with text (or placeholder if only images)\n\t\t\t\tconst hasText = textResult.length > 0;\n\t\t\t\t// Some providers (e.g. Mistral) require the 'name' field in tool results\n\t\t\t\tconst toolResultMsg: ChatCompletionToolMessageParam = {\n\t\t\t\t\trole: \"tool\",\n\t\t\t\t\tcontent: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t\t\ttool_call_id: toolMsg.toolCallId,\n\t\t\t\t};\n\t\t\t\tif (compat.requiresToolResultName && toolMsg.toolName) {\n\t\t\t\t\t(toolResultMsg as any).name = toolMsg.toolName;\n\t\t\t\t}\n\t\t\t\tparams.push(toolResultMsg);\n\n\t\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\t\tfor (const block of toolMsg.content) {\n\t\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\t\timageBlocks.push({\n\t\t\t\t\t\t\t\ttype: \"image_url\",\n\t\t\t\t\t\t\t\timage_url: {\n\t\t\t\t\t\t\t\t\turl: `data:${(block as any).mimeType};base64,${(block as any).data}`,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\ti = j - 1;\n\n\t\t\tif (imageBlocks.length > 0) {\n\t\t\t\tif (compat.requiresAssistantAfterToolResult) {\n\t\t\t\t\tparams.push({\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: \"I have processed the tool results.\",\n\t\t\t\t\t});\n\t\t\t\t}\n\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t...imageBlocks,\n\t\t\t\t\t],\n\t\t\t\t});\n\t\t\t\tlastRole = \"user\";\n\t\t\t} else {\n\t\t\t\tlastRole = \"toolResult\";\n\t\t\t}\n\t\t\tcontinue;\n\t\t}\n\n\t\tlastRole = msg.role;\n\t}\n\n\treturn params;\n}\n\nfunction convertTools(tools: Tool[]): OpenAI.Chat.Completions.ChatCompletionTool[] {\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: tool.parameters as any, // TypeBox already generates JSON Schema\n\t\t\tstrict: false, // Disable strict mode to allow optional parameters without null unions\n\t\t},\n\t}));\n}\n\nfunction mapStopReason(reason: ChatCompletionChunk.Choice[\"finish_reason\"]): StopReason {\n\tif (reason === null) return \"stop\";\n\tswitch (reason) {\n\t\tcase \"stop\":\n\t\t\treturn \"stop\";\n\t\tcase \"length\":\n\t\t\treturn \"length\";\n\t\tcase \"function_call\":\n\t\tcase \"tool_calls\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"content_filter\":\n\t\t\treturn \"error\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\n/**\n * Detect compatibility settings from provider and baseUrl for known providers.\n * Provider takes precedence over URL-based detection since it's explicitly configured.\n * Returns a fully resolved OpenAICompletionsCompat object with all fields set.\n */\nfunction detectCompat(model: Model<\"openai-completions\">): Required<OpenAICompletionsCompat> {\n\tconst provider = model.provider;\n\tconst baseUrl = model.baseUrl;\n\n\tconst isZai = provider === \"zai\" || baseUrl.includes(\"api.z.ai\");\n\n\tconst isNonStandard =\n\t\tprovider === \"cerebras\" ||\n\t\tbaseUrl.includes(\"cerebras.ai\") ||\n\t\tprovider === \"xai\" ||\n\t\tbaseUrl.includes(\"api.x.ai\") ||\n\t\tprovider === \"mistral\" ||\n\t\tbaseUrl.includes(\"mistral.ai\") ||\n\t\tbaseUrl.includes(\"chutes.ai\") ||\n\t\tbaseUrl.includes(\"deepseek.com\") ||\n\t\tisZai ||\n\t\tprovider === \"opencode\" ||\n\t\tbaseUrl.includes(\"opencode.ai\");\n\n\tconst useMaxTokens = provider === \"mistral\" || baseUrl.includes(\"mistral.ai\") || baseUrl.includes(\"chutes.ai\");\n\n\tconst isGrok = provider === \"xai\" || baseUrl.includes(\"api.x.ai\");\n\n\tconst isMistral = provider === \"mistral\" || baseUrl.includes(\"mistral.ai\");\n\n\treturn {\n\t\tsupportsStore: !isNonStandard,\n\t\tsupportsDeveloperRole: !isNonStandard,\n\t\tsupportsReasoningEffort: !isGrok && !isZai,\n\t\tsupportsUsageInStreaming: true,\n\t\tmaxTokensField: useMaxTokens ? \"max_tokens\" : \"max_completion_tokens\",\n\t\trequiresToolResultName: isMistral,\n\t\trequiresAssistantAfterToolResult: false, // Mistral no longer requires this as of Dec 2024\n\t\trequiresThinkingAsText: isMistral,\n\t\trequiresMistralToolIds: isMistral,\n\t\tthinkingFormat: isZai ? \"zai\" : \"openai\",\n\t\topenRouterRouting: {},\n\t};\n}\n\n/**\n * Get resolved compatibility settings for a model.\n * Uses explicit model.compat if provided, otherwise auto-detects from provider/URL.\n */\nfunction getCompat(model: Model<\"openai-completions\">): Required<OpenAICompletionsCompat> {\n\tconst detected = detectCompat(model);\n\tif (!model.compat) return detected;\n\n\treturn {\n\t\tsupportsStore: model.compat.supportsStore ?? detected.supportsStore,\n\t\tsupportsDeveloperRole: model.compat.supportsDeveloperRole ?? detected.supportsDeveloperRole,\n\t\tsupportsReasoningEffort: model.compat.supportsReasoningEffort ?? detected.supportsReasoningEffort,\n\t\tsupportsUsageInStreaming: model.compat.supportsUsageInStreaming ?? detected.supportsUsageInStreaming,\n\t\tmaxTokensField: model.compat.maxTokensField ?? detected.maxTokensField,\n\t\trequiresToolResultName: model.compat.requiresToolResultName ?? detected.requiresToolResultName,\n\t\trequiresAssistantAfterToolResult:\n\t\t\tmodel.compat.requiresAssistantAfterToolResult ?? detected.requiresAssistantAfterToolResult,\n\t\trequiresThinkingAsText: model.compat.requiresThinkingAsText ?? detected.requiresThinkingAsText,\n\t\trequiresMistralToolIds: model.compat.requiresMistralToolIds ?? detected.requiresMistralToolIds,\n\t\tthinkingFormat: model.compat.thinkingFormat ?? detected.thinkingFormat,\n\t\topenRouterRouting: model.compat.openRouterRouting ?? {},\n\t};\n}\n"]}
|
|
@@ -272,9 +272,11 @@ export const streamSimpleOpenAICompletions = (model, context, options) => {
|
|
|
272
272
|
}
|
|
273
273
|
const base = buildBaseOptions(model, options, apiKey);
|
|
274
274
|
const reasoningEffort = supportsXhigh(model) ? options?.reasoning : clampReasoning(options?.reasoning);
|
|
275
|
+
const toolChoice = options?.toolChoice;
|
|
275
276
|
return streamOpenAICompletions(model, context, {
|
|
276
277
|
...base,
|
|
277
278
|
reasoningEffort,
|
|
279
|
+
toolChoice,
|
|
278
280
|
});
|
|
279
281
|
};
|
|
280
282
|
function createClient(model, context, apiKey, optionsHeaders) {
|
|
@@ -403,6 +405,15 @@ export function convertMessages(model, context, compat) {
|
|
|
403
405
|
const normalizeToolCallId = (id) => {
|
|
404
406
|
if (compat.requiresMistralToolIds)
|
|
405
407
|
return normalizeMistralToolId(id);
|
|
408
|
+
// Handle pipe-separated IDs from OpenAI Responses API
|
|
409
|
+
// Format: {call_id}|{id} where {id} can be 400+ chars with special chars (+, /, =)
|
|
410
|
+
// These come from providers like github-copilot, openai-codex, opencode
|
|
411
|
+
// Extract just the call_id part and normalize it
|
|
412
|
+
if (id.includes("|")) {
|
|
413
|
+
const [callId] = id.split("|");
|
|
414
|
+
// Sanitize to allowed chars and truncate to 40 chars (OpenAI limit)
|
|
415
|
+
return callId.replace(/[^a-zA-Z0-9_-]/g, "_").slice(0, 40);
|
|
416
|
+
}
|
|
406
417
|
if (model.provider === "openai")
|
|
407
418
|
return id.length > 40 ? id.slice(0, 40) : id;
|
|
408
419
|
// Copilot Claude models route to Claude backend which requires Anthropic ID format
|
|
@@ -657,6 +668,7 @@ function detectCompat(model) {
|
|
|
657
668
|
provider === "mistral" ||
|
|
658
669
|
baseUrl.includes("mistral.ai") ||
|
|
659
670
|
baseUrl.includes("chutes.ai") ||
|
|
671
|
+
baseUrl.includes("deepseek.com") ||
|
|
660
672
|
isZai ||
|
|
661
673
|
provider === "opencode" ||
|
|
662
674
|
baseUrl.includes("opencode.ai");
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-completions.js","sourceRoot":"","sources":["../../src/providers/openai-completions.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAU5B,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAiB5D,OAAO,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAC;AAC5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAClE,OAAO,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACvE,OAAO,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE5D;;;GAGG;AACH,SAAS,sBAAsB,CAAC,EAAU,EAAU;IACnD,qCAAqC;IACrC,IAAI,UAAU,GAAG,EAAE,CAAC,OAAO,CAAC,eAAe,EAAE,EAAE,CAAC,CAAC;IACjD,wCAAwC;IACxC,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3B,4EAA4E;QAC5E,MAAM,OAAO,GAAG,WAAW,CAAC;QAC5B,UAAU,GAAG,UAAU,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC;IACnE,CAAC;SAAM,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAClC,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACrC,CAAC;IACD,OAAO,UAAU,CAAC;AAAA,CAClB;AAED;;;;GAIG;AACH,SAAS,cAAc,CAAC,QAAmB,EAAW;IACrD,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YAC/B,OAAO,IAAI,CAAC;QACb,CAAC;QACD,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YAC9B,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,KAAK,UAAU,CAAC,EAAE,CAAC;gBAC5D,OAAO,IAAI,CAAC;YACb,CAAC;QACF,CAAC;IACF,CAAC;IACD,OAAO,KAAK,CAAC;AAAA,CACb;AAOD,MAAM,CAAC,MAAM,uBAAuB,GAAmE,CACtG,KAAkC,EAClC,OAAgB,EAChB,OAAkC,EACJ,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,IAAI,2BAA2B,EAAE,CAAC;IAEjD,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,MAAM,GAAqB;YAChC,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,KAAK,CAAC,GAAG;YACd,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,EAAE;YACf,KAAK,EAAE;gBACN,KAAK,EAAE,CAAC;gBACR,MAAM,EAAE,CAAC;gBACT,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;aACpE;YACD,UAAU,EAAE,MAAM;YAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,IAAI,CAAC;YACJ,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACrE,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YACtE,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YACpD,OAAO,EAAE,SAAS,EAAE,CAAC,MAAM,CAAC,CAAC;YAC7B,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC/F,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAEhD,IAAI,YAAY,GAAiF,IAAI,CAAC;YACtG,MAAM,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC;YAC9B,MAAM,UAAU,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;YAC3C,MAAM,kBAAkB,GAAG,CAAC,KAA2B,EAAE,EAAE,CAAC;gBAC3D,IAAI,KAAK,EAAE,CAAC;oBACX,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC3B,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,UAAU;4BAChB,YAAY,EAAE,UAAU,EAAE;4BAC1B,OAAO,EAAE,KAAK,CAAC,IAAI;4BACnB,OAAO,EAAE,MAAM;yBACf,CAAC,CAAC;oBACJ,CAAC;yBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACtC,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,cAAc;4BACpB,YAAY,EAAE,UAAU,EAAE;4BAC1B,OAAO,EAAE,KAAK,CAAC,QAAQ;4BACvB,OAAO,EAAE,MAAM;yBACf,CAAC,CAAC;oBACJ,CAAC;yBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACtC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,IAAI,IAAI,CAAC,CAAC;wBACxD,OAAO,KAAK,CAAC,WAAW,CAAC;wBACzB,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,cAAc;4BACpB,YAAY,EAAE,UAAU,EAAE;4BAC1B,QAAQ,EAAE,KAAK;4BACf,OAAO,EAAE,MAAM;yBACf,CAAC,CAAC;oBACJ,CAAC;gBACF,CAAC;YAAA,CACD,CAAC;YAEF,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,YAAY,EAAE,CAAC;gBACxC,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;oBACjB,MAAM,YAAY,GAAG,KAAK,CAAC,KAAK,CAAC,qBAAqB,EAAE,aAAa,IAAI,CAAC,CAAC;oBAC3E,MAAM,eAAe,GAAG,KAAK,CAAC,KAAK,CAAC,yBAAyB,EAAE,gBAAgB,IAAI,CAAC,CAAC;oBACrF,MAAM,KAAK,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC,GAAG,YAAY,CAAC;oBAC9D,MAAM,YAAY,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,iBAAiB,IAAI,CAAC,CAAC,GAAG,eAAe,CAAC;oBAC5E,MAAM,CAAC,KAAK,GAAG;wBACd,sFAAsF;wBACtF,KAAK;wBACL,MAAM,EAAE,YAAY;wBACpB,SAAS,EAAE,YAAY;wBACvB,UAAU,EAAE,CAAC;wBACb,wEAAwE;wBACxE,qEAAqE;wBACrE,WAAW,EAAE,KAAK,GAAG,YAAY,GAAG,YAAY;wBAChD,IAAI,EAAE;4BACL,KAAK,EAAE,CAAC;4BACR,MAAM,EAAE,CAAC;4BACT,SAAS,EAAE,CAAC;4BACZ,UAAU,EAAE,CAAC;4BACb,KAAK,EAAE,CAAC;yBACR;qBACD,CAAC;oBACF,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;gBAED,MAAM,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;gBAChC,IAAI,CAAC,MAAM;oBAAE,SAAS;gBAEtB,IAAI,MAAM,CAAC,aAAa,EAAE,CAAC;oBAC1B,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;gBACzD,CAAC;gBAED,IAAI,MAAM,CAAC,KAAK,EAAE,CAAC;oBAClB,IACC,MAAM,CAAC,KAAK,CAAC,OAAO,KAAK,IAAI;wBAC7B,MAAM,CAAC,KAAK,CAAC,OAAO,KAAK,SAAS;wBAClC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAC9B,CAAC;wBACF,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BACnD,kBAAkB,CAAC,YAAY,CAAC,CAAC;4BACjC,YAAY,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;4BAC1C,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;4BAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;wBAClF,CAAC;wBAED,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BAClC,YAAY,CAAC,IAAI,IAAI,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC;4BAC1C,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,YAAY;gCAClB,YAAY,EAAE,UAAU,EAAE;gCAC1B,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO;gCAC3B,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;oBAED,oEAAoE;oBACpE,mDAAmD;oBACnD,+DAA+D;oBAC/D,mFAAmF;oBACnF,MAAM,eAAe,GAAG,CAAC,mBAAmB,EAAE,WAAW,EAAE,gBAAgB,CAAC,CAAC;oBAC7E,IAAI,mBAAmB,GAAkB,IAAI,CAAC;oBAC9C,KAAK,MAAM,KAAK,IAAI,eAAe,EAAE,CAAC;wBACrC,IACE,MAAM,CAAC,KAAa,CAAC,KAAK,CAAC,KAAK,IAAI;4BACpC,MAAM,CAAC,KAAa,CAAC,KAAK,CAAC,KAAK,SAAS;4BACzC,MAAM,CAAC,KAAa,CAAC,KAAK,CAAC,CAAC,MAAM,GAAG,CAAC,EACtC,CAAC;4BACF,IAAI,CAAC,mBAAmB,EAAE,CAAC;gCAC1B,mBAAmB,GAAG,KAAK,CAAC;gCAC5B,MAAM;4BACP,CAAC;wBACF,CAAC;oBACF,CAAC;oBAED,IAAI,mBAAmB,EAAE,CAAC;wBACzB,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACvD,kBAAkB,CAAC,YAAY,CAAC,CAAC;4BACjC,YAAY,GAAG;gCACd,IAAI,EAAE,UAAU;gCAChB,QAAQ,EAAE,EAAE;gCACZ,iBAAiB,EAAE,mBAAmB;6BACtC,CAAC;4BACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;4BAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;wBACtF,CAAC;wBAED,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACtC,MAAM,KAAK,GAAI,MAAM,CAAC,KAAa,CAAC,mBAAmB,CAAC,CAAC;4BACzD,YAAY,CAAC,QAAQ,IAAI,KAAK,CAAC;4BAC/B,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,UAAU,EAAE;gCAC1B,KAAK;gCACL,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;oBAED,IAAI,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,CAAC;wBAC/B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC;4BAChD,IACC,CAAC,YAAY;gCACb,YAAY,CAAC,IAAI,KAAK,UAAU;gCAChC,CAAC,QAAQ,CAAC,EAAE,IAAI,YAAY,CAAC,EAAE,KAAK,QAAQ,CAAC,EAAE,CAAC,EAC/C,CAAC;gCACF,kBAAkB,CAAC,YAAY,CAAC,CAAC;gCACjC,YAAY,GAAG;oCACd,IAAI,EAAE,UAAU;oCAChB,EAAE,EAAE,QAAQ,CAAC,EAAE,IAAI,EAAE;oCACrB,IAAI,EAAE,QAAQ,CAAC,QAAQ,EAAE,IAAI,IAAI,EAAE;oCACnC,SAAS,EAAE,EAAE;oCACb,WAAW,EAAE,EAAE;iCACf,CAAC;gCACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gCAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;4BACtF,CAAC;4BAED,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;gCACtC,IAAI,QAAQ,CAAC,EAAE;oCAAE,YAAY,CAAC,EAAE,GAAG,QAAQ,CAAC,EAAE,CAAC;gCAC/C,IAAI,QAAQ,CAAC,QAAQ,EAAE,IAAI;oCAAE,YAAY,CAAC,IAAI,GAAG,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC;gCACxE,IAAI,KAAK,GAAG,EAAE,CAAC;gCACf,IAAI,QAAQ,CAAC,QAAQ,EAAE,SAAS,EAAE,CAAC;oCAClC,KAAK,GAAG,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;oCACpC,YAAY,CAAC,WAAW,IAAI,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;oCACxD,YAAY,CAAC,SAAS,GAAG,kBAAkB,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC;gCACvE,CAAC;gCACD,MAAM,CAAC,IAAI,CAAC;oCACX,IAAI,EAAE,gBAAgB;oCACtB,YAAY,EAAE,UAAU,EAAE;oCAC1B,KAAK;oCACL,OAAO,EAAE,MAAM;iCACf,CAAC,CAAC;4BACJ,CAAC;wBACF,CAAC;oBACF,CAAC;oBAED,MAAM,gBAAgB,GAAI,MAAM,CAAC,KAAa,CAAC,iBAAiB,CAAC;oBACjE,IAAI,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,gBAAgB,CAAC,EAAE,CAAC;wBACzD,KAAK,MAAM,MAAM,IAAI,gBAAgB,EAAE,CAAC;4BACvC,IAAI,MAAM,CAAC,IAAI,KAAK,qBAAqB,IAAI,MAAM,CAAC,EAAE,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC;gCACvE,MAAM,gBAAgB,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,CAC3C,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,EAAE,KAAK,MAAM,CAAC,EAAE,CAC1B,CAAC;gCAC1B,IAAI,gBAAgB,EAAE,CAAC;oCACtB,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;gCAC5D,CAAC;4BACF,CAAC;wBACF,CAAC;oBACF,CAAC;gBACF,CAAC;YACF,CAAC;YAED,kBAAkB,CAAC,YAAY,CAAC,CAAC;YAEjC,IAAI,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;gBAC9B,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;YACxC,CAAC;YAED,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,IAAI,MAAM,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;gBACtE,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;YAC9C,CAAC;YAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC1E,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YAChB,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO;gBAAE,OAAQ,KAAa,CAAC,KAAK,CAAC;YAChE,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YACrF,2EAA2E;YAC3E,MAAM,WAAW,GAAI,KAAa,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,CAAC;YACzD,IAAI,WAAW;gBAAE,MAAM,CAAC,YAAY,IAAI,KAAK,WAAW,EAAE,CAAC;YAC3D,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YACzE,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;IAAA,CACD,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA8D,CACvG,KAAkC,EAClC,OAAgB,EAChB,OAA6B,EACC,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC/D,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;IAC/D,CAAC;IAED,MAAM,IAAI,GAAG,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,eAAe,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,cAAc,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;IAEvG,OAAO,uBAAuB,CAAC,KAAK,EAAE,OAAO,EAAE;QAC9C,GAAG,IAAI;QACP,eAAe;KACoB,CAAC,CAAC;AAAA,CACtC,CAAC;AAEF,SAAS,YAAY,CACpB,KAAkC,EAClC,OAAgB,EAChB,MAAe,EACf,cAAuC,EACtC;IACD,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC;YACjC,MAAM,IAAI,KAAK,CACd,gGAAgG,CAChG,CAAC;QACH,CAAC;QACD,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;IACrC,CAAC;IAED,MAAM,OAAO,GAAG,EAAE,GAAG,KAAK,CAAC,OAAO,EAAE,CAAC;IACrC,IAAI,KAAK,CAAC,QAAQ,KAAK,gBAAgB,EAAE,CAAC;QACzC,gFAAgF;QAChF,iFAAiF;QACjF,+CAA+C;QAC/C,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,IAAI,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,QAAQ,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAClD,MAAM,WAAW,GAAG,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;QACtE,OAAO,CAAC,aAAa,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC;QACxD,OAAO,CAAC,eAAe,CAAC,GAAG,oBAAoB,CAAC;QAEhD,mDAAmD;QACnD,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC;YACxC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;gBACvD,OAAO,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;YACpD,CAAC;YACD,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;gBAC7D,OAAO,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;YACpD,CAAC;YACD,OAAO,KAAK,CAAC;QAAA,CACb,CAAC,CAAC;QACH,IAAI,SAAS,EAAE,CAAC;YACf,OAAO,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;QAC5C,CAAC;IACF,CAAC;IAED,2DAA2D;IAC3D,IAAI,cAAc,EAAE,CAAC;QACpB,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;IACxC,CAAC;IAED,OAAO,IAAI,MAAM,CAAC;QACjB,MAAM;QACN,OAAO,EAAE,KAAK,CAAC,OAAO;QACtB,uBAAuB,EAAE,IAAI;QAC7B,cAAc,EAAE,OAAO;KACvB,CAAC,CAAC;AAAA,CACH;AAED,SAAS,WAAW,CAAC,KAAkC,EAAE,OAAgB,EAAE,OAAkC,EAAE;IAC9G,MAAM,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC;IAChC,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,uCAAuC,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IAEzD,MAAM,MAAM,GAAgE;QAC3E,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ;QACR,MAAM,EAAE,IAAI;KACZ,CAAC;IAEF,IAAI,MAAM,CAAC,wBAAwB,KAAK,KAAK,EAAE,CAAC;QAC9C,MAAc,CAAC,cAAc,GAAG,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC;IAC1D,CAAC;IAED,IAAI,MAAM,CAAC,aAAa,EAAE,CAAC;QAC1B,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC;IACtB,CAAC;IAED,IAAI,OAAO,EAAE,SAAS,EAAE,CAAC;QACxB,IAAI,MAAM,CAAC,cAAc,KAAK,YAAY,EAAE,CAAC;YAC3C,MAAc,CAAC,UAAU,GAAG,OAAO,CAAC,SAAS,CAAC;QAChD,CAAC;aAAM,CAAC;YACP,MAAM,CAAC,qBAAqB,GAAG,OAAO,CAAC,SAAS,CAAC;QAClD,CAAC;IACF,CAAC;IAED,IAAI,OAAO,EAAE,WAAW,KAAK,SAAS,EAAE,CAAC;QACxC,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAC1C,CAAC;IAED,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;QACnB,MAAM,CAAC,KAAK,GAAG,YAAY,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC5C,CAAC;SAAM,IAAI,cAAc,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC7C,mGAAmG;QACnG,MAAM,CAAC,KAAK,GAAG,EAAE,CAAC;IACnB,CAAC;IAED,IAAI,OAAO,EAAE,UAAU,EAAE,CAAC;QACzB,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,CAAC;IACzC,CAAC;IAED,IAAI,MAAM,CAAC,cAAc,KAAK,KAAK,IAAI,KAAK,CAAC,SAAS,EAAE,CAAC;QACxD,8DAA8D;QAC9D,kEAAkE;QACjE,MAAc,CAAC,QAAQ,GAAG,EAAE,IAAI,EAAE,OAAO,EAAE,eAAe,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC;IACxF,CAAC;SAAM,IAAI,OAAO,EAAE,eAAe,IAAI,KAAK,CAAC,SAAS,IAAI,MAAM,CAAC,uBAAuB,EAAE,CAAC;QAC1F,gCAAgC;QAChC,MAAM,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,CAAC;IACnD,CAAC;IAED,0CAA0C;IAC1C,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,KAAK,CAAC,MAAM,EAAE,iBAAiB,EAAE,CAAC;QAC/E,MAAc,CAAC,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC;IAC3D,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,SAAS,uCAAuC,CAC/C,KAAkC,EAClC,QAAsC,EAC/B;IACP,IAAI,KAAK,CAAC,QAAQ,KAAK,YAAY,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC;QAAE,OAAO;IAElF,kFAAkF;IAClF,qFAAqF;IACrF,KAAK,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC/C,MAAM,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QACxB,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW;YAAE,SAAS;QAE9D,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC;QAC5B,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;YACjC,GAAG,CAAC,OAAO,GAAG;gBACb,MAAM,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,MAAe,EAAE,IAAI,EAAE,OAAO,EAAE,EAAE,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,EAAE,CAAC;aACjG,CAAC;YACF,OAAO;QACR,CAAC;QAED,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC;YAAE,SAAS;QAEtC,4CAA4C;QAC5C,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;YAC9C,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YACxB,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM,EAAE,CAAC;gBAC3B,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,EAAE,CAAC,CAAC;gBAC9D,OAAO;YACR,CAAC;QACF,CAAC;IACF,CAAC;AAAA,CACD;AAED,MAAM,UAAU,eAAe,CAC9B,KAAkC,EAClC,OAAgB,EAChB,MAAyC,EACV;IAC/B,MAAM,MAAM,GAAiC,EAAE,CAAC;IAEhD,MAAM,mBAAmB,GAAG,CAAC,EAAU,EAAU,EAAE,CAAC;QACnD,IAAI,MAAM,CAAC,sBAAsB;YAAE,OAAO,sBAAsB,CAAC,EAAE,CAAC,CAAC;QACrE,IAAI,KAAK,CAAC,QAAQ,KAAK,QAAQ;YAAE,OAAO,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;QAC9E,mFAAmF;QACnF,IAAI,KAAK,CAAC,QAAQ,KAAK,gBAAgB,IAAI,KAAK,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;YACtF,OAAO,EAAE,CAAC,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QACxD,CAAC;QACD,OAAO,EAAE,CAAC;IAAA,CACV,CAAC;IAEF,MAAM,mBAAmB,GAAG,iBAAiB,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,mBAAmB,CAAC,EAAE,CAAC,CAAC,CAAC;IAExG,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QAC1B,MAAM,gBAAgB,GAAG,KAAK,CAAC,SAAS,IAAI,MAAM,CAAC,qBAAqB,CAAC;QACzE,MAAM,IAAI,GAAG,gBAAgB,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC;QACvD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC;IAChF,CAAC;IAED,IAAI,QAAQ,GAAkB,IAAI,CAAC;IAEnC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,mBAAmB,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACrD,MAAM,GAAG,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAC;QACnC,+FAA+F;QAC/F,yDAAyD;QACzD,IAAI,MAAM,CAAC,gCAAgC,IAAI,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACjG,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,oCAAoC;aAC7C,CAAC,CAAC;QACJ,CAAC;QAED,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACzB,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACrC,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE,kBAAkB,CAAC,GAAG,CAAC,OAAO,CAAC;iBACxC,CAAC,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACP,MAAM,OAAO,GAAgC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAA6B,EAAE,CAAC;oBACjG,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC1B,OAAO;4BACN,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC;yBACK,CAAC;oBAC3C,CAAC;yBAAM,CAAC;wBACP,OAAO;4BACN,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE;gCACV,GAAG,EAAE,QAAQ,IAAI,CAAC,QAAQ,WAAW,IAAI,CAAC,IAAI,EAAE;6BAChD;yBACwC,CAAC;oBAC5C,CAAC;gBAAA,CACD,CAAC,CAAC;gBACH,MAAM,eAAe,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC;oBACrD,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC;oBAC/C,CAAC,CAAC,OAAO,CAAC;gBACX,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBAC3C,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE,eAAe;iBACxB,CAAC,CAAC;YACJ,CAAC;QACF,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YACrC,oFAAoF;YACpF,MAAM,YAAY,GAAwC;gBACzD,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,MAAM,CAAC,gCAAgC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;aAC5D,CAAC;YAEF,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAkB,CAAC;YACjF,8DAA8D;YAC9D,MAAM,kBAAkB,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YACxF,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACnC,uEAAuE;gBACvE,2EAA2E;gBAC3E,IAAI,KAAK,CAAC,QAAQ,KAAK,gBAAgB,EAAE,CAAC;oBACzC,YAAY,CAAC,OAAO,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;gBAC3F,CAAC;qBAAM,CAAC;oBACP,YAAY,CAAC,OAAO,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;wBACpD,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,kBAAkB,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;oBAAA,CAC1D,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;YAED,yBAAyB;YACzB,MAAM,cAAc,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAsB,CAAC;YAC7F,kEAAkE;YAClE,MAAM,sBAAsB,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YACxG,IAAI,sBAAsB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACvC,IAAI,MAAM,CAAC,sBAAsB,EAAE,CAAC;oBACnC,gFAAgF;oBAChF,MAAM,YAAY,GAAG,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;oBAChF,MAAM,WAAW,GAAG,YAAY,CAAC,OAAuD,CAAC;oBACzF,IAAI,WAAW,EAAE,CAAC;wBACjB,WAAW,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,YAAY,EAAE,CAAC,CAAC;oBAC3D,CAAC;yBAAM,CAAC;wBACP,YAAY,CAAC,OAAO,GAAG,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,YAAY,EAAE,CAAC,CAAC;oBAC/D,CAAC;gBACF,CAAC;qBAAM,CAAC;oBACP,gGAAgG;oBAChG,MAAM,SAAS,GAAG,sBAAsB,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC;oBAC9D,IAAI,SAAS,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;wBACtC,YAAoB,CAAC,SAAS,CAAC,GAAG,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAC7F,CAAC;gBACF,CAAC;YACF,CAAC;YAED,MAAM,SAAS,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAe,CAAC;YACjF,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC1B,YAAY,CAAC,UAAU,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC;oBAChD,EAAE,EAAE,EAAE,CAAC,EAAE;oBACT,IAAI,EAAE,UAAmB;oBACzB,QAAQ,EAAE;wBACT,IAAI,EAAE,EAAE,CAAC,IAAI;wBACb,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,SAAS,CAAC;qBACvC;iBACD,CAAC,CAAC,CAAC;gBACJ,MAAM,gBAAgB,GAAG,SAAS;qBAChC,MAAM,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,gBAAgB,CAAC;qBACnC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC;oBACZ,IAAI,CAAC;wBACJ,OAAO,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,gBAAiB,CAAC,CAAC;oBACzC,CAAC;oBAAC,MAAM,CAAC;wBACR,OAAO,IAAI,CAAC;oBACb,CAAC;gBAAA,CACD,CAAC;qBACD,MAAM,CAAC,OAAO,CAAC,CAAC;gBAClB,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBAChC,YAAoB,CAAC,iBAAiB,GAAG,gBAAgB,CAAC;gBAC5D,CAAC;YACF,CAAC;YACD,kEAAkE;YAClE,4EAA4E;YAC5E,8DAA8D;YAC9D,gEAAgE;YAChE,MAAM,OAAO,GAAG,YAAY,CAAC,OAAO,CAAC;YACrC,MAAM,UAAU,GACf,OAAO,KAAK,IAAI;gBAChB,OAAO,KAAK,SAAS;gBACrB,CAAC,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YACzE,IAAI,CAAC,UAAU,IAAI,CAAC,YAAY,CAAC,UAAU,EAAE,CAAC;gBAC7C,SAAS;YACV,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC3B,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACtC,MAAM,WAAW,GAA6D,EAAE,CAAC;YACjF,IAAI,CAAC,GAAG,CAAC,CAAC;YAEV,OAAO,CAAC,GAAG,mBAAmB,CAAC,MAAM,IAAI,mBAAmB,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC5F,MAAM,OAAO,GAAG,mBAAmB,CAAC,CAAC,CAAsB,CAAC;gBAE5D,iCAAiC;gBACjC,MAAM,UAAU,GAAG,OAAO,CAAC,OAAO;qBAChC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;qBAChC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAAS,CAAC,IAAI,CAAC;qBAC3B,IAAI,CAAC,IAAI,CAAC,CAAC;gBACb,MAAM,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;gBAElE,oEAAoE;gBACpE,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;gBACtC,yEAAyE;gBACzE,MAAM,aAAa,GAAmC;oBACrD,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE,kBAAkB,CAAC,OAAO,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,sBAAsB,CAAC;oBAC1E,YAAY,EAAE,OAAO,CAAC,UAAU;iBAChC,CAAC;gBACF,IAAI,MAAM,CAAC,sBAAsB,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;oBACtD,aAAqB,CAAC,IAAI,GAAG,OAAO,CAAC,QAAQ,CAAC;gBAChD,CAAC;gBACD,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;gBAE3B,IAAI,SAAS,IAAI,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;oBAChD,KAAK,MAAM,KAAK,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;wBACrC,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;4BAC5B,WAAW,CAAC,IAAI,CAAC;gCAChB,IAAI,EAAE,WAAW;gCACjB,SAAS,EAAE;oCACV,GAAG,EAAE,QAAS,KAAa,CAAC,QAAQ,WAAY,KAAa,CAAC,IAAI,EAAE;iCACpE;6BACD,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;gBACF,CAAC;YACF,CAAC;YAED,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAEV,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC5B,IAAI,MAAM,CAAC,gCAAgC,EAAE,CAAC;oBAC7C,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,WAAW;wBACjB,OAAO,EAAE,oCAAoC;qBAC7C,CAAC,CAAC;gBACJ,CAAC;gBAED,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE;wBACR;4BACC,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,qCAAqC;yBAC3C;wBACD,GAAG,WAAW;qBACd;iBACD,CAAC,CAAC;gBACH,QAAQ,GAAG,MAAM,CAAC;YACnB,CAAC;iBAAM,CAAC;gBACP,QAAQ,GAAG,YAAY,CAAC;YACzB,CAAC;YACD,SAAS;QACV,CAAC;QAED,QAAQ,GAAG,GAAG,CAAC,IAAI,CAAC;IACrB,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,SAAS,YAAY,CAAC,KAAa,EAAgD;IAClF,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAC3B,IAAI,EAAE,UAAU;QAChB,QAAQ,EAAE;YACT,IAAI,EAAE,IAAI,CAAC,IAAI;YACf,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,UAAU,EAAE,IAAI,CAAC,UAAiB,EAAE,wCAAwC;YAC5E,MAAM,EAAE,KAAK,EAAE,uEAAuE;SACtF;KACD,CAAC,CAAC,CAAC;AAAA,CACJ;AAED,SAAS,aAAa,CAAC,MAAmD,EAAc;IACvF,IAAI,MAAM,KAAK,IAAI;QAAE,OAAO,MAAM,CAAC;IACnC,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,MAAM;YACV,OAAO,MAAM,CAAC;QACf,KAAK,QAAQ;YACZ,OAAO,QAAQ,CAAC;QACjB,KAAK,eAAe,CAAC;QACrB,KAAK,YAAY;YAChB,OAAO,SAAS,CAAC;QAClB,KAAK,gBAAgB;YACpB,OAAO,OAAO,CAAC;QAChB,SAAS,CAAC;YACT,MAAM,WAAW,GAAU,MAAM,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,0BAA0B,WAAW,EAAE,CAAC,CAAC;QAC1D,CAAC;IACF,CAAC;AAAA,CACD;AAED;;;;GAIG;AACH,SAAS,YAAY,CAAC,KAAkC,EAAqC;IAC5F,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;IAChC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;IAE9B,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK,IAAI,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;IAEjE,MAAM,aAAa,GAClB,QAAQ,KAAK,UAAU;QACvB,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC;QAC/B,QAAQ,KAAK,KAAK;QAClB,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC;QAC5B,QAAQ,KAAK,SAAS;QACtB,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC;QAC9B,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAC;QAC7B,KAAK;QACL,QAAQ,KAAK,UAAU;QACvB,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;IAEjC,MAAM,YAAY,GAAG,QAAQ,KAAK,SAAS,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;IAE/G,MAAM,MAAM,GAAG,QAAQ,KAAK,KAAK,IAAI,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;IAElE,MAAM,SAAS,GAAG,QAAQ,KAAK,SAAS,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;IAE3E,OAAO;QACN,aAAa,EAAE,CAAC,aAAa;QAC7B,qBAAqB,EAAE,CAAC,aAAa;QACrC,uBAAuB,EAAE,CAAC,MAAM,IAAI,CAAC,KAAK;QAC1C,wBAAwB,EAAE,IAAI;QAC9B,cAAc,EAAE,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,uBAAuB;QACrE,sBAAsB,EAAE,SAAS;QACjC,gCAAgC,EAAE,KAAK,EAAE,iDAAiD;QAC1F,sBAAsB,EAAE,SAAS;QACjC,sBAAsB,EAAE,SAAS;QACjC,cAAc,EAAE,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ;QACxC,iBAAiB,EAAE,EAAE;KACrB,CAAC;AAAA,CACF;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,KAAkC,EAAqC;IACzF,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,CAAC,KAAK,CAAC,MAAM;QAAE,OAAO,QAAQ,CAAC;IAEnC,OAAO;QACN,aAAa,EAAE,KAAK,CAAC,MAAM,CAAC,aAAa,IAAI,QAAQ,CAAC,aAAa;QACnE,qBAAqB,EAAE,KAAK,CAAC,MAAM,CAAC,qBAAqB,IAAI,QAAQ,CAAC,qBAAqB;QAC3F,uBAAuB,EAAE,KAAK,CAAC,MAAM,CAAC,uBAAuB,IAAI,QAAQ,CAAC,uBAAuB;QACjG,wBAAwB,EAAE,KAAK,CAAC,MAAM,CAAC,wBAAwB,IAAI,QAAQ,CAAC,wBAAwB;QACpG,cAAc,EAAE,KAAK,CAAC,MAAM,CAAC,cAAc,IAAI,QAAQ,CAAC,cAAc;QACtE,sBAAsB,EAAE,KAAK,CAAC,MAAM,CAAC,sBAAsB,IAAI,QAAQ,CAAC,sBAAsB;QAC9F,gCAAgC,EAC/B,KAAK,CAAC,MAAM,CAAC,gCAAgC,IAAI,QAAQ,CAAC,gCAAgC;QAC3F,sBAAsB,EAAE,KAAK,CAAC,MAAM,CAAC,sBAAsB,IAAI,QAAQ,CAAC,sBAAsB;QAC9F,sBAAsB,EAAE,KAAK,CAAC,MAAM,CAAC,sBAAsB,IAAI,QAAQ,CAAC,sBAAsB;QAC9F,cAAc,EAAE,KAAK,CAAC,MAAM,CAAC,cAAc,IAAI,QAAQ,CAAC,cAAc;QACtE,iBAAiB,EAAE,KAAK,CAAC,MAAM,CAAC,iBAAiB,IAAI,EAAE;KACvD,CAAC;AAAA,CACF","sourcesContent":["import OpenAI from \"openai\";\nimport type {\n\tChatCompletionAssistantMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionContentPart,\n\tChatCompletionContentPartImage,\n\tChatCompletionContentPartText,\n\tChatCompletionMessageParam,\n\tChatCompletionToolMessageParam,\n} from \"openai/resources/chat/completions.js\";\nimport { getEnvApiKey } from \"../env-api-keys.js\";\nimport { calculateCost, supportsXhigh } from \"../models.js\";\nimport type {\n\tAssistantMessage,\n\tContext,\n\tMessage,\n\tModel,\n\tOpenAICompletionsCompat,\n\tSimpleStreamOptions,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tToolResultMessage,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { buildBaseOptions, clampReasoning } from \"./simple-options.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\n/**\n * Normalize tool call ID for Mistral.\n * Mistral requires tool IDs to be exactly 9 alphanumeric characters (a-z, A-Z, 0-9).\n */\nfunction normalizeMistralToolId(id: string): string {\n\t// Remove non-alphanumeric characters\n\tlet normalized = id.replace(/[^a-zA-Z0-9]/g, \"\");\n\t// Mistral requires exactly 9 characters\n\tif (normalized.length < 9) {\n\t\t// Pad with deterministic characters based on original ID to ensure matching\n\t\tconst padding = \"ABCDEFGHI\";\n\t\tnormalized = normalized + padding.slice(0, 9 - normalized.length);\n\t} else if (normalized.length > 9) {\n\t\tnormalized = normalized.slice(0, 9);\n\t}\n\treturn normalized;\n}\n\n/**\n * Check if conversation messages contain tool calls or tool results.\n * This is needed because Anthropic (via proxy) requires the tools param\n * to be present when messages include tool_calls or tool role messages.\n */\nfunction hasToolHistory(messages: Message[]): boolean {\n\tfor (const msg of messages) {\n\t\tif (msg.role === \"toolResult\") {\n\t\t\treturn true;\n\t\t}\n\t\tif (msg.role === \"assistant\") {\n\t\t\tif (msg.content.some((block) => block.type === \"toolCall\")) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n}\n\nexport interface OpenAICompletionsOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"required\" | { type: \"function\"; function: { name: string } };\n\treasoningEffort?: \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n}\n\nexport const streamOpenAICompletions: StreamFunction<\"openai-completions\", OpenAICompletionsOptions> = (\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\toptions?: OpenAICompletionsOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: model.api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tconst client = createClient(model, context, apiKey, options?.headers);\n\t\t\tconst params = buildParams(model, context, options);\n\t\t\toptions?.onPayload?.(params);\n\t\t\tconst openaiStream = await client.chat.completions.create(params, { signal: options?.signal });\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\tlet currentBlock: TextContent | ThinkingContent | (ToolCall & { partialArgs?: string }) | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tconst finishCurrentBlock = (block?: typeof currentBlock) => {\n\t\t\t\tif (block) {\n\t\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: block.text,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: block.thinking,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\t\tblock.arguments = JSON.parse(block.partialArgs || \"{}\");\n\t\t\t\t\t\tdelete block.partialArgs;\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\ttoolCall: block,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tfor await (const chunk of openaiStream) {\n\t\t\t\tif (chunk.usage) {\n\t\t\t\t\tconst cachedTokens = chunk.usage.prompt_tokens_details?.cached_tokens || 0;\n\t\t\t\t\tconst reasoningTokens = chunk.usage.completion_tokens_details?.reasoning_tokens || 0;\n\t\t\t\t\tconst input = (chunk.usage.prompt_tokens || 0) - cachedTokens;\n\t\t\t\t\tconst outputTokens = (chunk.usage.completion_tokens || 0) + reasoningTokens;\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\t// OpenAI includes cached tokens in prompt_tokens, so subtract to get non-cached input\n\t\t\t\t\t\tinput,\n\t\t\t\t\t\toutput: outputTokens,\n\t\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t// Compute totalTokens ourselves since we add reasoning_tokens to output\n\t\t\t\t\t\t// and some providers (e.g., Groq) don't include them in total_tokens\n\t\t\t\t\t\ttotalTokens: input + outputTokens + cachedTokens,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\n\t\t\t\tconst choice = chunk.choices[0];\n\t\t\t\tif (!choice) continue;\n\n\t\t\t\tif (choice.finish_reason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(choice.finish_reason);\n\t\t\t\t}\n\n\t\t\t\tif (choice.delta) {\n\t\t\t\t\tif (\n\t\t\t\t\t\tchoice.delta.content !== null &&\n\t\t\t\t\t\tchoice.delta.content !== undefined &&\n\t\t\t\t\t\tchoice.delta.content.length > 0\n\t\t\t\t\t) {\n\t\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"text\") {\n\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\tcurrentBlock.text += choice.delta.content;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: choice.delta.content,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// Some endpoints return reasoning in reasoning_content (llama.cpp),\n\t\t\t\t\t// or reasoning (other openai compatible endpoints)\n\t\t\t\t\t// Use the first non-empty reasoning field to avoid duplication\n\t\t\t\t\t// (e.g., chutes.ai returns both reasoning_content and reasoning with same content)\n\t\t\t\t\tconst reasoningFields = [\"reasoning_content\", \"reasoning\", \"reasoning_text\"];\n\t\t\t\t\tlet foundReasoningField: string | null = null;\n\t\t\t\t\tfor (const field of reasoningFields) {\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t(choice.delta as any)[field] !== null &&\n\t\t\t\t\t\t\t(choice.delta as any)[field] !== undefined &&\n\t\t\t\t\t\t\t(choice.delta as any)[field].length > 0\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tif (!foundReasoningField) {\n\t\t\t\t\t\t\t\tfoundReasoningField = field;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (foundReasoningField) {\n\t\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"thinking\") {\n\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\t\tthinking: \"\",\n\t\t\t\t\t\t\t\tthinkingSignature: foundReasoningField,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\tconst delta = (choice.delta as any)[foundReasoningField];\n\t\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (choice?.delta?.tool_calls) {\n\t\t\t\t\t\tfor (const toolCall of choice.delta.tool_calls) {\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\tcurrentBlock.type !== \"toolCall\" ||\n\t\t\t\t\t\t\t\t(toolCall.id && currentBlock.id !== toolCall.id)\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\t\tid: toolCall.id || \"\",\n\t\t\t\t\t\t\t\t\tname: toolCall.function?.name || \"\",\n\t\t\t\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\t\t\t\tpartialArgs: \"\",\n\t\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tif (currentBlock.type === \"toolCall\") {\n\t\t\t\t\t\t\t\tif (toolCall.id) currentBlock.id = toolCall.id;\n\t\t\t\t\t\t\t\tif (toolCall.function?.name) currentBlock.name = toolCall.function.name;\n\t\t\t\t\t\t\t\tlet delta = \"\";\n\t\t\t\t\t\t\t\tif (toolCall.function?.arguments) {\n\t\t\t\t\t\t\t\t\tdelta = toolCall.function.arguments;\n\t\t\t\t\t\t\t\t\tcurrentBlock.partialArgs += toolCall.function.arguments;\n\t\t\t\t\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialArgs);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tconst reasoningDetails = (choice.delta as any).reasoning_details;\n\t\t\t\t\tif (reasoningDetails && Array.isArray(reasoningDetails)) {\n\t\t\t\t\t\tfor (const detail of reasoningDetails) {\n\t\t\t\t\t\t\tif (detail.type === \"reasoning.encrypted\" && detail.id && detail.data) {\n\t\t\t\t\t\t\t\tconst matchingToolCall = output.content.find(\n\t\t\t\t\t\t\t\t\t(b) => b.type === \"toolCall\" && b.id === detail.id,\n\t\t\t\t\t\t\t\t) as ToolCall | undefined;\n\t\t\t\t\t\t\t\tif (matchingToolCall) {\n\t\t\t\t\t\t\t\t\tmatchingToolCall.thoughtSignature = JSON.stringify(detail);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfinishCurrentBlock(currentBlock);\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\t// Some providers via OpenRouter give additional information in this field.\n\t\t\tconst rawMetadata = (error as any)?.error?.metadata?.raw;\n\t\t\tif (rawMetadata) output.errorMessage += `\\n${rawMetadata}`;\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nexport const streamSimpleOpenAICompletions: StreamFunction<\"openai-completions\", SimpleStreamOptions> = (\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream => {\n\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst base = buildBaseOptions(model, options, apiKey);\n\tconst reasoningEffort = supportsXhigh(model) ? options?.reasoning : clampReasoning(options?.reasoning);\n\n\treturn streamOpenAICompletions(model, context, {\n\t\t...base,\n\t\treasoningEffort,\n\t} satisfies OpenAICompletionsOptions);\n};\n\nfunction createClient(\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\tapiKey?: string,\n\toptionsHeaders?: Record<string, string>,\n) {\n\tif (!apiKey) {\n\t\tif (!process.env.OPENAI_API_KEY) {\n\t\t\tthrow new Error(\n\t\t\t\t\"OpenAI API key is required. Set OPENAI_API_KEY environment variable or pass it as an argument.\",\n\t\t\t);\n\t\t}\n\t\tapiKey = process.env.OPENAI_API_KEY;\n\t}\n\n\tconst headers = { ...model.headers };\n\tif (model.provider === \"github-copilot\") {\n\t\t// Copilot expects X-Initiator to indicate whether the request is user-initiated\n\t\t// or agent-initiated (e.g. follow-up after assistant/tool messages). If there is\n\t\t// no prior message, default to user-initiated.\n\t\tconst messages = context.messages || [];\n\t\tconst lastMessage = messages[messages.length - 1];\n\t\tconst isAgentCall = lastMessage ? lastMessage.role !== \"user\" : false;\n\t\theaders[\"X-Initiator\"] = isAgentCall ? \"agent\" : \"user\";\n\t\theaders[\"Openai-Intent\"] = \"conversation-edits\";\n\n\t\t// Copilot requires this header when sending images\n\t\tconst hasImages = messages.some((msg) => {\n\t\t\tif (msg.role === \"user\" && Array.isArray(msg.content)) {\n\t\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t\t}\n\t\t\tif (msg.role === \"toolResult\" && Array.isArray(msg.content)) {\n\t\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t\t}\n\t\t\treturn false;\n\t\t});\n\t\tif (hasImages) {\n\t\t\theaders[\"Copilot-Vision-Request\"] = \"true\";\n\t\t}\n\t}\n\n\t// Merge options headers last so they can override defaults\n\tif (optionsHeaders) {\n\t\tObject.assign(headers, optionsHeaders);\n\t}\n\n\treturn new OpenAI({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders: headers,\n\t});\n}\n\nfunction buildParams(model: Model<\"openai-completions\">, context: Context, options?: OpenAICompletionsOptions) {\n\tconst compat = getCompat(model);\n\tconst messages = convertMessages(model, context, compat);\n\tmaybeAddOpenRouterAnthropicCacheControl(model, messages);\n\n\tconst params: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {\n\t\tmodel: model.id,\n\t\tmessages,\n\t\tstream: true,\n\t};\n\n\tif (compat.supportsUsageInStreaming !== false) {\n\t\t(params as any).stream_options = { include_usage: true };\n\t}\n\n\tif (compat.supportsStore) {\n\t\tparams.store = false;\n\t}\n\n\tif (options?.maxTokens) {\n\t\tif (compat.maxTokensField === \"max_tokens\") {\n\t\t\t(params as any).max_tokens = options.maxTokens;\n\t\t} else {\n\t\t\tparams.max_completion_tokens = options.maxTokens;\n\t\t}\n\t}\n\n\tif (options?.temperature !== undefined) {\n\t\tparams.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tparams.tools = convertTools(context.tools);\n\t} else if (hasToolHistory(context.messages)) {\n\t\t// Anthropic (via LiteLLM/proxy) requires tools param when conversation has tool_calls/tool_results\n\t\tparams.tools = [];\n\t}\n\n\tif (options?.toolChoice) {\n\t\tparams.tool_choice = options.toolChoice;\n\t}\n\n\tif (compat.thinkingFormat === \"zai\" && model.reasoning) {\n\t\t// Z.ai uses binary thinking: { type: \"enabled\" | \"disabled\" }\n\t\t// Must explicitly disable since z.ai defaults to thinking enabled\n\t\t(params as any).thinking = { type: options?.reasoningEffort ? \"enabled\" : \"disabled\" };\n\t} else if (options?.reasoningEffort && model.reasoning && compat.supportsReasoningEffort) {\n\t\t// OpenAI-style reasoning_effort\n\t\tparams.reasoning_effort = options.reasoningEffort;\n\t}\n\n\t// OpenRouter provider routing preferences\n\tif (model.baseUrl.includes(\"openrouter.ai\") && model.compat?.openRouterRouting) {\n\t\t(params as any).provider = model.compat.openRouterRouting;\n\t}\n\n\treturn params;\n}\n\nfunction maybeAddOpenRouterAnthropicCacheControl(\n\tmodel: Model<\"openai-completions\">,\n\tmessages: ChatCompletionMessageParam[],\n): void {\n\tif (model.provider !== \"openrouter\" || !model.id.startsWith(\"anthropic/\")) return;\n\n\t// Anthropic-style caching requires cache_control on a text part. Add a breakpoint\n\t// on the last user/assistant message (walking backwards until we find text content).\n\tfor (let i = messages.length - 1; i >= 0; i--) {\n\t\tconst msg = messages[i];\n\t\tif (msg.role !== \"user\" && msg.role !== \"assistant\") continue;\n\n\t\tconst content = msg.content;\n\t\tif (typeof content === \"string\") {\n\t\t\tmsg.content = [\n\t\t\t\tObject.assign({ type: \"text\" as const, text: content }, { cache_control: { type: \"ephemeral\" } }),\n\t\t\t];\n\t\t\treturn;\n\t\t}\n\n\t\tif (!Array.isArray(content)) continue;\n\n\t\t// Find last text part and add cache_control\n\t\tfor (let j = content.length - 1; j >= 0; j--) {\n\t\t\tconst part = content[j];\n\t\t\tif (part?.type === \"text\") {\n\t\t\t\tObject.assign(part, { cache_control: { type: \"ephemeral\" } });\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t}\n}\n\nexport function convertMessages(\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\tcompat: Required<OpenAICompletionsCompat>,\n): ChatCompletionMessageParam[] {\n\tconst params: ChatCompletionMessageParam[] = [];\n\n\tconst normalizeToolCallId = (id: string): string => {\n\t\tif (compat.requiresMistralToolIds) return normalizeMistralToolId(id);\n\t\tif (model.provider === \"openai\") return id.length > 40 ? id.slice(0, 40) : id;\n\t\t// Copilot Claude models route to Claude backend which requires Anthropic ID format\n\t\tif (model.provider === \"github-copilot\" && model.id.toLowerCase().includes(\"claude\")) {\n\t\t\treturn id.replace(/[^a-zA-Z0-9_-]/g, \"_\").slice(0, 64);\n\t\t}\n\t\treturn id;\n\t};\n\n\tconst transformedMessages = transformMessages(context.messages, model, (id) => normalizeToolCallId(id));\n\n\tif (context.systemPrompt) {\n\t\tconst useDeveloperRole = model.reasoning && compat.supportsDeveloperRole;\n\t\tconst role = useDeveloperRole ? \"developer\" : \"system\";\n\t\tparams.push({ role: role, content: sanitizeSurrogates(context.systemPrompt) });\n\t}\n\n\tlet lastRole: string | null = null;\n\n\tfor (let i = 0; i < transformedMessages.length; i++) {\n\t\tconst msg = transformedMessages[i];\n\t\t// Some providers (e.g. Mistral/Devstral) don't allow user messages directly after tool results\n\t\t// Insert a synthetic assistant message to bridge the gap\n\t\tif (compat.requiresAssistantAfterToolResult && lastRole === \"toolResult\" && msg.role === \"user\") {\n\t\t\tparams.push({\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: \"I have processed the tool results.\",\n\t\t\t});\n\t\t}\n\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: sanitizeSurrogates(msg.content),\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ChatCompletionContentPart[] = msg.content.map((item): ChatCompletionContentPart => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ChatCompletionContentPartText;\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"image_url\",\n\t\t\t\t\t\t\timage_url: {\n\t\t\t\t\t\t\t\turl: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t} satisfies ChatCompletionContentPartImage;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"image_url\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\t// Some providers (e.g. Mistral) don't accept null content, use empty string instead\n\t\t\tconst assistantMsg: ChatCompletionAssistantMessageParam = {\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: compat.requiresAssistantAfterToolResult ? \"\" : null,\n\t\t\t};\n\n\t\t\tconst textBlocks = msg.content.filter((b) => b.type === \"text\") as TextContent[];\n\t\t\t// Filter out empty text blocks to avoid API validation errors\n\t\t\tconst nonEmptyTextBlocks = textBlocks.filter((b) => b.text && b.text.trim().length > 0);\n\t\t\tif (nonEmptyTextBlocks.length > 0) {\n\t\t\t\t// GitHub Copilot requires assistant content as a string, not an array.\n\t\t\t\t// Sending as array causes Claude models to re-answer all previous prompts.\n\t\t\t\tif (model.provider === \"github-copilot\") {\n\t\t\t\t\tassistantMsg.content = nonEmptyTextBlocks.map((b) => sanitizeSurrogates(b.text)).join(\"\");\n\t\t\t\t} else {\n\t\t\t\t\tassistantMsg.content = nonEmptyTextBlocks.map((b) => {\n\t\t\t\t\t\treturn { type: \"text\", text: sanitizeSurrogates(b.text) };\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Handle thinking blocks\n\t\t\tconst thinkingBlocks = msg.content.filter((b) => b.type === \"thinking\") as ThinkingContent[];\n\t\t\t// Filter out empty thinking blocks to avoid API validation errors\n\t\t\tconst nonEmptyThinkingBlocks = thinkingBlocks.filter((b) => b.thinking && b.thinking.trim().length > 0);\n\t\t\tif (nonEmptyThinkingBlocks.length > 0) {\n\t\t\t\tif (compat.requiresThinkingAsText) {\n\t\t\t\t\t// Convert thinking blocks to plain text (no tags to avoid model mimicking them)\n\t\t\t\t\tconst thinkingText = nonEmptyThinkingBlocks.map((b) => b.thinking).join(\"\\n\\n\");\n\t\t\t\t\tconst textContent = assistantMsg.content as Array<{ type: \"text\"; text: string }> | null;\n\t\t\t\t\tif (textContent) {\n\t\t\t\t\t\ttextContent.unshift({ type: \"text\", text: thinkingText });\n\t\t\t\t\t} else {\n\t\t\t\t\t\tassistantMsg.content = [{ type: \"text\", text: thinkingText }];\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// Use the signature from the first thinking block if available (for llama.cpp server + gpt-oss)\n\t\t\t\t\tconst signature = nonEmptyThinkingBlocks[0].thinkingSignature;\n\t\t\t\t\tif (signature && signature.length > 0) {\n\t\t\t\t\t\t(assistantMsg as any)[signature] = nonEmptyThinkingBlocks.map((b) => b.thinking).join(\"\\n\");\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tconst toolCalls = msg.content.filter((b) => b.type === \"toolCall\") as ToolCall[];\n\t\t\tif (toolCalls.length > 0) {\n\t\t\t\tassistantMsg.tool_calls = toolCalls.map((tc) => ({\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\ttype: \"function\" as const,\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: tc.name,\n\t\t\t\t\t\targuments: JSON.stringify(tc.arguments),\n\t\t\t\t\t},\n\t\t\t\t}));\n\t\t\t\tconst reasoningDetails = toolCalls\n\t\t\t\t\t.filter((tc) => tc.thoughtSignature)\n\t\t\t\t\t.map((tc) => {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\treturn JSON.parse(tc.thoughtSignature!);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\treturn null;\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t\t.filter(Boolean);\n\t\t\t\tif (reasoningDetails.length > 0) {\n\t\t\t\t\t(assistantMsg as any).reasoning_details = reasoningDetails;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Skip assistant messages that have no content and no tool calls.\n\t\t\t// Mistral explicitly requires \"either content or tool_calls, but not none\".\n\t\t\t// Other providers also don't accept empty assistant messages.\n\t\t\t// This handles aborted assistant responses that got no content.\n\t\t\tconst content = assistantMsg.content;\n\t\t\tconst hasContent =\n\t\t\t\tcontent !== null &&\n\t\t\t\tcontent !== undefined &&\n\t\t\t\t(typeof content === \"string\" ? content.length > 0 : content.length > 0);\n\t\t\tif (!hasContent && !assistantMsg.tool_calls) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tparams.push(assistantMsg);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tconst imageBlocks: Array<{ type: \"image_url\"; image_url: { url: string } }> = [];\n\t\t\tlet j = i;\n\n\t\t\tfor (; j < transformedMessages.length && transformedMessages[j].role === \"toolResult\"; j++) {\n\t\t\t\tconst toolMsg = transformedMessages[j] as ToolResultMessage;\n\n\t\t\t\t// Extract text and image content\n\t\t\t\tconst textResult = toolMsg.content\n\t\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t\t.map((c) => (c as any).text)\n\t\t\t\t\t.join(\"\\n\");\n\t\t\t\tconst hasImages = toolMsg.content.some((c) => c.type === \"image\");\n\n\t\t\t\t// Always send tool result with text (or placeholder if only images)\n\t\t\t\tconst hasText = textResult.length > 0;\n\t\t\t\t// Some providers (e.g. Mistral) require the 'name' field in tool results\n\t\t\t\tconst toolResultMsg: ChatCompletionToolMessageParam = {\n\t\t\t\t\trole: \"tool\",\n\t\t\t\t\tcontent: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t\t\ttool_call_id: toolMsg.toolCallId,\n\t\t\t\t};\n\t\t\t\tif (compat.requiresToolResultName && toolMsg.toolName) {\n\t\t\t\t\t(toolResultMsg as any).name = toolMsg.toolName;\n\t\t\t\t}\n\t\t\t\tparams.push(toolResultMsg);\n\n\t\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\t\tfor (const block of toolMsg.content) {\n\t\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\t\timageBlocks.push({\n\t\t\t\t\t\t\t\ttype: \"image_url\",\n\t\t\t\t\t\t\t\timage_url: {\n\t\t\t\t\t\t\t\t\turl: `data:${(block as any).mimeType};base64,${(block as any).data}`,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\ti = j - 1;\n\n\t\t\tif (imageBlocks.length > 0) {\n\t\t\t\tif (compat.requiresAssistantAfterToolResult) {\n\t\t\t\t\tparams.push({\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: \"I have processed the tool results.\",\n\t\t\t\t\t});\n\t\t\t\t}\n\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t...imageBlocks,\n\t\t\t\t\t],\n\t\t\t\t});\n\t\t\t\tlastRole = \"user\";\n\t\t\t} else {\n\t\t\t\tlastRole = \"toolResult\";\n\t\t\t}\n\t\t\tcontinue;\n\t\t}\n\n\t\tlastRole = msg.role;\n\t}\n\n\treturn params;\n}\n\nfunction convertTools(tools: Tool[]): OpenAI.Chat.Completions.ChatCompletionTool[] {\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: tool.parameters as any, // TypeBox already generates JSON Schema\n\t\t\tstrict: false, // Disable strict mode to allow optional parameters without null unions\n\t\t},\n\t}));\n}\n\nfunction mapStopReason(reason: ChatCompletionChunk.Choice[\"finish_reason\"]): StopReason {\n\tif (reason === null) return \"stop\";\n\tswitch (reason) {\n\t\tcase \"stop\":\n\t\t\treturn \"stop\";\n\t\tcase \"length\":\n\t\t\treturn \"length\";\n\t\tcase \"function_call\":\n\t\tcase \"tool_calls\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"content_filter\":\n\t\t\treturn \"error\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\n/**\n * Detect compatibility settings from provider and baseUrl for known providers.\n * Provider takes precedence over URL-based detection since it's explicitly configured.\n * Returns a fully resolved OpenAICompletionsCompat object with all fields set.\n */\nfunction detectCompat(model: Model<\"openai-completions\">): Required<OpenAICompletionsCompat> {\n\tconst provider = model.provider;\n\tconst baseUrl = model.baseUrl;\n\n\tconst isZai = provider === \"zai\" || baseUrl.includes(\"api.z.ai\");\n\n\tconst isNonStandard =\n\t\tprovider === \"cerebras\" ||\n\t\tbaseUrl.includes(\"cerebras.ai\") ||\n\t\tprovider === \"xai\" ||\n\t\tbaseUrl.includes(\"api.x.ai\") ||\n\t\tprovider === \"mistral\" ||\n\t\tbaseUrl.includes(\"mistral.ai\") ||\n\t\tbaseUrl.includes(\"chutes.ai\") ||\n\t\tisZai ||\n\t\tprovider === \"opencode\" ||\n\t\tbaseUrl.includes(\"opencode.ai\");\n\n\tconst useMaxTokens = provider === \"mistral\" || baseUrl.includes(\"mistral.ai\") || baseUrl.includes(\"chutes.ai\");\n\n\tconst isGrok = provider === \"xai\" || baseUrl.includes(\"api.x.ai\");\n\n\tconst isMistral = provider === \"mistral\" || baseUrl.includes(\"mistral.ai\");\n\n\treturn {\n\t\tsupportsStore: !isNonStandard,\n\t\tsupportsDeveloperRole: !isNonStandard,\n\t\tsupportsReasoningEffort: !isGrok && !isZai,\n\t\tsupportsUsageInStreaming: true,\n\t\tmaxTokensField: useMaxTokens ? \"max_tokens\" : \"max_completion_tokens\",\n\t\trequiresToolResultName: isMistral,\n\t\trequiresAssistantAfterToolResult: false, // Mistral no longer requires this as of Dec 2024\n\t\trequiresThinkingAsText: isMistral,\n\t\trequiresMistralToolIds: isMistral,\n\t\tthinkingFormat: isZai ? \"zai\" : \"openai\",\n\t\topenRouterRouting: {},\n\t};\n}\n\n/**\n * Get resolved compatibility settings for a model.\n * Uses explicit model.compat if provided, otherwise auto-detects from provider/URL.\n */\nfunction getCompat(model: Model<\"openai-completions\">): Required<OpenAICompletionsCompat> {\n\tconst detected = detectCompat(model);\n\tif (!model.compat) return detected;\n\n\treturn {\n\t\tsupportsStore: model.compat.supportsStore ?? detected.supportsStore,\n\t\tsupportsDeveloperRole: model.compat.supportsDeveloperRole ?? detected.supportsDeveloperRole,\n\t\tsupportsReasoningEffort: model.compat.supportsReasoningEffort ?? detected.supportsReasoningEffort,\n\t\tsupportsUsageInStreaming: model.compat.supportsUsageInStreaming ?? detected.supportsUsageInStreaming,\n\t\tmaxTokensField: model.compat.maxTokensField ?? detected.maxTokensField,\n\t\trequiresToolResultName: model.compat.requiresToolResultName ?? detected.requiresToolResultName,\n\t\trequiresAssistantAfterToolResult:\n\t\t\tmodel.compat.requiresAssistantAfterToolResult ?? detected.requiresAssistantAfterToolResult,\n\t\trequiresThinkingAsText: model.compat.requiresThinkingAsText ?? detected.requiresThinkingAsText,\n\t\trequiresMistralToolIds: model.compat.requiresMistralToolIds ?? detected.requiresMistralToolIds,\n\t\tthinkingFormat: model.compat.thinkingFormat ?? detected.thinkingFormat,\n\t\topenRouterRouting: model.compat.openRouterRouting ?? {},\n\t};\n}\n"]}
|
|
1
|
+
{"version":3,"file":"openai-completions.js","sourceRoot":"","sources":["../../src/providers/openai-completions.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAU5B,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAiB5D,OAAO,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,EAAE,kBAAkB,EAAE,MAAM,wBAAwB,CAAC;AAC5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,8BAA8B,CAAC;AAClE,OAAO,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACvE,OAAO,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AAE5D;;;GAGG;AACH,SAAS,sBAAsB,CAAC,EAAU,EAAU;IACnD,qCAAqC;IACrC,IAAI,UAAU,GAAG,EAAE,CAAC,OAAO,CAAC,eAAe,EAAE,EAAE,CAAC,CAAC;IACjD,wCAAwC;IACxC,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC3B,4EAA4E;QAC5E,MAAM,OAAO,GAAG,WAAW,CAAC;QAC5B,UAAU,GAAG,UAAU,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC;IACnE,CAAC;SAAM,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAClC,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACrC,CAAC;IACD,OAAO,UAAU,CAAC;AAAA,CAClB;AAED;;;;GAIG;AACH,SAAS,cAAc,CAAC,QAAmB,EAAW;IACrD,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YAC/B,OAAO,IAAI,CAAC;QACb,CAAC;QACD,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YAC9B,IAAI,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,KAAK,UAAU,CAAC,EAAE,CAAC;gBAC5D,OAAO,IAAI,CAAC;YACb,CAAC;QACF,CAAC;IACF,CAAC;IACD,OAAO,KAAK,CAAC;AAAA,CACb;AAOD,MAAM,CAAC,MAAM,uBAAuB,GAAmE,CACtG,KAAkC,EAClC,OAAgB,EAChB,OAAkC,EACJ,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,IAAI,2BAA2B,EAAE,CAAC;IAEjD,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,MAAM,GAAqB;YAChC,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,KAAK,CAAC,GAAG;YACd,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,EAAE;YACf,KAAK,EAAE;gBACN,KAAK,EAAE,CAAC;gBACR,MAAM,EAAE,CAAC;gBACT,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;aACpE;YACD,UAAU,EAAE,MAAM;YAClB,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,IAAI,CAAC;YACJ,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACrE,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YACtE,MAAM,MAAM,GAAG,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YACpD,OAAO,EAAE,SAAS,EAAE,CAAC,MAAM,CAAC,CAAC;YAC7B,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC/F,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAEhD,IAAI,YAAY,GAAiF,IAAI,CAAC;YACtG,MAAM,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC;YAC9B,MAAM,UAAU,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC;YAC3C,MAAM,kBAAkB,GAAG,CAAC,KAA2B,EAAE,EAAE,CAAC;gBAC3D,IAAI,KAAK,EAAE,CAAC;oBACX,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC3B,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,UAAU;4BAChB,YAAY,EAAE,UAAU,EAAE;4BAC1B,OAAO,EAAE,KAAK,CAAC,IAAI;4BACnB,OAAO,EAAE,MAAM;yBACf,CAAC,CAAC;oBACJ,CAAC;yBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACtC,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,cAAc;4BACpB,YAAY,EAAE,UAAU,EAAE;4BAC1B,OAAO,EAAE,KAAK,CAAC,QAAQ;4BACvB,OAAO,EAAE,MAAM;yBACf,CAAC,CAAC;oBACJ,CAAC;yBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;wBACtC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,IAAI,IAAI,CAAC,CAAC;wBACxD,OAAO,KAAK,CAAC,WAAW,CAAC;wBACzB,MAAM,CAAC,IAAI,CAAC;4BACX,IAAI,EAAE,cAAc;4BACpB,YAAY,EAAE,UAAU,EAAE;4BAC1B,QAAQ,EAAE,KAAK;4BACf,OAAO,EAAE,MAAM;yBACf,CAAC,CAAC;oBACJ,CAAC;gBACF,CAAC;YAAA,CACD,CAAC;YAEF,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,YAAY,EAAE,CAAC;gBACxC,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;oBACjB,MAAM,YAAY,GAAG,KAAK,CAAC,KAAK,CAAC,qBAAqB,EAAE,aAAa,IAAI,CAAC,CAAC;oBAC3E,MAAM,eAAe,GAAG,KAAK,CAAC,KAAK,CAAC,yBAAyB,EAAE,gBAAgB,IAAI,CAAC,CAAC;oBACrF,MAAM,KAAK,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,aAAa,IAAI,CAAC,CAAC,GAAG,YAAY,CAAC;oBAC9D,MAAM,YAAY,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,iBAAiB,IAAI,CAAC,CAAC,GAAG,eAAe,CAAC;oBAC5E,MAAM,CAAC,KAAK,GAAG;wBACd,sFAAsF;wBACtF,KAAK;wBACL,MAAM,EAAE,YAAY;wBACpB,SAAS,EAAE,YAAY;wBACvB,UAAU,EAAE,CAAC;wBACb,wEAAwE;wBACxE,qEAAqE;wBACrE,WAAW,EAAE,KAAK,GAAG,YAAY,GAAG,YAAY;wBAChD,IAAI,EAAE;4BACL,KAAK,EAAE,CAAC;4BACR,MAAM,EAAE,CAAC;4BACT,SAAS,EAAE,CAAC;4BACZ,UAAU,EAAE,CAAC;4BACb,KAAK,EAAE,CAAC;yBACR;qBACD,CAAC;oBACF,aAAa,CAAC,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;gBACpC,CAAC;gBAED,MAAM,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;gBAChC,IAAI,CAAC,MAAM;oBAAE,SAAS;gBAEtB,IAAI,MAAM,CAAC,aAAa,EAAE,CAAC;oBAC1B,MAAM,CAAC,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;gBACzD,CAAC;gBAED,IAAI,MAAM,CAAC,KAAK,EAAE,CAAC;oBAClB,IACC,MAAM,CAAC,KAAK,CAAC,OAAO,KAAK,IAAI;wBAC7B,MAAM,CAAC,KAAK,CAAC,OAAO,KAAK,SAAS;wBAClC,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,EAC9B,CAAC;wBACF,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BACnD,kBAAkB,CAAC,YAAY,CAAC,CAAC;4BACjC,YAAY,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;4BAC1C,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;4BAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;wBAClF,CAAC;wBAED,IAAI,YAAY,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;4BAClC,YAAY,CAAC,IAAI,IAAI,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC;4BAC1C,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,YAAY;gCAClB,YAAY,EAAE,UAAU,EAAE;gCAC1B,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO;gCAC3B,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;oBAED,oEAAoE;oBACpE,mDAAmD;oBACnD,+DAA+D;oBAC/D,mFAAmF;oBACnF,MAAM,eAAe,GAAG,CAAC,mBAAmB,EAAE,WAAW,EAAE,gBAAgB,CAAC,CAAC;oBAC7E,IAAI,mBAAmB,GAAkB,IAAI,CAAC;oBAC9C,KAAK,MAAM,KAAK,IAAI,eAAe,EAAE,CAAC;wBACrC,IACE,MAAM,CAAC,KAAa,CAAC,KAAK,CAAC,KAAK,IAAI;4BACpC,MAAM,CAAC,KAAa,CAAC,KAAK,CAAC,KAAK,SAAS;4BACzC,MAAM,CAAC,KAAa,CAAC,KAAK,CAAC,CAAC,MAAM,GAAG,CAAC,EACtC,CAAC;4BACF,IAAI,CAAC,mBAAmB,EAAE,CAAC;gCAC1B,mBAAmB,GAAG,KAAK,CAAC;gCAC5B,MAAM;4BACP,CAAC;wBACF,CAAC;oBACF,CAAC;oBAED,IAAI,mBAAmB,EAAE,CAAC;wBACzB,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACvD,kBAAkB,CAAC,YAAY,CAAC,CAAC;4BACjC,YAAY,GAAG;gCACd,IAAI,EAAE,UAAU;gCAChB,QAAQ,EAAE,EAAE;gCACZ,iBAAiB,EAAE,mBAAmB;6BACtC,CAAC;4BACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;4BAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;wBACtF,CAAC;wBAED,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;4BACtC,MAAM,KAAK,GAAI,MAAM,CAAC,KAAa,CAAC,mBAAmB,CAAC,CAAC;4BACzD,YAAY,CAAC,QAAQ,IAAI,KAAK,CAAC;4BAC/B,MAAM,CAAC,IAAI,CAAC;gCACX,IAAI,EAAE,gBAAgB;gCACtB,YAAY,EAAE,UAAU,EAAE;gCAC1B,KAAK;gCACL,OAAO,EAAE,MAAM;6BACf,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;oBAED,IAAI,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,CAAC;wBAC/B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC;4BAChD,IACC,CAAC,YAAY;gCACb,YAAY,CAAC,IAAI,KAAK,UAAU;gCAChC,CAAC,QAAQ,CAAC,EAAE,IAAI,YAAY,CAAC,EAAE,KAAK,QAAQ,CAAC,EAAE,CAAC,EAC/C,CAAC;gCACF,kBAAkB,CAAC,YAAY,CAAC,CAAC;gCACjC,YAAY,GAAG;oCACd,IAAI,EAAE,UAAU;oCAChB,EAAE,EAAE,QAAQ,CAAC,EAAE,IAAI,EAAE;oCACrB,IAAI,EAAE,QAAQ,CAAC,QAAQ,EAAE,IAAI,IAAI,EAAE;oCACnC,SAAS,EAAE,EAAE;oCACb,WAAW,EAAE,EAAE;iCACf,CAAC;gCACF,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gCAClC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;4BACtF,CAAC;4BAED,IAAI,YAAY,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;gCACtC,IAAI,QAAQ,CAAC,EAAE;oCAAE,YAAY,CAAC,EAAE,GAAG,QAAQ,CAAC,EAAE,CAAC;gCAC/C,IAAI,QAAQ,CAAC,QAAQ,EAAE,IAAI;oCAAE,YAAY,CAAC,IAAI,GAAG,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC;gCACxE,IAAI,KAAK,GAAG,EAAE,CAAC;gCACf,IAAI,QAAQ,CAAC,QAAQ,EAAE,SAAS,EAAE,CAAC;oCAClC,KAAK,GAAG,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;oCACpC,YAAY,CAAC,WAAW,IAAI,QAAQ,CAAC,QAAQ,CAAC,SAAS,CAAC;oCACxD,YAAY,CAAC,SAAS,GAAG,kBAAkB,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC;gCACvE,CAAC;gCACD,MAAM,CAAC,IAAI,CAAC;oCACX,IAAI,EAAE,gBAAgB;oCACtB,YAAY,EAAE,UAAU,EAAE;oCAC1B,KAAK;oCACL,OAAO,EAAE,MAAM;iCACf,CAAC,CAAC;4BACJ,CAAC;wBACF,CAAC;oBACF,CAAC;oBAED,MAAM,gBAAgB,GAAI,MAAM,CAAC,KAAa,CAAC,iBAAiB,CAAC;oBACjE,IAAI,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,gBAAgB,CAAC,EAAE,CAAC;wBACzD,KAAK,MAAM,MAAM,IAAI,gBAAgB,EAAE,CAAC;4BACvC,IAAI,MAAM,CAAC,IAAI,KAAK,qBAAqB,IAAI,MAAM,CAAC,EAAE,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC;gCACvE,MAAM,gBAAgB,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,CAC3C,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,CAAC,EAAE,KAAK,MAAM,CAAC,EAAE,CAC1B,CAAC;gCAC1B,IAAI,gBAAgB,EAAE,CAAC;oCACtB,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;gCAC5D,CAAC;4BACF,CAAC;wBACF,CAAC;oBACF,CAAC;gBACF,CAAC;YACF,CAAC;YAED,kBAAkB,CAAC,YAAY,CAAC,CAAC;YAEjC,IAAI,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;gBAC9B,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;YACxC,CAAC;YAED,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,IAAI,MAAM,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;gBACtE,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC;YAC9C,CAAC;YAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YAC1E,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YAChB,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,OAAO;gBAAE,OAAQ,KAAa,CAAC,KAAK,CAAC;YAChE,MAAM,CAAC,UAAU,GAAG,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YACnE,MAAM,CAAC,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YACrF,2EAA2E;YAC3E,MAAM,WAAW,GAAI,KAAa,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,CAAC;YACzD,IAAI,WAAW;gBAAE,MAAM,CAAC,YAAY,IAAI,KAAK,WAAW,EAAE,CAAC;YAC3D,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC;YACzE,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;IAAA,CACD,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA8D,CACvG,KAAkC,EAClC,OAAgB,EAChB,OAA6B,EACC,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,OAAO,EAAE,MAAM,IAAI,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IAC/D,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;IAC/D,CAAC;IAED,MAAM,IAAI,GAAG,gBAAgB,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,eAAe,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,cAAc,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;IACvG,MAAM,UAAU,GAAI,OAAgD,EAAE,UAAU,CAAC;IAEjF,OAAO,uBAAuB,CAAC,KAAK,EAAE,OAAO,EAAE;QAC9C,GAAG,IAAI;QACP,eAAe;QACf,UAAU;KACyB,CAAC,CAAC;AAAA,CACtC,CAAC;AAEF,SAAS,YAAY,CACpB,KAAkC,EAClC,OAAgB,EAChB,MAAe,EACf,cAAuC,EACtC;IACD,IAAI,CAAC,MAAM,EAAE,CAAC;QACb,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC;YACjC,MAAM,IAAI,KAAK,CACd,gGAAgG,CAChG,CAAC;QACH,CAAC;QACD,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC;IACrC,CAAC;IAED,MAAM,OAAO,GAAG,EAAE,GAAG,KAAK,CAAC,OAAO,EAAE,CAAC;IACrC,IAAI,KAAK,CAAC,QAAQ,KAAK,gBAAgB,EAAE,CAAC;QACzC,gFAAgF;QAChF,iFAAiF;QACjF,+CAA+C;QAC/C,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,IAAI,EAAE,CAAC;QACxC,MAAM,WAAW,GAAG,QAAQ,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAClD,MAAM,WAAW,GAAG,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC;QACtE,OAAO,CAAC,aAAa,CAAC,GAAG,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC;QACxD,OAAO,CAAC,eAAe,CAAC,GAAG,oBAAoB,CAAC;QAEhD,mDAAmD;QACnD,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC;YACxC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;gBACvD,OAAO,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;YACpD,CAAC;YACD,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC;gBAC7D,OAAO,GAAG,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;YACpD,CAAC;YACD,OAAO,KAAK,CAAC;QAAA,CACb,CAAC,CAAC;QACH,IAAI,SAAS,EAAE,CAAC;YACf,OAAO,CAAC,wBAAwB,CAAC,GAAG,MAAM,CAAC;QAC5C,CAAC;IACF,CAAC;IAED,2DAA2D;IAC3D,IAAI,cAAc,EAAE,CAAC;QACpB,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;IACxC,CAAC;IAED,OAAO,IAAI,MAAM,CAAC;QACjB,MAAM;QACN,OAAO,EAAE,KAAK,CAAC,OAAO;QACtB,uBAAuB,EAAE,IAAI;QAC7B,cAAc,EAAE,OAAO;KACvB,CAAC,CAAC;AAAA,CACH;AAED,SAAS,WAAW,CAAC,KAAkC,EAAE,OAAgB,EAAE,OAAkC,EAAE;IAC9G,MAAM,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC;IAChC,MAAM,QAAQ,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,uCAAuC,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IAEzD,MAAM,MAAM,GAAgE;QAC3E,KAAK,EAAE,KAAK,CAAC,EAAE;QACf,QAAQ;QACR,MAAM,EAAE,IAAI;KACZ,CAAC;IAEF,IAAI,MAAM,CAAC,wBAAwB,KAAK,KAAK,EAAE,CAAC;QAC9C,MAAc,CAAC,cAAc,GAAG,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC;IAC1D,CAAC;IAED,IAAI,MAAM,CAAC,aAAa,EAAE,CAAC;QAC1B,MAAM,CAAC,KAAK,GAAG,KAAK,CAAC;IACtB,CAAC;IAED,IAAI,OAAO,EAAE,SAAS,EAAE,CAAC;QACxB,IAAI,MAAM,CAAC,cAAc,KAAK,YAAY,EAAE,CAAC;YAC3C,MAAc,CAAC,UAAU,GAAG,OAAO,CAAC,SAAS,CAAC;QAChD,CAAC;aAAM,CAAC;YACP,MAAM,CAAC,qBAAqB,GAAG,OAAO,CAAC,SAAS,CAAC;QAClD,CAAC;IACF,CAAC;IAED,IAAI,OAAO,EAAE,WAAW,KAAK,SAAS,EAAE,CAAC;QACxC,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAC1C,CAAC;IAED,IAAI,OAAO,CAAC,KAAK,EAAE,CAAC;QACnB,MAAM,CAAC,KAAK,GAAG,YAAY,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC5C,CAAC;SAAM,IAAI,cAAc,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC7C,mGAAmG;QACnG,MAAM,CAAC,KAAK,GAAG,EAAE,CAAC;IACnB,CAAC;IAED,IAAI,OAAO,EAAE,UAAU,EAAE,CAAC;QACzB,MAAM,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,CAAC;IACzC,CAAC;IAED,IAAI,MAAM,CAAC,cAAc,KAAK,KAAK,IAAI,KAAK,CAAC,SAAS,EAAE,CAAC;QACxD,8DAA8D;QAC9D,kEAAkE;QACjE,MAAc,CAAC,QAAQ,GAAG,EAAE,IAAI,EAAE,OAAO,EAAE,eAAe,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC;IACxF,CAAC;SAAM,IAAI,OAAO,EAAE,eAAe,IAAI,KAAK,CAAC,SAAS,IAAI,MAAM,CAAC,uBAAuB,EAAE,CAAC;QAC1F,gCAAgC;QAChC,MAAM,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,CAAC;IACnD,CAAC;IAED,0CAA0C;IAC1C,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,KAAK,CAAC,MAAM,EAAE,iBAAiB,EAAE,CAAC;QAC/E,MAAc,CAAC,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC;IAC3D,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,SAAS,uCAAuC,CAC/C,KAAkC,EAClC,QAAsC,EAC/B;IACP,IAAI,KAAK,CAAC,QAAQ,KAAK,YAAY,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC;QAAE,OAAO;IAElF,kFAAkF;IAClF,qFAAqF;IACrF,KAAK,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC/C,MAAM,GAAG,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QACxB,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW;YAAE,SAAS;QAE9D,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC;QAC5B,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;YACjC,GAAG,CAAC,OAAO,GAAG;gBACb,MAAM,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,MAAe,EAAE,IAAI,EAAE,OAAO,EAAE,EAAE,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,EAAE,CAAC;aACjG,CAAC;YACF,OAAO;QACR,CAAC;QAED,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC;YAAE,SAAS;QAEtC,4CAA4C;QAC5C,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;YAC9C,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YACxB,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM,EAAE,CAAC;gBAC3B,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,EAAE,CAAC,CAAC;gBAC9D,OAAO;YACR,CAAC;QACF,CAAC;IACF,CAAC;AAAA,CACD;AAED,MAAM,UAAU,eAAe,CAC9B,KAAkC,EAClC,OAAgB,EAChB,MAAyC,EACV;IAC/B,MAAM,MAAM,GAAiC,EAAE,CAAC;IAEhD,MAAM,mBAAmB,GAAG,CAAC,EAAU,EAAU,EAAE,CAAC;QACnD,IAAI,MAAM,CAAC,sBAAsB;YAAE,OAAO,sBAAsB,CAAC,EAAE,CAAC,CAAC;QAErE,sDAAsD;QACtD,mFAAmF;QACnF,wEAAwE;QACxE,iDAAiD;QACjD,IAAI,EAAE,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;YACtB,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YAC/B,oEAAoE;YACpE,OAAO,MAAM,CAAC,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC5D,CAAC;QAED,IAAI,KAAK,CAAC,QAAQ,KAAK,QAAQ;YAAE,OAAO,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;QAC9E,mFAAmF;QACnF,IAAI,KAAK,CAAC,QAAQ,KAAK,gBAAgB,IAAI,KAAK,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;YACtF,OAAO,EAAE,CAAC,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QACxD,CAAC;QACD,OAAO,EAAE,CAAC;IAAA,CACV,CAAC;IAEF,MAAM,mBAAmB,GAAG,iBAAiB,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,mBAAmB,CAAC,EAAE,CAAC,CAAC,CAAC;IAExG,IAAI,OAAO,CAAC,YAAY,EAAE,CAAC;QAC1B,MAAM,gBAAgB,GAAG,KAAK,CAAC,SAAS,IAAI,MAAM,CAAC,qBAAqB,CAAC;QACzE,MAAM,IAAI,GAAG,gBAAgB,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC;QACvD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,kBAAkB,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC;IAChF,CAAC;IAED,IAAI,QAAQ,GAAkB,IAAI,CAAC;IAEnC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,mBAAmB,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACrD,MAAM,GAAG,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAC;QACnC,+FAA+F;QAC/F,yDAAyD;QACzD,IAAI,MAAM,CAAC,gCAAgC,IAAI,QAAQ,KAAK,YAAY,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACjG,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,oCAAoC;aAC7C,CAAC,CAAC;QACJ,CAAC;QAED,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;YACzB,IAAI,OAAO,GAAG,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACrC,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE,kBAAkB,CAAC,GAAG,CAAC,OAAO,CAAC;iBACxC,CAAC,CAAC;YACJ,CAAC;iBAAM,CAAC;gBACP,MAAM,OAAO,GAAgC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,EAA6B,EAAE,CAAC;oBACjG,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;wBAC1B,OAAO;4BACN,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,kBAAkB,CAAC,IAAI,CAAC,IAAI,CAAC;yBACK,CAAC;oBAC3C,CAAC;yBAAM,CAAC;wBACP,OAAO;4BACN,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE;gCACV,GAAG,EAAE,QAAQ,IAAI,CAAC,QAAQ,WAAW,IAAI,CAAC,IAAI,EAAE;6BAChD;yBACwC,CAAC;oBAC5C,CAAC;gBAAA,CACD,CAAC,CAAC;gBACH,MAAM,eAAe,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC;oBACrD,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,WAAW,CAAC;oBAC/C,CAAC,CAAC,OAAO,CAAC;gBACX,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBAC3C,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE,eAAe;iBACxB,CAAC,CAAC;YACJ,CAAC;QACF,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;YACrC,oFAAoF;YACpF,MAAM,YAAY,GAAwC;gBACzD,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,MAAM,CAAC,gCAAgC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI;aAC5D,CAAC;YAEF,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAkB,CAAC;YACjF,8DAA8D;YAC9D,MAAM,kBAAkB,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YACxF,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACnC,uEAAuE;gBACvE,2EAA2E;gBAC3E,IAAI,KAAK,CAAC,QAAQ,KAAK,gBAAgB,EAAE,CAAC;oBACzC,YAAY,CAAC,OAAO,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;gBAC3F,CAAC;qBAAM,CAAC;oBACP,YAAY,CAAC,OAAO,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;wBACpD,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,kBAAkB,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;oBAAA,CAC1D,CAAC,CAAC;gBACJ,CAAC;YACF,CAAC;YAED,yBAAyB;YACzB,MAAM,cAAc,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAsB,CAAC;YAC7F,kEAAkE;YAClE,MAAM,sBAAsB,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YACxG,IAAI,sBAAsB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACvC,IAAI,MAAM,CAAC,sBAAsB,EAAE,CAAC;oBACnC,gFAAgF;oBAChF,MAAM,YAAY,GAAG,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;oBAChF,MAAM,WAAW,GAAG,YAAY,CAAC,OAAuD,CAAC;oBACzF,IAAI,WAAW,EAAE,CAAC;wBACjB,WAAW,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,YAAY,EAAE,CAAC,CAAC;oBAC3D,CAAC;yBAAM,CAAC;wBACP,YAAY,CAAC,OAAO,GAAG,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,YAAY,EAAE,CAAC,CAAC;oBAC/D,CAAC;gBACF,CAAC;qBAAM,CAAC;oBACP,gGAAgG;oBAChG,MAAM,SAAS,GAAG,sBAAsB,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC;oBAC9D,IAAI,SAAS,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;wBACtC,YAAoB,CAAC,SAAS,CAAC,GAAG,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAC7F,CAAC;gBACF,CAAC;YACF,CAAC;YAED,MAAM,SAAS,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAe,CAAC;YACjF,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC1B,YAAY,CAAC,UAAU,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,CAAC;oBAChD,EAAE,EAAE,EAAE,CAAC,EAAE;oBACT,IAAI,EAAE,UAAmB;oBACzB,QAAQ,EAAE;wBACT,IAAI,EAAE,EAAE,CAAC,IAAI;wBACb,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,SAAS,CAAC;qBACvC;iBACD,CAAC,CAAC,CAAC;gBACJ,MAAM,gBAAgB,GAAG,SAAS;qBAChC,MAAM,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,gBAAgB,CAAC;qBACnC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC;oBACZ,IAAI,CAAC;wBACJ,OAAO,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,gBAAiB,CAAC,CAAC;oBACzC,CAAC;oBAAC,MAAM,CAAC;wBACR,OAAO,IAAI,CAAC;oBACb,CAAC;gBAAA,CACD,CAAC;qBACD,MAAM,CAAC,OAAO,CAAC,CAAC;gBAClB,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBAChC,YAAoB,CAAC,iBAAiB,GAAG,gBAAgB,CAAC;gBAC5D,CAAC;YACF,CAAC;YACD,kEAAkE;YAClE,4EAA4E;YAC5E,8DAA8D;YAC9D,gEAAgE;YAChE,MAAM,OAAO,GAAG,YAAY,CAAC,OAAO,CAAC;YACrC,MAAM,UAAU,GACf,OAAO,KAAK,IAAI;gBAChB,OAAO,KAAK,SAAS;gBACrB,CAAC,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;YACzE,IAAI,CAAC,UAAU,IAAI,CAAC,YAAY,CAAC,UAAU,EAAE,CAAC;gBAC7C,SAAS;YACV,CAAC;YACD,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAC3B,CAAC;aAAM,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACtC,MAAM,WAAW,GAA6D,EAAE,CAAC;YACjF,IAAI,CAAC,GAAG,CAAC,CAAC;YAEV,OAAO,CAAC,GAAG,mBAAmB,CAAC,MAAM,IAAI,mBAAmB,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC5F,MAAM,OAAO,GAAG,mBAAmB,CAAC,CAAC,CAAsB,CAAC;gBAE5D,iCAAiC;gBACjC,MAAM,UAAU,GAAG,OAAO,CAAC,OAAO;qBAChC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;qBAChC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAAS,CAAC,IAAI,CAAC;qBAC3B,IAAI,CAAC,IAAI,CAAC,CAAC;gBACb,MAAM,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC;gBAElE,oEAAoE;gBACpE,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;gBACtC,yEAAyE;gBACzE,MAAM,aAAa,GAAmC;oBACrD,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE,kBAAkB,CAAC,OAAO,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,sBAAsB,CAAC;oBAC1E,YAAY,EAAE,OAAO,CAAC,UAAU;iBAChC,CAAC;gBACF,IAAI,MAAM,CAAC,sBAAsB,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC;oBACtD,aAAqB,CAAC,IAAI,GAAG,OAAO,CAAC,QAAQ,CAAC;gBAChD,CAAC;gBACD,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;gBAE3B,IAAI,SAAS,IAAI,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;oBAChD,KAAK,MAAM,KAAK,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;wBACrC,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;4BAC5B,WAAW,CAAC,IAAI,CAAC;gCAChB,IAAI,EAAE,WAAW;gCACjB,SAAS,EAAE;oCACV,GAAG,EAAE,QAAS,KAAa,CAAC,QAAQ,WAAY,KAAa,CAAC,IAAI,EAAE;iCACpE;6BACD,CAAC,CAAC;wBACJ,CAAC;oBACF,CAAC;gBACF,CAAC;YACF,CAAC;YAED,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YAEV,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC5B,IAAI,MAAM,CAAC,gCAAgC,EAAE,CAAC;oBAC7C,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,WAAW;wBACjB,OAAO,EAAE,oCAAoC;qBAC7C,CAAC,CAAC;gBACJ,CAAC;gBAED,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,MAAM;oBACZ,OAAO,EAAE;wBACR;4BACC,IAAI,EAAE,MAAM;4BACZ,IAAI,EAAE,qCAAqC;yBAC3C;wBACD,GAAG,WAAW;qBACd;iBACD,CAAC,CAAC;gBACH,QAAQ,GAAG,MAAM,CAAC;YACnB,CAAC;iBAAM,CAAC;gBACP,QAAQ,GAAG,YAAY,CAAC;YACzB,CAAC;YACD,SAAS;QACV,CAAC;QAED,QAAQ,GAAG,GAAG,CAAC,IAAI,CAAC;IACrB,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,SAAS,YAAY,CAAC,KAAa,EAAgD;IAClF,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAC3B,IAAI,EAAE,UAAU;QAChB,QAAQ,EAAE;YACT,IAAI,EAAE,IAAI,CAAC,IAAI;YACf,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,UAAU,EAAE,IAAI,CAAC,UAAiB,EAAE,wCAAwC;YAC5E,MAAM,EAAE,KAAK,EAAE,uEAAuE;SACtF;KACD,CAAC,CAAC,CAAC;AAAA,CACJ;AAED,SAAS,aAAa,CAAC,MAAmD,EAAc;IACvF,IAAI,MAAM,KAAK,IAAI;QAAE,OAAO,MAAM,CAAC;IACnC,QAAQ,MAAM,EAAE,CAAC;QAChB,KAAK,MAAM;YACV,OAAO,MAAM,CAAC;QACf,KAAK,QAAQ;YACZ,OAAO,QAAQ,CAAC;QACjB,KAAK,eAAe,CAAC;QACrB,KAAK,YAAY;YAChB,OAAO,SAAS,CAAC;QAClB,KAAK,gBAAgB;YACpB,OAAO,OAAO,CAAC;QAChB,SAAS,CAAC;YACT,MAAM,WAAW,GAAU,MAAM,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,0BAA0B,WAAW,EAAE,CAAC,CAAC;QAC1D,CAAC;IACF,CAAC;AAAA,CACD;AAED;;;;GAIG;AACH,SAAS,YAAY,CAAC,KAAkC,EAAqC;IAC5F,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC;IAChC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;IAE9B,MAAM,KAAK,GAAG,QAAQ,KAAK,KAAK,IAAI,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;IAEjE,MAAM,aAAa,GAClB,QAAQ,KAAK,UAAU;QACvB,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC;QAC/B,QAAQ,KAAK,KAAK;QAClB,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC;QAC5B,QAAQ,KAAK,SAAS;QACtB,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC;QAC9B,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAC;QAC7B,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC;QAChC,KAAK;QACL,QAAQ,KAAK,UAAU;QACvB,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;IAEjC,MAAM,YAAY,GAAG,QAAQ,KAAK,SAAS,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;IAE/G,MAAM,MAAM,GAAG,QAAQ,KAAK,KAAK,IAAI,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;IAElE,MAAM,SAAS,GAAG,QAAQ,KAAK,SAAS,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;IAE3E,OAAO;QACN,aAAa,EAAE,CAAC,aAAa;QAC7B,qBAAqB,EAAE,CAAC,aAAa;QACrC,uBAAuB,EAAE,CAAC,MAAM,IAAI,CAAC,KAAK;QAC1C,wBAAwB,EAAE,IAAI;QAC9B,cAAc,EAAE,YAAY,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,uBAAuB;QACrE,sBAAsB,EAAE,SAAS;QACjC,gCAAgC,EAAE,KAAK,EAAE,iDAAiD;QAC1F,sBAAsB,EAAE,SAAS;QACjC,sBAAsB,EAAE,SAAS;QACjC,cAAc,EAAE,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,QAAQ;QACxC,iBAAiB,EAAE,EAAE;KACrB,CAAC;AAAA,CACF;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,KAAkC,EAAqC;IACzF,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,CAAC,KAAK,CAAC,MAAM;QAAE,OAAO,QAAQ,CAAC;IAEnC,OAAO;QACN,aAAa,EAAE,KAAK,CAAC,MAAM,CAAC,aAAa,IAAI,QAAQ,CAAC,aAAa;QACnE,qBAAqB,EAAE,KAAK,CAAC,MAAM,CAAC,qBAAqB,IAAI,QAAQ,CAAC,qBAAqB;QAC3F,uBAAuB,EAAE,KAAK,CAAC,MAAM,CAAC,uBAAuB,IAAI,QAAQ,CAAC,uBAAuB;QACjG,wBAAwB,EAAE,KAAK,CAAC,MAAM,CAAC,wBAAwB,IAAI,QAAQ,CAAC,wBAAwB;QACpG,cAAc,EAAE,KAAK,CAAC,MAAM,CAAC,cAAc,IAAI,QAAQ,CAAC,cAAc;QACtE,sBAAsB,EAAE,KAAK,CAAC,MAAM,CAAC,sBAAsB,IAAI,QAAQ,CAAC,sBAAsB;QAC9F,gCAAgC,EAC/B,KAAK,CAAC,MAAM,CAAC,gCAAgC,IAAI,QAAQ,CAAC,gCAAgC;QAC3F,sBAAsB,EAAE,KAAK,CAAC,MAAM,CAAC,sBAAsB,IAAI,QAAQ,CAAC,sBAAsB;QAC9F,sBAAsB,EAAE,KAAK,CAAC,MAAM,CAAC,sBAAsB,IAAI,QAAQ,CAAC,sBAAsB;QAC9F,cAAc,EAAE,KAAK,CAAC,MAAM,CAAC,cAAc,IAAI,QAAQ,CAAC,cAAc;QACtE,iBAAiB,EAAE,KAAK,CAAC,MAAM,CAAC,iBAAiB,IAAI,EAAE;KACvD,CAAC;AAAA,CACF","sourcesContent":["import OpenAI from \"openai\";\nimport type {\n\tChatCompletionAssistantMessageParam,\n\tChatCompletionChunk,\n\tChatCompletionContentPart,\n\tChatCompletionContentPartImage,\n\tChatCompletionContentPartText,\n\tChatCompletionMessageParam,\n\tChatCompletionToolMessageParam,\n} from \"openai/resources/chat/completions.js\";\nimport { getEnvApiKey } from \"../env-api-keys.js\";\nimport { calculateCost, supportsXhigh } from \"../models.js\";\nimport type {\n\tAssistantMessage,\n\tContext,\n\tMessage,\n\tModel,\n\tOpenAICompletionsCompat,\n\tSimpleStreamOptions,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tToolResultMessage,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { buildBaseOptions, clampReasoning } from \"./simple-options.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\n/**\n * Normalize tool call ID for Mistral.\n * Mistral requires tool IDs to be exactly 9 alphanumeric characters (a-z, A-Z, 0-9).\n */\nfunction normalizeMistralToolId(id: string): string {\n\t// Remove non-alphanumeric characters\n\tlet normalized = id.replace(/[^a-zA-Z0-9]/g, \"\");\n\t// Mistral requires exactly 9 characters\n\tif (normalized.length < 9) {\n\t\t// Pad with deterministic characters based on original ID to ensure matching\n\t\tconst padding = \"ABCDEFGHI\";\n\t\tnormalized = normalized + padding.slice(0, 9 - normalized.length);\n\t} else if (normalized.length > 9) {\n\t\tnormalized = normalized.slice(0, 9);\n\t}\n\treturn normalized;\n}\n\n/**\n * Check if conversation messages contain tool calls or tool results.\n * This is needed because Anthropic (via proxy) requires the tools param\n * to be present when messages include tool_calls or tool role messages.\n */\nfunction hasToolHistory(messages: Message[]): boolean {\n\tfor (const msg of messages) {\n\t\tif (msg.role === \"toolResult\") {\n\t\t\treturn true;\n\t\t}\n\t\tif (msg.role === \"assistant\") {\n\t\t\tif (msg.content.some((block) => block.type === \"toolCall\")) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n}\n\nexport interface OpenAICompletionsOptions extends StreamOptions {\n\ttoolChoice?: \"auto\" | \"none\" | \"required\" | { type: \"function\"; function: { name: string } };\n\treasoningEffort?: \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n}\n\nexport const streamOpenAICompletions: StreamFunction<\"openai-completions\", OpenAICompletionsOptions> = (\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\toptions?: OpenAICompletionsOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: model.api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tconst client = createClient(model, context, apiKey, options?.headers);\n\t\t\tconst params = buildParams(model, context, options);\n\t\t\toptions?.onPayload?.(params);\n\t\t\tconst openaiStream = await client.chat.completions.create(params, { signal: options?.signal });\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\tlet currentBlock: TextContent | ThinkingContent | (ToolCall & { partialArgs?: string }) | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\t\t\tconst finishCurrentBlock = (block?: typeof currentBlock) => {\n\t\t\t\tif (block) {\n\t\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: block.text,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: block.thinking,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\t\tblock.arguments = JSON.parse(block.partialArgs || \"{}\");\n\t\t\t\t\t\tdelete block.partialArgs;\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\ttoolCall: block,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\n\t\t\tfor await (const chunk of openaiStream) {\n\t\t\t\tif (chunk.usage) {\n\t\t\t\t\tconst cachedTokens = chunk.usage.prompt_tokens_details?.cached_tokens || 0;\n\t\t\t\t\tconst reasoningTokens = chunk.usage.completion_tokens_details?.reasoning_tokens || 0;\n\t\t\t\t\tconst input = (chunk.usage.prompt_tokens || 0) - cachedTokens;\n\t\t\t\t\tconst outputTokens = (chunk.usage.completion_tokens || 0) + reasoningTokens;\n\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\t// OpenAI includes cached tokens in prompt_tokens, so subtract to get non-cached input\n\t\t\t\t\t\tinput,\n\t\t\t\t\t\toutput: outputTokens,\n\t\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t// Compute totalTokens ourselves since we add reasoning_tokens to output\n\t\t\t\t\t\t// and some providers (e.g., Groq) don't include them in total_tokens\n\t\t\t\t\t\ttotalTokens: input + outputTokens + cachedTokens,\n\t\t\t\t\t\tcost: {\n\t\t\t\t\t\t\tinput: 0,\n\t\t\t\t\t\t\toutput: 0,\n\t\t\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotal: 0,\n\t\t\t\t\t\t},\n\t\t\t\t\t};\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t}\n\n\t\t\t\tconst choice = chunk.choices[0];\n\t\t\t\tif (!choice) continue;\n\n\t\t\t\tif (choice.finish_reason) {\n\t\t\t\t\toutput.stopReason = mapStopReason(choice.finish_reason);\n\t\t\t\t}\n\n\t\t\t\tif (choice.delta) {\n\t\t\t\t\tif (\n\t\t\t\t\t\tchoice.delta.content !== null &&\n\t\t\t\t\t\tchoice.delta.content !== undefined &&\n\t\t\t\t\t\tchoice.delta.content.length > 0\n\t\t\t\t\t) {\n\t\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"text\") {\n\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (currentBlock.type === \"text\") {\n\t\t\t\t\t\t\tcurrentBlock.text += choice.delta.content;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: choice.delta.content,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\t// Some endpoints return reasoning in reasoning_content (llama.cpp),\n\t\t\t\t\t// or reasoning (other openai compatible endpoints)\n\t\t\t\t\t// Use the first non-empty reasoning field to avoid duplication\n\t\t\t\t\t// (e.g., chutes.ai returns both reasoning_content and reasoning with same content)\n\t\t\t\t\tconst reasoningFields = [\"reasoning_content\", \"reasoning\", \"reasoning_text\"];\n\t\t\t\t\tlet foundReasoningField: string | null = null;\n\t\t\t\t\tfor (const field of reasoningFields) {\n\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t(choice.delta as any)[field] !== null &&\n\t\t\t\t\t\t\t(choice.delta as any)[field] !== undefined &&\n\t\t\t\t\t\t\t(choice.delta as any)[field].length > 0\n\t\t\t\t\t\t) {\n\t\t\t\t\t\t\tif (!foundReasoningField) {\n\t\t\t\t\t\t\t\tfoundReasoningField = field;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (foundReasoningField) {\n\t\t\t\t\t\tif (!currentBlock || currentBlock.type !== \"thinking\") {\n\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\t\ttype: \"thinking\",\n\t\t\t\t\t\t\t\tthinking: \"\",\n\t\t\t\t\t\t\t\tthinkingSignature: foundReasoningField,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (currentBlock.type === \"thinking\") {\n\t\t\t\t\t\t\tconst delta = (choice.delta as any)[foundReasoningField];\n\t\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (choice?.delta?.tool_calls) {\n\t\t\t\t\t\tfor (const toolCall of choice.delta.tool_calls) {\n\t\t\t\t\t\t\tif (\n\t\t\t\t\t\t\t\t!currentBlock ||\n\t\t\t\t\t\t\t\tcurrentBlock.type !== \"toolCall\" ||\n\t\t\t\t\t\t\t\t(toolCall.id && currentBlock.id !== toolCall.id)\n\t\t\t\t\t\t\t) {\n\t\t\t\t\t\t\t\tfinishCurrentBlock(currentBlock);\n\t\t\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\t\t\tid: toolCall.id || \"\",\n\t\t\t\t\t\t\t\t\tname: toolCall.function?.name || \"\",\n\t\t\t\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\t\t\t\tpartialArgs: \"\",\n\t\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tif (currentBlock.type === \"toolCall\") {\n\t\t\t\t\t\t\t\tif (toolCall.id) currentBlock.id = toolCall.id;\n\t\t\t\t\t\t\t\tif (toolCall.function?.name) currentBlock.name = toolCall.function.name;\n\t\t\t\t\t\t\t\tlet delta = \"\";\n\t\t\t\t\t\t\t\tif (toolCall.function?.arguments) {\n\t\t\t\t\t\t\t\t\tdelta = toolCall.function.arguments;\n\t\t\t\t\t\t\t\t\tcurrentBlock.partialArgs += toolCall.function.arguments;\n\t\t\t\t\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialArgs);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tconst reasoningDetails = (choice.delta as any).reasoning_details;\n\t\t\t\t\tif (reasoningDetails && Array.isArray(reasoningDetails)) {\n\t\t\t\t\t\tfor (const detail of reasoningDetails) {\n\t\t\t\t\t\t\tif (detail.type === \"reasoning.encrypted\" && detail.id && detail.data) {\n\t\t\t\t\t\t\t\tconst matchingToolCall = output.content.find(\n\t\t\t\t\t\t\t\t\t(b) => b.type === \"toolCall\" && b.id === detail.id,\n\t\t\t\t\t\t\t\t) as ToolCall | undefined;\n\t\t\t\t\t\t\t\tif (matchingToolCall) {\n\t\t\t\t\t\t\t\t\tmatchingToolCall.thoughtSignature = JSON.stringify(detail);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfinishCurrentBlock(currentBlock);\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as any).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\t// Some providers via OpenRouter give additional information in this field.\n\t\t\tconst rawMetadata = (error as any)?.error?.metadata?.raw;\n\t\t\tif (rawMetadata) output.errorMessage += `\\n${rawMetadata}`;\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nexport const streamSimpleOpenAICompletions: StreamFunction<\"openai-completions\", SimpleStreamOptions> = (\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\toptions?: SimpleStreamOptions,\n): AssistantMessageEventStream => {\n\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider);\n\tif (!apiKey) {\n\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t}\n\n\tconst base = buildBaseOptions(model, options, apiKey);\n\tconst reasoningEffort = supportsXhigh(model) ? options?.reasoning : clampReasoning(options?.reasoning);\n\tconst toolChoice = (options as OpenAICompletionsOptions | undefined)?.toolChoice;\n\n\treturn streamOpenAICompletions(model, context, {\n\t\t...base,\n\t\treasoningEffort,\n\t\ttoolChoice,\n\t} satisfies OpenAICompletionsOptions);\n};\n\nfunction createClient(\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\tapiKey?: string,\n\toptionsHeaders?: Record<string, string>,\n) {\n\tif (!apiKey) {\n\t\tif (!process.env.OPENAI_API_KEY) {\n\t\t\tthrow new Error(\n\t\t\t\t\"OpenAI API key is required. Set OPENAI_API_KEY environment variable or pass it as an argument.\",\n\t\t\t);\n\t\t}\n\t\tapiKey = process.env.OPENAI_API_KEY;\n\t}\n\n\tconst headers = { ...model.headers };\n\tif (model.provider === \"github-copilot\") {\n\t\t// Copilot expects X-Initiator to indicate whether the request is user-initiated\n\t\t// or agent-initiated (e.g. follow-up after assistant/tool messages). If there is\n\t\t// no prior message, default to user-initiated.\n\t\tconst messages = context.messages || [];\n\t\tconst lastMessage = messages[messages.length - 1];\n\t\tconst isAgentCall = lastMessage ? lastMessage.role !== \"user\" : false;\n\t\theaders[\"X-Initiator\"] = isAgentCall ? \"agent\" : \"user\";\n\t\theaders[\"Openai-Intent\"] = \"conversation-edits\";\n\n\t\t// Copilot requires this header when sending images\n\t\tconst hasImages = messages.some((msg) => {\n\t\t\tif (msg.role === \"user\" && Array.isArray(msg.content)) {\n\t\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t\t}\n\t\t\tif (msg.role === \"toolResult\" && Array.isArray(msg.content)) {\n\t\t\t\treturn msg.content.some((c) => c.type === \"image\");\n\t\t\t}\n\t\t\treturn false;\n\t\t});\n\t\tif (hasImages) {\n\t\t\theaders[\"Copilot-Vision-Request\"] = \"true\";\n\t\t}\n\t}\n\n\t// Merge options headers last so they can override defaults\n\tif (optionsHeaders) {\n\t\tObject.assign(headers, optionsHeaders);\n\t}\n\n\treturn new OpenAI({\n\t\tapiKey,\n\t\tbaseURL: model.baseUrl,\n\t\tdangerouslyAllowBrowser: true,\n\t\tdefaultHeaders: headers,\n\t});\n}\n\nfunction buildParams(model: Model<\"openai-completions\">, context: Context, options?: OpenAICompletionsOptions) {\n\tconst compat = getCompat(model);\n\tconst messages = convertMessages(model, context, compat);\n\tmaybeAddOpenRouterAnthropicCacheControl(model, messages);\n\n\tconst params: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {\n\t\tmodel: model.id,\n\t\tmessages,\n\t\tstream: true,\n\t};\n\n\tif (compat.supportsUsageInStreaming !== false) {\n\t\t(params as any).stream_options = { include_usage: true };\n\t}\n\n\tif (compat.supportsStore) {\n\t\tparams.store = false;\n\t}\n\n\tif (options?.maxTokens) {\n\t\tif (compat.maxTokensField === \"max_tokens\") {\n\t\t\t(params as any).max_tokens = options.maxTokens;\n\t\t} else {\n\t\t\tparams.max_completion_tokens = options.maxTokens;\n\t\t}\n\t}\n\n\tif (options?.temperature !== undefined) {\n\t\tparams.temperature = options.temperature;\n\t}\n\n\tif (context.tools) {\n\t\tparams.tools = convertTools(context.tools);\n\t} else if (hasToolHistory(context.messages)) {\n\t\t// Anthropic (via LiteLLM/proxy) requires tools param when conversation has tool_calls/tool_results\n\t\tparams.tools = [];\n\t}\n\n\tif (options?.toolChoice) {\n\t\tparams.tool_choice = options.toolChoice;\n\t}\n\n\tif (compat.thinkingFormat === \"zai\" && model.reasoning) {\n\t\t// Z.ai uses binary thinking: { type: \"enabled\" | \"disabled\" }\n\t\t// Must explicitly disable since z.ai defaults to thinking enabled\n\t\t(params as any).thinking = { type: options?.reasoningEffort ? \"enabled\" : \"disabled\" };\n\t} else if (options?.reasoningEffort && model.reasoning && compat.supportsReasoningEffort) {\n\t\t// OpenAI-style reasoning_effort\n\t\tparams.reasoning_effort = options.reasoningEffort;\n\t}\n\n\t// OpenRouter provider routing preferences\n\tif (model.baseUrl.includes(\"openrouter.ai\") && model.compat?.openRouterRouting) {\n\t\t(params as any).provider = model.compat.openRouterRouting;\n\t}\n\n\treturn params;\n}\n\nfunction maybeAddOpenRouterAnthropicCacheControl(\n\tmodel: Model<\"openai-completions\">,\n\tmessages: ChatCompletionMessageParam[],\n): void {\n\tif (model.provider !== \"openrouter\" || !model.id.startsWith(\"anthropic/\")) return;\n\n\t// Anthropic-style caching requires cache_control on a text part. Add a breakpoint\n\t// on the last user/assistant message (walking backwards until we find text content).\n\tfor (let i = messages.length - 1; i >= 0; i--) {\n\t\tconst msg = messages[i];\n\t\tif (msg.role !== \"user\" && msg.role !== \"assistant\") continue;\n\n\t\tconst content = msg.content;\n\t\tif (typeof content === \"string\") {\n\t\t\tmsg.content = [\n\t\t\t\tObject.assign({ type: \"text\" as const, text: content }, { cache_control: { type: \"ephemeral\" } }),\n\t\t\t];\n\t\t\treturn;\n\t\t}\n\n\t\tif (!Array.isArray(content)) continue;\n\n\t\t// Find last text part and add cache_control\n\t\tfor (let j = content.length - 1; j >= 0; j--) {\n\t\t\tconst part = content[j];\n\t\t\tif (part?.type === \"text\") {\n\t\t\t\tObject.assign(part, { cache_control: { type: \"ephemeral\" } });\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t}\n}\n\nexport function convertMessages(\n\tmodel: Model<\"openai-completions\">,\n\tcontext: Context,\n\tcompat: Required<OpenAICompletionsCompat>,\n): ChatCompletionMessageParam[] {\n\tconst params: ChatCompletionMessageParam[] = [];\n\n\tconst normalizeToolCallId = (id: string): string => {\n\t\tif (compat.requiresMistralToolIds) return normalizeMistralToolId(id);\n\n\t\t// Handle pipe-separated IDs from OpenAI Responses API\n\t\t// Format: {call_id}|{id} where {id} can be 400+ chars with special chars (+, /, =)\n\t\t// These come from providers like github-copilot, openai-codex, opencode\n\t\t// Extract just the call_id part and normalize it\n\t\tif (id.includes(\"|\")) {\n\t\t\tconst [callId] = id.split(\"|\");\n\t\t\t// Sanitize to allowed chars and truncate to 40 chars (OpenAI limit)\n\t\t\treturn callId.replace(/[^a-zA-Z0-9_-]/g, \"_\").slice(0, 40);\n\t\t}\n\n\t\tif (model.provider === \"openai\") return id.length > 40 ? id.slice(0, 40) : id;\n\t\t// Copilot Claude models route to Claude backend which requires Anthropic ID format\n\t\tif (model.provider === \"github-copilot\" && model.id.toLowerCase().includes(\"claude\")) {\n\t\t\treturn id.replace(/[^a-zA-Z0-9_-]/g, \"_\").slice(0, 64);\n\t\t}\n\t\treturn id;\n\t};\n\n\tconst transformedMessages = transformMessages(context.messages, model, (id) => normalizeToolCallId(id));\n\n\tif (context.systemPrompt) {\n\t\tconst useDeveloperRole = model.reasoning && compat.supportsDeveloperRole;\n\t\tconst role = useDeveloperRole ? \"developer\" : \"system\";\n\t\tparams.push({ role: role, content: sanitizeSurrogates(context.systemPrompt) });\n\t}\n\n\tlet lastRole: string | null = null;\n\n\tfor (let i = 0; i < transformedMessages.length; i++) {\n\t\tconst msg = transformedMessages[i];\n\t\t// Some providers (e.g. Mistral/Devstral) don't allow user messages directly after tool results\n\t\t// Insert a synthetic assistant message to bridge the gap\n\t\tif (compat.requiresAssistantAfterToolResult && lastRole === \"toolResult\" && msg.role === \"user\") {\n\t\t\tparams.push({\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: \"I have processed the tool results.\",\n\t\t\t});\n\t\t}\n\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: sanitizeSurrogates(msg.content),\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ChatCompletionContentPart[] = msg.content.map((item): ChatCompletionContentPart => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ChatCompletionContentPartText;\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"image_url\",\n\t\t\t\t\t\t\timage_url: {\n\t\t\t\t\t\t\t\turl: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t} satisfies ChatCompletionContentPartImage;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"image_url\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\t// Some providers (e.g. Mistral) don't accept null content, use empty string instead\n\t\t\tconst assistantMsg: ChatCompletionAssistantMessageParam = {\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: compat.requiresAssistantAfterToolResult ? \"\" : null,\n\t\t\t};\n\n\t\t\tconst textBlocks = msg.content.filter((b) => b.type === \"text\") as TextContent[];\n\t\t\t// Filter out empty text blocks to avoid API validation errors\n\t\t\tconst nonEmptyTextBlocks = textBlocks.filter((b) => b.text && b.text.trim().length > 0);\n\t\t\tif (nonEmptyTextBlocks.length > 0) {\n\t\t\t\t// GitHub Copilot requires assistant content as a string, not an array.\n\t\t\t\t// Sending as array causes Claude models to re-answer all previous prompts.\n\t\t\t\tif (model.provider === \"github-copilot\") {\n\t\t\t\t\tassistantMsg.content = nonEmptyTextBlocks.map((b) => sanitizeSurrogates(b.text)).join(\"\");\n\t\t\t\t} else {\n\t\t\t\t\tassistantMsg.content = nonEmptyTextBlocks.map((b) => {\n\t\t\t\t\t\treturn { type: \"text\", text: sanitizeSurrogates(b.text) };\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Handle thinking blocks\n\t\t\tconst thinkingBlocks = msg.content.filter((b) => b.type === \"thinking\") as ThinkingContent[];\n\t\t\t// Filter out empty thinking blocks to avoid API validation errors\n\t\t\tconst nonEmptyThinkingBlocks = thinkingBlocks.filter((b) => b.thinking && b.thinking.trim().length > 0);\n\t\t\tif (nonEmptyThinkingBlocks.length > 0) {\n\t\t\t\tif (compat.requiresThinkingAsText) {\n\t\t\t\t\t// Convert thinking blocks to plain text (no tags to avoid model mimicking them)\n\t\t\t\t\tconst thinkingText = nonEmptyThinkingBlocks.map((b) => b.thinking).join(\"\\n\\n\");\n\t\t\t\t\tconst textContent = assistantMsg.content as Array<{ type: \"text\"; text: string }> | null;\n\t\t\t\t\tif (textContent) {\n\t\t\t\t\t\ttextContent.unshift({ type: \"text\", text: thinkingText });\n\t\t\t\t\t} else {\n\t\t\t\t\t\tassistantMsg.content = [{ type: \"text\", text: thinkingText }];\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// Use the signature from the first thinking block if available (for llama.cpp server + gpt-oss)\n\t\t\t\t\tconst signature = nonEmptyThinkingBlocks[0].thinkingSignature;\n\t\t\t\t\tif (signature && signature.length > 0) {\n\t\t\t\t\t\t(assistantMsg as any)[signature] = nonEmptyThinkingBlocks.map((b) => b.thinking).join(\"\\n\");\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tconst toolCalls = msg.content.filter((b) => b.type === \"toolCall\") as ToolCall[];\n\t\t\tif (toolCalls.length > 0) {\n\t\t\t\tassistantMsg.tool_calls = toolCalls.map((tc) => ({\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\ttype: \"function\" as const,\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: tc.name,\n\t\t\t\t\t\targuments: JSON.stringify(tc.arguments),\n\t\t\t\t\t},\n\t\t\t\t}));\n\t\t\t\tconst reasoningDetails = toolCalls\n\t\t\t\t\t.filter((tc) => tc.thoughtSignature)\n\t\t\t\t\t.map((tc) => {\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\treturn JSON.parse(tc.thoughtSignature!);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\treturn null;\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t\t.filter(Boolean);\n\t\t\t\tif (reasoningDetails.length > 0) {\n\t\t\t\t\t(assistantMsg as any).reasoning_details = reasoningDetails;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Skip assistant messages that have no content and no tool calls.\n\t\t\t// Mistral explicitly requires \"either content or tool_calls, but not none\".\n\t\t\t// Other providers also don't accept empty assistant messages.\n\t\t\t// This handles aborted assistant responses that got no content.\n\t\t\tconst content = assistantMsg.content;\n\t\t\tconst hasContent =\n\t\t\t\tcontent !== null &&\n\t\t\t\tcontent !== undefined &&\n\t\t\t\t(typeof content === \"string\" ? content.length > 0 : content.length > 0);\n\t\t\tif (!hasContent && !assistantMsg.tool_calls) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tparams.push(assistantMsg);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tconst imageBlocks: Array<{ type: \"image_url\"; image_url: { url: string } }> = [];\n\t\t\tlet j = i;\n\n\t\t\tfor (; j < transformedMessages.length && transformedMessages[j].role === \"toolResult\"; j++) {\n\t\t\t\tconst toolMsg = transformedMessages[j] as ToolResultMessage;\n\n\t\t\t\t// Extract text and image content\n\t\t\t\tconst textResult = toolMsg.content\n\t\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t\t.map((c) => (c as any).text)\n\t\t\t\t\t.join(\"\\n\");\n\t\t\t\tconst hasImages = toolMsg.content.some((c) => c.type === \"image\");\n\n\t\t\t\t// Always send tool result with text (or placeholder if only images)\n\t\t\t\tconst hasText = textResult.length > 0;\n\t\t\t\t// Some providers (e.g. Mistral) require the 'name' field in tool results\n\t\t\t\tconst toolResultMsg: ChatCompletionToolMessageParam = {\n\t\t\t\t\trole: \"tool\",\n\t\t\t\t\tcontent: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t\t\ttool_call_id: toolMsg.toolCallId,\n\t\t\t\t};\n\t\t\t\tif (compat.requiresToolResultName && toolMsg.toolName) {\n\t\t\t\t\t(toolResultMsg as any).name = toolMsg.toolName;\n\t\t\t\t}\n\t\t\t\tparams.push(toolResultMsg);\n\n\t\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\t\tfor (const block of toolMsg.content) {\n\t\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\t\timageBlocks.push({\n\t\t\t\t\t\t\t\ttype: \"image_url\",\n\t\t\t\t\t\t\t\timage_url: {\n\t\t\t\t\t\t\t\t\turl: `data:${(block as any).mimeType};base64,${(block as any).data}`,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\ti = j - 1;\n\n\t\t\tif (imageBlocks.length > 0) {\n\t\t\t\tif (compat.requiresAssistantAfterToolResult) {\n\t\t\t\t\tparams.push({\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: \"I have processed the tool results.\",\n\t\t\t\t\t});\n\t\t\t\t}\n\n\t\t\t\tparams.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t\t\t},\n\t\t\t\t\t\t...imageBlocks,\n\t\t\t\t\t],\n\t\t\t\t});\n\t\t\t\tlastRole = \"user\";\n\t\t\t} else {\n\t\t\t\tlastRole = \"toolResult\";\n\t\t\t}\n\t\t\tcontinue;\n\t\t}\n\n\t\tlastRole = msg.role;\n\t}\n\n\treturn params;\n}\n\nfunction convertTools(tools: Tool[]): OpenAI.Chat.Completions.ChatCompletionTool[] {\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tfunction: {\n\t\t\tname: tool.name,\n\t\t\tdescription: tool.description,\n\t\t\tparameters: tool.parameters as any, // TypeBox already generates JSON Schema\n\t\t\tstrict: false, // Disable strict mode to allow optional parameters without null unions\n\t\t},\n\t}));\n}\n\nfunction mapStopReason(reason: ChatCompletionChunk.Choice[\"finish_reason\"]): StopReason {\n\tif (reason === null) return \"stop\";\n\tswitch (reason) {\n\t\tcase \"stop\":\n\t\t\treturn \"stop\";\n\t\tcase \"length\":\n\t\t\treturn \"length\";\n\t\tcase \"function_call\":\n\t\tcase \"tool_calls\":\n\t\t\treturn \"toolUse\";\n\t\tcase \"content_filter\":\n\t\t\treturn \"error\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = reason;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n\n/**\n * Detect compatibility settings from provider and baseUrl for known providers.\n * Provider takes precedence over URL-based detection since it's explicitly configured.\n * Returns a fully resolved OpenAICompletionsCompat object with all fields set.\n */\nfunction detectCompat(model: Model<\"openai-completions\">): Required<OpenAICompletionsCompat> {\n\tconst provider = model.provider;\n\tconst baseUrl = model.baseUrl;\n\n\tconst isZai = provider === \"zai\" || baseUrl.includes(\"api.z.ai\");\n\n\tconst isNonStandard =\n\t\tprovider === \"cerebras\" ||\n\t\tbaseUrl.includes(\"cerebras.ai\") ||\n\t\tprovider === \"xai\" ||\n\t\tbaseUrl.includes(\"api.x.ai\") ||\n\t\tprovider === \"mistral\" ||\n\t\tbaseUrl.includes(\"mistral.ai\") ||\n\t\tbaseUrl.includes(\"chutes.ai\") ||\n\t\tbaseUrl.includes(\"deepseek.com\") ||\n\t\tisZai ||\n\t\tprovider === \"opencode\" ||\n\t\tbaseUrl.includes(\"opencode.ai\");\n\n\tconst useMaxTokens = provider === \"mistral\" || baseUrl.includes(\"mistral.ai\") || baseUrl.includes(\"chutes.ai\");\n\n\tconst isGrok = provider === \"xai\" || baseUrl.includes(\"api.x.ai\");\n\n\tconst isMistral = provider === \"mistral\" || baseUrl.includes(\"mistral.ai\");\n\n\treturn {\n\t\tsupportsStore: !isNonStandard,\n\t\tsupportsDeveloperRole: !isNonStandard,\n\t\tsupportsReasoningEffort: !isGrok && !isZai,\n\t\tsupportsUsageInStreaming: true,\n\t\tmaxTokensField: useMaxTokens ? \"max_tokens\" : \"max_completion_tokens\",\n\t\trequiresToolResultName: isMistral,\n\t\trequiresAssistantAfterToolResult: false, // Mistral no longer requires this as of Dec 2024\n\t\trequiresThinkingAsText: isMistral,\n\t\trequiresMistralToolIds: isMistral,\n\t\tthinkingFormat: isZai ? \"zai\" : \"openai\",\n\t\topenRouterRouting: {},\n\t};\n}\n\n/**\n * Get resolved compatibility settings for a model.\n * Uses explicit model.compat if provided, otherwise auto-detects from provider/URL.\n */\nfunction getCompat(model: Model<\"openai-completions\">): Required<OpenAICompletionsCompat> {\n\tconst detected = detectCompat(model);\n\tif (!model.compat) return detected;\n\n\treturn {\n\t\tsupportsStore: model.compat.supportsStore ?? detected.supportsStore,\n\t\tsupportsDeveloperRole: model.compat.supportsDeveloperRole ?? detected.supportsDeveloperRole,\n\t\tsupportsReasoningEffort: model.compat.supportsReasoningEffort ?? detected.supportsReasoningEffort,\n\t\tsupportsUsageInStreaming: model.compat.supportsUsageInStreaming ?? detected.supportsUsageInStreaming,\n\t\tmaxTokensField: model.compat.maxTokensField ?? detected.maxTokensField,\n\t\trequiresToolResultName: model.compat.requiresToolResultName ?? detected.requiresToolResultName,\n\t\trequiresAssistantAfterToolResult:\n\t\t\tmodel.compat.requiresAssistantAfterToolResult ?? detected.requiresAssistantAfterToolResult,\n\t\trequiresThinkingAsText: model.compat.requiresThinkingAsText ?? detected.requiresThinkingAsText,\n\t\trequiresMistralToolIds: model.compat.requiresMistralToolIds ?? detected.requiresMistralToolIds,\n\t\tthinkingFormat: model.compat.thinkingFormat ?? detected.thinkingFormat,\n\t\topenRouterRouting: model.compat.openRouterRouting ?? {},\n\t};\n}\n"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-responses-shared.d.ts","sourceRoot":"","sources":["../../src/providers/openai-responses-shared.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACX,IAAI,IAAI,UAAU,EAClB,6BAA6B,EAE7B,aAAa,EAMb,mBAAmB,EACnB,MAAM,yCAAyC,CAAC;AAEjD,OAAO,KAAK,EACX,GAAG,EACH,gBAAgB,EAChB,OAAO,EAEP,KAAK,EAIL,IAAI,EAEJ,KAAK,EACL,MAAM,aAAa,CAAC;AACrB,OAAO,KAAK,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AAuB5E,MAAM,WAAW,4BAA4B;IAC5C,WAAW,CAAC,EAAE,6BAA6B,CAAC,cAAc,CAAC,CAAC;IAC5D,uBAAuB,CAAC,EAAE,CACzB,KAAK,EAAE,KAAK,EACZ,WAAW,EAAE,6BAA6B,CAAC,cAAc,CAAC,GAAG,SAAS,KAClE,IAAI,CAAC;CACV;AAED,MAAM,WAAW,+BAA+B;IAC/C,mBAAmB,CAAC,EAAE,OAAO,CAAC;CAC9B;AAED,MAAM,WAAW,4BAA4B;IAC5C,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;CACxB;AAMD,wBAAgB,wBAAwB,CAAC,IAAI,SAAS,GAAG,EACxD,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,wBAAwB,EAAE,WAAW,CAAC,MAAM,CAAC,EAC7C,OAAO,CAAC,EAAE,+BAA+B,GACvC,aAAa,CAiKf;AAMD,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,OAAO,CAAC,EAAE,4BAA4B,GAAG,UAAU,EAAE,CASzG;AAMD,wBAAsB,sBAAsB,CAAC,IAAI,SAAS,GAAG,EAC5D,YAAY,EAAE,aAAa,CAAC,mBAAmB,CAAC,EAChD,MAAM,EAAE,gBAAgB,EACxB,MAAM,EAAE,2BAA2B,EACnC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,CAAC,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,CAAC,CAgMf","sourcesContent":["import type OpenAI from \"openai\";\nimport type {\n\tTool as OpenAITool,\n\tResponseCreateParamsStreaming,\n\tResponseFunctionToolCall,\n\tResponseInput,\n\tResponseInputContent,\n\tResponseInputImage,\n\tResponseInputText,\n\tResponseOutputMessage,\n\tResponseReasoningItem,\n\tResponseStreamEvent,\n} from \"openai/resources/responses/responses.js\";\nimport { calculateCost } from \"../models.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tImageContent,\n\tModel,\n\tStopReason,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tUsage,\n} from \"../types.js\";\nimport type { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\n// =============================================================================\n// Utilities\n// =============================================================================\n\n/** Fast deterministic hash to shorten long strings */\nfunction shortHash(str: string): string {\n\tlet h1 = 0xdeadbeef;\n\tlet h2 = 0x41c6ce57;\n\tfor (let i = 0; i < str.length; i++) {\n\t\tconst ch = str.charCodeAt(i);\n\t\th1 = Math.imul(h1 ^ ch, 2654435761);\n\t\th2 = Math.imul(h2 ^ ch, 1597334677);\n\t}\n\th1 = Math.imul(h1 ^ (h1 >>> 16), 2246822507) ^ Math.imul(h2 ^ (h2 >>> 13), 3266489909);\n\th2 = Math.imul(h2 ^ (h2 >>> 16), 2246822507) ^ Math.imul(h1 ^ (h1 >>> 13), 3266489909);\n\treturn (h2 >>> 0).toString(36) + (h1 >>> 0).toString(36);\n}\n\nexport interface OpenAIResponsesStreamOptions {\n\tserviceTier?: ResponseCreateParamsStreaming[\"service_tier\"];\n\tapplyServiceTierPricing?: (\n\t\tusage: Usage,\n\t\tserviceTier: ResponseCreateParamsStreaming[\"service_tier\"] | undefined,\n\t) => void;\n}\n\nexport interface ConvertResponsesMessagesOptions {\n\tincludeSystemPrompt?: boolean;\n}\n\nexport interface ConvertResponsesToolsOptions {\n\tstrict?: boolean | null;\n}\n\n// =============================================================================\n// Message conversion\n// =============================================================================\n\nexport function convertResponsesMessages<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\tallowedToolCallProviders: ReadonlySet<string>,\n\toptions?: ConvertResponsesMessagesOptions,\n): ResponseInput {\n\tconst messages: ResponseInput = [];\n\n\tconst normalizeToolCallId = (id: string): string => {\n\t\tif (!allowedToolCallProviders.has(model.provider)) return id;\n\t\tif (!id.includes(\"|\")) return id;\n\t\tconst [callId, itemId] = id.split(\"|\");\n\t\tconst sanitizedCallId = callId.replace(/[^a-zA-Z0-9_-]/g, \"_\");\n\t\tlet sanitizedItemId = itemId.replace(/[^a-zA-Z0-9_-]/g, \"_\");\n\t\t// OpenAI Responses API requires item id to start with \"fc\"\n\t\tif (!sanitizedItemId.startsWith(\"fc\")) {\n\t\t\tsanitizedItemId = `fc_${sanitizedItemId}`;\n\t\t}\n\t\tconst normalizedCallId = sanitizedCallId.length > 64 ? sanitizedCallId.slice(0, 64) : sanitizedCallId;\n\t\tconst normalizedItemId = sanitizedItemId.length > 64 ? sanitizedItemId.slice(0, 64) : sanitizedItemId;\n\t\treturn `${normalizedCallId}|${normalizedItemId}`;\n\t};\n\n\tconst transformedMessages = transformMessages(context.messages, model, normalizeToolCallId);\n\n\tconst includeSystemPrompt = options?.includeSystemPrompt ?? true;\n\tif (includeSystemPrompt && context.systemPrompt) {\n\t\tconst role = model.reasoning ? \"developer\" : \"system\";\n\t\tmessages.push({\n\t\t\trole,\n\t\t\tcontent: sanitizeSurrogates(context.systemPrompt),\n\t\t});\n\t}\n\n\tlet msgIndex = 0;\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ResponseInputContent[] = msg.content.map((item): ResponseInputContent => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ResponseInputText;\n\t\t\t\t\t}\n\t\t\t\t\treturn {\n\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\timage_url: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t} satisfies ResponseInputImage;\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"input_image\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst output: ResponseInput = [];\n\t\t\tconst assistantMsg = msg as AssistantMessage;\n\t\t\tconst isDifferentModel =\n\t\t\t\tassistantMsg.model !== model.id &&\n\t\t\t\tassistantMsg.provider === model.provider &&\n\t\t\t\tassistantMsg.api === model.api;\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"thinking\") {\n\t\t\t\t\tif (block.thinkingSignature) {\n\t\t\t\t\t\tconst reasoningItem = JSON.parse(block.thinkingSignature) as ResponseReasoningItem;\n\t\t\t\t\t\toutput.push(reasoningItem);\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"text\") {\n\t\t\t\t\tconst textBlock = block as TextContent;\n\t\t\t\t\t// OpenAI requires id to be max 64 characters\n\t\t\t\t\tlet msgId = textBlock.textSignature;\n\t\t\t\t\tif (!msgId) {\n\t\t\t\t\t\tmsgId = `msg_${msgIndex}`;\n\t\t\t\t\t} else if (msgId.length > 64) {\n\t\t\t\t\t\tmsgId = `msg_${shortHash(msgId)}`;\n\t\t\t\t\t}\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: [{ type: \"output_text\", text: sanitizeSurrogates(textBlock.text), annotations: [] }],\n\t\t\t\t\t\tstatus: \"completed\",\n\t\t\t\t\t\tid: msgId,\n\t\t\t\t\t} satisfies ResponseOutputMessage);\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tconst toolCall = block as ToolCall;\n\t\t\t\t\tconst [callId, itemIdRaw] = toolCall.id.split(\"|\");\n\t\t\t\t\tlet itemId: string | undefined = itemIdRaw;\n\n\t\t\t\t\t// For different-model messages, set id to undefined to avoid pairing validation.\n\t\t\t\t\t// OpenAI tracks which fc_xxx IDs were paired with rs_xxx reasoning items.\n\t\t\t\t\t// By omitting the id, we avoid triggering that validation (like cross-provider does).\n\t\t\t\t\tif (isDifferentModel && itemId?.startsWith(\"fc_\")) {\n\t\t\t\t\t\titemId = undefined;\n\t\t\t\t\t}\n\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"function_call\",\n\t\t\t\t\t\tid: itemId,\n\t\t\t\t\t\tcall_id: callId,\n\t\t\t\t\t\tname: toolCall.name,\n\t\t\t\t\t\targuments: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (output.length === 0) continue;\n\t\t\tmessages.push(...output);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Extract text and image content\n\t\t\tconst textResult = msg.content\n\t\t\t\t.filter((c): c is TextContent => c.type === \"text\")\n\t\t\t\t.map((c) => c.text)\n\t\t\t\t.join(\"\\n\");\n\t\t\tconst hasImages = msg.content.some((c): c is ImageContent => c.type === \"image\");\n\n\t\t\t// Always send function_call_output with text (or placeholder if only images)\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tconst [callId] = msg.toolCallId.split(\"|\");\n\t\t\tmessages.push({\n\t\t\t\ttype: \"function_call_output\",\n\t\t\t\tcall_id: callId,\n\t\t\t\toutput: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t});\n\n\t\t\t// If there are images and model supports them, send a follow-up user message with images\n\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\tconst contentParts: ResponseInputContent[] = [];\n\n\t\t\t\t// Add text prefix\n\t\t\t\tcontentParts.push({\n\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t} satisfies ResponseInputText);\n\n\t\t\t\t// Add images\n\t\t\t\tfor (const block of msg.content) {\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tcontentParts.push({\n\t\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\t\timage_url: `data:${block.mimeType};base64,${block.data}`,\n\t\t\t\t\t\t} satisfies ResponseInputImage);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: contentParts,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\tmsgIndex++;\n\t}\n\n\treturn messages;\n}\n\n// =============================================================================\n// Tool conversion\n// =============================================================================\n\nexport function convertResponsesTools(tools: Tool[], options?: ConvertResponsesToolsOptions): OpenAITool[] {\n\tconst strict = options?.strict === undefined ? false : options.strict;\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tname: tool.name,\n\t\tdescription: tool.description,\n\t\tparameters: tool.parameters as any, // TypeBox already generates JSON Schema\n\t\tstrict,\n\t}));\n}\n\n// =============================================================================\n// Stream processing\n// =============================================================================\n\nexport async function processResponsesStream<TApi extends Api>(\n\topenaiStream: AsyncIterable<ResponseStreamEvent>,\n\toutput: AssistantMessage,\n\tstream: AssistantMessageEventStream,\n\tmodel: Model<TApi>,\n\toptions?: OpenAIResponsesStreamOptions,\n): Promise<void> {\n\tlet currentItem: ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall | null = null;\n\tlet currentBlock: ThinkingContent | TextContent | (ToolCall & { partialJson: string }) | null = null;\n\tconst blocks = output.content;\n\tconst blockIndex = () => blocks.length - 1;\n\n\tfor await (const event of openaiStream) {\n\t\tif (event.type === \"response.output_item.added\") {\n\t\t\tconst item = event.item;\n\t\t\tif (item.type === \"reasoning\") {\n\t\t\t\tcurrentItem = item;\n\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\" };\n\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t} else if (item.type === \"message\") {\n\t\t\t\tcurrentItem = item;\n\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\tcurrentItem = item;\n\t\t\t\tcurrentBlock = {\n\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\tname: item.name,\n\t\t\t\t\targuments: {},\n\t\t\t\t\tpartialJson: item.arguments || \"\",\n\t\t\t\t};\n\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t}\n\t\t} else if (event.type === \"response.reasoning_summary_part.added\") {\n\t\t\tif (currentItem && currentItem.type === \"reasoning\") {\n\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\tcurrentItem.summary.push(event.part);\n\t\t\t}\n\t\t} else if (event.type === \"response.reasoning_summary_text.delta\") {\n\t\t\tif (currentItem?.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\tif (lastPart) {\n\t\t\t\t\tcurrentBlock.thinking += event.delta;\n\t\t\t\t\tlastPart.text += event.delta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: event.delta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.reasoning_summary_part.done\") {\n\t\t\tif (currentItem?.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\tif (lastPart) {\n\t\t\t\t\tcurrentBlock.thinking += \"\\n\\n\";\n\t\t\t\t\tlastPart.text += \"\\n\\n\";\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: \"\\n\\n\",\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.content_part.added\") {\n\t\t\tif (currentItem?.type === \"message\") {\n\t\t\t\tcurrentItem.content = currentItem.content || [];\n\t\t\t\t// Filter out ReasoningText, only accept output_text and refusal\n\t\t\t\tif (event.part.type === \"output_text\" || event.part.type === \"refusal\") {\n\t\t\t\t\tcurrentItem.content.push(event.part);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.output_text.delta\") {\n\t\t\tif (currentItem?.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\tif (!currentItem.content || currentItem.content.length === 0) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\tif (lastPart?.type === \"output_text\") {\n\t\t\t\t\tcurrentBlock.text += event.delta;\n\t\t\t\t\tlastPart.text += event.delta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: event.delta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.refusal.delta\") {\n\t\t\tif (currentItem?.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\tif (!currentItem.content || currentItem.content.length === 0) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\tif (lastPart?.type === \"refusal\") {\n\t\t\t\t\tcurrentBlock.text += event.delta;\n\t\t\t\t\tlastPart.refusal += event.delta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: event.delta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.function_call_arguments.delta\") {\n\t\t\tif (currentItem?.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\tcurrentBlock.partialJson += event.delta;\n\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t\tstream.push({\n\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\tdelta: event.delta,\n\t\t\t\t\tpartial: output,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (event.type === \"response.function_call_arguments.done\") {\n\t\t\tif (currentItem?.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\tcurrentBlock.partialJson = event.arguments;\n\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t}\n\t\t} else if (event.type === \"response.output_item.done\") {\n\t\t\tconst item = event.item;\n\n\t\t\tif (item.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\tcurrentBlock.thinking = item.summary?.map((s) => s.text).join(\"\\n\\n\") || \"\";\n\t\t\t\tcurrentBlock.thinkingSignature = JSON.stringify(item);\n\t\t\t\tstream.push({\n\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\tpartial: output,\n\t\t\t\t});\n\t\t\t\tcurrentBlock = null;\n\t\t\t} else if (item.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\tcurrentBlock.text = item.content.map((c) => (c.type === \"output_text\" ? c.text : c.refusal)).join(\"\");\n\t\t\t\tcurrentBlock.textSignature = item.id;\n\t\t\t\tstream.push({\n\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\tpartial: output,\n\t\t\t\t});\n\t\t\t\tcurrentBlock = null;\n\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\tconst args =\n\t\t\t\t\tcurrentBlock?.type === \"toolCall\" && currentBlock.partialJson\n\t\t\t\t\t\t? JSON.parse(currentBlock.partialJson)\n\t\t\t\t\t\t: JSON.parse(item.arguments);\n\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\tname: item.name,\n\t\t\t\t\targuments: args,\n\t\t\t\t};\n\n\t\t\t\tcurrentBlock = null;\n\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t}\n\t\t} else if (event.type === \"response.completed\") {\n\t\t\tconst response = event.response;\n\t\t\tif (response?.usage) {\n\t\t\t\tconst cachedTokens = response.usage.input_tokens_details?.cached_tokens || 0;\n\t\t\t\toutput.usage = {\n\t\t\t\t\t// OpenAI includes cached tokens in input_tokens, so subtract to get non-cached input\n\t\t\t\t\tinput: (response.usage.input_tokens || 0) - cachedTokens,\n\t\t\t\t\toutput: response.usage.output_tokens || 0,\n\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\ttotalTokens: response.usage.total_tokens || 0,\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t};\n\t\t\t}\n\t\t\tcalculateCost(model, output.usage);\n\t\t\tif (options?.applyServiceTierPricing) {\n\t\t\t\tconst serviceTier = response?.service_tier ?? options.serviceTier;\n\t\t\t\toptions.applyServiceTierPricing(output.usage, serviceTier);\n\t\t\t}\n\t\t\t// Map status to stop reason\n\t\t\toutput.stopReason = mapStopReason(response?.status);\n\t\t\tif (output.content.some((b) => b.type === \"toolCall\") && output.stopReason === \"stop\") {\n\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t}\n\t\t} else if (event.type === \"error\") {\n\t\t\tthrow new Error(`Error Code ${event.code}: ${event.message}` || \"Unknown error\");\n\t\t} else if (event.type === \"response.failed\") {\n\t\t\tthrow new Error(\"Unknown error\");\n\t\t}\n\t}\n}\n\nfunction mapStopReason(status: OpenAI.Responses.ResponseStatus | undefined): StopReason {\n\tif (!status) return \"stop\";\n\tswitch (status) {\n\t\tcase \"completed\":\n\t\t\treturn \"stop\";\n\t\tcase \"incomplete\":\n\t\t\treturn \"length\";\n\t\tcase \"failed\":\n\t\tcase \"cancelled\":\n\t\t\treturn \"error\";\n\t\t// These two are wonky ...\n\t\tcase \"in_progress\":\n\t\tcase \"queued\":\n\t\t\treturn \"stop\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = status;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"openai-responses-shared.d.ts","sourceRoot":"","sources":["../../src/providers/openai-responses-shared.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EACX,IAAI,IAAI,UAAU,EAClB,6BAA6B,EAE7B,aAAa,EAMb,mBAAmB,EACnB,MAAM,yCAAyC,CAAC;AAEjD,OAAO,KAAK,EACX,GAAG,EACH,gBAAgB,EAChB,OAAO,EAEP,KAAK,EAIL,IAAI,EAEJ,KAAK,EACL,MAAM,aAAa,CAAC;AACrB,OAAO,KAAK,EAAE,2BAA2B,EAAE,MAAM,0BAA0B,CAAC;AAuB5E,MAAM,WAAW,4BAA4B;IAC5C,WAAW,CAAC,EAAE,6BAA6B,CAAC,cAAc,CAAC,CAAC;IAC5D,uBAAuB,CAAC,EAAE,CACzB,KAAK,EAAE,KAAK,EACZ,WAAW,EAAE,6BAA6B,CAAC,cAAc,CAAC,GAAG,SAAS,KAClE,IAAI,CAAC;CACV;AAED,MAAM,WAAW,+BAA+B;IAC/C,mBAAmB,CAAC,EAAE,OAAO,CAAC;CAC9B;AAED,MAAM,WAAW,4BAA4B;IAC5C,MAAM,CAAC,EAAE,OAAO,GAAG,IAAI,CAAC;CACxB;AAMD,wBAAgB,wBAAwB,CAAC,IAAI,SAAS,GAAG,EACxD,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,EAAE,OAAO,EAChB,wBAAwB,EAAE,WAAW,CAAC,MAAM,CAAC,EAC7C,OAAO,CAAC,EAAE,+BAA+B,GACvC,aAAa,CAoKf;AAMD,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,IAAI,EAAE,EAAE,OAAO,CAAC,EAAE,4BAA4B,GAAG,UAAU,EAAE,CASzG;AAMD,wBAAsB,sBAAsB,CAAC,IAAI,SAAS,GAAG,EAC5D,YAAY,EAAE,aAAa,CAAC,mBAAmB,CAAC,EAChD,MAAM,EAAE,gBAAgB,EACxB,MAAM,EAAE,2BAA2B,EACnC,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,EAClB,OAAO,CAAC,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,CAAC,CAgMf","sourcesContent":["import type OpenAI from \"openai\";\nimport type {\n\tTool as OpenAITool,\n\tResponseCreateParamsStreaming,\n\tResponseFunctionToolCall,\n\tResponseInput,\n\tResponseInputContent,\n\tResponseInputImage,\n\tResponseInputText,\n\tResponseOutputMessage,\n\tResponseReasoningItem,\n\tResponseStreamEvent,\n} from \"openai/resources/responses/responses.js\";\nimport { calculateCost } from \"../models.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tImageContent,\n\tModel,\n\tStopReason,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n\tUsage,\n} from \"../types.js\";\nimport type { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport { transformMessages } from \"./transform-messages.js\";\n\n// =============================================================================\n// Utilities\n// =============================================================================\n\n/** Fast deterministic hash to shorten long strings */\nfunction shortHash(str: string): string {\n\tlet h1 = 0xdeadbeef;\n\tlet h2 = 0x41c6ce57;\n\tfor (let i = 0; i < str.length; i++) {\n\t\tconst ch = str.charCodeAt(i);\n\t\th1 = Math.imul(h1 ^ ch, 2654435761);\n\t\th2 = Math.imul(h2 ^ ch, 1597334677);\n\t}\n\th1 = Math.imul(h1 ^ (h1 >>> 16), 2246822507) ^ Math.imul(h2 ^ (h2 >>> 13), 3266489909);\n\th2 = Math.imul(h2 ^ (h2 >>> 16), 2246822507) ^ Math.imul(h1 ^ (h1 >>> 13), 3266489909);\n\treturn (h2 >>> 0).toString(36) + (h1 >>> 0).toString(36);\n}\n\nexport interface OpenAIResponsesStreamOptions {\n\tserviceTier?: ResponseCreateParamsStreaming[\"service_tier\"];\n\tapplyServiceTierPricing?: (\n\t\tusage: Usage,\n\t\tserviceTier: ResponseCreateParamsStreaming[\"service_tier\"] | undefined,\n\t) => void;\n}\n\nexport interface ConvertResponsesMessagesOptions {\n\tincludeSystemPrompt?: boolean;\n}\n\nexport interface ConvertResponsesToolsOptions {\n\tstrict?: boolean | null;\n}\n\n// =============================================================================\n// Message conversion\n// =============================================================================\n\nexport function convertResponsesMessages<TApi extends Api>(\n\tmodel: Model<TApi>,\n\tcontext: Context,\n\tallowedToolCallProviders: ReadonlySet<string>,\n\toptions?: ConvertResponsesMessagesOptions,\n): ResponseInput {\n\tconst messages: ResponseInput = [];\n\n\tconst normalizeToolCallId = (id: string): string => {\n\t\tif (!allowedToolCallProviders.has(model.provider)) return id;\n\t\tif (!id.includes(\"|\")) return id;\n\t\tconst [callId, itemId] = id.split(\"|\");\n\t\tconst sanitizedCallId = callId.replace(/[^a-zA-Z0-9_-]/g, \"_\");\n\t\tlet sanitizedItemId = itemId.replace(/[^a-zA-Z0-9_-]/g, \"_\");\n\t\t// OpenAI Responses API requires item id to start with \"fc\"\n\t\tif (!sanitizedItemId.startsWith(\"fc\")) {\n\t\t\tsanitizedItemId = `fc_${sanitizedItemId}`;\n\t\t}\n\t\t// Truncate to 64 chars and strip trailing underscores (OpenAI Codex rejects them)\n\t\tlet normalizedCallId = sanitizedCallId.length > 64 ? sanitizedCallId.slice(0, 64) : sanitizedCallId;\n\t\tlet normalizedItemId = sanitizedItemId.length > 64 ? sanitizedItemId.slice(0, 64) : sanitizedItemId;\n\t\tnormalizedCallId = normalizedCallId.replace(/_+$/, \"\");\n\t\tnormalizedItemId = normalizedItemId.replace(/_+$/, \"\");\n\t\treturn `${normalizedCallId}|${normalizedItemId}`;\n\t};\n\n\tconst transformedMessages = transformMessages(context.messages, model, normalizeToolCallId);\n\n\tconst includeSystemPrompt = options?.includeSystemPrompt ?? true;\n\tif (includeSystemPrompt && context.systemPrompt) {\n\t\tconst role = model.reasoning ? \"developer\" : \"system\";\n\t\tmessages.push({\n\t\t\trole,\n\t\t\tcontent: sanitizeSurrogates(context.systemPrompt),\n\t\t});\n\t}\n\n\tlet msgIndex = 0;\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ResponseInputContent[] = msg.content.map((item): ResponseInputContent => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ResponseInputText;\n\t\t\t\t\t}\n\t\t\t\t\treturn {\n\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\timage_url: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t} satisfies ResponseInputImage;\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"input_image\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst output: ResponseInput = [];\n\t\t\tconst assistantMsg = msg as AssistantMessage;\n\t\t\tconst isDifferentModel =\n\t\t\t\tassistantMsg.model !== model.id &&\n\t\t\t\tassistantMsg.provider === model.provider &&\n\t\t\t\tassistantMsg.api === model.api;\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"thinking\") {\n\t\t\t\t\tif (block.thinkingSignature) {\n\t\t\t\t\t\tconst reasoningItem = JSON.parse(block.thinkingSignature) as ResponseReasoningItem;\n\t\t\t\t\t\toutput.push(reasoningItem);\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"text\") {\n\t\t\t\t\tconst textBlock = block as TextContent;\n\t\t\t\t\t// OpenAI requires id to be max 64 characters\n\t\t\t\t\tlet msgId = textBlock.textSignature;\n\t\t\t\t\tif (!msgId) {\n\t\t\t\t\t\tmsgId = `msg_${msgIndex}`;\n\t\t\t\t\t} else if (msgId.length > 64) {\n\t\t\t\t\t\tmsgId = `msg_${shortHash(msgId)}`;\n\t\t\t\t\t}\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: [{ type: \"output_text\", text: sanitizeSurrogates(textBlock.text), annotations: [] }],\n\t\t\t\t\t\tstatus: \"completed\",\n\t\t\t\t\t\tid: msgId,\n\t\t\t\t\t} satisfies ResponseOutputMessage);\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tconst toolCall = block as ToolCall;\n\t\t\t\t\tconst [callId, itemIdRaw] = toolCall.id.split(\"|\");\n\t\t\t\t\tlet itemId: string | undefined = itemIdRaw;\n\n\t\t\t\t\t// For different-model messages, set id to undefined to avoid pairing validation.\n\t\t\t\t\t// OpenAI tracks which fc_xxx IDs were paired with rs_xxx reasoning items.\n\t\t\t\t\t// By omitting the id, we avoid triggering that validation (like cross-provider does).\n\t\t\t\t\tif (isDifferentModel && itemId?.startsWith(\"fc_\")) {\n\t\t\t\t\t\titemId = undefined;\n\t\t\t\t\t}\n\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"function_call\",\n\t\t\t\t\t\tid: itemId,\n\t\t\t\t\t\tcall_id: callId,\n\t\t\t\t\t\tname: toolCall.name,\n\t\t\t\t\t\targuments: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (output.length === 0) continue;\n\t\t\tmessages.push(...output);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\t// Extract text and image content\n\t\t\tconst textResult = msg.content\n\t\t\t\t.filter((c): c is TextContent => c.type === \"text\")\n\t\t\t\t.map((c) => c.text)\n\t\t\t\t.join(\"\\n\");\n\t\t\tconst hasImages = msg.content.some((c): c is ImageContent => c.type === \"image\");\n\n\t\t\t// Always send function_call_output with text (or placeholder if only images)\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tconst [callId] = msg.toolCallId.split(\"|\");\n\t\t\tmessages.push({\n\t\t\t\ttype: \"function_call_output\",\n\t\t\t\tcall_id: callId,\n\t\t\t\toutput: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t});\n\n\t\t\t// If there are images and model supports them, send a follow-up user message with images\n\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\tconst contentParts: ResponseInputContent[] = [];\n\n\t\t\t\t// Add text prefix\n\t\t\t\tcontentParts.push({\n\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t} satisfies ResponseInputText);\n\n\t\t\t\t// Add images\n\t\t\t\tfor (const block of msg.content) {\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tcontentParts.push({\n\t\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\t\timage_url: `data:${block.mimeType};base64,${block.data}`,\n\t\t\t\t\t\t} satisfies ResponseInputImage);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: contentParts,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\tmsgIndex++;\n\t}\n\n\treturn messages;\n}\n\n// =============================================================================\n// Tool conversion\n// =============================================================================\n\nexport function convertResponsesTools(tools: Tool[], options?: ConvertResponsesToolsOptions): OpenAITool[] {\n\tconst strict = options?.strict === undefined ? false : options.strict;\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tname: tool.name,\n\t\tdescription: tool.description,\n\t\tparameters: tool.parameters as any, // TypeBox already generates JSON Schema\n\t\tstrict,\n\t}));\n}\n\n// =============================================================================\n// Stream processing\n// =============================================================================\n\nexport async function processResponsesStream<TApi extends Api>(\n\topenaiStream: AsyncIterable<ResponseStreamEvent>,\n\toutput: AssistantMessage,\n\tstream: AssistantMessageEventStream,\n\tmodel: Model<TApi>,\n\toptions?: OpenAIResponsesStreamOptions,\n): Promise<void> {\n\tlet currentItem: ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall | null = null;\n\tlet currentBlock: ThinkingContent | TextContent | (ToolCall & { partialJson: string }) | null = null;\n\tconst blocks = output.content;\n\tconst blockIndex = () => blocks.length - 1;\n\n\tfor await (const event of openaiStream) {\n\t\tif (event.type === \"response.output_item.added\") {\n\t\t\tconst item = event.item;\n\t\t\tif (item.type === \"reasoning\") {\n\t\t\t\tcurrentItem = item;\n\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\" };\n\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t} else if (item.type === \"message\") {\n\t\t\t\tcurrentItem = item;\n\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\tcurrentItem = item;\n\t\t\t\tcurrentBlock = {\n\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\tname: item.name,\n\t\t\t\t\targuments: {},\n\t\t\t\t\tpartialJson: item.arguments || \"\",\n\t\t\t\t};\n\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t}\n\t\t} else if (event.type === \"response.reasoning_summary_part.added\") {\n\t\t\tif (currentItem && currentItem.type === \"reasoning\") {\n\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\tcurrentItem.summary.push(event.part);\n\t\t\t}\n\t\t} else if (event.type === \"response.reasoning_summary_text.delta\") {\n\t\t\tif (currentItem?.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\tif (lastPart) {\n\t\t\t\t\tcurrentBlock.thinking += event.delta;\n\t\t\t\t\tlastPart.text += event.delta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: event.delta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.reasoning_summary_part.done\") {\n\t\t\tif (currentItem?.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\tif (lastPart) {\n\t\t\t\t\tcurrentBlock.thinking += \"\\n\\n\";\n\t\t\t\t\tlastPart.text += \"\\n\\n\";\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: \"\\n\\n\",\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.content_part.added\") {\n\t\t\tif (currentItem?.type === \"message\") {\n\t\t\t\tcurrentItem.content = currentItem.content || [];\n\t\t\t\t// Filter out ReasoningText, only accept output_text and refusal\n\t\t\t\tif (event.part.type === \"output_text\" || event.part.type === \"refusal\") {\n\t\t\t\t\tcurrentItem.content.push(event.part);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.output_text.delta\") {\n\t\t\tif (currentItem?.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\tif (!currentItem.content || currentItem.content.length === 0) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\tif (lastPart?.type === \"output_text\") {\n\t\t\t\t\tcurrentBlock.text += event.delta;\n\t\t\t\t\tlastPart.text += event.delta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: event.delta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.refusal.delta\") {\n\t\t\tif (currentItem?.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\tif (!currentItem.content || currentItem.content.length === 0) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\tif (lastPart?.type === \"refusal\") {\n\t\t\t\t\tcurrentBlock.text += event.delta;\n\t\t\t\t\tlastPart.refusal += event.delta;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\tdelta: event.delta,\n\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (event.type === \"response.function_call_arguments.delta\") {\n\t\t\tif (currentItem?.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\tcurrentBlock.partialJson += event.delta;\n\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t\tstream.push({\n\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\tdelta: event.delta,\n\t\t\t\t\tpartial: output,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (event.type === \"response.function_call_arguments.done\") {\n\t\t\tif (currentItem?.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\tcurrentBlock.partialJson = event.arguments;\n\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t}\n\t\t} else if (event.type === \"response.output_item.done\") {\n\t\t\tconst item = event.item;\n\n\t\t\tif (item.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\tcurrentBlock.thinking = item.summary?.map((s) => s.text).join(\"\\n\\n\") || \"\";\n\t\t\t\tcurrentBlock.thinkingSignature = JSON.stringify(item);\n\t\t\t\tstream.push({\n\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\tpartial: output,\n\t\t\t\t});\n\t\t\t\tcurrentBlock = null;\n\t\t\t} else if (item.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\tcurrentBlock.text = item.content.map((c) => (c.type === \"output_text\" ? c.text : c.refusal)).join(\"\");\n\t\t\t\tcurrentBlock.textSignature = item.id;\n\t\t\t\tstream.push({\n\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\tpartial: output,\n\t\t\t\t});\n\t\t\t\tcurrentBlock = null;\n\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\tconst args =\n\t\t\t\t\tcurrentBlock?.type === \"toolCall\" && currentBlock.partialJson\n\t\t\t\t\t\t? JSON.parse(currentBlock.partialJson)\n\t\t\t\t\t\t: JSON.parse(item.arguments);\n\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\tname: item.name,\n\t\t\t\t\targuments: args,\n\t\t\t\t};\n\n\t\t\t\tcurrentBlock = null;\n\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t}\n\t\t} else if (event.type === \"response.completed\") {\n\t\t\tconst response = event.response;\n\t\t\tif (response?.usage) {\n\t\t\t\tconst cachedTokens = response.usage.input_tokens_details?.cached_tokens || 0;\n\t\t\t\toutput.usage = {\n\t\t\t\t\t// OpenAI includes cached tokens in input_tokens, so subtract to get non-cached input\n\t\t\t\t\tinput: (response.usage.input_tokens || 0) - cachedTokens,\n\t\t\t\t\toutput: response.usage.output_tokens || 0,\n\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\ttotalTokens: response.usage.total_tokens || 0,\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t};\n\t\t\t}\n\t\t\tcalculateCost(model, output.usage);\n\t\t\tif (options?.applyServiceTierPricing) {\n\t\t\t\tconst serviceTier = response?.service_tier ?? options.serviceTier;\n\t\t\t\toptions.applyServiceTierPricing(output.usage, serviceTier);\n\t\t\t}\n\t\t\t// Map status to stop reason\n\t\t\toutput.stopReason = mapStopReason(response?.status);\n\t\t\tif (output.content.some((b) => b.type === \"toolCall\") && output.stopReason === \"stop\") {\n\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t}\n\t\t} else if (event.type === \"error\") {\n\t\t\tthrow new Error(`Error Code ${event.code}: ${event.message}` || \"Unknown error\");\n\t\t} else if (event.type === \"response.failed\") {\n\t\t\tthrow new Error(\"Unknown error\");\n\t\t}\n\t}\n}\n\nfunction mapStopReason(status: OpenAI.Responses.ResponseStatus | undefined): StopReason {\n\tif (!status) return \"stop\";\n\tswitch (status) {\n\t\tcase \"completed\":\n\t\t\treturn \"stop\";\n\t\tcase \"incomplete\":\n\t\t\treturn \"length\";\n\t\tcase \"failed\":\n\t\tcase \"cancelled\":\n\t\t\treturn \"error\";\n\t\t// These two are wonky ...\n\t\tcase \"in_progress\":\n\t\tcase \"queued\":\n\t\t\treturn \"stop\";\n\t\tdefault: {\n\t\t\tconst _exhaustive: never = status;\n\t\t\tthrow new Error(`Unhandled stop reason: ${_exhaustive}`);\n\t\t}\n\t}\n}\n"]}
|
|
@@ -35,8 +35,11 @@ export function convertResponsesMessages(model, context, allowedToolCallProvider
|
|
|
35
35
|
if (!sanitizedItemId.startsWith("fc")) {
|
|
36
36
|
sanitizedItemId = `fc_${sanitizedItemId}`;
|
|
37
37
|
}
|
|
38
|
-
|
|
39
|
-
|
|
38
|
+
// Truncate to 64 chars and strip trailing underscores (OpenAI Codex rejects them)
|
|
39
|
+
let normalizedCallId = sanitizedCallId.length > 64 ? sanitizedCallId.slice(0, 64) : sanitizedCallId;
|
|
40
|
+
let normalizedItemId = sanitizedItemId.length > 64 ? sanitizedItemId.slice(0, 64) : sanitizedItemId;
|
|
41
|
+
normalizedCallId = normalizedCallId.replace(/_+$/, "");
|
|
42
|
+
normalizedItemId = normalizedItemId.replace(/_+$/, "");
|
|
40
43
|
return `${normalizedCallId}|${normalizedItemId}`;
|
|
41
44
|
};
|
|
42
45
|
const transformedMessages = transformMessages(context.messages, model, normalizeToolCallId);
|