@jeffreycao/copilot-api 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +381 -0
- package/dist/config-BKsHEU7z.js +258 -0
- package/dist/config-BKsHEU7z.js.map +1 -0
- package/dist/main.js +538 -0
- package/dist/main.js.map +1 -0
- package/dist/server-IY-mTdPh.js +2252 -0
- package/dist/server-IY-mTdPh.js.map +1 -0
- package/package.json +68 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"server-IY-mTdPh.js","names":["stats: fs.Stats","state","x","headers: Record<string, string>","logger","handleCompletion","isNonStreaming","handleCompletion","newMessages: Array<Message>","contentParts: Array<ContentPart>","assistantContentBlocks: Array<AnthropicAssistantContentBlock>","headers: Record<string, string>","THINKING_TEXT","input: Array<ResponseInputItem>","items: Array<ResponseInputItem>","pendingContent: Array<ResponseInputContent>","contentBlocks: Array<AnthropicAssistantContentBlock>","segments: Array<string>","parsed: unknown","result: Array<ResponseInputContent>","state","handleOutputItemAdded","handleOutputItemDone","events","THINKING_TEXT","blockIndex","result: Array<ResponseInputItem>","headers: Record<string, string>","state","events: Array<AnthropicStreamEventData>","events","logger","RESPONSES_ENDPOINT","streamState: AnthropicStreamState","events","isAsyncIterable","toolResults: Array<AnthropicToolResultBlock>","textBlocks: Array<AnthropicTextBlock>","logger"],"sources":["../src/lib/approval.ts","../src/lib/logger.ts","../src/lib/rate-limit.ts","../src/lib/tokenizer.ts","../src/services/copilot/create-chat-completions.ts","../src/routes/chat-completions/handler.ts","../src/routes/chat-completions/route.ts","../src/services/copilot/create-embeddings.ts","../src/routes/embeddings/route.ts","../src/routes/messages/utils.ts","../src/routes/messages/non-stream-translation.ts","../src/routes/messages/count-tokens-handler.ts","../src/services/copilot/create-responses.ts","../src/routes/messages/responses-translation.ts","../src/routes/messages/responses-stream-translation.ts","../src/routes/responses/utils.ts","../src/services/copilot/create-messages.ts","../src/routes/messages/stream-translation.ts","../src/routes/messages/handler.ts","../src/routes/messages/route.ts","../src/routes/models/route.ts","../src/routes/responses/stream-id-sync.ts","../src/routes/responses/handler.ts","../src/routes/responses/route.ts","../src/routes/token/route.ts","../src/routes/usage/route.ts","../src/server.ts"],"sourcesContent":["import consola from \"consola\"\n\nimport { HTTPError } from \"./error\"\n\nexport const awaitApproval = async () => {\n const response = await consola.prompt(`Accept incoming request?`, {\n type: \"confirm\",\n })\n\n if (!response)\n throw new HTTPError(\n \"Request rejected\",\n Response.json({ message: \"Request rejected\" }, { status: 403 }),\n )\n}\n","import consola, { type ConsolaInstance } from \"consola\"\nimport fs from \"node:fs\"\nimport path from \"node:path\"\nimport util from \"node:util\"\n\nimport { PATHS } from \"./paths\"\nimport { state } from \"./state\"\n\nconst LOG_RETENTION_DAYS = 7\nconst LOG_RETENTION_MS = LOG_RETENTION_DAYS * 24 * 60 * 60 * 1000\nconst CLEANUP_INTERVAL_MS = 24 * 60 * 60 * 1000\nconst LOG_DIR = path.join(PATHS.APP_DIR, \"logs\")\nconst FLUSH_INTERVAL_MS = 1000\nconst MAX_BUFFER_SIZE = 100\n\nconst logStreams = new Map<string, fs.WriteStream>()\nconst logBuffers = new Map<string, Array<string>>()\n\nconst ensureLogDirectory = () => {\n if (!fs.existsSync(LOG_DIR)) {\n fs.mkdirSync(LOG_DIR, { recursive: true })\n }\n}\n\nconst cleanupOldLogs = () => {\n if (!fs.existsSync(LOG_DIR)) {\n return\n }\n\n const now = Date.now()\n\n for (const entry of fs.readdirSync(LOG_DIR)) {\n const filePath = path.join(LOG_DIR, entry)\n\n let stats: fs.Stats\n try {\n stats = fs.statSync(filePath)\n } catch {\n continue\n }\n\n if (!stats.isFile()) {\n continue\n }\n\n if (now - stats.mtimeMs > LOG_RETENTION_MS) {\n try {\n fs.rmSync(filePath)\n } catch {\n continue\n }\n }\n }\n}\n\nconst formatArgs = (args: Array<unknown>) =>\n args\n .map((arg) =>\n typeof arg === \"string\" ? arg : (\n util.inspect(arg, { depth: null, colors: false })\n ),\n )\n .join(\" \")\n\nconst sanitizeName = (name: string) => {\n const normalized = name\n .toLowerCase()\n .replaceAll(/[^a-z0-9]+/g, \"-\")\n .replaceAll(/^-+|-+$/g, \"\")\n\n return normalized === \"\" ? \"handler\" : normalized\n}\n\nconst getLogStream = (filePath: string): fs.WriteStream => {\n let stream = logStreams.get(filePath)\n if (!stream || stream.destroyed) {\n stream = fs.createWriteStream(filePath, { flags: \"a\" })\n logStreams.set(filePath, stream)\n\n stream.on(\"error\", (error: unknown) => {\n console.warn(\"Log stream error\", error)\n logStreams.delete(filePath)\n })\n }\n return stream\n}\n\nconst flushBuffer = (filePath: string) => {\n const buffer = logBuffers.get(filePath)\n if (!buffer || buffer.length === 0) {\n return\n }\n\n const stream = getLogStream(filePath)\n const content = buffer.join(\"\\n\") + \"\\n\"\n stream.write(content, (error) => {\n if (error) {\n console.warn(\"Failed to write handler log\", error)\n }\n })\n\n logBuffers.set(filePath, [])\n}\n\nconst flushAllBuffers = () => {\n for (const filePath of logBuffers.keys()) {\n flushBuffer(filePath)\n }\n}\n\nconst appendLine = (filePath: string, line: string) => {\n let buffer = logBuffers.get(filePath)\n if (!buffer) {\n buffer = []\n logBuffers.set(filePath, buffer)\n }\n\n buffer.push(line)\n\n if (buffer.length >= MAX_BUFFER_SIZE) {\n flushBuffer(filePath)\n }\n}\n\nsetInterval(flushAllBuffers, FLUSH_INTERVAL_MS)\n\nconst cleanup = () => {\n flushAllBuffers()\n for (const stream of logStreams.values()) {\n stream.end()\n }\n logStreams.clear()\n logBuffers.clear()\n}\n\nprocess.on(\"exit\", cleanup)\nprocess.on(\"SIGINT\", () => {\n cleanup()\n process.exit(0)\n})\nprocess.on(\"SIGTERM\", () => {\n cleanup()\n process.exit(0)\n})\n\nlet lastCleanup = 0\n\nexport const createHandlerLogger = (name: string): ConsolaInstance => {\n ensureLogDirectory()\n\n const sanitizedName = sanitizeName(name)\n const instance = consola.withTag(name)\n\n if (state.verbose) {\n instance.level = 5\n }\n instance.setReporters([])\n\n instance.addReporter({\n log(logObj) {\n ensureLogDirectory()\n\n if (Date.now() - lastCleanup > CLEANUP_INTERVAL_MS) {\n cleanupOldLogs()\n lastCleanup = Date.now()\n }\n\n const date = logObj.date\n const dateKey = date.toLocaleDateString(\"sv-SE\")\n const timestamp = date.toLocaleString(\"sv-SE\", { hour12: false })\n const filePath = path.join(LOG_DIR, `${sanitizedName}-${dateKey}.log`)\n const message = formatArgs(logObj.args as Array<unknown>)\n const line = `[${timestamp}] [${logObj.type}] [${logObj.tag || name}]${\n message ? ` ${message}` : \"\"\n }`\n\n appendLine(filePath, line)\n },\n })\n\n return instance\n}\n","import consola from \"consola\"\n\nimport type { State } from \"./state\"\n\nimport { HTTPError } from \"./error\"\nimport { sleep } from \"./utils\"\n\nexport async function checkRateLimit(state: State) {\n if (state.rateLimitSeconds === undefined) return\n\n const now = Date.now()\n\n if (!state.lastRequestTimestamp) {\n state.lastRequestTimestamp = now\n return\n }\n\n const elapsedSeconds = (now - state.lastRequestTimestamp) / 1000\n\n if (elapsedSeconds > state.rateLimitSeconds) {\n state.lastRequestTimestamp = now\n return\n }\n\n const waitTimeSeconds = Math.ceil(state.rateLimitSeconds - elapsedSeconds)\n\n if (!state.rateLimitWait) {\n consola.warn(\n `Rate limit exceeded. Need to wait ${waitTimeSeconds} more seconds.`,\n )\n throw new HTTPError(\n \"Rate limit exceeded\",\n Response.json({ message: \"Rate limit exceeded\" }, { status: 429 }),\n )\n }\n\n const waitTimeMs = waitTimeSeconds * 1000\n consola.warn(\n `Rate limit reached. Waiting ${waitTimeSeconds} seconds before proceeding...`,\n )\n await sleep(waitTimeMs)\n // eslint-disable-next-line require-atomic-updates\n state.lastRequestTimestamp = now\n consola.info(\"Rate limit wait completed, proceeding with request\")\n return\n}\n","import type {\n ChatCompletionsPayload,\n ContentPart,\n Message,\n Tool,\n ToolCall,\n} from \"~/services/copilot/create-chat-completions\"\nimport type { Model } from \"~/services/copilot/get-models\"\n\n// Encoder type mapping\nconst ENCODING_MAP = {\n o200k_base: () => import(\"gpt-tokenizer/encoding/o200k_base\"),\n cl100k_base: () => import(\"gpt-tokenizer/encoding/cl100k_base\"),\n p50k_base: () => import(\"gpt-tokenizer/encoding/p50k_base\"),\n p50k_edit: () => import(\"gpt-tokenizer/encoding/p50k_edit\"),\n r50k_base: () => import(\"gpt-tokenizer/encoding/r50k_base\"),\n} as const\n\ntype SupportedEncoding = keyof typeof ENCODING_MAP\n\n// Define encoder interface\ninterface Encoder {\n encode: (text: string) => Array<number>\n}\n\n// Cache loaded encoders to avoid repeated imports\nconst encodingCache = new Map<string, Encoder>()\n\n/**\n * Calculate tokens for tool calls\n */\nconst calculateToolCallsTokens = (\n toolCalls: Array<ToolCall>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = 0\n for (const toolCall of toolCalls) {\n tokens += constants.funcInit\n tokens += encoder.encode(toolCall.id).length\n tokens += encoder.encode(toolCall.function.name).length\n tokens += encoder.encode(toolCall.function.arguments).length\n }\n tokens += constants.funcEnd\n return tokens\n}\n\n/**\n * Calculate tokens for content parts\n */\nconst calculateContentPartsTokens = (\n contentParts: Array<ContentPart>,\n encoder: Encoder,\n): number => {\n let tokens = 0\n for (const part of contentParts) {\n if (part.type === \"image_url\") {\n tokens += encoder.encode(part.image_url.url).length + 85\n } else if (part.text) {\n tokens += encoder.encode(part.text).length\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens for a single message\n */\nconst calculateMessageTokens = (\n message: Message,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n const tokensPerMessage = 3\n const tokensPerName = 1\n let tokens = tokensPerMessage\n for (const [key, value] of Object.entries(message)) {\n if (key === \"reasoning_opaque\") {\n continue\n }\n if (typeof value === \"string\") {\n tokens += encoder.encode(value).length\n }\n if (key === \"name\") {\n tokens += tokensPerName\n }\n if (key === \"tool_calls\") {\n tokens += calculateToolCallsTokens(\n value as Array<ToolCall>,\n encoder,\n constants,\n )\n }\n if (key === \"content\" && Array.isArray(value)) {\n tokens += calculateContentPartsTokens(\n value as Array<ContentPart>,\n encoder,\n )\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens using custom algorithm\n */\nconst calculateTokens = (\n messages: Array<Message>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n if (messages.length === 0) {\n return 0\n }\n let numTokens = 0\n for (const message of messages) {\n numTokens += calculateMessageTokens(message, encoder, constants)\n }\n // every reply is primed with <|start|>assistant<|message|>\n numTokens += 3\n return numTokens\n}\n\n/**\n * Get the corresponding encoder module based on encoding type\n */\nconst getEncodeChatFunction = async (encoding: string): Promise<Encoder> => {\n if (encodingCache.has(encoding)) {\n const cached = encodingCache.get(encoding)\n if (cached) {\n return cached\n }\n }\n\n const supportedEncoding = encoding as SupportedEncoding\n if (!(supportedEncoding in ENCODING_MAP)) {\n const fallbackModule = (await ENCODING_MAP.o200k_base()) as Encoder\n encodingCache.set(encoding, fallbackModule)\n return fallbackModule\n }\n\n const encodingModule = (await ENCODING_MAP[supportedEncoding]()) as Encoder\n encodingCache.set(encoding, encodingModule)\n return encodingModule\n}\n\n/**\n * Get tokenizer type from model information\n */\nexport const getTokenizerFromModel = (model: Model): string => {\n return model.capabilities.tokenizer || \"o200k_base\"\n}\n\n/**\n * Get model-specific constants for token calculation\n */\nconst getModelConstants = (model: Model) => {\n return model.id === \"gpt-3.5-turbo\" || model.id === \"gpt-4\" ?\n {\n funcInit: 10,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n isGpt: true,\n }\n : {\n funcInit: 7,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n isGpt: model.id.startsWith(\"gpt-\"),\n }\n}\n\n/**\n * Calculate tokens for a single parameter\n */\nconst calculateParameterTokens = (\n key: string,\n prop: unknown,\n context: {\n encoder: Encoder\n constants: ReturnType<typeof getModelConstants>\n },\n): number => {\n const { encoder, constants } = context\n let tokens = constants.propKey\n\n // Early return if prop is not an object\n if (typeof prop !== \"object\" || prop === null) {\n return tokens\n }\n\n // Type assertion for parameter properties\n const param = prop as {\n type?: string\n description?: string\n enum?: Array<unknown>\n [key: string]: unknown\n }\n\n const paramName = key\n const paramType = param.type || \"string\"\n let paramDesc = param.description || \"\"\n\n // Handle enum values\n if (param.enum && Array.isArray(param.enum)) {\n tokens += constants.enumInit\n for (const item of param.enum) {\n tokens += constants.enumItem\n tokens += encoder.encode(String(item)).length\n }\n }\n\n // Clean up description\n if (paramDesc.endsWith(\".\")) {\n paramDesc = paramDesc.slice(0, -1)\n }\n\n // Encode the main parameter line\n const line = `${paramName}:${paramType}:${paramDesc}`\n tokens += encoder.encode(line).length\n\n if (param.type === \"array\" && param[\"items\"]) {\n tokens += calculateParametersTokens(param[\"items\"], encoder, constants)\n }\n\n // Handle additional properties (excluding standard ones)\n const excludedKeys = new Set([\"type\", \"description\", \"enum\", \"items\"])\n for (const propertyName of Object.keys(param)) {\n if (!excludedKeys.has(propertyName)) {\n const propertyValue = param[propertyName]\n const propertyText =\n typeof propertyValue === \"string\" ? propertyValue : (\n JSON.stringify(propertyValue)\n )\n tokens += encoder.encode(`${propertyName}:${propertyText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for properties object\n */\nconst calculatePropertiesTokens = (\n properties: Record<string, unknown>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = 0\n if (Object.keys(properties).length > 0) {\n tokens += constants.propInit\n for (const propKey of Object.keys(properties)) {\n tokens += calculateParameterTokens(propKey, properties[propKey], {\n encoder,\n constants,\n })\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens for function parameters\n */\nconst calculateParametersTokens = (\n parameters: unknown,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n if (!parameters || typeof parameters !== \"object\") {\n return 0\n }\n\n const params = parameters as Record<string, unknown>\n let tokens = 0\n\n const excludedKeys = new Set([\"$schema\", \"additionalProperties\"])\n for (const [key, value] of Object.entries(params)) {\n if (excludedKeys.has(key)) {\n continue\n }\n if (key === \"properties\") {\n tokens += calculatePropertiesTokens(\n value as Record<string, unknown>,\n encoder,\n constants,\n )\n } else {\n const paramText =\n typeof value === \"string\" ? value : JSON.stringify(value)\n tokens += encoder.encode(`${key}:${paramText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for a single tool\n */\nconst calculateToolTokens = (\n tool: Tool,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = constants.funcInit\n const func = tool.function\n const fName = func.name\n let fDesc = func.description || \"\"\n if (fDesc.endsWith(\".\")) {\n fDesc = fDesc.slice(0, -1)\n }\n const line = fName + \":\" + fDesc\n tokens += encoder.encode(line).length\n if (\n typeof func.parameters === \"object\" // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n && func.parameters !== null\n ) {\n tokens += calculateParametersTokens(func.parameters, encoder, constants)\n }\n return tokens\n}\n\n/**\n * Calculate token count for tools based on model\n */\nexport const numTokensForTools = (\n tools: Array<Tool>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let funcTokenCount = 0\n if (constants.isGpt) {\n for (const tool of tools) {\n funcTokenCount += calculateToolTokens(tool, encoder, constants)\n }\n funcTokenCount += constants.funcEnd\n } else {\n for (const tool of tools) {\n funcTokenCount += encoder.encode(JSON.stringify(tool)).length\n }\n }\n return funcTokenCount\n}\n\n/**\n * Calculate the token count of messages, supporting multiple GPT encoders\n */\nexport const getTokenCount = async (\n payload: ChatCompletionsPayload,\n model: Model,\n): Promise<{ input: number; output: number }> => {\n // Get tokenizer string\n const tokenizer = getTokenizerFromModel(model)\n\n // Get corresponding encoder module\n const encoder = await getEncodeChatFunction(tokenizer)\n\n const simplifiedMessages = payload.messages\n const inputMessages = simplifiedMessages.filter(\n (msg) => msg.role !== \"assistant\",\n )\n const outputMessages = simplifiedMessages.filter(\n (msg) => msg.role === \"assistant\",\n )\n\n const constants = getModelConstants(model)\n // gpt count token https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb\n let inputTokens = calculateTokens(inputMessages, encoder, constants)\n if (payload.tools && payload.tools.length > 0) {\n inputTokens += numTokensForTools(payload.tools, encoder, constants)\n }\n const outputTokens = calculateTokens(outputMessages, encoder, constants)\n\n return {\n input: inputTokens,\n output: outputTokens,\n }\n}\n","import consola from \"consola\"\nimport { events } from \"fetch-event-stream\"\n\nimport { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createChatCompletions = async (\n payload: ChatCompletionsPayload,\n) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const enableVision = payload.messages.some(\n (x) =>\n typeof x.content !== \"string\"\n && x.content?.some((x) => x.type === \"image_url\"),\n )\n\n // Agent/user check for X-Initiator header\n // Determine if any message is from an agent (\"assistant\" or \"tool\")\n // Refactor `isAgentCall` logic to check only the last message in the history rather than any message. This prevents valid user messages from being incorrectly flagged as agent calls due to previous assistant history, ensuring proper credit consumption for multi-turn conversations.\n let isAgentCall = false\n if (payload.messages.length > 0) {\n const lastMessage = payload.messages.at(-1)\n if (lastMessage) {\n isAgentCall = [\"assistant\", \"tool\"].includes(lastMessage.role)\n }\n }\n\n // Build headers and add X-Initiator\n const headers: Record<string, string> = {\n ...copilotHeaders(state, enableVision),\n \"X-Initiator\": isAgentCall ? \"agent\" : \"user\",\n }\n\n const response = await fetch(`${copilotBaseUrl(state)}/chat/completions`, {\n method: \"POST\",\n headers,\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) {\n consola.error(\"Failed to create chat completions\", response)\n throw new HTTPError(\"Failed to create chat completions\", response)\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as ChatCompletionResponse\n}\n\n// Streaming types\n\nexport interface ChatCompletionChunk {\n id: string\n object: \"chat.completion.chunk\"\n created: number\n model: string\n choices: Array<Choice>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n completion_tokens_details?: {\n accepted_prediction_tokens: number\n rejected_prediction_tokens: number\n }\n }\n}\n\nexport interface Delta {\n content?: string | null\n role?: \"user\" | \"assistant\" | \"system\" | \"tool\"\n tool_calls?: Array<{\n index: number\n id?: string\n type?: \"function\"\n function?: {\n name?: string\n arguments?: string\n }\n }>\n reasoning_text?: string | null\n reasoning_opaque?: string | null\n}\n\nexport interface Choice {\n index: number\n delta: Delta\n finish_reason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null\n logprobs: object | null\n}\n\n// Non-streaming types\n\nexport interface ChatCompletionResponse {\n id: string\n object: \"chat.completion\"\n created: number\n model: string\n choices: Array<ChoiceNonStreaming>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n }\n}\n\ninterface ResponseMessage {\n role: \"assistant\"\n content: string | null\n reasoning_text?: string | null\n reasoning_opaque?: string | null\n tool_calls?: Array<ToolCall>\n}\n\ninterface ChoiceNonStreaming {\n index: number\n message: ResponseMessage\n logprobs: object | null\n finish_reason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\"\n}\n\n// Payload types\n\nexport interface ChatCompletionsPayload {\n messages: Array<Message>\n model: string\n temperature?: number | null\n top_p?: number | null\n max_tokens?: number | null\n stop?: string | Array<string> | null\n n?: number | null\n stream?: boolean | null\n\n frequency_penalty?: number | null\n presence_penalty?: number | null\n logit_bias?: Record<string, number> | null\n logprobs?: boolean | null\n response_format?: { type: \"json_object\" } | null\n seed?: number | null\n tools?: Array<Tool> | null\n tool_choice?:\n | \"none\"\n | \"auto\"\n | \"required\"\n | { type: \"function\"; function: { name: string } }\n | null\n user?: string | null\n thinking_budget?: number\n}\n\nexport interface Tool {\n type: \"function\"\n function: {\n name: string\n description?: string\n parameters: Record<string, unknown>\n }\n}\n\nexport interface Message {\n role: \"user\" | \"assistant\" | \"system\" | \"tool\" | \"developer\"\n content: string | Array<ContentPart> | null\n\n name?: string\n tool_calls?: Array<ToolCall>\n tool_call_id?: string\n reasoning_text?: string | null\n reasoning_opaque?: string | null\n}\n\nexport interface ToolCall {\n id: string\n type: \"function\"\n function: {\n name: string\n arguments: string\n }\n}\n\nexport type ContentPart = TextPart | ImagePart\n\nexport interface TextPart {\n type: \"text\"\n text: string\n}\n\nexport interface ImagePart {\n type: \"image_url\"\n image_url: {\n url: string\n detail?: \"low\" | \"high\" | \"auto\"\n }\n}\n","import type { Context } from \"hono\"\n\nimport { streamSSE, type SSEMessage } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport { createHandlerLogger } from \"~/lib/logger\"\nimport { checkRateLimit } from \"~/lib/rate-limit\"\nimport { state } from \"~/lib/state\"\nimport { getTokenCount } from \"~/lib/tokenizer\"\nimport { isNullish } from \"~/lib/utils\"\nimport {\n createChatCompletions,\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n} from \"~/services/copilot/create-chat-completions\"\n\nconst logger = createHandlerLogger(\"chat-completions-handler\")\n\nexport async function handleCompletion(c: Context) {\n await checkRateLimit(state)\n\n let payload = await c.req.json<ChatCompletionsPayload>()\n logger.debug(\"Request payload:\", JSON.stringify(payload).slice(-400))\n\n // Find the selected model\n const selectedModel = state.models?.data.find(\n (model) => model.id === payload.model,\n )\n\n // Calculate and display token count\n try {\n if (selectedModel) {\n const tokenCount = await getTokenCount(payload, selectedModel)\n logger.info(\"Current token count:\", tokenCount)\n } else {\n logger.warn(\"No model selected, skipping token count calculation\")\n }\n } catch (error) {\n logger.warn(\"Failed to calculate token count:\", error)\n }\n\n if (state.manualApprove) await awaitApproval()\n\n if (isNullish(payload.max_tokens)) {\n payload = {\n ...payload,\n max_tokens: selectedModel?.capabilities.limits.max_output_tokens,\n }\n logger.debug(\"Set max_tokens to:\", JSON.stringify(payload.max_tokens))\n }\n\n const response = await createChatCompletions(payload)\n\n if (isNonStreaming(response)) {\n logger.debug(\"Non-streaming response:\", JSON.stringify(response))\n return c.json(response)\n }\n\n logger.debug(\"Streaming response\")\n return streamSSE(c, async (stream) => {\n for await (const chunk of response) {\n logger.debug(\"Streaming chunk:\", JSON.stringify(chunk))\n await stream.writeSSE(chunk as SSEMessage)\n }\n })\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createChatCompletions>>,\n): response is ChatCompletionResponse => Object.hasOwn(response, \"choices\")\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleCompletion } from \"./handler\"\n\nexport const completionRoutes = new Hono()\n\ncompletionRoutes.post(\"/\", async (c) => {\n try {\n return await handleCompletion(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createEmbeddings = async (payload: EmbeddingRequest) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const response = await fetch(`${copilotBaseUrl(state)}/embeddings`, {\n method: \"POST\",\n headers: copilotHeaders(state),\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) throw new HTTPError(\"Failed to create embeddings\", response)\n\n return (await response.json()) as EmbeddingResponse\n}\n\nexport interface EmbeddingRequest {\n input: string | Array<string>\n model: string\n}\n\nexport interface Embedding {\n object: string\n embedding: Array<number>\n index: number\n}\n\nexport interface EmbeddingResponse {\n object: string\n data: Array<Embedding>\n model: string\n usage: {\n prompt_tokens: number\n total_tokens: number\n }\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport {\n createEmbeddings,\n type EmbeddingRequest,\n} from \"~/services/copilot/create-embeddings\"\n\nexport const embeddingRoutes = new Hono()\n\nembeddingRoutes.post(\"/\", async (c) => {\n try {\n const paylod = await c.req.json<EmbeddingRequest>()\n const response = await createEmbeddings(paylod)\n\n return c.json(response)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { type AnthropicResponse } from \"./anthropic-types\"\n\nexport function mapOpenAIStopReasonToAnthropic(\n finishReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null,\n): AnthropicResponse[\"stop_reason\"] {\n if (finishReason === null) {\n return null\n }\n const stopReasonMap = {\n stop: \"end_turn\",\n length: \"max_tokens\",\n tool_calls: \"tool_use\",\n content_filter: \"end_turn\",\n } as const\n return stopReasonMap[finishReason]\n}\n","import type { Model } from \"~/services/copilot/get-models\"\n\nimport { state } from \"~/lib/state\"\nimport {\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n type ContentPart,\n type Message,\n type TextPart,\n type Tool,\n type ToolCall,\n} from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicAssistantContentBlock,\n type AnthropicAssistantMessage,\n type AnthropicMessagesPayload,\n type AnthropicResponse,\n type AnthropicTextBlock,\n type AnthropicThinkingBlock,\n type AnthropicTool,\n type AnthropicToolResultBlock,\n type AnthropicToolUseBlock,\n type AnthropicUserContentBlock,\n type AnthropicUserMessage,\n} from \"./anthropic-types\"\nimport { mapOpenAIStopReasonToAnthropic } from \"./utils\"\n\n// Compatible with opencode, it will filter out blocks where the thinking text is empty, so we need add a default thinking text\nexport const THINKING_TEXT = \"Thinking...\"\n\n// Payload translation\nexport function translateToOpenAI(\n payload: AnthropicMessagesPayload,\n): ChatCompletionsPayload {\n const modelId = translateModelName(payload.model)\n const model = state.models?.data.find((m) => m.id === modelId)\n const thinkingBudget = getThinkingBudget(payload, model)\n return {\n model: modelId,\n messages: translateAnthropicMessagesToOpenAI(\n payload,\n modelId,\n thinkingBudget,\n ),\n max_tokens: payload.max_tokens,\n stop: payload.stop_sequences,\n stream: payload.stream,\n temperature: payload.temperature,\n top_p: payload.top_p,\n user: payload.metadata?.user_id,\n tools: translateAnthropicToolsToOpenAI(payload.tools),\n tool_choice: translateAnthropicToolChoiceToOpenAI(payload.tool_choice),\n thinking_budget: thinkingBudget,\n }\n}\n\nfunction getThinkingBudget(\n payload: AnthropicMessagesPayload,\n model: Model | undefined,\n): number | undefined {\n const thinking = payload.thinking\n if (model && thinking) {\n const maxThinkingBudget = Math.min(\n model.capabilities.supports.max_thinking_budget ?? 0,\n (model.capabilities.limits.max_output_tokens ?? 0) - 1,\n )\n if (maxThinkingBudget > 0 && thinking.budget_tokens !== undefined) {\n const budgetTokens = Math.min(thinking.budget_tokens, maxThinkingBudget)\n return Math.max(\n budgetTokens,\n model.capabilities.supports.min_thinking_budget ?? 1024,\n )\n }\n }\n return undefined\n}\n\nfunction translateModelName(model: string): string {\n // Subagent requests use a specific model number which Copilot doesn't support\n if (model.startsWith(\"claude-sonnet-4-\")) {\n return model.replace(/^claude-sonnet-4-.*/, \"claude-sonnet-4\")\n } else if (model.startsWith(\"claude-opus-4-\")) {\n return model.replace(/^claude-opus-4-.*/, \"claude-opus-4\")\n }\n return model\n}\n\nfunction translateAnthropicMessagesToOpenAI(\n payload: AnthropicMessagesPayload,\n modelId: string,\n thinkingBudget: number | undefined,\n): Array<Message> {\n const systemMessages = handleSystemPrompt(\n payload.system,\n modelId,\n thinkingBudget,\n )\n const otherMessages = payload.messages.flatMap((message) =>\n message.role === \"user\" ?\n handleUserMessage(message)\n : handleAssistantMessage(message, modelId),\n )\n if (modelId.startsWith(\"claude\") && thinkingBudget) {\n const reminder =\n \"<system-reminder>you MUST follow interleaved_thinking_protocol</system-reminder>\"\n const firstUserIndex = otherMessages.findIndex((m) => m.role === \"user\")\n if (firstUserIndex !== -1) {\n const userMessage = otherMessages[firstUserIndex]\n if (typeof userMessage.content === \"string\") {\n userMessage.content = reminder + \"\\n\\n\" + userMessage.content\n } else if (Array.isArray(userMessage.content)) {\n userMessage.content = [\n { type: \"text\", text: reminder },\n ...userMessage.content,\n ] as Array<ContentPart>\n }\n }\n }\n return [...systemMessages, ...otherMessages]\n}\n\nfunction handleSystemPrompt(\n system: string | Array<AnthropicTextBlock> | undefined,\n modelId: string,\n thinkingBudget: number | undefined,\n): Array<Message> {\n if (!system) {\n return []\n }\n\n let extraPrompt = \"\"\n if (modelId.startsWith(\"claude\") && thinkingBudget) {\n extraPrompt = `\n<interleaved_thinking_protocol>\nABSOLUTE REQUIREMENT - NON-NEGOTIABLE:\nThe current thinking_mode is interleaved, Whenever you have the result of a function call, think carefully , MUST output a thinking block\nRULES:\nTool result → thinking block (ALWAYS, no exceptions)\nThis is NOT optional - it is a hard requirement\nThe thinking block must contain substantive reasoning (minimum 3-5 sentences)\nThink about: what the results mean, what to do next, how to answer the user\nNEVER skip this step, even if the result seems simple or obvious\n</interleaved_thinking_protocol>`\n }\n\n if (typeof system === \"string\") {\n return [{ role: \"system\", content: system + extraPrompt }]\n } else {\n const systemText = system\n .map((block, index) => {\n if (index === 0) {\n return block.text + extraPrompt\n }\n return block.text\n })\n .join(\"\\n\\n\")\n return [{ role: \"system\", content: systemText }]\n }\n}\n\nfunction handleUserMessage(message: AnthropicUserMessage): Array<Message> {\n const newMessages: Array<Message> = []\n\n if (Array.isArray(message.content)) {\n const toolResultBlocks = message.content.filter(\n (block): block is AnthropicToolResultBlock =>\n block.type === \"tool_result\",\n )\n const otherBlocks = message.content.filter(\n (block) => block.type !== \"tool_result\",\n )\n\n // Tool results must come first to maintain protocol: tool_use -> tool_result -> user\n for (const block of toolResultBlocks) {\n newMessages.push({\n role: \"tool\",\n tool_call_id: block.tool_use_id,\n content: mapContent(block.content),\n })\n }\n\n if (otherBlocks.length > 0) {\n newMessages.push({\n role: \"user\",\n content: mapContent(otherBlocks),\n })\n }\n } else {\n newMessages.push({\n role: \"user\",\n content: mapContent(message.content),\n })\n }\n\n return newMessages\n}\n\nfunction handleAssistantMessage(\n message: AnthropicAssistantMessage,\n modelId: string,\n): Array<Message> {\n if (!Array.isArray(message.content)) {\n return [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n },\n ]\n }\n\n const toolUseBlocks = message.content.filter(\n (block): block is AnthropicToolUseBlock => block.type === \"tool_use\",\n )\n\n let thinkingBlocks = message.content.filter(\n (block): block is AnthropicThinkingBlock => block.type === \"thinking\",\n )\n\n if (modelId.startsWith(\"claude\")) {\n thinkingBlocks = thinkingBlocks.filter(\n (b) =>\n b.thinking\n && b.thinking !== THINKING_TEXT\n && b.signature\n // gpt signature has @ in it, so filter those out for claude models\n && !b.signature.includes(\"@\"),\n )\n }\n\n const thinkingContents = thinkingBlocks\n .filter((b) => b.thinking && b.thinking !== THINKING_TEXT)\n .map((b) => b.thinking)\n\n const allThinkingContent =\n thinkingContents.length > 0 ? thinkingContents.join(\"\\n\\n\") : undefined\n\n const signature = thinkingBlocks.find((b) => b.signature)?.signature\n\n return toolUseBlocks.length > 0 ?\n [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n reasoning_text: allThinkingContent,\n reasoning_opaque: signature,\n tool_calls: toolUseBlocks.map((toolUse) => ({\n id: toolUse.id,\n type: \"function\",\n function: {\n name: toolUse.name,\n arguments: JSON.stringify(toolUse.input),\n },\n })),\n },\n ]\n : [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n reasoning_text: allThinkingContent,\n reasoning_opaque: signature,\n },\n ]\n}\n\nfunction mapContent(\n content:\n | string\n | Array<AnthropicUserContentBlock | AnthropicAssistantContentBlock>,\n): string | Array<ContentPart> | null {\n if (typeof content === \"string\") {\n return content\n }\n if (!Array.isArray(content)) {\n return null\n }\n\n const hasImage = content.some((block) => block.type === \"image\")\n if (!hasImage) {\n return content\n .filter((block): block is AnthropicTextBlock => block.type === \"text\")\n .map((block) => block.text)\n .join(\"\\n\\n\")\n }\n\n const contentParts: Array<ContentPart> = []\n for (const block of content) {\n switch (block.type) {\n case \"text\": {\n contentParts.push({ type: \"text\", text: block.text })\n break\n }\n case \"image\": {\n contentParts.push({\n type: \"image_url\",\n image_url: {\n url: `data:${block.source.media_type};base64,${block.source.data}`,\n },\n })\n break\n }\n // No default\n }\n }\n return contentParts\n}\n\nfunction translateAnthropicToolsToOpenAI(\n anthropicTools: Array<AnthropicTool> | undefined,\n): Array<Tool> | undefined {\n if (!anthropicTools) {\n return undefined\n }\n return anthropicTools.map((tool) => ({\n type: \"function\",\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.input_schema,\n },\n }))\n}\n\nfunction translateAnthropicToolChoiceToOpenAI(\n anthropicToolChoice: AnthropicMessagesPayload[\"tool_choice\"],\n): ChatCompletionsPayload[\"tool_choice\"] {\n if (!anthropicToolChoice) {\n return undefined\n }\n\n switch (anthropicToolChoice.type) {\n case \"auto\": {\n return \"auto\"\n }\n case \"any\": {\n return \"required\"\n }\n case \"tool\": {\n if (anthropicToolChoice.name) {\n return {\n type: \"function\",\n function: { name: anthropicToolChoice.name },\n }\n }\n return undefined\n }\n case \"none\": {\n return \"none\"\n }\n default: {\n return undefined\n }\n }\n}\n\n// Response translation\n\nexport function translateToAnthropic(\n response: ChatCompletionResponse,\n): AnthropicResponse {\n // Merge content from all choices\n const assistantContentBlocks: Array<AnthropicAssistantContentBlock> = []\n let stopReason = response.choices[0]?.finish_reason ?? null\n\n // Process all choices to extract text and tool use blocks\n for (const choice of response.choices) {\n const textBlocks = getAnthropicTextBlocks(choice.message.content)\n const thinkBlocks = getAnthropicThinkBlocks(\n choice.message.reasoning_text,\n choice.message.reasoning_opaque,\n )\n const toolUseBlocks = getAnthropicToolUseBlocks(choice.message.tool_calls)\n\n assistantContentBlocks.push(...thinkBlocks, ...textBlocks, ...toolUseBlocks)\n\n // Use the finish_reason from the first choice, or prioritize tool_calls\n if (choice.finish_reason === \"tool_calls\" || stopReason === \"stop\") {\n stopReason = choice.finish_reason\n }\n }\n\n return {\n id: response.id,\n type: \"message\",\n role: \"assistant\",\n model: response.model,\n content: assistantContentBlocks,\n stop_reason: mapOpenAIStopReasonToAnthropic(stopReason),\n stop_sequence: null,\n usage: {\n input_tokens:\n (response.usage?.prompt_tokens ?? 0)\n - (response.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: response.usage?.completion_tokens ?? 0,\n ...(response.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n response.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n }\n}\n\nfunction getAnthropicTextBlocks(\n messageContent: Message[\"content\"],\n): Array<AnthropicTextBlock> {\n if (typeof messageContent === \"string\" && messageContent.length > 0) {\n return [{ type: \"text\", text: messageContent }]\n }\n\n if (Array.isArray(messageContent)) {\n return messageContent\n .filter((part): part is TextPart => part.type === \"text\")\n .map((part) => ({ type: \"text\", text: part.text }))\n }\n\n return []\n}\n\nfunction getAnthropicThinkBlocks(\n reasoningText: string | null | undefined,\n reasoningOpaque: string | null | undefined,\n): Array<AnthropicThinkingBlock> {\n if (reasoningText && reasoningText.length > 0) {\n return [\n {\n type: \"thinking\",\n thinking: reasoningText,\n signature: reasoningOpaque || \"\",\n },\n ]\n }\n if (reasoningOpaque && reasoningOpaque.length > 0) {\n return [\n {\n type: \"thinking\",\n thinking: THINKING_TEXT, // Compatible with opencode, it will filter out blocks where the thinking text is empty, so we add a default thinking text here\n signature: reasoningOpaque,\n },\n ]\n }\n return []\n}\n\nfunction getAnthropicToolUseBlocks(\n toolCalls: Array<ToolCall> | undefined,\n): Array<AnthropicToolUseBlock> {\n if (!toolCalls) {\n return []\n }\n return toolCalls.map((toolCall) => ({\n type: \"tool_use\",\n id: toolCall.id,\n name: toolCall.function.name,\n input: JSON.parse(toolCall.function.arguments) as Record<string, unknown>,\n }))\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\n\nimport { state } from \"~/lib/state\"\nimport { getTokenCount } from \"~/lib/tokenizer\"\n\nimport { type AnthropicMessagesPayload } from \"./anthropic-types\"\nimport { translateToOpenAI } from \"./non-stream-translation\"\n\n/**\n * Handles token counting for Anthropic messages\n */\nexport async function handleCountTokens(c: Context) {\n try {\n const anthropicBeta = c.req.header(\"anthropic-beta\")\n\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n\n const openAIPayload = translateToOpenAI(anthropicPayload)\n\n const selectedModel = state.models?.data.find(\n (model) => model.id === anthropicPayload.model,\n )\n\n if (!selectedModel) {\n consola.warn(\"Model not found, returning default token count\")\n return c.json({\n input_tokens: 1,\n })\n }\n\n const tokenCount = await getTokenCount(openAIPayload, selectedModel)\n\n if (anthropicPayload.tools && anthropicPayload.tools.length > 0) {\n let addToolSystemPromptCount = false\n if (anthropicBeta) {\n const toolsLength = anthropicPayload.tools.length\n addToolSystemPromptCount = !anthropicPayload.tools.some(\n (tool) =>\n tool.name.startsWith(\"mcp__\")\n || (tool.name === \"Skill\" && toolsLength === 1),\n )\n }\n if (addToolSystemPromptCount) {\n if (anthropicPayload.model.startsWith(\"claude\")) {\n // https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/overview#pricing\n tokenCount.input = tokenCount.input + 346\n } else if (anthropicPayload.model.startsWith(\"grok\")) {\n tokenCount.input = tokenCount.input + 120\n }\n }\n }\n\n let finalTokenCount = tokenCount.input + tokenCount.output\n if (anthropicPayload.model.startsWith(\"claude\")) {\n finalTokenCount = Math.round(finalTokenCount * 1.15)\n }\n\n consola.info(\"Token count:\", finalTokenCount)\n\n return c.json({\n input_tokens: finalTokenCount,\n })\n } catch (error) {\n consola.error(\"Error counting tokens:\", error)\n return c.json({\n input_tokens: 1,\n })\n }\n}\n","import consola from \"consola\"\nimport { events } from \"fetch-event-stream\"\n\nimport { copilotBaseUrl, copilotHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport interface ResponsesPayload {\n model: string\n instructions?: string | null\n input?: string | Array<ResponseInputItem>\n tools?: Array<Tool> | null\n tool_choice?: ToolChoiceOptions | ToolChoiceFunction\n temperature?: number | null\n top_p?: number | null\n max_output_tokens?: number | null\n metadata?: Metadata | null\n stream?: boolean | null\n safety_identifier?: string | null\n prompt_cache_key?: string | null\n parallel_tool_calls?: boolean | null\n store?: boolean | null\n reasoning?: Reasoning | null\n include?: Array<ResponseIncludable>\n service_tier?: string | null // NOTE: Unsupported by GitHub Copilot\n [key: string]: unknown\n}\n\nexport type ToolChoiceOptions = \"none\" | \"auto\" | \"required\"\n\nexport interface ToolChoiceFunction {\n name: string\n type: \"function\"\n}\n\nexport type Tool = FunctionTool | Record<string, unknown>\n\nexport interface FunctionTool {\n name: string\n parameters: { [key: string]: unknown } | null\n strict: boolean | null\n type: \"function\"\n description?: string | null\n}\n\nexport type ResponseIncludable =\n | \"file_search_call.results\"\n | \"message.input_image.image_url\"\n | \"computer_call_output.output.image_url\"\n | \"reasoning.encrypted_content\"\n | \"code_interpreter_call.outputs\"\n\nexport interface Reasoning {\n effort?: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\" | null\n summary?: \"auto\" | \"concise\" | \"detailed\" | null\n}\n\nexport interface ResponseInputMessage {\n type?: \"message\"\n role: \"user\" | \"assistant\" | \"system\" | \"developer\"\n content?: string | Array<ResponseInputContent>\n status?: string\n}\n\nexport interface ResponseFunctionToolCallItem {\n type: \"function_call\"\n call_id: string\n name: string\n arguments: string\n status?: \"in_progress\" | \"completed\" | \"incomplete\"\n}\n\nexport interface ResponseFunctionCallOutputItem {\n type: \"function_call_output\"\n call_id: string\n output: string | Array<ResponseInputContent>\n status?: \"in_progress\" | \"completed\" | \"incomplete\"\n}\n\nexport interface ResponseInputReasoning {\n id?: string\n type: \"reasoning\"\n summary: Array<{\n type: \"summary_text\"\n text: string\n }>\n encrypted_content: string\n}\n\nexport type ResponseInputItem =\n | ResponseInputMessage\n | ResponseFunctionToolCallItem\n | ResponseFunctionCallOutputItem\n | ResponseInputReasoning\n | Record<string, unknown>\n\nexport type ResponseInputContent =\n | ResponseInputText\n | ResponseInputImage\n | Record<string, unknown>\n\nexport interface ResponseInputText {\n type: \"input_text\" | \"output_text\"\n text: string\n}\n\nexport interface ResponseInputImage {\n type: \"input_image\"\n image_url?: string | null\n file_id?: string | null\n detail: \"low\" | \"high\" | \"auto\"\n}\n\nexport interface ResponsesResult {\n id: string\n object: \"response\"\n created_at: number\n model: string\n output: Array<ResponseOutputItem>\n output_text: string\n status: string\n usage?: ResponseUsage | null\n error: ResponseError | null\n incomplete_details: IncompleteDetails | null\n instructions: string | null\n metadata: Metadata | null\n parallel_tool_calls: boolean\n temperature: number | null\n tool_choice: unknown\n tools: Array<Tool>\n top_p: number | null\n}\n\nexport type Metadata = { [key: string]: string }\n\nexport interface IncompleteDetails {\n reason?: \"max_output_tokens\" | \"content_filter\"\n}\n\nexport interface ResponseError {\n message: string\n}\n\nexport type ResponseOutputItem =\n | ResponseOutputMessage\n | ResponseOutputReasoning\n | ResponseOutputFunctionCall\n\nexport interface ResponseOutputMessage {\n id: string\n type: \"message\"\n role: \"assistant\"\n status: \"completed\" | \"in_progress\" | \"incomplete\"\n content?: Array<ResponseOutputContentBlock>\n}\n\nexport interface ResponseOutputReasoning {\n id: string\n type: \"reasoning\"\n summary?: Array<ResponseReasoningBlock>\n encrypted_content?: string\n status?: \"completed\" | \"in_progress\" | \"incomplete\"\n}\n\nexport interface ResponseReasoningBlock {\n type: string\n text?: string\n}\n\nexport interface ResponseOutputFunctionCall {\n id?: string\n type: \"function_call\"\n call_id: string\n name: string\n arguments: string\n status?: \"in_progress\" | \"completed\" | \"incomplete\"\n}\n\nexport type ResponseOutputContentBlock =\n | ResponseOutputText\n | ResponseOutputRefusal\n | Record<string, unknown>\n\nexport interface ResponseOutputText {\n type: \"output_text\"\n text: string\n annotations: Array<unknown>\n}\n\nexport interface ResponseOutputRefusal {\n type: \"refusal\"\n refusal: string\n}\n\nexport interface ResponseUsage {\n input_tokens: number\n output_tokens?: number\n total_tokens: number\n input_tokens_details?: {\n cached_tokens: number\n }\n output_tokens_details?: {\n reasoning_tokens: number\n }\n}\n\nexport type ResponseStreamEvent =\n | ResponseCompletedEvent\n | ResponseIncompleteEvent\n | ResponseCreatedEvent\n | ResponseErrorEvent\n | ResponseFunctionCallArgumentsDeltaEvent\n | ResponseFunctionCallArgumentsDoneEvent\n | ResponseFailedEvent\n | ResponseOutputItemAddedEvent\n | ResponseOutputItemDoneEvent\n | ResponseReasoningSummaryTextDeltaEvent\n | ResponseReasoningSummaryTextDoneEvent\n | ResponseTextDeltaEvent\n | ResponseTextDoneEvent\n\nexport interface ResponseCompletedEvent {\n response: ResponsesResult\n sequence_number: number\n type: \"response.completed\"\n}\n\nexport interface ResponseIncompleteEvent {\n response: ResponsesResult\n sequence_number: number\n type: \"response.incomplete\"\n}\n\nexport interface ResponseCreatedEvent {\n response: ResponsesResult\n sequence_number: number\n type: \"response.created\"\n}\n\nexport interface ResponseErrorEvent {\n code: string | null\n message: string\n param: string | null\n sequence_number: number\n type: \"error\"\n}\n\nexport interface ResponseFunctionCallArgumentsDeltaEvent {\n delta: string\n item_id: string\n output_index: number\n sequence_number: number\n type: \"response.function_call_arguments.delta\"\n}\n\nexport interface ResponseFunctionCallArgumentsDoneEvent {\n arguments: string\n item_id: string\n name: string\n output_index: number\n sequence_number: number\n type: \"response.function_call_arguments.done\"\n}\n\nexport interface ResponseFailedEvent {\n response: ResponsesResult\n sequence_number: number\n type: \"response.failed\"\n}\n\nexport interface ResponseOutputItemAddedEvent {\n item: ResponseOutputItem\n output_index: number\n sequence_number: number\n type: \"response.output_item.added\"\n}\n\nexport interface ResponseOutputItemDoneEvent {\n item: ResponseOutputItem\n output_index: number\n sequence_number: number\n type: \"response.output_item.done\"\n}\n\nexport interface ResponseReasoningSummaryTextDeltaEvent {\n delta: string\n item_id: string\n output_index: number\n sequence_number: number\n summary_index: number\n type: \"response.reasoning_summary_text.delta\"\n}\n\nexport interface ResponseReasoningSummaryTextDoneEvent {\n item_id: string\n output_index: number\n sequence_number: number\n summary_index: number\n text: string\n type: \"response.reasoning_summary_text.done\"\n}\n\nexport interface ResponseTextDeltaEvent {\n content_index: number\n delta: string\n item_id: string\n output_index: number\n sequence_number: number\n type: \"response.output_text.delta\"\n}\n\nexport interface ResponseTextDoneEvent {\n content_index: number\n item_id: string\n output_index: number\n sequence_number: number\n text: string\n type: \"response.output_text.done\"\n}\n\nexport type ResponsesStream = ReturnType<typeof events>\nexport type CreateResponsesReturn = ResponsesResult | ResponsesStream\n\ninterface ResponsesRequestOptions {\n vision: boolean\n initiator: \"agent\" | \"user\"\n}\n\nexport const createResponses = async (\n payload: ResponsesPayload,\n { vision, initiator }: ResponsesRequestOptions,\n): Promise<CreateResponsesReturn> => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const headers: Record<string, string> = {\n ...copilotHeaders(state, vision),\n \"X-Initiator\": initiator,\n }\n\n // service_tier is not supported by github copilot\n payload.service_tier = null\n\n const response = await fetch(`${copilotBaseUrl(state)}/responses`, {\n method: \"POST\",\n headers,\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) {\n consola.error(\"Failed to create responses\", response)\n throw new HTTPError(\"Failed to create responses\", response)\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as ResponsesResult\n}\n","import consola from \"consola\"\n\nimport {\n getExtraPromptForModel,\n getReasoningEffortForModel,\n} from \"~/lib/config\"\nimport {\n type ResponsesPayload,\n type ResponseInputContent,\n type ResponseInputImage,\n type ResponseInputItem,\n type ResponseInputMessage,\n type ResponseInputReasoning,\n type ResponseInputText,\n type ResponsesResult,\n type ResponseOutputContentBlock,\n type ResponseOutputFunctionCall,\n type ResponseOutputItem,\n type ResponseOutputReasoning,\n type ResponseReasoningBlock,\n type ResponseOutputRefusal,\n type ResponseOutputText,\n type ResponseFunctionToolCallItem,\n type ResponseFunctionCallOutputItem,\n type Tool,\n type ToolChoiceFunction,\n type ToolChoiceOptions,\n} from \"~/services/copilot/create-responses\"\n\nimport {\n type AnthropicAssistantContentBlock,\n type AnthropicAssistantMessage,\n type AnthropicResponse,\n type AnthropicImageBlock,\n type AnthropicMessage,\n type AnthropicMessagesPayload,\n type AnthropicTextBlock,\n type AnthropicThinkingBlock,\n type AnthropicTool,\n type AnthropicToolResultBlock,\n type AnthropicToolUseBlock,\n type AnthropicUserContentBlock,\n type AnthropicUserMessage,\n} from \"./anthropic-types\"\n\nconst MESSAGE_TYPE = \"message\"\n\nexport const THINKING_TEXT = \"Thinking...\"\n\nexport const translateAnthropicMessagesToResponsesPayload = (\n payload: AnthropicMessagesPayload,\n): ResponsesPayload => {\n const input: Array<ResponseInputItem> = []\n\n for (const message of payload.messages) {\n input.push(...translateMessage(message))\n }\n\n const translatedTools = convertAnthropicTools(payload.tools)\n const toolChoice = convertAnthropicToolChoice(payload.tool_choice)\n\n const { safetyIdentifier, promptCacheKey } = parseUserId(\n payload.metadata?.user_id,\n )\n\n const responsesPayload: ResponsesPayload = {\n model: payload.model,\n input,\n instructions: translateSystemPrompt(payload.system, payload.model),\n temperature: 1, // reasoning high temperature fixed to 1\n top_p: payload.top_p ?? null,\n max_output_tokens: Math.max(payload.max_tokens, 12800),\n tools: translatedTools,\n tool_choice: toolChoice,\n metadata: payload.metadata ? { ...payload.metadata } : null,\n safety_identifier: safetyIdentifier,\n prompt_cache_key: promptCacheKey,\n stream: payload.stream ?? null,\n store: false,\n parallel_tool_calls: true,\n reasoning: {\n effort: getReasoningEffortForModel(payload.model),\n summary: \"detailed\",\n },\n include: [\"reasoning.encrypted_content\"],\n }\n\n return responsesPayload\n}\n\nconst translateMessage = (\n message: AnthropicMessage,\n): Array<ResponseInputItem> => {\n if (message.role === \"user\") {\n return translateUserMessage(message)\n }\n\n return translateAssistantMessage(message)\n}\n\nconst translateUserMessage = (\n message: AnthropicUserMessage,\n): Array<ResponseInputItem> => {\n if (typeof message.content === \"string\") {\n return [createMessage(\"user\", message.content)]\n }\n\n if (!Array.isArray(message.content)) {\n return []\n }\n\n const items: Array<ResponseInputItem> = []\n const pendingContent: Array<ResponseInputContent> = []\n\n for (const block of message.content) {\n if (block.type === \"tool_result\") {\n flushPendingContent(\"user\", pendingContent, items)\n items.push(createFunctionCallOutput(block))\n continue\n }\n\n const converted = translateUserContentBlock(block)\n if (converted) {\n pendingContent.push(converted)\n }\n }\n\n flushPendingContent(\"user\", pendingContent, items)\n\n return items\n}\n\nconst translateAssistantMessage = (\n message: AnthropicAssistantMessage,\n): Array<ResponseInputItem> => {\n if (typeof message.content === \"string\") {\n return [createMessage(\"assistant\", message.content)]\n }\n\n if (!Array.isArray(message.content)) {\n return []\n }\n\n const items: Array<ResponseInputItem> = []\n const pendingContent: Array<ResponseInputContent> = []\n\n for (const block of message.content) {\n if (block.type === \"tool_use\") {\n flushPendingContent(\"assistant\", pendingContent, items)\n items.push(createFunctionToolCall(block))\n continue\n }\n\n if (\n block.type === \"thinking\"\n && block.signature\n && block.signature.includes(\"@\")\n ) {\n flushPendingContent(\"assistant\", pendingContent, items)\n items.push(createReasoningContent(block))\n continue\n }\n\n const converted = translateAssistantContentBlock(block)\n if (converted) {\n pendingContent.push(converted)\n }\n }\n\n flushPendingContent(\"assistant\", pendingContent, items)\n\n return items\n}\n\nconst translateUserContentBlock = (\n block: AnthropicUserContentBlock,\n): ResponseInputContent | undefined => {\n switch (block.type) {\n case \"text\": {\n return createTextContent(block.text)\n }\n case \"image\": {\n return createImageContent(block)\n }\n default: {\n return undefined\n }\n }\n}\n\nconst translateAssistantContentBlock = (\n block: AnthropicAssistantContentBlock,\n): ResponseInputContent | undefined => {\n switch (block.type) {\n case \"text\": {\n return createOutPutTextContent(block.text)\n }\n default: {\n return undefined\n }\n }\n}\n\nconst flushPendingContent = (\n role: ResponseInputMessage[\"role\"],\n pendingContent: Array<ResponseInputContent>,\n target: Array<ResponseInputItem>,\n) => {\n if (pendingContent.length === 0) {\n return\n }\n\n const messageContent = [...pendingContent]\n\n target.push(createMessage(role, messageContent))\n pendingContent.length = 0\n}\n\nconst createMessage = (\n role: ResponseInputMessage[\"role\"],\n content: string | Array<ResponseInputContent>,\n): ResponseInputMessage => ({\n type: MESSAGE_TYPE,\n role,\n content,\n})\n\nconst createTextContent = (text: string): ResponseInputText => ({\n type: \"input_text\",\n text,\n})\n\nconst createOutPutTextContent = (text: string): ResponseInputText => ({\n type: \"output_text\",\n text,\n})\n\nconst createImageContent = (\n block: AnthropicImageBlock,\n): ResponseInputImage => ({\n type: \"input_image\",\n image_url: `data:${block.source.media_type};base64,${block.source.data}`,\n detail: \"auto\",\n})\n\nconst createReasoningContent = (\n block: AnthropicThinkingBlock,\n): ResponseInputReasoning => {\n // align with vscode-copilot-chat extractThinkingData, should add id, otherwise it will cause miss cache occasionally —— the usage input cached tokens to be 0\n // https://github.com/microsoft/vscode-copilot-chat/blob/main/src/platform/endpoint/node/responsesApi.ts#L162\n // when use in codex cli, reasoning id is empty, so it will cause miss cache occasionally\n const array = block.signature.split(\"@\")\n const signature = array[0]\n const id = array[1]\n const thinking = block.thinking === THINKING_TEXT ? \"\" : block.thinking\n return {\n id,\n type: \"reasoning\",\n summary: thinking ? [{ type: \"summary_text\", text: thinking }] : [],\n encrypted_content: signature,\n }\n}\n\nconst createFunctionToolCall = (\n block: AnthropicToolUseBlock,\n): ResponseFunctionToolCallItem => ({\n type: \"function_call\",\n call_id: block.id,\n name: block.name,\n arguments: JSON.stringify(block.input),\n status: \"completed\",\n})\n\nconst createFunctionCallOutput = (\n block: AnthropicToolResultBlock,\n): ResponseFunctionCallOutputItem => ({\n type: \"function_call_output\",\n call_id: block.tool_use_id,\n output: convertToolResultContent(block.content),\n status: block.is_error ? \"incomplete\" : \"completed\",\n})\n\nconst translateSystemPrompt = (\n system: string | Array<AnthropicTextBlock> | undefined,\n model: string,\n): string | null => {\n if (!system) {\n return null\n }\n\n const extraPrompt = getExtraPromptForModel(model)\n\n if (typeof system === \"string\") {\n return system + extraPrompt\n }\n\n const text = system\n .map((block, index) => {\n if (index === 0) {\n return block.text + extraPrompt\n }\n return block.text\n })\n .join(\" \")\n return text.length > 0 ? text : null\n}\n\nconst convertAnthropicTools = (\n tools: Array<AnthropicTool> | undefined,\n): Array<Tool> | null => {\n if (!tools || tools.length === 0) {\n return null\n }\n\n return tools.map((tool) => ({\n type: \"function\",\n name: tool.name,\n parameters: tool.input_schema,\n strict: false,\n ...(tool.description ? { description: tool.description } : {}),\n }))\n}\n\nconst convertAnthropicToolChoice = (\n choice: AnthropicMessagesPayload[\"tool_choice\"],\n): ToolChoiceOptions | ToolChoiceFunction => {\n if (!choice) {\n return \"auto\"\n }\n\n switch (choice.type) {\n case \"auto\": {\n return \"auto\"\n }\n case \"any\": {\n return \"required\"\n }\n case \"tool\": {\n return choice.name ? { type: \"function\", name: choice.name } : \"auto\"\n }\n case \"none\": {\n return \"none\"\n }\n default: {\n return \"auto\"\n }\n }\n}\n\nexport const translateResponsesResultToAnthropic = (\n response: ResponsesResult,\n): AnthropicResponse => {\n const contentBlocks = mapOutputToAnthropicContent(response.output)\n const usage = mapResponsesUsage(response)\n let anthropicContent = fallbackContentBlocks(response.output_text)\n if (contentBlocks.length > 0) {\n anthropicContent = contentBlocks\n }\n\n const stopReason = mapResponsesStopReason(response)\n\n return {\n id: response.id,\n type: \"message\",\n role: \"assistant\",\n content: anthropicContent,\n model: response.model,\n stop_reason: stopReason,\n stop_sequence: null,\n usage,\n }\n}\n\nconst mapOutputToAnthropicContent = (\n output: Array<ResponseOutputItem>,\n): Array<AnthropicAssistantContentBlock> => {\n const contentBlocks: Array<AnthropicAssistantContentBlock> = []\n\n for (const item of output) {\n switch (item.type) {\n case \"reasoning\": {\n const thinkingText = extractReasoningText(item)\n if (thinkingText.length > 0) {\n contentBlocks.push({\n type: \"thinking\",\n thinking: thinkingText,\n signature: (item.encrypted_content ?? \"\") + \"@\" + item.id,\n })\n }\n break\n }\n case \"function_call\": {\n const toolUseBlock = createToolUseContentBlock(item)\n if (toolUseBlock) {\n contentBlocks.push(toolUseBlock)\n }\n break\n }\n case \"message\": {\n const combinedText = combineMessageTextContent(item.content)\n if (combinedText.length > 0) {\n contentBlocks.push({ type: \"text\", text: combinedText })\n }\n break\n }\n default: {\n // Future compatibility for unrecognized output item types.\n const combinedText = combineMessageTextContent(\n (item as { content?: Array<ResponseOutputContentBlock> }).content,\n )\n if (combinedText.length > 0) {\n contentBlocks.push({ type: \"text\", text: combinedText })\n }\n }\n }\n }\n\n return contentBlocks\n}\n\nconst combineMessageTextContent = (\n content: Array<ResponseOutputContentBlock> | undefined,\n): string => {\n if (!Array.isArray(content)) {\n return \"\"\n }\n\n let aggregated = \"\"\n\n for (const block of content) {\n if (isResponseOutputText(block)) {\n aggregated += block.text\n continue\n }\n\n if (isResponseOutputRefusal(block)) {\n aggregated += block.refusal\n continue\n }\n\n if (typeof (block as { text?: unknown }).text === \"string\") {\n aggregated += (block as { text: string }).text\n continue\n }\n\n if (typeof (block as { reasoning?: unknown }).reasoning === \"string\") {\n aggregated += (block as { reasoning: string }).reasoning\n continue\n }\n }\n\n return aggregated\n}\n\nconst extractReasoningText = (item: ResponseOutputReasoning): string => {\n const segments: Array<string> = []\n\n const collectFromBlocks = (blocks?: Array<ResponseReasoningBlock>) => {\n if (!Array.isArray(blocks)) {\n return\n }\n\n for (const block of blocks) {\n if (typeof block.text === \"string\") {\n segments.push(block.text)\n continue\n }\n }\n }\n\n // Compatible with opencode, it will filter out blocks where the thinking text is empty, so we add a default thinking text here\n if (!item.summary || item.summary.length === 0) {\n return THINKING_TEXT\n }\n\n collectFromBlocks(item.summary)\n\n return segments.join(\"\").trim()\n}\n\nconst createToolUseContentBlock = (\n call: ResponseOutputFunctionCall,\n): AnthropicToolUseBlock | null => {\n const toolId = call.call_id\n if (!call.name || !toolId) {\n return null\n }\n\n const input = parseFunctionCallArguments(call.arguments)\n\n return {\n type: \"tool_use\",\n id: toolId,\n name: call.name,\n input,\n }\n}\n\nconst parseFunctionCallArguments = (\n rawArguments: string,\n): Record<string, unknown> => {\n if (typeof rawArguments !== \"string\" || rawArguments.trim().length === 0) {\n return {}\n }\n\n try {\n const parsed: unknown = JSON.parse(rawArguments)\n\n if (Array.isArray(parsed)) {\n return { arguments: parsed }\n }\n\n if (parsed && typeof parsed === \"object\") {\n return parsed as Record<string, unknown>\n }\n } catch (error) {\n consola.warn(\"Failed to parse function call arguments\", {\n error,\n rawArguments,\n })\n }\n\n return { raw_arguments: rawArguments }\n}\n\nconst fallbackContentBlocks = (\n outputText: string,\n): Array<AnthropicAssistantContentBlock> => {\n if (!outputText) {\n return []\n }\n\n return [\n {\n type: \"text\",\n text: outputText,\n },\n ]\n}\n\nconst mapResponsesStopReason = (\n response: ResponsesResult,\n): AnthropicResponse[\"stop_reason\"] => {\n const { status, incomplete_details: incompleteDetails } = response\n\n if (status === \"completed\") {\n if (response.output.some((item) => item.type === \"function_call\")) {\n return \"tool_use\"\n }\n return \"end_turn\"\n }\n\n if (status === \"incomplete\") {\n if (incompleteDetails?.reason === \"max_output_tokens\") {\n return \"max_tokens\"\n }\n if (incompleteDetails?.reason === \"content_filter\") {\n return \"end_turn\"\n }\n }\n\n return null\n}\n\nconst mapResponsesUsage = (\n response: ResponsesResult,\n): AnthropicResponse[\"usage\"] => {\n const inputTokens = response.usage?.input_tokens ?? 0\n const outputTokens = response.usage?.output_tokens ?? 0\n const inputCachedTokens = response.usage?.input_tokens_details?.cached_tokens\n\n return {\n input_tokens: inputTokens - (inputCachedTokens ?? 0),\n output_tokens: outputTokens,\n ...(response.usage?.input_tokens_details?.cached_tokens !== undefined && {\n cache_read_input_tokens:\n response.usage.input_tokens_details.cached_tokens,\n }),\n }\n}\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n typeof value === \"object\" && value !== null\n\nconst isResponseOutputText = (\n block: ResponseOutputContentBlock,\n): block is ResponseOutputText =>\n isRecord(block)\n && \"type\" in block\n && (block as { type?: unknown }).type === \"output_text\"\n\nconst isResponseOutputRefusal = (\n block: ResponseOutputContentBlock,\n): block is ResponseOutputRefusal =>\n isRecord(block)\n && \"type\" in block\n && (block as { type?: unknown }).type === \"refusal\"\n\nconst parseUserId = (\n userId: string | undefined,\n): { safetyIdentifier: string | null; promptCacheKey: string | null } => {\n if (!userId || typeof userId !== \"string\") {\n return { safetyIdentifier: null, promptCacheKey: null }\n }\n\n // Parse safety_identifier: content between \"user_\" and \"_account\"\n const userMatch = userId.match(/user_([^_]+)_account/)\n const safetyIdentifier = userMatch ? userMatch[1] : null\n\n // Parse prompt_cache_key: content after \"_session_\"\n const sessionMatch = userId.match(/_session_(.+)$/)\n const promptCacheKey = sessionMatch ? sessionMatch[1] : null\n\n return { safetyIdentifier, promptCacheKey }\n}\n\nconst convertToolResultContent = (\n content: string | Array<AnthropicTextBlock | AnthropicImageBlock>,\n): string | Array<ResponseInputContent> => {\n if (typeof content === \"string\") {\n return content\n }\n\n if (Array.isArray(content)) {\n const result: Array<ResponseInputContent> = []\n for (const block of content) {\n switch (block.type) {\n case \"text\": {\n result.push(createTextContent(block.text))\n break\n }\n case \"image\": {\n result.push(createImageContent(block))\n break\n }\n default: {\n break\n }\n }\n }\n return result\n }\n\n return \"\"\n}\n","import {\n type ResponseCompletedEvent,\n type ResponseCreatedEvent,\n type ResponseErrorEvent,\n type ResponseFailedEvent,\n type ResponseFunctionCallArgumentsDeltaEvent,\n type ResponseFunctionCallArgumentsDoneEvent,\n type ResponseIncompleteEvent,\n type ResponseOutputItemAddedEvent,\n type ResponseOutputItemDoneEvent,\n type ResponseReasoningSummaryTextDeltaEvent,\n type ResponseReasoningSummaryTextDoneEvent,\n type ResponsesResult,\n type ResponseStreamEvent,\n type ResponseTextDeltaEvent,\n type ResponseTextDoneEvent,\n} from \"~/services/copilot/create-responses\"\n\nimport { type AnthropicStreamEventData } from \"./anthropic-types\"\nimport {\n THINKING_TEXT,\n translateResponsesResultToAnthropic,\n} from \"./responses-translation\"\n\nconst MAX_CONSECUTIVE_FUNCTION_CALL_WHITESPACE = 20\n\nclass FunctionCallArgumentsValidationError extends Error {\n constructor(message: string) {\n super(message)\n this.name = \"FunctionCallArgumentsValidationError\"\n }\n}\n\nconst updateWhitespaceRunState = (\n previousCount: number,\n chunk: string,\n): {\n nextCount: number\n exceeded: boolean\n} => {\n let count = previousCount\n\n for (const char of chunk) {\n if (char === \"\\r\" || char === \"\\n\" || char === \"\\t\") {\n count += 1\n if (count > MAX_CONSECUTIVE_FUNCTION_CALL_WHITESPACE) {\n return { nextCount: count, exceeded: true }\n }\n continue\n }\n\n if (char !== \" \") {\n count = 0\n }\n }\n\n return { nextCount: count, exceeded: false }\n}\n\nexport interface ResponsesStreamState {\n messageStartSent: boolean\n messageCompleted: boolean\n nextContentBlockIndex: number\n blockIndexByKey: Map<string, number>\n openBlocks: Set<number>\n blockHasDelta: Set<number>\n functionCallStateByOutputIndex: Map<number, FunctionCallStreamState>\n}\n\ntype FunctionCallStreamState = {\n blockIndex: number\n toolCallId: string\n name: string\n consecutiveWhitespaceCount: number\n}\n\nexport const createResponsesStreamState = (): ResponsesStreamState => ({\n messageStartSent: false,\n messageCompleted: false,\n nextContentBlockIndex: 0,\n blockIndexByKey: new Map(),\n openBlocks: new Set(),\n blockHasDelta: new Set(),\n functionCallStateByOutputIndex: new Map(),\n})\n\nexport const translateResponsesStreamEvent = (\n rawEvent: ResponseStreamEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const eventType = rawEvent.type\n switch (eventType) {\n case \"response.created\": {\n return handleResponseCreated(rawEvent, state)\n }\n\n case \"response.output_item.added\": {\n return handleOutputItemAdded(rawEvent, state)\n }\n\n case \"response.reasoning_summary_text.delta\": {\n return handleReasoningSummaryTextDelta(rawEvent, state)\n }\n\n case \"response.output_text.delta\": {\n return handleOutputTextDelta(rawEvent, state)\n }\n\n case \"response.reasoning_summary_text.done\": {\n return handleReasoningSummaryTextDone(rawEvent, state)\n }\n\n case \"response.output_text.done\": {\n return handleOutputTextDone(rawEvent, state)\n }\n case \"response.output_item.done\": {\n return handleOutputItemDone(rawEvent, state)\n }\n\n case \"response.function_call_arguments.delta\": {\n return handleFunctionCallArgumentsDelta(rawEvent, state)\n }\n\n case \"response.function_call_arguments.done\": {\n return handleFunctionCallArgumentsDone(rawEvent, state)\n }\n\n case \"response.completed\":\n case \"response.incomplete\": {\n return handleResponseCompleted(rawEvent, state)\n }\n\n case \"response.failed\": {\n return handleResponseFailed(rawEvent, state)\n }\n\n case \"error\": {\n return handleErrorEvent(rawEvent, state)\n }\n\n default: {\n return []\n }\n }\n}\n\n// Helper handlers to keep translateResponsesStreamEvent concise\nconst handleResponseCreated = (\n rawEvent: ResponseCreatedEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n return messageStart(state, rawEvent.response)\n}\n\nconst handleOutputItemAdded = (\n rawEvent: ResponseOutputItemAddedEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const events = new Array<AnthropicStreamEventData>()\n const functionCallDetails = extractFunctionCallDetails(rawEvent)\n if (!functionCallDetails) {\n return events\n }\n\n const { outputIndex, toolCallId, name, initialArguments } =\n functionCallDetails\n const blockIndex = openFunctionCallBlock(state, {\n outputIndex,\n toolCallId,\n name,\n events,\n })\n\n if (initialArguments !== undefined && initialArguments.length > 0) {\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"input_json_delta\",\n partial_json: initialArguments,\n },\n })\n state.blockHasDelta.add(blockIndex)\n }\n\n return events\n}\n\nconst handleOutputItemDone = (\n rawEvent: ResponseOutputItemDoneEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const events = new Array<AnthropicStreamEventData>()\n const item = rawEvent.item\n const itemType = item.type\n if (itemType !== \"reasoning\") {\n return events\n }\n\n const outputIndex = rawEvent.output_index\n const blockIndex = openThinkingBlockIfNeeded(state, outputIndex, events)\n const signature = (item.encrypted_content ?? \"\") + \"@\" + item.id\n if (signature) {\n // Compatible with opencode, it will filter out blocks where the thinking text is empty, so we add a default thinking text here\n if (!item.summary || item.summary.length === 0) {\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"thinking_delta\",\n thinking: THINKING_TEXT,\n },\n })\n }\n\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"signature_delta\",\n signature,\n },\n })\n state.blockHasDelta.add(blockIndex)\n }\n\n return events\n}\n\nconst handleFunctionCallArgumentsDelta = (\n rawEvent: ResponseFunctionCallArgumentsDeltaEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const events = new Array<AnthropicStreamEventData>()\n const outputIndex = rawEvent.output_index\n const deltaText = rawEvent.delta\n\n if (!deltaText) {\n return events\n }\n\n const blockIndex = openFunctionCallBlock(state, {\n outputIndex,\n events,\n })\n\n const functionCallState =\n state.functionCallStateByOutputIndex.get(outputIndex)\n if (!functionCallState) {\n return handleFunctionCallArgumentsValidationError(\n new FunctionCallArgumentsValidationError(\n \"Received function call arguments delta without an open tool call block.\",\n ),\n state,\n events,\n )\n }\n\n // fix: copolit function call returning infinite line breaks until max_tokens limit\n // \"arguments\": \"{\\\"path\\\":\\\"xxx\\\",\\\"pattern\\\":\\\"**/*.ts\\\",\\\"} }? Wait extra braces. Need correct. I should run? Wait overcame. Need proper JSON with pattern \\\"\\n\\n\\n\\n\\n\\n\\n\\n...\n const { nextCount, exceeded } = updateWhitespaceRunState(\n functionCallState.consecutiveWhitespaceCount,\n deltaText,\n )\n if (exceeded) {\n return handleFunctionCallArgumentsValidationError(\n new FunctionCallArgumentsValidationError(\n \"Received function call arguments delta containing more than 20 consecutive whitespace characters.\",\n ),\n state,\n events,\n )\n }\n functionCallState.consecutiveWhitespaceCount = nextCount\n\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"input_json_delta\",\n partial_json: deltaText,\n },\n })\n state.blockHasDelta.add(blockIndex)\n\n return events\n}\n\nconst handleFunctionCallArgumentsDone = (\n rawEvent: ResponseFunctionCallArgumentsDoneEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const events = new Array<AnthropicStreamEventData>()\n const outputIndex = rawEvent.output_index\n const blockIndex = openFunctionCallBlock(state, {\n outputIndex,\n events,\n })\n\n const finalArguments =\n typeof rawEvent.arguments === \"string\" ? rawEvent.arguments : undefined\n\n if (!state.blockHasDelta.has(blockIndex) && finalArguments) {\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"input_json_delta\",\n partial_json: finalArguments,\n },\n })\n state.blockHasDelta.add(blockIndex)\n }\n\n state.functionCallStateByOutputIndex.delete(outputIndex)\n return events\n}\n\nconst handleOutputTextDelta = (\n rawEvent: ResponseTextDeltaEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const events = new Array<AnthropicStreamEventData>()\n const outputIndex = rawEvent.output_index\n const contentIndex = rawEvent.content_index\n const deltaText = rawEvent.delta\n\n if (!deltaText) {\n return events\n }\n\n const blockIndex = openTextBlockIfNeeded(state, {\n outputIndex,\n contentIndex,\n events,\n })\n\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"text_delta\",\n text: deltaText,\n },\n })\n state.blockHasDelta.add(blockIndex)\n\n return events\n}\n\nconst handleReasoningSummaryTextDelta = (\n rawEvent: ResponseReasoningSummaryTextDeltaEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const outputIndex = rawEvent.output_index\n const deltaText = rawEvent.delta\n const events = new Array<AnthropicStreamEventData>()\n const blockIndex = openThinkingBlockIfNeeded(state, outputIndex, events)\n\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"thinking_delta\",\n thinking: deltaText,\n },\n })\n state.blockHasDelta.add(blockIndex)\n\n return events\n}\n\nconst handleReasoningSummaryTextDone = (\n rawEvent: ResponseReasoningSummaryTextDoneEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const outputIndex = rawEvent.output_index\n const text = rawEvent.text\n const events = new Array<AnthropicStreamEventData>()\n const blockIndex = openThinkingBlockIfNeeded(state, outputIndex, events)\n\n if (text && !state.blockHasDelta.has(blockIndex)) {\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"thinking_delta\",\n thinking: text,\n },\n })\n }\n\n return events\n}\n\nconst handleOutputTextDone = (\n rawEvent: ResponseTextDoneEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const events = new Array<AnthropicStreamEventData>()\n const outputIndex = rawEvent.output_index\n const contentIndex = rawEvent.content_index\n const text = rawEvent.text\n\n const blockIndex = openTextBlockIfNeeded(state, {\n outputIndex,\n contentIndex,\n events,\n })\n\n if (text && !state.blockHasDelta.has(blockIndex)) {\n events.push({\n type: \"content_block_delta\",\n index: blockIndex,\n delta: {\n type: \"text_delta\",\n text,\n },\n })\n }\n\n return events\n}\n\nconst handleResponseCompleted = (\n rawEvent: ResponseCompletedEvent | ResponseIncompleteEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const response = rawEvent.response\n const events = new Array<AnthropicStreamEventData>()\n\n closeAllOpenBlocks(state, events)\n const anthropic = translateResponsesResultToAnthropic(response)\n events.push(\n {\n type: \"message_delta\",\n delta: {\n stop_reason: anthropic.stop_reason,\n stop_sequence: anthropic.stop_sequence,\n },\n usage: anthropic.usage,\n },\n { type: \"message_stop\" },\n )\n state.messageCompleted = true\n return events\n}\n\nconst handleResponseFailed = (\n rawEvent: ResponseFailedEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const response = rawEvent.response\n const events = new Array<AnthropicStreamEventData>()\n closeAllOpenBlocks(state, events)\n\n const message =\n response.error?.message ?? \"The response failed due to an unknown error.\"\n\n events.push(buildErrorEvent(message))\n state.messageCompleted = true\n\n return events\n}\n\nconst handleErrorEvent = (\n rawEvent: ResponseErrorEvent,\n state: ResponsesStreamState,\n): Array<AnthropicStreamEventData> => {\n const message =\n typeof rawEvent.message === \"string\" ?\n rawEvent.message\n : \"An unexpected error occurred during streaming.\"\n\n state.messageCompleted = true\n return [buildErrorEvent(message)]\n}\n\nconst handleFunctionCallArgumentsValidationError = (\n error: FunctionCallArgumentsValidationError,\n state: ResponsesStreamState,\n events: Array<AnthropicStreamEventData> = [],\n): Array<AnthropicStreamEventData> => {\n const reason = error.message\n\n closeAllOpenBlocks(state, events)\n state.messageCompleted = true\n\n events.push(buildErrorEvent(reason))\n\n return events\n}\n\nconst messageStart = (\n state: ResponsesStreamState,\n response: ResponsesResult,\n): Array<AnthropicStreamEventData> => {\n state.messageStartSent = true\n const inputCachedTokens = response.usage?.input_tokens_details?.cached_tokens\n const inputTokens =\n (response.usage?.input_tokens ?? 0) - (inputCachedTokens ?? 0)\n return [\n {\n type: \"message_start\",\n message: {\n id: response.id,\n type: \"message\",\n role: \"assistant\",\n content: [],\n model: response.model,\n stop_reason: null,\n stop_sequence: null,\n usage: {\n input_tokens: inputTokens,\n output_tokens: 0,\n cache_read_input_tokens: inputCachedTokens ?? 0,\n },\n },\n },\n ]\n}\n\nconst openTextBlockIfNeeded = (\n state: ResponsesStreamState,\n params: {\n outputIndex: number\n contentIndex: number\n events: Array<AnthropicStreamEventData>\n },\n): number => {\n const { outputIndex, contentIndex, events } = params\n const key = getBlockKey(outputIndex, contentIndex)\n let blockIndex = state.blockIndexByKey.get(key)\n\n if (blockIndex === undefined) {\n blockIndex = state.nextContentBlockIndex\n state.nextContentBlockIndex += 1\n state.blockIndexByKey.set(key, blockIndex)\n }\n\n if (!state.openBlocks.has(blockIndex)) {\n closeOpenBlocks(state, events)\n events.push({\n type: \"content_block_start\",\n index: blockIndex,\n content_block: {\n type: \"text\",\n text: \"\",\n },\n })\n state.openBlocks.add(blockIndex)\n }\n\n return blockIndex\n}\n\nconst openThinkingBlockIfNeeded = (\n state: ResponsesStreamState,\n outputIndex: number,\n events: Array<AnthropicStreamEventData>,\n): number => {\n //thinking blocks has multiple summary_index, should combine into one block\n const summaryIndex = 0\n const key = getBlockKey(outputIndex, summaryIndex)\n let blockIndex = state.blockIndexByKey.get(key)\n\n if (blockIndex === undefined) {\n blockIndex = state.nextContentBlockIndex\n state.nextContentBlockIndex += 1\n state.blockIndexByKey.set(key, blockIndex)\n }\n\n if (!state.openBlocks.has(blockIndex)) {\n closeOpenBlocks(state, events)\n events.push({\n type: \"content_block_start\",\n index: blockIndex,\n content_block: {\n type: \"thinking\",\n thinking: \"\",\n },\n })\n state.openBlocks.add(blockIndex)\n }\n\n return blockIndex\n}\n\nconst closeBlockIfOpen = (\n state: ResponsesStreamState,\n blockIndex: number,\n events: Array<AnthropicStreamEventData>,\n) => {\n if (!state.openBlocks.has(blockIndex)) {\n return\n }\n\n events.push({ type: \"content_block_stop\", index: blockIndex })\n state.openBlocks.delete(blockIndex)\n state.blockHasDelta.delete(blockIndex)\n}\n\nconst closeOpenBlocks = (\n state: ResponsesStreamState,\n events: Array<AnthropicStreamEventData>,\n) => {\n for (const blockIndex of state.openBlocks) {\n closeBlockIfOpen(state, blockIndex, events)\n }\n}\n\nconst closeAllOpenBlocks = (\n state: ResponsesStreamState,\n events: Array<AnthropicStreamEventData>,\n) => {\n closeOpenBlocks(state, events)\n\n state.functionCallStateByOutputIndex.clear()\n}\n\nexport const buildErrorEvent = (message: string): AnthropicStreamEventData => ({\n type: \"error\",\n error: {\n type: \"api_error\",\n message,\n },\n})\n\nconst getBlockKey = (outputIndex: number, contentIndex: number): string =>\n `${outputIndex}:${contentIndex}`\n\nconst openFunctionCallBlock = (\n state: ResponsesStreamState,\n params: {\n outputIndex: number\n toolCallId?: string\n name?: string\n events: Array<AnthropicStreamEventData>\n },\n): number => {\n const { outputIndex, toolCallId, name, events } = params\n\n let functionCallState = state.functionCallStateByOutputIndex.get(outputIndex)\n\n if (!functionCallState) {\n const blockIndex = state.nextContentBlockIndex\n state.nextContentBlockIndex += 1\n\n const resolvedToolCallId = toolCallId ?? `tool_call_${blockIndex}`\n const resolvedName = name ?? \"function\"\n\n functionCallState = {\n blockIndex,\n toolCallId: resolvedToolCallId,\n name: resolvedName,\n consecutiveWhitespaceCount: 0,\n }\n\n state.functionCallStateByOutputIndex.set(outputIndex, functionCallState)\n }\n\n const { blockIndex } = functionCallState\n\n if (!state.openBlocks.has(blockIndex)) {\n closeOpenBlocks(state, events)\n events.push({\n type: \"content_block_start\",\n index: blockIndex,\n content_block: {\n type: \"tool_use\",\n id: functionCallState.toolCallId,\n name: functionCallState.name,\n input: {},\n },\n })\n state.openBlocks.add(blockIndex)\n }\n\n return blockIndex\n}\n\ntype FunctionCallDetails = {\n outputIndex: number\n toolCallId: string\n name: string\n initialArguments?: string\n}\n\nconst extractFunctionCallDetails = (\n rawEvent: ResponseOutputItemAddedEvent,\n): FunctionCallDetails | undefined => {\n const item = rawEvent.item\n const itemType = item.type\n if (itemType !== \"function_call\") {\n return undefined\n }\n\n const outputIndex = rawEvent.output_index\n const toolCallId = item.call_id\n const name = item.name\n const initialArguments = item.arguments\n return {\n outputIndex,\n toolCallId,\n name,\n initialArguments,\n }\n}\n","import type {\n ResponseInputItem,\n ResponsesPayload,\n} from \"~/services/copilot/create-responses\"\n\nexport const getResponsesRequestOptions = (\n payload: ResponsesPayload,\n): { vision: boolean; initiator: \"agent\" | \"user\" } => {\n const vision = hasVisionInput(payload)\n const initiator = hasAgentInitiator(payload) ? \"agent\" : \"user\"\n\n return { vision, initiator }\n}\n\nexport const hasAgentInitiator = (payload: ResponsesPayload): boolean => {\n // Refactor `isAgentCall` logic to check only the last message in the history rather than any message. This prevents valid user messages from being incorrectly flagged as agent calls due to previous assistant history, ensuring proper credit consumption for multi-turn conversations.\n const lastItem = getPayloadItems(payload).at(-1)\n if (!lastItem) {\n return false\n }\n if (!(\"role\" in lastItem) || !lastItem.role) {\n return true\n }\n const role =\n typeof lastItem.role === \"string\" ? lastItem.role.toLowerCase() : \"\"\n return role === \"assistant\"\n}\n\nexport const hasVisionInput = (payload: ResponsesPayload): boolean => {\n const values = getPayloadItems(payload)\n return values.some((item) => containsVisionContent(item))\n}\n\nconst getPayloadItems = (\n payload: ResponsesPayload,\n): Array<ResponseInputItem> => {\n const result: Array<ResponseInputItem> = []\n\n const { input } = payload\n\n if (Array.isArray(input)) {\n result.push(...input)\n }\n\n return result\n}\n\nconst containsVisionContent = (value: unknown): boolean => {\n if (!value) return false\n\n if (Array.isArray(value)) {\n return value.some((entry) => containsVisionContent(entry))\n }\n\n if (typeof value !== \"object\") {\n return false\n }\n\n const record = value as Record<string, unknown>\n const type =\n typeof record.type === \"string\" ? record.type.toLowerCase() : undefined\n\n if (type === \"input_image\") {\n return true\n }\n\n if (Array.isArray(record.content)) {\n return record.content.some((entry) => containsVisionContent(entry))\n }\n\n return false\n}\n","import consola from \"consola\"\nimport { events } from \"fetch-event-stream\"\n\nimport type {\n AnthropicMessagesPayload,\n AnthropicResponse,\n} from \"~/routes/messages/anthropic-types\"\n\nimport { copilotBaseUrl, copilotHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport type MessagesStream = ReturnType<typeof events>\nexport type CreateMessagesReturn = AnthropicResponse | MessagesStream\n\nexport const createMessages = async (\n payload: AnthropicMessagesPayload,\n anthropicBetaHeader?: string,\n): Promise<CreateMessagesReturn> => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const enableVision = payload.messages.some(\n (message) =>\n Array.isArray(message.content)\n && message.content.some((block) => block.type === \"image\"),\n )\n\n let isInitiateRequest = false\n const lastMessage = payload.messages.at(-1)\n if (lastMessage?.role === \"user\") {\n isInitiateRequest =\n Array.isArray(lastMessage.content) ?\n lastMessage.content.some((block) => block.type !== \"tool_result\")\n : true\n }\n\n const headers: Record<string, string> = {\n ...copilotHeaders(state, enableVision),\n \"X-Initiator\": isInitiateRequest ? \"user\" : \"agent\",\n }\n\n if (anthropicBetaHeader) {\n headers[\"anthropic-beta\"] = anthropicBetaHeader\n } else if (payload.thinking?.budget_tokens) {\n headers[\"anthropic-beta\"] = \"interleaved-thinking-2025-05-14\"\n }\n\n const response = await fetch(`${copilotBaseUrl(state)}/v1/messages`, {\n method: \"POST\",\n headers,\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) {\n consola.error(\"Failed to create messages\", response)\n throw new HTTPError(\"Failed to create messages\", response)\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as AnthropicResponse\n}\n","import {\n type ChatCompletionChunk,\n type Choice,\n type Delta,\n} from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicStreamEventData,\n type AnthropicStreamState,\n} from \"./anthropic-types\"\nimport { THINKING_TEXT } from \"./non-stream-translation\"\nimport { mapOpenAIStopReasonToAnthropic } from \"./utils\"\n\nfunction isToolBlockOpen(state: AnthropicStreamState): boolean {\n if (!state.contentBlockOpen) {\n return false\n }\n // Check if the current block index corresponds to any known tool call\n return Object.values(state.toolCalls).some(\n (tc) => tc.anthropicBlockIndex === state.contentBlockIndex,\n )\n}\n\nexport function translateChunkToAnthropicEvents(\n chunk: ChatCompletionChunk,\n state: AnthropicStreamState,\n): Array<AnthropicStreamEventData> {\n const events: Array<AnthropicStreamEventData> = []\n\n if (chunk.choices.length === 0) {\n return events\n }\n\n const choice = chunk.choices[0]\n const { delta } = choice\n\n handleMessageStart(state, events, chunk)\n\n handleThinkingText(delta, state, events)\n\n handleContent(delta, state, events)\n\n handleToolCalls(delta, state, events)\n\n handleFinish(choice, state, { events, chunk })\n\n return events\n}\n\nfunction handleFinish(\n choice: Choice,\n state: AnthropicStreamState,\n context: {\n events: Array<AnthropicStreamEventData>\n chunk: ChatCompletionChunk\n },\n) {\n const { events, chunk } = context\n if (choice.finish_reason && choice.finish_reason.length > 0) {\n if (state.contentBlockOpen) {\n const toolBlockOpen = isToolBlockOpen(state)\n context.events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockOpen = false\n state.contentBlockIndex++\n if (!toolBlockOpen) {\n handleReasoningOpaque(choice.delta, events, state)\n }\n }\n\n events.push(\n {\n type: \"message_delta\",\n delta: {\n stop_reason: mapOpenAIStopReasonToAnthropic(choice.finish_reason),\n stop_sequence: null,\n },\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: chunk.usage?.completion_tokens ?? 0,\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n {\n type: \"message_stop\",\n },\n )\n }\n}\n\nfunction handleToolCalls(\n delta: Delta,\n state: AnthropicStreamState,\n events: Array<AnthropicStreamEventData>,\n) {\n if (delta.tool_calls && delta.tool_calls.length > 0) {\n closeThinkingBlockIfOpen(state, events)\n\n handleReasoningOpaqueInToolCalls(state, events, delta)\n\n for (const toolCall of delta.tool_calls) {\n if (toolCall.id && toolCall.function?.name) {\n // New tool call starting.\n if (state.contentBlockOpen) {\n // Close any previously open block.\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n const anthropicBlockIndex = state.contentBlockIndex\n state.toolCalls[toolCall.index] = {\n id: toolCall.id,\n name: toolCall.function.name,\n anthropicBlockIndex,\n }\n\n events.push({\n type: \"content_block_start\",\n index: anthropicBlockIndex,\n content_block: {\n type: \"tool_use\",\n id: toolCall.id,\n name: toolCall.function.name,\n input: {},\n },\n })\n state.contentBlockOpen = true\n }\n\n if (toolCall.function?.arguments) {\n const toolCallInfo = state.toolCalls[toolCall.index]\n // Tool call can still be empty\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (toolCallInfo) {\n events.push({\n type: \"content_block_delta\",\n index: toolCallInfo.anthropicBlockIndex,\n delta: {\n type: \"input_json_delta\",\n partial_json: toolCall.function.arguments,\n },\n })\n }\n }\n }\n }\n}\n\nfunction handleReasoningOpaqueInToolCalls(\n state: AnthropicStreamState,\n events: Array<AnthropicStreamEventData>,\n delta: Delta,\n) {\n if (state.contentBlockOpen && !isToolBlockOpen(state)) {\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n handleReasoningOpaque(delta, events, state)\n}\n\nfunction handleContent(\n delta: Delta,\n state: AnthropicStreamState,\n events: Array<AnthropicStreamEventData>,\n) {\n if (delta.content && delta.content.length > 0) {\n closeThinkingBlockIfOpen(state, events)\n\n if (isToolBlockOpen(state)) {\n // A tool block was open, so close it before starting a text block.\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n if (!state.contentBlockOpen) {\n events.push({\n type: \"content_block_start\",\n index: state.contentBlockIndex,\n content_block: {\n type: \"text\",\n text: \"\",\n },\n })\n state.contentBlockOpen = true\n }\n\n events.push({\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"text_delta\",\n text: delta.content,\n },\n })\n }\n\n // handle for claude model\n if (\n delta.content === \"\"\n && delta.reasoning_opaque\n && delta.reasoning_opaque.length > 0\n && state.thinkingBlockOpen\n ) {\n events.push(\n {\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"signature_delta\",\n signature: delta.reasoning_opaque,\n },\n },\n {\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n },\n )\n state.contentBlockIndex++\n state.thinkingBlockOpen = false\n }\n}\n\nfunction handleMessageStart(\n state: AnthropicStreamState,\n events: Array<AnthropicStreamEventData>,\n chunk: ChatCompletionChunk,\n) {\n if (!state.messageStartSent) {\n events.push({\n type: \"message_start\",\n message: {\n id: chunk.id,\n type: \"message\",\n role: \"assistant\",\n content: [],\n model: chunk.model,\n stop_reason: null,\n stop_sequence: null,\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: 0, // Will be updated in message_delta when finished\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n })\n state.messageStartSent = true\n }\n}\n\nfunction handleReasoningOpaque(\n delta: Delta,\n events: Array<AnthropicStreamEventData>,\n state: AnthropicStreamState,\n) {\n if (delta.reasoning_opaque && delta.reasoning_opaque.length > 0) {\n events.push(\n {\n type: \"content_block_start\",\n index: state.contentBlockIndex,\n content_block: {\n type: \"thinking\",\n thinking: \"\",\n },\n },\n {\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"thinking_delta\",\n thinking: THINKING_TEXT, // Compatible with opencode, it will filter out blocks where the thinking text is empty, so we add a default thinking text here\n },\n },\n {\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"signature_delta\",\n signature: delta.reasoning_opaque,\n },\n },\n {\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n },\n )\n state.contentBlockIndex++\n }\n}\n\nfunction handleThinkingText(\n delta: Delta,\n state: AnthropicStreamState,\n events: Array<AnthropicStreamEventData>,\n) {\n if (delta.reasoning_text && delta.reasoning_text.length > 0) {\n // compatible with copilot API returning content->reasoning_text->reasoning_opaque in different deltas\n // this is an extremely abnormal situation, probably a server-side bug\n // only occurs in the claude model, with a very low probability of occurrence\n if (state.contentBlockOpen) {\n delta.content = delta.reasoning_text\n delta.reasoning_text = undefined\n return\n }\n\n if (!state.thinkingBlockOpen) {\n events.push({\n type: \"content_block_start\",\n index: state.contentBlockIndex,\n content_block: {\n type: \"thinking\",\n thinking: \"\",\n },\n })\n state.thinkingBlockOpen = true\n }\n\n events.push({\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"thinking_delta\",\n thinking: delta.reasoning_text,\n },\n })\n }\n}\n\nfunction closeThinkingBlockIfOpen(\n state: AnthropicStreamState,\n events: Array<AnthropicStreamEventData>,\n): void {\n if (state.thinkingBlockOpen) {\n events.push(\n {\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"signature_delta\",\n signature: \"\",\n },\n },\n {\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n },\n )\n state.contentBlockIndex++\n state.thinkingBlockOpen = false\n }\n}\n\nexport function translateErrorToAnthropicErrorEvent(): AnthropicStreamEventData {\n return {\n type: \"error\",\n error: {\n type: \"api_error\",\n message: \"An unexpected error occurred during streaming.\",\n },\n }\n}\n","import type { Context } from \"hono\"\n\nimport { streamSSE } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport { getSmallModel, shouldCompactUseSmallModel } from \"~/lib/config\"\nimport { createHandlerLogger } from \"~/lib/logger\"\nimport { checkRateLimit } from \"~/lib/rate-limit\"\nimport { state } from \"~/lib/state\"\nimport {\n buildErrorEvent,\n createResponsesStreamState,\n translateResponsesStreamEvent,\n} from \"~/routes/messages/responses-stream-translation\"\nimport {\n translateAnthropicMessagesToResponsesPayload,\n translateResponsesResultToAnthropic,\n} from \"~/routes/messages/responses-translation\"\nimport { getResponsesRequestOptions } from \"~/routes/responses/utils\"\nimport {\n createChatCompletions,\n type ChatCompletionChunk,\n type ChatCompletionResponse,\n} from \"~/services/copilot/create-chat-completions\"\nimport { createMessages } from \"~/services/copilot/create-messages\"\nimport {\n createResponses,\n type ResponsesResult,\n type ResponseStreamEvent,\n} from \"~/services/copilot/create-responses\"\n\nimport {\n type AnthropicMessagesPayload,\n type AnthropicStreamState,\n type AnthropicTextBlock,\n type AnthropicToolResultBlock,\n} from \"./anthropic-types\"\nimport {\n translateToAnthropic,\n translateToOpenAI,\n} from \"./non-stream-translation\"\nimport { translateChunkToAnthropicEvents } from \"./stream-translation\"\n\nconst logger = createHandlerLogger(\"messages-handler\")\n\nconst compactSystemPromptStart =\n \"You are a helpful AI assistant tasked with summarizing conversations\"\n\nexport async function handleCompletion(c: Context) {\n await checkRateLimit(state)\n\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n logger.debug(\"Anthropic request payload:\", JSON.stringify(anthropicPayload))\n\n // claude code and opencode compact request detection\n const isCompact = isCompactRequest(anthropicPayload)\n\n // fix claude code 2.0.28+ warmup request consume premium request, forcing small model if no tools are used\n // set \"CLAUDE_CODE_SUBAGENT_MODEL\": \"you small model\" also can avoid this\n const anthropicBeta = c.req.header(\"anthropic-beta\")\n logger.debug(\"Anthropic Beta header:\", anthropicBeta)\n const noTools = !anthropicPayload.tools || anthropicPayload.tools.length === 0\n if (anthropicBeta && noTools && !isCompact) {\n anthropicPayload.model = getSmallModel()\n }\n\n if (isCompact) {\n logger.debug(\"Is compact request:\", isCompact)\n if (shouldCompactUseSmallModel()) {\n anthropicPayload.model = getSmallModel()\n }\n } else {\n // Merge tool_result and text blocks into tool_result to avoid consuming premium requests\n // (caused by skill invocations, edit hooks, plan or to do reminders)\n // e.g. {\"role\":\"user\",\"content\":[{\"type\":\"tool_result\",\"content\":\"Launching skill: xxx\"},{\"type\":\"text\",\"text\":\"xxx\"}]}\n // not only for claude, but also for opencode\n // compact requests are excluded from this processing\n mergeToolResultForClaude(anthropicPayload)\n }\n\n if (state.manualApprove) {\n await awaitApproval()\n }\n\n if (shouldUseMessagesApi(anthropicPayload.model)) {\n return await handleWithMessagesApi(c, anthropicPayload, anthropicBeta)\n }\n\n if (shouldUseResponsesApi(anthropicPayload.model)) {\n return await handleWithResponsesApi(c, anthropicPayload)\n }\n\n return await handleWithChatCompletions(c, anthropicPayload)\n}\n\nconst RESPONSES_ENDPOINT = \"/responses\"\nconst MESSAGES_ENDPOINT = \"/v1/messages\"\n\nconst handleWithChatCompletions = async (\n c: Context,\n anthropicPayload: AnthropicMessagesPayload,\n) => {\n const openAIPayload = translateToOpenAI(anthropicPayload)\n logger.debug(\n \"Translated OpenAI request payload:\",\n JSON.stringify(openAIPayload),\n )\n\n const response = await createChatCompletions(openAIPayload)\n\n if (isNonStreaming(response)) {\n logger.debug(\n \"Non-streaming response from Copilot:\",\n JSON.stringify(response),\n )\n const anthropicResponse = translateToAnthropic(response)\n logger.debug(\n \"Translated Anthropic response:\",\n JSON.stringify(anthropicResponse),\n )\n return c.json(anthropicResponse)\n }\n\n logger.debug(\"Streaming response from Copilot\")\n return streamSSE(c, async (stream) => {\n const streamState: AnthropicStreamState = {\n messageStartSent: false,\n contentBlockIndex: 0,\n contentBlockOpen: false,\n toolCalls: {},\n thinkingBlockOpen: false,\n }\n\n for await (const rawEvent of response) {\n logger.debug(\"Copilot raw stream event:\", JSON.stringify(rawEvent))\n if (rawEvent.data === \"[DONE]\") {\n break\n }\n\n if (!rawEvent.data) {\n continue\n }\n\n const chunk = JSON.parse(rawEvent.data) as ChatCompletionChunk\n const events = translateChunkToAnthropicEvents(chunk, streamState)\n\n for (const event of events) {\n logger.debug(\"Translated Anthropic event:\", JSON.stringify(event))\n await stream.writeSSE({\n event: event.type,\n data: JSON.stringify(event),\n })\n }\n }\n })\n}\n\nconst handleWithResponsesApi = async (\n c: Context,\n anthropicPayload: AnthropicMessagesPayload,\n) => {\n const responsesPayload =\n translateAnthropicMessagesToResponsesPayload(anthropicPayload)\n logger.debug(\n \"Translated Responses payload:\",\n JSON.stringify(responsesPayload),\n )\n\n const { vision, initiator } = getResponsesRequestOptions(responsesPayload)\n const response = await createResponses(responsesPayload, {\n vision,\n initiator,\n })\n\n if (responsesPayload.stream && isAsyncIterable(response)) {\n logger.debug(\"Streaming response from Copilot (Responses API)\")\n return streamSSE(c, async (stream) => {\n const streamState = createResponsesStreamState()\n\n for await (const chunk of response) {\n const eventName = chunk.event\n if (eventName === \"ping\") {\n await stream.writeSSE({ event: \"ping\", data: \"\" })\n continue\n }\n\n const data = chunk.data\n if (!data) {\n continue\n }\n\n logger.debug(\"Responses raw stream event:\", data)\n\n const events = translateResponsesStreamEvent(\n JSON.parse(data) as ResponseStreamEvent,\n streamState,\n )\n for (const event of events) {\n const eventData = JSON.stringify(event)\n logger.debug(\"Translated Anthropic event:\", eventData)\n await stream.writeSSE({\n event: event.type,\n data: eventData,\n })\n }\n\n if (streamState.messageCompleted) {\n logger.debug(\"Message completed, ending stream\")\n break\n }\n }\n\n if (!streamState.messageCompleted) {\n logger.warn(\n \"Responses stream ended without completion; sending error event\",\n )\n const errorEvent = buildErrorEvent(\n \"Responses stream ended without completion\",\n )\n await stream.writeSSE({\n event: errorEvent.type,\n data: JSON.stringify(errorEvent),\n })\n }\n })\n }\n\n logger.debug(\n \"Non-streaming Responses result:\",\n JSON.stringify(response).slice(-400),\n )\n const anthropicResponse = translateResponsesResultToAnthropic(\n response as ResponsesResult,\n )\n logger.debug(\n \"Translated Anthropic response:\",\n JSON.stringify(anthropicResponse),\n )\n return c.json(anthropicResponse)\n}\n\nconst handleWithMessagesApi = async (\n c: Context,\n anthropicPayload: AnthropicMessagesPayload,\n anthropicBetaHeader?: string,\n) => {\n // Pre-request processing: filter thinking blocks for Claude models so only\n // valid thinking blocks are sent to the Copilot Messages API.\n for (const msg of anthropicPayload.messages) {\n if (msg.role === \"assistant\" && Array.isArray(msg.content)) {\n msg.content = msg.content.filter((block) => {\n if (block.type !== \"thinking\") return true\n return (\n block.thinking\n && block.thinking !== \"Thinking...\"\n && block.signature\n && !block.signature.includes(\"@\")\n )\n })\n }\n }\n\n const response = await createMessages(anthropicPayload, anthropicBetaHeader)\n\n if (isAsyncIterable(response)) {\n logger.debug(\"Streaming response from Copilot (Messages API)\")\n return streamSSE(c, async (stream) => {\n for await (const event of response) {\n const eventName = event.event\n const data = event.data ?? \"\"\n logger.debug(\"Messages raw stream event:\", data)\n await stream.writeSSE({\n event: eventName,\n data,\n })\n }\n })\n }\n\n logger.debug(\n \"Non-streaming Messages result:\",\n JSON.stringify(response).slice(-400),\n )\n return c.json(response)\n}\n\nconst shouldUseResponsesApi = (modelId: string): boolean => {\n const selectedModel = state.models?.data.find((model) => model.id === modelId)\n return (\n selectedModel?.supported_endpoints?.includes(RESPONSES_ENDPOINT) ?? false\n )\n}\n\nconst shouldUseMessagesApi = (modelId: string): boolean => {\n const selectedModel = state.models?.data.find((model) => model.id === modelId)\n return (\n selectedModel?.supported_endpoints?.includes(MESSAGES_ENDPOINT) ?? false\n )\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createChatCompletions>>,\n): response is ChatCompletionResponse => Object.hasOwn(response, \"choices\")\n\nconst isAsyncIterable = <T>(value: unknown): value is AsyncIterable<T> =>\n Boolean(value)\n && typeof (value as AsyncIterable<T>)[Symbol.asyncIterator] === \"function\"\n\nconst isCompactRequest = (\n anthropicPayload: AnthropicMessagesPayload,\n): boolean => {\n const system = anthropicPayload.system\n if (typeof system === \"string\") {\n return system.startsWith(compactSystemPromptStart)\n }\n if (!Array.isArray(system)) return false\n\n return system.some(\n (msg) =>\n typeof msg.text === \"string\"\n && msg.text.startsWith(compactSystemPromptStart),\n )\n}\n\nconst mergeContentWithText = (\n tr: AnthropicToolResultBlock,\n textBlock: AnthropicTextBlock,\n): AnthropicToolResultBlock => {\n if (typeof tr.content === \"string\") {\n return { ...tr, content: `${tr.content}\\n\\n${textBlock.text}` }\n }\n return {\n ...tr,\n content: [...tr.content, textBlock],\n }\n}\n\nconst mergeContentWithTexts = (\n tr: AnthropicToolResultBlock,\n textBlocks: Array<AnthropicTextBlock>,\n): AnthropicToolResultBlock => {\n if (typeof tr.content === \"string\") {\n const appendedTexts = textBlocks.map((tb) => tb.text).join(\"\\n\\n\")\n return { ...tr, content: `${tr.content}\\n\\n${appendedTexts}` }\n }\n return { ...tr, content: [...tr.content, ...textBlocks] }\n}\n\nconst mergeToolResultForClaude = (\n anthropicPayload: AnthropicMessagesPayload,\n): void => {\n for (const msg of anthropicPayload.messages) {\n if (msg.role !== \"user\" || !Array.isArray(msg.content)) continue\n\n const toolResults: Array<AnthropicToolResultBlock> = []\n const textBlocks: Array<AnthropicTextBlock> = []\n let valid = true\n\n for (const block of msg.content) {\n if (block.type === \"tool_result\") {\n toolResults.push(block)\n } else if (block.type === \"text\") {\n textBlocks.push(block)\n } else {\n valid = false\n break\n }\n }\n\n if (!valid || toolResults.length === 0 || textBlocks.length === 0) continue\n\n msg.content = mergeToolResult(toolResults, textBlocks)\n }\n}\n\nconst mergeToolResult = (\n toolResults: Array<AnthropicToolResultBlock>,\n textBlocks: Array<AnthropicTextBlock>,\n): Array<AnthropicToolResultBlock> => {\n // equal lengths -> pairwise merge\n if (toolResults.length === textBlocks.length) {\n return toolResults.map((tr, i) => mergeContentWithText(tr, textBlocks[i]))\n }\n\n // lengths differ -> append all textBlocks to the last tool_result\n const lastIndex = toolResults.length - 1\n return toolResults.map((tr, i) =>\n i === lastIndex ? mergeContentWithTexts(tr, textBlocks) : tr,\n )\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleCountTokens } from \"./count-tokens-handler\"\nimport { handleCompletion } from \"./handler\"\n\nexport const messageRoutes = new Hono()\n\nmessageRoutes.post(\"/\", async (c) => {\n try {\n return await handleCompletion(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n\nmessageRoutes.post(\"/count_tokens\", async (c) => {\n try {\n return await handleCountTokens(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\nimport { cacheModels } from \"~/lib/utils\"\n\nexport const modelRoutes = new Hono()\n\nmodelRoutes.get(\"/\", async (c) => {\n try {\n if (!state.models) {\n // This should be handled by startup logic, but as a fallback.\n await cacheModels()\n }\n\n const models = state.models?.data.map((model) => ({\n id: model.id,\n object: \"model\",\n type: \"model\",\n created: 0, // No date available from source\n created_at: new Date(0).toISOString(), // No date available from source\n owned_by: model.vendor,\n display_name: model.name,\n }))\n\n return c.json({\n object: \"list\",\n data: models,\n has_more: false,\n })\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","/**\n * Stream ID Synchronization for @ai-sdk/openai compatibility\n *\n * Problem: GitHub Copilot's Responses API returns different IDs for the same\n * item in 'added' vs 'done' events. This breaks @ai-sdk/openai which expects\n * consistent IDs across the stream lifecycle.\n *\n * Errors without this fix:\n * - \"activeReasoningPart.summaryParts\" undefined\n * - \"text part not found\"\n *\n * Use case: OpenCode (AI coding assistant) using Codex models (gpt-5.2-codex)\n * via @ai-sdk/openai provider requires the Responses API endpoint.\n */\n\nimport type {\n ResponseOutputItemAddedEvent,\n ResponseOutputItemDoneEvent,\n ResponseStreamEvent,\n} from \"~/services/copilot/create-responses\"\n\ninterface StreamIdTracker {\n outputItems: Map<number, string>\n}\n\nexport const createStreamIdTracker = (): StreamIdTracker => ({\n outputItems: new Map(),\n})\n\nexport const fixStreamIds = (\n data: string,\n event: string | undefined,\n tracker: StreamIdTracker,\n): string => {\n if (!data) return data\n const parsed = JSON.parse(data) as ResponseStreamEvent\n switch (event) {\n case \"response.output_item.added\": {\n return handleOutputItemAdded(\n parsed as ResponseOutputItemAddedEvent,\n tracker,\n )\n }\n case \"response.output_item.done\": {\n return handleOutputItemDone(\n parsed as ResponseOutputItemDoneEvent,\n tracker,\n )\n }\n default: {\n return handleItemId(parsed, tracker)\n }\n }\n}\n\nconst handleOutputItemAdded = (\n parsed: ResponseOutputItemAddedEvent,\n tracker: StreamIdTracker,\n): string => {\n if (!parsed.item.id) {\n let randomSuffix = \"\"\n while (randomSuffix.length < 16) {\n randomSuffix += Math.random().toString(36).slice(2)\n }\n parsed.item.id = `oi_${parsed.output_index}_${randomSuffix.slice(0, 16)}`\n }\n\n const outputIndex = parsed.output_index\n tracker.outputItems.set(outputIndex, parsed.item.id)\n return JSON.stringify(parsed)\n}\n\nconst handleOutputItemDone = (\n parsed: ResponseOutputItemDoneEvent,\n tracker: StreamIdTracker,\n): string => {\n const outputIndex = parsed.output_index\n const originalId = tracker.outputItems.get(outputIndex)\n if (originalId) {\n parsed.item.id = originalId\n }\n return JSON.stringify(parsed)\n}\n\nconst handleItemId = (\n parsed: ResponseStreamEvent & { output_index?: number; item_id?: string },\n tracker: StreamIdTracker,\n): string => {\n const outputIndex = parsed.output_index\n if (outputIndex !== undefined) {\n const itemId = tracker.outputItems.get(outputIndex)\n if (itemId) {\n parsed.item_id = itemId\n }\n }\n return JSON.stringify(parsed)\n}\n","import type { Context } from \"hono\"\n\nimport { streamSSE } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport { getConfig } from \"~/lib/config\"\nimport { createHandlerLogger } from \"~/lib/logger\"\nimport { checkRateLimit } from \"~/lib/rate-limit\"\nimport { state } from \"~/lib/state\"\nimport {\n createResponses,\n type ResponsesPayload,\n type ResponsesResult,\n} from \"~/services/copilot/create-responses\"\n\nimport { createStreamIdTracker, fixStreamIds } from \"./stream-id-sync\"\nimport { getResponsesRequestOptions } from \"./utils\"\n\nconst logger = createHandlerLogger(\"responses-handler\")\n\nconst RESPONSES_ENDPOINT = \"/responses\"\n\nexport const handleResponses = async (c: Context) => {\n await checkRateLimit(state)\n\n const payload = await c.req.json<ResponsesPayload>()\n logger.debug(\"Responses request payload:\", JSON.stringify(payload))\n\n useFunctionApplyPatch(payload)\n\n // Remove web_search tool as it's not supported by GitHub Copilot\n removeWebSearchTool(payload)\n\n const selectedModel = state.models?.data.find(\n (model) => model.id === payload.model,\n )\n const supportsResponses =\n selectedModel?.supported_endpoints?.includes(RESPONSES_ENDPOINT) ?? false\n\n if (!supportsResponses) {\n return c.json(\n {\n error: {\n message:\n \"This model does not support the responses endpoint. Please choose a different model.\",\n type: \"invalid_request_error\",\n },\n },\n 400,\n )\n }\n\n const { vision, initiator } = getResponsesRequestOptions(payload)\n\n if (state.manualApprove) {\n await awaitApproval()\n }\n\n const response = await createResponses(payload, { vision, initiator })\n\n if (isStreamingRequested(payload) && isAsyncIterable(response)) {\n logger.debug(\"Forwarding native Responses stream\")\n return streamSSE(c, async (stream) => {\n const idTracker = createStreamIdTracker()\n\n for await (const chunk of response) {\n logger.debug(\"Responses stream chunk:\", JSON.stringify(chunk))\n\n const processedData = fixStreamIds(\n (chunk as { data?: string }).data ?? \"\",\n (chunk as { event?: string }).event,\n idTracker,\n )\n\n await stream.writeSSE({\n id: (chunk as { id?: string }).id,\n event: (chunk as { event?: string }).event,\n data: processedData,\n })\n }\n })\n }\n\n logger.debug(\n \"Forwarding native Responses result:\",\n JSON.stringify(response).slice(-400),\n )\n return c.json(response as ResponsesResult)\n}\n\nconst isAsyncIterable = <T>(value: unknown): value is AsyncIterable<T> =>\n Boolean(value)\n && typeof (value as AsyncIterable<T>)[Symbol.asyncIterator] === \"function\"\n\nconst isStreamingRequested = (payload: ResponsesPayload): boolean =>\n Boolean(payload.stream)\n\nconst useFunctionApplyPatch = (payload: ResponsesPayload): void => {\n const config = getConfig()\n const useFunctionApplyPatch = config.useFunctionApplyPatch ?? true\n if (useFunctionApplyPatch) {\n logger.debug(\"Using function tool apply_patch for responses\")\n if (Array.isArray(payload.tools)) {\n const toolsArr = payload.tools\n for (let i = 0; i < toolsArr.length; i++) {\n const t = toolsArr[i]\n if (t.type === \"custom\" && t.name === \"apply_patch\") {\n toolsArr[i] = {\n type: \"function\",\n name: t.name,\n description: \"Use the `apply_patch` tool to edit files\",\n parameters: {\n type: \"object\",\n properties: {\n input: {\n type: \"string\",\n description: \"The entire contents of the apply_patch command\",\n },\n },\n required: [\"input\"],\n },\n strict: false,\n }\n }\n }\n }\n }\n}\n\nconst removeWebSearchTool = (payload: ResponsesPayload): void => {\n if (!Array.isArray(payload.tools) || payload.tools.length === 0) return\n\n payload.tools = payload.tools.filter((t) => {\n return t.type !== \"web_search\"\n })\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleResponses } from \"./handler\"\n\nexport const responsesRoutes = new Hono()\n\nresponsesRoutes.post(\"/\", async (c) => {\n try {\n return await handleResponses(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { state } from \"~/lib/state\"\n\nexport const tokenRoute = new Hono()\n\ntokenRoute.get(\"/\", (c) => {\n try {\n return c.json({\n token: state.copilotToken,\n })\n } catch (error) {\n console.error(\"Error fetching token:\", error)\n return c.json({ error: \"Failed to fetch token\", token: null }, 500)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { getCopilotUsage } from \"~/services/github/get-copilot-usage\"\n\nexport const usageRoute = new Hono()\n\nusageRoute.get(\"/\", async (c) => {\n try {\n const usage = await getCopilotUsage()\n return c.json(usage)\n } catch (error) {\n console.error(\"Error fetching Copilot usage:\", error)\n return c.json({ error: \"Failed to fetch Copilot usage\" }, 500)\n }\n})\n","import { Hono } from \"hono\"\nimport { cors } from \"hono/cors\"\nimport { logger } from \"hono/logger\"\n\nimport { completionRoutes } from \"./routes/chat-completions/route\"\nimport { embeddingRoutes } from \"./routes/embeddings/route\"\nimport { messageRoutes } from \"./routes/messages/route\"\nimport { modelRoutes } from \"./routes/models/route\"\nimport { responsesRoutes } from \"./routes/responses/route\"\nimport { tokenRoute } from \"./routes/token/route\"\nimport { usageRoute } from \"./routes/usage/route\"\n\nexport const server = new Hono()\n\nserver.use(logger())\nserver.use(cors())\n\nserver.get(\"/\", (c) => c.text(\"Server running\"))\n\nserver.route(\"/chat/completions\", completionRoutes)\nserver.route(\"/models\", modelRoutes)\nserver.route(\"/embeddings\", embeddingRoutes)\nserver.route(\"/usage\", usageRoute)\nserver.route(\"/token\", tokenRoute)\nserver.route(\"/responses\", responsesRoutes)\n\n// Compatibility with tools that expect v1/ prefix\nserver.route(\"/v1/chat/completions\", completionRoutes)\nserver.route(\"/v1/models\", modelRoutes)\nserver.route(\"/v1/embeddings\", embeddingRoutes)\nserver.route(\"/v1/responses\", responsesRoutes)\n\n// Anthropic compatible endpoints\nserver.route(\"/v1/messages\", messageRoutes)\n"],"mappings":";;;;;;;;;;;;AAIA,MAAa,gBAAgB,YAAY;AAKvC,KAAI,CAJa,MAAM,QAAQ,OAAO,4BAA4B,EAChE,MAAM,WACP,CAAC,CAGA,OAAM,IAAI,UACR,oBACA,SAAS,KAAK,EAAE,SAAS,oBAAoB,EAAE,EAAE,QAAQ,KAAK,CAAC,CAChE;;;;;ACJL,MAAM,mBAAmB,QAA+B,KAAK;AAC7D,MAAM,sBAAsB,OAAU,KAAK;AAC3C,MAAM,UAAU,KAAK,KAAK,MAAM,SAAS,OAAO;AAChD,MAAM,oBAAoB;AAC1B,MAAM,kBAAkB;AAExB,MAAM,6BAAa,IAAI,KAA6B;AACpD,MAAM,6BAAa,IAAI,KAA4B;AAEnD,MAAM,2BAA2B;AAC/B,KAAI,CAAC,GAAG,WAAW,QAAQ,CACzB,IAAG,UAAU,SAAS,EAAE,WAAW,MAAM,CAAC;;AAI9C,MAAM,uBAAuB;AAC3B,KAAI,CAAC,GAAG,WAAW,QAAQ,CACzB;CAGF,MAAM,MAAM,KAAK,KAAK;AAEtB,MAAK,MAAM,SAAS,GAAG,YAAY,QAAQ,EAAE;EAC3C,MAAM,WAAW,KAAK,KAAK,SAAS,MAAM;EAE1C,IAAIA;AACJ,MAAI;AACF,WAAQ,GAAG,SAAS,SAAS;UACvB;AACN;;AAGF,MAAI,CAAC,MAAM,QAAQ,CACjB;AAGF,MAAI,MAAM,MAAM,UAAU,iBACxB,KAAI;AACF,MAAG,OAAO,SAAS;UACb;AACN;;;;AAMR,MAAM,cAAc,SAClB,KACG,KAAK,QACJ,OAAO,QAAQ,WAAW,MACxB,KAAK,QAAQ,KAAK;CAAE,OAAO;CAAM,QAAQ;CAAO,CAAC,CAEpD,CACA,KAAK,IAAI;AAEd,MAAM,gBAAgB,SAAiB;CACrC,MAAM,aAAa,KAChB,aAAa,CACb,WAAW,eAAe,IAAI,CAC9B,WAAW,YAAY,GAAG;AAE7B,QAAO,eAAe,KAAK,YAAY;;AAGzC,MAAM,gBAAgB,aAAqC;CACzD,IAAI,SAAS,WAAW,IAAI,SAAS;AACrC,KAAI,CAAC,UAAU,OAAO,WAAW;AAC/B,WAAS,GAAG,kBAAkB,UAAU,EAAE,OAAO,KAAK,CAAC;AACvD,aAAW,IAAI,UAAU,OAAO;AAEhC,SAAO,GAAG,UAAU,UAAmB;AACrC,WAAQ,KAAK,oBAAoB,MAAM;AACvC,cAAW,OAAO,SAAS;IAC3B;;AAEJ,QAAO;;AAGT,MAAM,eAAe,aAAqB;CACxC,MAAM,SAAS,WAAW,IAAI,SAAS;AACvC,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B;CAGF,MAAM,SAAS,aAAa,SAAS;CACrC,MAAM,UAAU,OAAO,KAAK,KAAK,GAAG;AACpC,QAAO,MAAM,UAAU,UAAU;AAC/B,MAAI,MACF,SAAQ,KAAK,+BAA+B,MAAM;GAEpD;AAEF,YAAW,IAAI,UAAU,EAAE,CAAC;;AAG9B,MAAM,wBAAwB;AAC5B,MAAK,MAAM,YAAY,WAAW,MAAM,CACtC,aAAY,SAAS;;AAIzB,MAAM,cAAc,UAAkB,SAAiB;CACrD,IAAI,SAAS,WAAW,IAAI,SAAS;AACrC,KAAI,CAAC,QAAQ;AACX,WAAS,EAAE;AACX,aAAW,IAAI,UAAU,OAAO;;AAGlC,QAAO,KAAK,KAAK;AAEjB,KAAI,OAAO,UAAU,gBACnB,aAAY,SAAS;;AAIzB,YAAY,iBAAiB,kBAAkB;AAE/C,MAAM,gBAAgB;AACpB,kBAAiB;AACjB,MAAK,MAAM,UAAU,WAAW,QAAQ,CACtC,QAAO,KAAK;AAEd,YAAW,OAAO;AAClB,YAAW,OAAO;;AAGpB,QAAQ,GAAG,QAAQ,QAAQ;AAC3B,QAAQ,GAAG,gBAAgB;AACzB,UAAS;AACT,SAAQ,KAAK,EAAE;EACf;AACF,QAAQ,GAAG,iBAAiB;AAC1B,UAAS;AACT,SAAQ,KAAK,EAAE;EACf;AAEF,IAAI,cAAc;AAElB,MAAa,uBAAuB,SAAkC;AACpE,qBAAoB;CAEpB,MAAM,gBAAgB,aAAa,KAAK;CACxC,MAAM,WAAW,QAAQ,QAAQ,KAAK;AAEtC,KAAI,MAAM,QACR,UAAS,QAAQ;AAEnB,UAAS,aAAa,EAAE,CAAC;AAEzB,UAAS,YAAY,EACnB,IAAI,QAAQ;AACV,sBAAoB;AAEpB,MAAI,KAAK,KAAK,GAAG,cAAc,qBAAqB;AAClD,mBAAgB;AAChB,iBAAc,KAAK,KAAK;;EAG1B,MAAM,OAAO,OAAO;EACpB,MAAM,UAAU,KAAK,mBAAmB,QAAQ;EAChD,MAAM,YAAY,KAAK,eAAe,SAAS,EAAE,QAAQ,OAAO,CAAC;EACjE,MAAM,WAAW,KAAK,KAAK,SAAS,GAAG,cAAc,GAAG,QAAQ,MAAM;EACtE,MAAM,UAAU,WAAW,OAAO,KAAuB;EACzD,MAAM,OAAO,IAAI,UAAU,KAAK,OAAO,KAAK,KAAK,OAAO,OAAO,KAAK,GAClE,UAAU,IAAI,YAAY;AAG5B,aAAW,UAAU,KAAK;IAE7B,CAAC;AAEF,QAAO;;;;;AC7KT,eAAsB,eAAe,SAAc;AACjD,KAAIC,QAAM,qBAAqB,OAAW;CAE1C,MAAM,MAAM,KAAK,KAAK;AAEtB,KAAI,CAACA,QAAM,sBAAsB;AAC/B,UAAM,uBAAuB;AAC7B;;CAGF,MAAM,kBAAkB,MAAMA,QAAM,wBAAwB;AAE5D,KAAI,iBAAiBA,QAAM,kBAAkB;AAC3C,UAAM,uBAAuB;AAC7B;;CAGF,MAAM,kBAAkB,KAAK,KAAKA,QAAM,mBAAmB,eAAe;AAE1E,KAAI,CAACA,QAAM,eAAe;AACxB,UAAQ,KACN,qCAAqC,gBAAgB,gBACtD;AACD,QAAM,IAAI,UACR,uBACA,SAAS,KAAK,EAAE,SAAS,uBAAuB,EAAE,EAAE,QAAQ,KAAK,CAAC,CACnE;;CAGH,MAAM,aAAa,kBAAkB;AACrC,SAAQ,KACN,+BAA+B,gBAAgB,+BAChD;AACD,OAAM,MAAM,WAAW;AAEvB,SAAM,uBAAuB;AAC7B,SAAQ,KAAK,qDAAqD;;;;;ACjCpE,MAAM,eAAe;CACnB,kBAAkB,OAAO;CACzB,mBAAmB,OAAO;CAC1B,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACzB;AAUD,MAAM,gCAAgB,IAAI,KAAsB;;;;AAKhD,MAAM,4BACJ,WACA,SACA,cACW;CACX,IAAI,SAAS;AACb,MAAK,MAAM,YAAY,WAAW;AAChC,YAAU,UAAU;AACpB,YAAU,QAAQ,OAAO,SAAS,GAAG,CAAC;AACtC,YAAU,QAAQ,OAAO,SAAS,SAAS,KAAK,CAAC;AACjD,YAAU,QAAQ,OAAO,SAAS,SAAS,UAAU,CAAC;;AAExD,WAAU,UAAU;AACpB,QAAO;;;;;AAMT,MAAM,+BACJ,cACA,YACW;CACX,IAAI,SAAS;AACb,MAAK,MAAM,QAAQ,aACjB,KAAI,KAAK,SAAS,YAChB,WAAU,QAAQ,OAAO,KAAK,UAAU,IAAI,CAAC,SAAS;UAC7C,KAAK,KACd,WAAU,QAAQ,OAAO,KAAK,KAAK,CAAC;AAGxC,QAAO;;;;;AAMT,MAAM,0BACJ,SACA,SACA,cACW;CACX,MAAM,mBAAmB;CACzB,MAAM,gBAAgB;CACtB,IAAI,SAAS;AACb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,EAAE;AAClD,MAAI,QAAQ,mBACV;AAEF,MAAI,OAAO,UAAU,SACnB,WAAU,QAAQ,OAAO,MAAM,CAAC;AAElC,MAAI,QAAQ,OACV,WAAU;AAEZ,MAAI,QAAQ,aACV,WAAU,yBACR,OACA,SACA,UACD;AAEH,MAAI,QAAQ,aAAa,MAAM,QAAQ,MAAM,CAC3C,WAAU,4BACR,OACA,QACD;;AAGL,QAAO;;;;;AAMT,MAAM,mBACJ,UACA,SACA,cACW;AACX,KAAI,SAAS,WAAW,EACtB,QAAO;CAET,IAAI,YAAY;AAChB,MAAK,MAAM,WAAW,SACpB,cAAa,uBAAuB,SAAS,SAAS,UAAU;AAGlE,cAAa;AACb,QAAO;;;;;AAMT,MAAM,wBAAwB,OAAO,aAAuC;AAC1E,KAAI,cAAc,IAAI,SAAS,EAAE;EAC/B,MAAM,SAAS,cAAc,IAAI,SAAS;AAC1C,MAAI,OACF,QAAO;;CAIX,MAAM,oBAAoB;AAC1B,KAAI,EAAE,qBAAqB,eAAe;EACxC,MAAM,iBAAkB,MAAM,aAAa,YAAY;AACvD,gBAAc,IAAI,UAAU,eAAe;AAC3C,SAAO;;CAGT,MAAM,iBAAkB,MAAM,aAAa,oBAAoB;AAC/D,eAAc,IAAI,UAAU,eAAe;AAC3C,QAAO;;;;;AAMT,MAAa,yBAAyB,UAAyB;AAC7D,QAAO,MAAM,aAAa,aAAa;;;;;AAMzC,MAAM,qBAAqB,UAAiB;AAC1C,QAAO,MAAM,OAAO,mBAAmB,MAAM,OAAO,UAChD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACT,OAAO;EACR,GACD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACT,OAAO,MAAM,GAAG,WAAW,OAAO;EACnC;;;;;AAMP,MAAM,4BACJ,KACA,MACA,YAIW;CACX,MAAM,EAAE,SAAS,cAAc;CAC/B,IAAI,SAAS,UAAU;AAGvB,KAAI,OAAO,SAAS,YAAY,SAAS,KACvC,QAAO;CAIT,MAAM,QAAQ;CAOd,MAAM,YAAY;CAClB,MAAM,YAAY,MAAM,QAAQ;CAChC,IAAI,YAAY,MAAM,eAAe;AAGrC,KAAI,MAAM,QAAQ,MAAM,QAAQ,MAAM,KAAK,EAAE;AAC3C,YAAU,UAAU;AACpB,OAAK,MAAM,QAAQ,MAAM,MAAM;AAC7B,aAAU,UAAU;AACpB,aAAU,QAAQ,OAAO,OAAO,KAAK,CAAC,CAAC;;;AAK3C,KAAI,UAAU,SAAS,IAAI,CACzB,aAAY,UAAU,MAAM,GAAG,GAAG;CAIpC,MAAM,OAAO,GAAG,UAAU,GAAG,UAAU,GAAG;AAC1C,WAAU,QAAQ,OAAO,KAAK,CAAC;AAE/B,KAAI,MAAM,SAAS,WAAW,MAAM,SAClC,WAAU,0BAA0B,MAAM,UAAU,SAAS,UAAU;CAIzE,MAAM,eAAe,IAAI,IAAI;EAAC;EAAQ;EAAe;EAAQ;EAAQ,CAAC;AACtE,MAAK,MAAM,gBAAgB,OAAO,KAAK,MAAM,CAC3C,KAAI,CAAC,aAAa,IAAI,aAAa,EAAE;EACnC,MAAM,gBAAgB,MAAM;EAC5B,MAAM,eACJ,OAAO,kBAAkB,WAAW,gBAClC,KAAK,UAAU,cAAc;AAEjC,YAAU,QAAQ,OAAO,GAAG,aAAa,GAAG,eAAe,CAAC;;AAIhE,QAAO;;;;;AAMT,MAAM,6BACJ,YACA,SACA,cACW;CACX,IAAI,SAAS;AACb,KAAI,OAAO,KAAK,WAAW,CAAC,SAAS,GAAG;AACtC,YAAU,UAAU;AACpB,OAAK,MAAM,WAAW,OAAO,KAAK,WAAW,CAC3C,WAAU,yBAAyB,SAAS,WAAW,UAAU;GAC/D;GACA;GACD,CAAC;;AAGN,QAAO;;;;;AAMT,MAAM,6BACJ,YACA,SACA,cACW;AACX,KAAI,CAAC,cAAc,OAAO,eAAe,SACvC,QAAO;CAGT,MAAM,SAAS;CACf,IAAI,SAAS;CAEb,MAAM,eAAe,IAAI,IAAI,CAAC,WAAW,uBAAuB,CAAC;AACjE,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,EAAE;AACjD,MAAI,aAAa,IAAI,IAAI,CACvB;AAEF,MAAI,QAAQ,aACV,WAAU,0BACR,OACA,SACA,UACD;OACI;GACL,MAAM,YACJ,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,MAAM;AAC3D,aAAU,QAAQ,OAAO,GAAG,IAAI,GAAG,YAAY,CAAC;;;AAIpD,QAAO;;;;;AAMT,MAAM,uBACJ,MACA,SACA,cACW;CACX,IAAI,SAAS,UAAU;CACvB,MAAM,OAAO,KAAK;CAClB,MAAM,QAAQ,KAAK;CACnB,IAAI,QAAQ,KAAK,eAAe;AAChC,KAAI,MAAM,SAAS,IAAI,CACrB,SAAQ,MAAM,MAAM,GAAG,GAAG;CAE5B,MAAM,OAAO,QAAQ,MAAM;AAC3B,WAAU,QAAQ,OAAO,KAAK,CAAC;AAC/B,KACE,OAAO,KAAK,eAAe,YACxB,KAAK,eAAe,KAEvB,WAAU,0BAA0B,KAAK,YAAY,SAAS,UAAU;AAE1E,QAAO;;;;;AAMT,MAAa,qBACX,OACA,SACA,cACW;CACX,IAAI,iBAAiB;AACrB,KAAI,UAAU,OAAO;AACnB,OAAK,MAAM,QAAQ,MACjB,mBAAkB,oBAAoB,MAAM,SAAS,UAAU;AAEjE,oBAAkB,UAAU;OAE5B,MAAK,MAAM,QAAQ,MACjB,mBAAkB,QAAQ,OAAO,KAAK,UAAU,KAAK,CAAC,CAAC;AAG3D,QAAO;;;;;AAMT,MAAa,gBAAgB,OAC3B,SACA,UAC+C;CAE/C,MAAM,YAAY,sBAAsB,MAAM;CAG9C,MAAM,UAAU,MAAM,sBAAsB,UAAU;CAEtD,MAAM,qBAAqB,QAAQ;CACnC,MAAM,gBAAgB,mBAAmB,QACtC,QAAQ,IAAI,SAAS,YACvB;CACD,MAAM,iBAAiB,mBAAmB,QACvC,QAAQ,IAAI,SAAS,YACvB;CAED,MAAM,YAAY,kBAAkB,MAAM;CAE1C,IAAI,cAAc,gBAAgB,eAAe,SAAS,UAAU;AACpE,KAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,EAC1C,gBAAe,kBAAkB,QAAQ,OAAO,SAAS,UAAU;CAErE,MAAM,eAAe,gBAAgB,gBAAgB,SAAS,UAAU;AAExE,QAAO;EACL,OAAO;EACP,QAAQ;EACT;;;;;ACzXH,MAAa,wBAAwB,OACnC,YACG;AACH,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,eAAe,QAAQ,SAAS,MACnC,MACC,OAAO,EAAE,YAAY,YAClB,EAAE,SAAS,MAAM,QAAMC,IAAE,SAAS,YAAY,CACpD;CAKD,IAAI,cAAc;AAClB,KAAI,QAAQ,SAAS,SAAS,GAAG;EAC/B,MAAM,cAAc,QAAQ,SAAS,GAAG,GAAG;AAC3C,MAAI,YACF,eAAc,CAAC,aAAa,OAAO,CAAC,SAAS,YAAY,KAAK;;CAKlE,MAAMC,UAAkC;EACtC,GAAG,eAAe,OAAO,aAAa;EACtC,eAAe,cAAc,UAAU;EACxC;CAED,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,oBAAoB;EACxE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,qCAAqC,SAAS;AAC5D,QAAM,IAAI,UAAU,qCAAqC,SAAS;;AAGpE,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;;;;AClC/B,MAAMC,WAAS,oBAAoB,2BAA2B;AAE9D,eAAsBC,mBAAiB,GAAY;AACjD,OAAM,eAAe,MAAM;CAE3B,IAAI,UAAU,MAAM,EAAE,IAAI,MAA8B;AACxD,UAAO,MAAM,oBAAoB,KAAK,UAAU,QAAQ,CAAC,MAAM,KAAK,CAAC;CAGrE,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,QAAQ,MACjC;AAGD,KAAI;AACF,MAAI,eAAe;GACjB,MAAM,aAAa,MAAM,cAAc,SAAS,cAAc;AAC9D,YAAO,KAAK,wBAAwB,WAAW;QAE/C,UAAO,KAAK,sDAAsD;UAE7D,OAAO;AACd,WAAO,KAAK,oCAAoC,MAAM;;AAGxD,KAAI,MAAM,cAAe,OAAM,eAAe;AAE9C,KAAI,UAAU,QAAQ,WAAW,EAAE;AACjC,YAAU;GACR,GAAG;GACH,YAAY,eAAe,aAAa,OAAO;GAChD;AACD,WAAO,MAAM,sBAAsB,KAAK,UAAU,QAAQ,WAAW,CAAC;;CAGxE,MAAM,WAAW,MAAM,sBAAsB,QAAQ;AAErD,KAAIC,iBAAe,SAAS,EAAE;AAC5B,WAAO,MAAM,2BAA2B,KAAK,UAAU,SAAS,CAAC;AACjE,SAAO,EAAE,KAAK,SAAS;;AAGzB,UAAO,MAAM,qBAAqB;AAClC,QAAO,UAAU,GAAG,OAAO,WAAW;AACpC,aAAW,MAAM,SAAS,UAAU;AAClC,YAAO,MAAM,oBAAoB,KAAK,UAAU,MAAM,CAAC;AACvD,SAAM,OAAO,SAAS,MAAoB;;GAE5C;;AAGJ,MAAMA,oBACJ,aACuC,OAAO,OAAO,UAAU,UAAU;;;;AC/D3E,MAAa,mBAAmB,IAAI,MAAM;AAE1C,iBAAiB,KAAK,KAAK,OAAO,MAAM;AACtC,KAAI;AACF,SAAO,MAAMC,mBAAiB,EAAE;UACzB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACVF,MAAa,mBAAmB,OAAO,YAA8B;AACnE,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,cAAc;EAClE,QAAQ;EACR,SAAS,eAAe,MAAM;EAC9B,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,IAAI,UAAU,+BAA+B,SAAS;AAE9E,QAAQ,MAAM,SAAS,MAAM;;;;;ACP/B,MAAa,kBAAkB,IAAI,MAAM;AAEzC,gBAAgB,KAAK,KAAK,OAAO,MAAM;AACrC,KAAI;EACF,MAAM,SAAS,MAAM,EAAE,IAAI,MAAwB;EACnD,MAAM,WAAW,MAAM,iBAAiB,OAAO;AAE/C,SAAO,EAAE,KAAK,SAAS;UAChB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACjBF,SAAgB,+BACd,cACkC;AAClC,KAAI,iBAAiB,KACnB,QAAO;AAQT,QANsB;EACpB,MAAM;EACN,QAAQ;EACR,YAAY;EACZ,gBAAgB;EACjB,CACoB;;;;;ACevB,MAAa,gBAAgB;AAG7B,SAAgB,kBACd,SACwB;CACxB,MAAM,UAAU,mBAAmB,QAAQ,MAAM;CACjD,MAAM,QAAQ,MAAM,QAAQ,KAAK,MAAM,MAAM,EAAE,OAAO,QAAQ;CAC9D,MAAM,iBAAiB,kBAAkB,SAAS,MAAM;AACxD,QAAO;EACL,OAAO;EACP,UAAU,mCACR,SACA,SACA,eACD;EACD,YAAY,QAAQ;EACpB,MAAM,QAAQ;EACd,QAAQ,QAAQ;EAChB,aAAa,QAAQ;EACrB,OAAO,QAAQ;EACf,MAAM,QAAQ,UAAU;EACxB,OAAO,gCAAgC,QAAQ,MAAM;EACrD,aAAa,qCAAqC,QAAQ,YAAY;EACtE,iBAAiB;EAClB;;AAGH,SAAS,kBACP,SACA,OACoB;CACpB,MAAM,WAAW,QAAQ;AACzB,KAAI,SAAS,UAAU;EACrB,MAAM,oBAAoB,KAAK,IAC7B,MAAM,aAAa,SAAS,uBAAuB,IAClD,MAAM,aAAa,OAAO,qBAAqB,KAAK,EACtD;AACD,MAAI,oBAAoB,KAAK,SAAS,kBAAkB,QAAW;GACjE,MAAM,eAAe,KAAK,IAAI,SAAS,eAAe,kBAAkB;AACxE,UAAO,KAAK,IACV,cACA,MAAM,aAAa,SAAS,uBAAuB,KACpD;;;;AAMP,SAAS,mBAAmB,OAAuB;AAEjD,KAAI,MAAM,WAAW,mBAAmB,CACtC,QAAO,MAAM,QAAQ,uBAAuB,kBAAkB;UACrD,MAAM,WAAW,iBAAiB,CAC3C,QAAO,MAAM,QAAQ,qBAAqB,gBAAgB;AAE5D,QAAO;;AAGT,SAAS,mCACP,SACA,SACA,gBACgB;CAChB,MAAM,iBAAiB,mBACrB,QAAQ,QACR,SACA,eACD;CACD,MAAM,gBAAgB,QAAQ,SAAS,SAAS,YAC9C,QAAQ,SAAS,SACf,kBAAkB,QAAQ,GAC1B,uBAAuB,SAAS,QAAQ,CAC3C;AACD,KAAI,QAAQ,WAAW,SAAS,IAAI,gBAAgB;EAClD,MAAM,WACJ;EACF,MAAM,iBAAiB,cAAc,WAAW,MAAM,EAAE,SAAS,OAAO;AACxE,MAAI,mBAAmB,IAAI;GACzB,MAAM,cAAc,cAAc;AAClC,OAAI,OAAO,YAAY,YAAY,SACjC,aAAY,UAAU,WAAW,SAAS,YAAY;YAC7C,MAAM,QAAQ,YAAY,QAAQ,CAC3C,aAAY,UAAU,CACpB;IAAE,MAAM;IAAQ,MAAM;IAAU,EAChC,GAAG,YAAY,QAChB;;;AAIP,QAAO,CAAC,GAAG,gBAAgB,GAAG,cAAc;;AAG9C,SAAS,mBACP,QACA,SACA,gBACgB;AAChB,KAAI,CAAC,OACH,QAAO,EAAE;CAGX,IAAI,cAAc;AAClB,KAAI,QAAQ,WAAW,SAAS,IAAI,eAClC,eAAc;;;;;;;;;;;AAahB,KAAI,OAAO,WAAW,SACpB,QAAO,CAAC;EAAE,MAAM;EAAU,SAAS,SAAS;EAAa,CAAC;KAU1D,QAAO,CAAC;EAAE,MAAM;EAAU,SARP,OAChB,KAAK,OAAO,UAAU;AACrB,OAAI,UAAU,EACZ,QAAO,MAAM,OAAO;AAEtB,UAAO,MAAM;IACb,CACD,KAAK,OAAO;EACgC,CAAC;;AAIpD,SAAS,kBAAkB,SAA+C;CACxE,MAAMC,cAA8B,EAAE;AAEtC,KAAI,MAAM,QAAQ,QAAQ,QAAQ,EAAE;EAClC,MAAM,mBAAmB,QAAQ,QAAQ,QACtC,UACC,MAAM,SAAS,cAClB;EACD,MAAM,cAAc,QAAQ,QAAQ,QACjC,UAAU,MAAM,SAAS,cAC3B;AAGD,OAAK,MAAM,SAAS,iBAClB,aAAY,KAAK;GACf,MAAM;GACN,cAAc,MAAM;GACpB,SAAS,WAAW,MAAM,QAAQ;GACnC,CAAC;AAGJ,MAAI,YAAY,SAAS,EACvB,aAAY,KAAK;GACf,MAAM;GACN,SAAS,WAAW,YAAY;GACjC,CAAC;OAGJ,aAAY,KAAK;EACf,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CAAC;AAGJ,QAAO;;AAGT,SAAS,uBACP,SACA,SACgB;AAChB,KAAI,CAAC,MAAM,QAAQ,QAAQ,QAAQ,CACjC,QAAO,CACL;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;CAGH,MAAM,gBAAgB,QAAQ,QAAQ,QACnC,UAA0C,MAAM,SAAS,WAC3D;CAED,IAAI,iBAAiB,QAAQ,QAAQ,QAClC,UAA2C,MAAM,SAAS,WAC5D;AAED,KAAI,QAAQ,WAAW,SAAS,CAC9B,kBAAiB,eAAe,QAC7B,MACC,EAAE,YACC,EAAE,aAAa,iBACf,EAAE,aAEF,CAAC,EAAE,UAAU,SAAS,IAAI,CAChC;CAGH,MAAM,mBAAmB,eACtB,QAAQ,MAAM,EAAE,YAAY,EAAE,aAAa,cAAc,CACzD,KAAK,MAAM,EAAE,SAAS;CAEzB,MAAM,qBACJ,iBAAiB,SAAS,IAAI,iBAAiB,KAAK,OAAO,GAAG;CAEhE,MAAM,YAAY,eAAe,MAAM,MAAM,EAAE,UAAU,EAAE;AAE3D,QAAO,cAAc,SAAS,IAC1B,CACE;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACpC,gBAAgB;EAChB,kBAAkB;EAClB,YAAY,cAAc,KAAK,aAAa;GAC1C,IAAI,QAAQ;GACZ,MAAM;GACN,UAAU;IACR,MAAM,QAAQ;IACd,WAAW,KAAK,UAAU,QAAQ,MAAM;IACzC;GACF,EAAE;EACJ,CACF,GACD,CACE;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACpC,gBAAgB;EAChB,kBAAkB;EACnB,CACF;;AAGP,SAAS,WACP,SAGoC;AACpC,KAAI,OAAO,YAAY,SACrB,QAAO;AAET,KAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,QAAO;AAIT,KAAI,CADa,QAAQ,MAAM,UAAU,MAAM,SAAS,QAAQ,CAE9D,QAAO,QACJ,QAAQ,UAAuC,MAAM,SAAS,OAAO,CACrE,KAAK,UAAU,MAAM,KAAK,CAC1B,KAAK,OAAO;CAGjB,MAAMC,eAAmC,EAAE;AAC3C,MAAK,MAAM,SAAS,QAClB,SAAQ,MAAM,MAAd;EACE,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAM,CAAC;AACrD;EAEF,KAAK;AACH,gBAAa,KAAK;IAChB,MAAM;IACN,WAAW,EACT,KAAK,QAAQ,MAAM,OAAO,WAAW,UAAU,MAAM,OAAO,QAC7D;IACF,CAAC;AACF;;AAKN,QAAO;;AAGT,SAAS,gCACP,gBACyB;AACzB,KAAI,CAAC,eACH;AAEF,QAAO,eAAe,KAAK,UAAU;EACnC,MAAM;EACN,UAAU;GACR,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,YAAY,KAAK;GAClB;EACF,EAAE;;AAGL,SAAS,qCACP,qBACuC;AACvC,KAAI,CAAC,oBACH;AAGF,SAAQ,oBAAoB,MAA5B;EACE,KAAK,OACH,QAAO;EAET,KAAK,MACH,QAAO;EAET,KAAK;AACH,OAAI,oBAAoB,KACtB,QAAO;IACL,MAAM;IACN,UAAU,EAAE,MAAM,oBAAoB,MAAM;IAC7C;AAEH;EAEF,KAAK,OACH,QAAO;EAET,QACE;;;AAON,SAAgB,qBACd,UACmB;CAEnB,MAAMC,yBAAgE,EAAE;CACxE,IAAI,aAAa,SAAS,QAAQ,IAAI,iBAAiB;AAGvD,MAAK,MAAM,UAAU,SAAS,SAAS;EACrC,MAAM,aAAa,uBAAuB,OAAO,QAAQ,QAAQ;EACjE,MAAM,cAAc,wBAClB,OAAO,QAAQ,gBACf,OAAO,QAAQ,iBAChB;EACD,MAAM,gBAAgB,0BAA0B,OAAO,QAAQ,WAAW;AAE1E,yBAAuB,KAAK,GAAG,aAAa,GAAG,YAAY,GAAG,cAAc;AAG5E,MAAI,OAAO,kBAAkB,gBAAgB,eAAe,OAC1D,cAAa,OAAO;;AAIxB,QAAO;EACL,IAAI,SAAS;EACb,MAAM;EACN,MAAM;EACN,OAAO,SAAS;EAChB,SAAS;EACT,aAAa,+BAA+B,WAAW;EACvD,eAAe;EACf,OAAO;GACL,eACG,SAAS,OAAO,iBAAiB,MAC/B,SAAS,OAAO,uBAAuB,iBAAiB;GAC7D,eAAe,SAAS,OAAO,qBAAqB;GACpD,GAAI,SAAS,OAAO,uBAAuB,kBACrC,UAAa,EACjB,yBACE,SAAS,MAAM,sBAAsB,eACxC;GACF;EACF;;AAGH,SAAS,uBACP,gBAC2B;AAC3B,KAAI,OAAO,mBAAmB,YAAY,eAAe,SAAS,EAChE,QAAO,CAAC;EAAE,MAAM;EAAQ,MAAM;EAAgB,CAAC;AAGjD,KAAI,MAAM,QAAQ,eAAe,CAC/B,QAAO,eACJ,QAAQ,SAA2B,KAAK,SAAS,OAAO,CACxD,KAAK,UAAU;EAAE,MAAM;EAAQ,MAAM,KAAK;EAAM,EAAE;AAGvD,QAAO,EAAE;;AAGX,SAAS,wBACP,eACA,iBAC+B;AAC/B,KAAI,iBAAiB,cAAc,SAAS,EAC1C,QAAO,CACL;EACE,MAAM;EACN,UAAU;EACV,WAAW,mBAAmB;EAC/B,CACF;AAEH,KAAI,mBAAmB,gBAAgB,SAAS,EAC9C,QAAO,CACL;EACE,MAAM;EACN,UAAU;EACV,WAAW;EACZ,CACF;AAEH,QAAO,EAAE;;AAGX,SAAS,0BACP,WAC8B;AAC9B,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,UAAU,KAAK,cAAc;EAClC,MAAM;EACN,IAAI,SAAS;EACb,MAAM,SAAS,SAAS;EACxB,OAAO,KAAK,MAAM,SAAS,SAAS,UAAU;EAC/C,EAAE;;;;;;;;AC3bL,eAAsB,kBAAkB,GAAY;AAClD,KAAI;EACF,MAAM,gBAAgB,EAAE,IAAI,OAAO,iBAAiB;EAEpD,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;EAErE,MAAM,gBAAgB,kBAAkB,iBAAiB;EAEzD,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,iBAAiB,MAC1C;AAED,MAAI,CAAC,eAAe;AAClB,WAAQ,KAAK,iDAAiD;AAC9D,UAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;EAGJ,MAAM,aAAa,MAAM,cAAc,eAAe,cAAc;AAEpE,MAAI,iBAAiB,SAAS,iBAAiB,MAAM,SAAS,GAAG;GAC/D,IAAI,2BAA2B;AAC/B,OAAI,eAAe;IACjB,MAAM,cAAc,iBAAiB,MAAM;AAC3C,+BAA2B,CAAC,iBAAiB,MAAM,MAChD,SACC,KAAK,KAAK,WAAW,QAAQ,IACzB,KAAK,SAAS,WAAW,gBAAgB,EAChD;;AAEH,OAAI,0BACF;QAAI,iBAAiB,MAAM,WAAW,SAAS,CAE7C,YAAW,QAAQ,WAAW,QAAQ;aAC7B,iBAAiB,MAAM,WAAW,OAAO,CAClD,YAAW,QAAQ,WAAW,QAAQ;;;EAK5C,IAAI,kBAAkB,WAAW,QAAQ,WAAW;AACpD,MAAI,iBAAiB,MAAM,WAAW,SAAS,CAC7C,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;AAGtD,UAAQ,KAAK,gBAAgB,gBAAgB;AAE7C,SAAO,EAAE,KAAK,EACZ,cAAc,iBACf,CAAC;UACK,OAAO;AACd,UAAQ,MAAM,0BAA0B,MAAM;AAC9C,SAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;;;;;ACoQN,MAAa,kBAAkB,OAC7B,SACA,EAAE,QAAQ,gBACyB;AACnC,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAMC,UAAkC;EACtC,GAAG,eAAe,OAAO,OAAO;EAChC,eAAe;EAChB;AAGD,SAAQ,eAAe;CAEvB,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,aAAa;EACjE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,8BAA8B,SAAS;AACrD,QAAM,IAAI,UAAU,8BAA8B,SAAS;;AAG7D,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;;;;ACxT/B,MAAM,eAAe;AAErB,MAAaC,kBAAgB;AAE7B,MAAa,gDACX,YACqB;CACrB,MAAMC,QAAkC,EAAE;AAE1C,MAAK,MAAM,WAAW,QAAQ,SAC5B,OAAM,KAAK,GAAG,iBAAiB,QAAQ,CAAC;CAG1C,MAAM,kBAAkB,sBAAsB,QAAQ,MAAM;CAC5D,MAAM,aAAa,2BAA2B,QAAQ,YAAY;CAElE,MAAM,EAAE,kBAAkB,mBAAmB,YAC3C,QAAQ,UAAU,QACnB;AAwBD,QAtB2C;EACzC,OAAO,QAAQ;EACf;EACA,cAAc,sBAAsB,QAAQ,QAAQ,QAAQ,MAAM;EAClE,aAAa;EACb,OAAO,QAAQ,SAAS;EACxB,mBAAmB,KAAK,IAAI,QAAQ,YAAY,MAAM;EACtD,OAAO;EACP,aAAa;EACb,UAAU,QAAQ,WAAW,EAAE,GAAG,QAAQ,UAAU,GAAG;EACvD,mBAAmB;EACnB,kBAAkB;EAClB,QAAQ,QAAQ,UAAU;EAC1B,OAAO;EACP,qBAAqB;EACrB,WAAW;GACT,QAAQ,2BAA2B,QAAQ,MAAM;GACjD,SAAS;GACV;EACD,SAAS,CAAC,8BAA8B;EACzC;;AAKH,MAAM,oBACJ,YAC6B;AAC7B,KAAI,QAAQ,SAAS,OACnB,QAAO,qBAAqB,QAAQ;AAGtC,QAAO,0BAA0B,QAAQ;;AAG3C,MAAM,wBACJ,YAC6B;AAC7B,KAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,CAAC,cAAc,QAAQ,QAAQ,QAAQ,CAAC;AAGjD,KAAI,CAAC,MAAM,QAAQ,QAAQ,QAAQ,CACjC,QAAO,EAAE;CAGX,MAAMC,QAAkC,EAAE;CAC1C,MAAMC,iBAA8C,EAAE;AAEtD,MAAK,MAAM,SAAS,QAAQ,SAAS;AACnC,MAAI,MAAM,SAAS,eAAe;AAChC,uBAAoB,QAAQ,gBAAgB,MAAM;AAClD,SAAM,KAAK,yBAAyB,MAAM,CAAC;AAC3C;;EAGF,MAAM,YAAY,0BAA0B,MAAM;AAClD,MAAI,UACF,gBAAe,KAAK,UAAU;;AAIlC,qBAAoB,QAAQ,gBAAgB,MAAM;AAElD,QAAO;;AAGT,MAAM,6BACJ,YAC6B;AAC7B,KAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,CAAC,cAAc,aAAa,QAAQ,QAAQ,CAAC;AAGtD,KAAI,CAAC,MAAM,QAAQ,QAAQ,QAAQ,CACjC,QAAO,EAAE;CAGX,MAAMD,QAAkC,EAAE;CAC1C,MAAMC,iBAA8C,EAAE;AAEtD,MAAK,MAAM,SAAS,QAAQ,SAAS;AACnC,MAAI,MAAM,SAAS,YAAY;AAC7B,uBAAoB,aAAa,gBAAgB,MAAM;AACvD,SAAM,KAAK,uBAAuB,MAAM,CAAC;AACzC;;AAGF,MACE,MAAM,SAAS,cACZ,MAAM,aACN,MAAM,UAAU,SAAS,IAAI,EAChC;AACA,uBAAoB,aAAa,gBAAgB,MAAM;AACvD,SAAM,KAAK,uBAAuB,MAAM,CAAC;AACzC;;EAGF,MAAM,YAAY,+BAA+B,MAAM;AACvD,MAAI,UACF,gBAAe,KAAK,UAAU;;AAIlC,qBAAoB,aAAa,gBAAgB,MAAM;AAEvD,QAAO;;AAGT,MAAM,6BACJ,UACqC;AACrC,SAAQ,MAAM,MAAd;EACE,KAAK,OACH,QAAO,kBAAkB,MAAM,KAAK;EAEtC,KAAK,QACH,QAAO,mBAAmB,MAAM;EAElC,QACE;;;AAKN,MAAM,kCACJ,UACqC;AACrC,SAAQ,MAAM,MAAd;EACE,KAAK,OACH,QAAO,wBAAwB,MAAM,KAAK;EAE5C,QACE;;;AAKN,MAAM,uBACJ,MACA,gBACA,WACG;AACH,KAAI,eAAe,WAAW,EAC5B;CAGF,MAAM,iBAAiB,CAAC,GAAG,eAAe;AAE1C,QAAO,KAAK,cAAc,MAAM,eAAe,CAAC;AAChD,gBAAe,SAAS;;AAG1B,MAAM,iBACJ,MACA,aAC0B;CAC1B,MAAM;CACN;CACA;CACD;AAED,MAAM,qBAAqB,UAAqC;CAC9D,MAAM;CACN;CACD;AAED,MAAM,2BAA2B,UAAqC;CACpE,MAAM;CACN;CACD;AAED,MAAM,sBACJ,WACwB;CACxB,MAAM;CACN,WAAW,QAAQ,MAAM,OAAO,WAAW,UAAU,MAAM,OAAO;CAClE,QAAQ;CACT;AAED,MAAM,0BACJ,UAC2B;CAI3B,MAAM,QAAQ,MAAM,UAAU,MAAM,IAAI;CACxC,MAAM,YAAY,MAAM;CACxB,MAAM,KAAK,MAAM;CACjB,MAAM,WAAW,MAAM,aAAaH,kBAAgB,KAAK,MAAM;AAC/D,QAAO;EACL;EACA,MAAM;EACN,SAAS,WAAW,CAAC;GAAE,MAAM;GAAgB,MAAM;GAAU,CAAC,GAAG,EAAE;EACnE,mBAAmB;EACpB;;AAGH,MAAM,0BACJ,WACkC;CAClC,MAAM;CACN,SAAS,MAAM;CACf,MAAM,MAAM;CACZ,WAAW,KAAK,UAAU,MAAM,MAAM;CACtC,QAAQ;CACT;AAED,MAAM,4BACJ,WACoC;CACpC,MAAM;CACN,SAAS,MAAM;CACf,QAAQ,yBAAyB,MAAM,QAAQ;CAC/C,QAAQ,MAAM,WAAW,eAAe;CACzC;AAED,MAAM,yBACJ,QACA,UACkB;AAClB,KAAI,CAAC,OACH,QAAO;CAGT,MAAM,cAAc,uBAAuB,MAAM;AAEjD,KAAI,OAAO,WAAW,SACpB,QAAO,SAAS;CAGlB,MAAM,OAAO,OACV,KAAK,OAAO,UAAU;AACrB,MAAI,UAAU,EACZ,QAAO,MAAM,OAAO;AAEtB,SAAO,MAAM;GACb,CACD,KAAK,IAAI;AACZ,QAAO,KAAK,SAAS,IAAI,OAAO;;AAGlC,MAAM,yBACJ,UACuB;AACvB,KAAI,CAAC,SAAS,MAAM,WAAW,EAC7B,QAAO;AAGT,QAAO,MAAM,KAAK,UAAU;EAC1B,MAAM;EACN,MAAM,KAAK;EACX,YAAY,KAAK;EACjB,QAAQ;EACR,GAAI,KAAK,cAAc,EAAE,aAAa,KAAK,aAAa,GAAG,EAAE;EAC9D,EAAE;;AAGL,MAAM,8BACJ,WAC2C;AAC3C,KAAI,CAAC,OACH,QAAO;AAGT,SAAQ,OAAO,MAAf;EACE,KAAK,OACH,QAAO;EAET,KAAK,MACH,QAAO;EAET,KAAK,OACH,QAAO,OAAO,OAAO;GAAE,MAAM;GAAY,MAAM,OAAO;GAAM,GAAG;EAEjE,KAAK,OACH,QAAO;EAET,QACE,QAAO;;;AAKb,MAAa,uCACX,aACsB;CACtB,MAAM,gBAAgB,4BAA4B,SAAS,OAAO;CAClE,MAAM,QAAQ,kBAAkB,SAAS;CACzC,IAAI,mBAAmB,sBAAsB,SAAS,YAAY;AAClE,KAAI,cAAc,SAAS,EACzB,oBAAmB;CAGrB,MAAM,aAAa,uBAAuB,SAAS;AAEnD,QAAO;EACL,IAAI,SAAS;EACb,MAAM;EACN,MAAM;EACN,SAAS;EACT,OAAO,SAAS;EAChB,aAAa;EACb,eAAe;EACf;EACD;;AAGH,MAAM,+BACJ,WAC0C;CAC1C,MAAMI,gBAAuD,EAAE;AAE/D,MAAK,MAAM,QAAQ,OACjB,SAAQ,KAAK,MAAb;EACE,KAAK,aAAa;GAChB,MAAM,eAAe,qBAAqB,KAAK;AAC/C,OAAI,aAAa,SAAS,EACxB,eAAc,KAAK;IACjB,MAAM;IACN,UAAU;IACV,YAAY,KAAK,qBAAqB,MAAM,MAAM,KAAK;IACxD,CAAC;AAEJ;;EAEF,KAAK,iBAAiB;GACpB,MAAM,eAAe,0BAA0B,KAAK;AACpD,OAAI,aACF,eAAc,KAAK,aAAa;AAElC;;EAEF,KAAK,WAAW;GACd,MAAM,eAAe,0BAA0B,KAAK,QAAQ;AAC5D,OAAI,aAAa,SAAS,EACxB,eAAc,KAAK;IAAE,MAAM;IAAQ,MAAM;IAAc,CAAC;AAE1D;;EAEF,SAAS;GAEP,MAAM,eAAe,0BAClB,KAAyD,QAC3D;AACD,OAAI,aAAa,SAAS,EACxB,eAAc,KAAK;IAAE,MAAM;IAAQ,MAAM;IAAc,CAAC;;;AAMhE,QAAO;;AAGT,MAAM,6BACJ,YACW;AACX,KAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,QAAO;CAGT,IAAI,aAAa;AAEjB,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAI,qBAAqB,MAAM,EAAE;AAC/B,iBAAc,MAAM;AACpB;;AAGF,MAAI,wBAAwB,MAAM,EAAE;AAClC,iBAAc,MAAM;AACpB;;AAGF,MAAI,OAAQ,MAA6B,SAAS,UAAU;AAC1D,iBAAe,MAA2B;AAC1C;;AAGF,MAAI,OAAQ,MAAkC,cAAc,UAAU;AACpE,iBAAe,MAAgC;AAC/C;;;AAIJ,QAAO;;AAGT,MAAM,wBAAwB,SAA0C;CACtE,MAAMC,WAA0B,EAAE;CAElC,MAAM,qBAAqB,WAA2C;AACpE,MAAI,CAAC,MAAM,QAAQ,OAAO,CACxB;AAGF,OAAK,MAAM,SAAS,OAClB,KAAI,OAAO,MAAM,SAAS,UAAU;AAClC,YAAS,KAAK,MAAM,KAAK;AACzB;;;AAMN,KAAI,CAAC,KAAK,WAAW,KAAK,QAAQ,WAAW,EAC3C,QAAOL;AAGT,mBAAkB,KAAK,QAAQ;AAE/B,QAAO,SAAS,KAAK,GAAG,CAAC,MAAM;;AAGjC,MAAM,6BACJ,SACiC;CACjC,MAAM,SAAS,KAAK;AACpB,KAAI,CAAC,KAAK,QAAQ,CAAC,OACjB,QAAO;CAGT,MAAM,QAAQ,2BAA2B,KAAK,UAAU;AAExD,QAAO;EACL,MAAM;EACN,IAAI;EACJ,MAAM,KAAK;EACX;EACD;;AAGH,MAAM,8BACJ,iBAC4B;AAC5B,KAAI,OAAO,iBAAiB,YAAY,aAAa,MAAM,CAAC,WAAW,EACrE,QAAO,EAAE;AAGX,KAAI;EACF,MAAMM,SAAkB,KAAK,MAAM,aAAa;AAEhD,MAAI,MAAM,QAAQ,OAAO,CACvB,QAAO,EAAE,WAAW,QAAQ;AAG9B,MAAI,UAAU,OAAO,WAAW,SAC9B,QAAO;UAEF,OAAO;AACd,UAAQ,KAAK,2CAA2C;GACtD;GACA;GACD,CAAC;;AAGJ,QAAO,EAAE,eAAe,cAAc;;AAGxC,MAAM,yBACJ,eAC0C;AAC1C,KAAI,CAAC,WACH,QAAO,EAAE;AAGX,QAAO,CACL;EACE,MAAM;EACN,MAAM;EACP,CACF;;AAGH,MAAM,0BACJ,aACqC;CACrC,MAAM,EAAE,QAAQ,oBAAoB,sBAAsB;AAE1D,KAAI,WAAW,aAAa;AAC1B,MAAI,SAAS,OAAO,MAAM,SAAS,KAAK,SAAS,gBAAgB,CAC/D,QAAO;AAET,SAAO;;AAGT,KAAI,WAAW,cAAc;AAC3B,MAAI,mBAAmB,WAAW,oBAChC,QAAO;AAET,MAAI,mBAAmB,WAAW,iBAChC,QAAO;;AAIX,QAAO;;AAGT,MAAM,qBACJ,aAC+B;CAC/B,MAAM,cAAc,SAAS,OAAO,gBAAgB;CACpD,MAAM,eAAe,SAAS,OAAO,iBAAiB;CACtD,MAAM,oBAAoB,SAAS,OAAO,sBAAsB;AAEhE,QAAO;EACL,cAAc,eAAe,qBAAqB;EAClD,eAAe;EACf,GAAI,SAAS,OAAO,sBAAsB,kBAAkB,UAAa,EACvE,yBACE,SAAS,MAAM,qBAAqB,eACvC;EACF;;AAGH,MAAM,YAAY,UAChB,OAAO,UAAU,YAAY,UAAU;AAEzC,MAAM,wBACJ,UAEA,SAAS,MAAM,IACZ,UAAU,SACT,MAA6B,SAAS;AAE5C,MAAM,2BACJ,UAEA,SAAS,MAAM,IACZ,UAAU,SACT,MAA6B,SAAS;AAE5C,MAAM,eACJ,WACuE;AACvE,KAAI,CAAC,UAAU,OAAO,WAAW,SAC/B,QAAO;EAAE,kBAAkB;EAAM,gBAAgB;EAAM;CAIzD,MAAM,YAAY,OAAO,MAAM,uBAAuB;CACtD,MAAM,mBAAmB,YAAY,UAAU,KAAK;CAGpD,MAAM,eAAe,OAAO,MAAM,iBAAiB;CACnD,MAAM,iBAAiB,eAAe,aAAa,KAAK;AAExD,QAAO;EAAE;EAAkB;EAAgB;;AAG7C,MAAM,4BACJ,YACyC;AACzC,KAAI,OAAO,YAAY,SACrB,QAAO;AAGT,KAAI,MAAM,QAAQ,QAAQ,EAAE;EAC1B,MAAMC,SAAsC,EAAE;AAC9C,OAAK,MAAM,SAAS,QAClB,SAAQ,MAAM,MAAd;GACE,KAAK;AACH,WAAO,KAAK,kBAAkB,MAAM,KAAK,CAAC;AAC1C;GAEF,KAAK;AACH,WAAO,KAAK,mBAAmB,MAAM,CAAC;AACtC;GAEF,QACE;;AAIN,SAAO;;AAGT,QAAO;;;;;AC3mBT,MAAM,2CAA2C;AAEjD,IAAM,uCAAN,cAAmD,MAAM;CACvD,YAAY,SAAiB;AAC3B,QAAM,QAAQ;AACd,OAAK,OAAO;;;AAIhB,MAAM,4BACJ,eACA,UAIG;CACH,IAAI,QAAQ;AAEZ,MAAK,MAAM,QAAQ,OAAO;AACxB,MAAI,SAAS,QAAQ,SAAS,QAAQ,SAAS,KAAM;AACnD,YAAS;AACT,OAAI,QAAQ,yCACV,QAAO;IAAE,WAAW;IAAO,UAAU;IAAM;AAE7C;;AAGF,MAAI,SAAS,IACX,SAAQ;;AAIZ,QAAO;EAAE,WAAW;EAAO,UAAU;EAAO;;AAoB9C,MAAa,oCAA0D;CACrE,kBAAkB;CAClB,kBAAkB;CAClB,uBAAuB;CACvB,iCAAiB,IAAI,KAAK;CAC1B,4BAAY,IAAI,KAAK;CACrB,+BAAe,IAAI,KAAK;CACxB,gDAAgC,IAAI,KAAK;CAC1C;AAED,MAAa,iCACX,UACA,YACoC;AAEpC,SADkB,SAAS,MAC3B;EACE,KAAK,mBACH,QAAO,sBAAsB,UAAUC,QAAM;EAG/C,KAAK,6BACH,QAAOC,wBAAsB,UAAUD,QAAM;EAG/C,KAAK,wCACH,QAAO,gCAAgC,UAAUA,QAAM;EAGzD,KAAK,6BACH,QAAO,sBAAsB,UAAUA,QAAM;EAG/C,KAAK,uCACH,QAAO,+BAA+B,UAAUA,QAAM;EAGxD,KAAK,4BACH,QAAO,qBAAqB,UAAUA,QAAM;EAE9C,KAAK,4BACH,QAAOE,uBAAqB,UAAUF,QAAM;EAG9C,KAAK,yCACH,QAAO,iCAAiC,UAAUA,QAAM;EAG1D,KAAK,wCACH,QAAO,gCAAgC,UAAUA,QAAM;EAGzD,KAAK;EACL,KAAK,sBACH,QAAO,wBAAwB,UAAUA,QAAM;EAGjD,KAAK,kBACH,QAAO,qBAAqB,UAAUA,QAAM;EAG9C,KAAK,QACH,QAAO,iBAAiB,UAAUA,QAAM;EAG1C,QACE,QAAO,EAAE;;;AAMf,MAAM,yBACJ,UACA,YACoC;AACpC,QAAO,aAAaA,SAAO,SAAS,SAAS;;AAG/C,MAAMC,2BACJ,UACA,YACoC;CACpC,MAAME,WAAS,IAAI,OAAiC;CACpD,MAAM,sBAAsB,2BAA2B,SAAS;AAChE,KAAI,CAAC,oBACH,QAAOA;CAGT,MAAM,EAAE,aAAa,YAAY,MAAM,qBACrC;CACF,MAAM,aAAa,sBAAsBH,SAAO;EAC9C;EACA;EACA;EACA;EACD,CAAC;AAEF,KAAI,qBAAqB,UAAa,iBAAiB,SAAS,GAAG;AACjE,WAAO,KAAK;GACV,MAAM;GACN,OAAO;GACP,OAAO;IACL,MAAM;IACN,cAAc;IACf;GACF,CAAC;AACF,UAAM,cAAc,IAAI,WAAW;;AAGrC,QAAOG;;AAGT,MAAMD,0BACJ,UACA,YACoC;CACpC,MAAMC,WAAS,IAAI,OAAiC;CACpD,MAAM,OAAO,SAAS;AAEtB,KADiB,KAAK,SACL,YACf,QAAOA;CAGT,MAAM,cAAc,SAAS;CAC7B,MAAM,aAAa,0BAA0BH,SAAO,aAAaG,SAAO;CACxE,MAAM,aAAa,KAAK,qBAAqB,MAAM,MAAM,KAAK;AAC9D,KAAI,WAAW;AAEb,MAAI,CAAC,KAAK,WAAW,KAAK,QAAQ,WAAW,EAC3C,UAAO,KAAK;GACV,MAAM;GACN,OAAO;GACP,OAAO;IACL,MAAM;IACN,UAAUC;IACX;GACF,CAAC;AAGJ,WAAO,KAAK;GACV,MAAM;GACN,OAAO;GACP,OAAO;IACL,MAAM;IACN;IACD;GACF,CAAC;AACF,UAAM,cAAc,IAAI,WAAW;;AAGrC,QAAOD;;AAGT,MAAM,oCACJ,UACA,YACoC;CACpC,MAAMA,WAAS,IAAI,OAAiC;CACpD,MAAM,cAAc,SAAS;CAC7B,MAAM,YAAY,SAAS;AAE3B,KAAI,CAAC,UACH,QAAOA;CAGT,MAAM,aAAa,sBAAsBH,SAAO;EAC9C;EACA;EACD,CAAC;CAEF,MAAM,oBACJA,QAAM,+BAA+B,IAAI,YAAY;AACvD,KAAI,CAAC,kBACH,QAAO,2CACL,IAAI,qCACF,0EACD,EACDA,SACAG,SACD;CAKH,MAAM,EAAE,WAAW,aAAa,yBAC9B,kBAAkB,4BAClB,UACD;AACD,KAAI,SACF,QAAO,2CACL,IAAI,qCACF,oGACD,EACDH,SACAG,SACD;AAEH,mBAAkB,6BAA6B;AAE/C,UAAO,KAAK;EACV,MAAM;EACN,OAAO;EACP,OAAO;GACL,MAAM;GACN,cAAc;GACf;EACF,CAAC;AACF,SAAM,cAAc,IAAI,WAAW;AAEnC,QAAOA;;AAGT,MAAM,mCACJ,UACA,YACoC;CACpC,MAAMA,WAAS,IAAI,OAAiC;CACpD,MAAM,cAAc,SAAS;CAC7B,MAAM,aAAa,sBAAsBH,SAAO;EAC9C;EACA;EACD,CAAC;CAEF,MAAM,iBACJ,OAAO,SAAS,cAAc,WAAW,SAAS,YAAY;AAEhE,KAAI,CAACA,QAAM,cAAc,IAAI,WAAW,IAAI,gBAAgB;AAC1D,WAAO,KAAK;GACV,MAAM;GACN,OAAO;GACP,OAAO;IACL,MAAM;IACN,cAAc;IACf;GACF,CAAC;AACF,UAAM,cAAc,IAAI,WAAW;;AAGrC,SAAM,+BAA+B,OAAO,YAAY;AACxD,QAAOG;;AAGT,MAAM,yBACJ,UACA,YACoC;CACpC,MAAMA,WAAS,IAAI,OAAiC;CACpD,MAAM,cAAc,SAAS;CAC7B,MAAM,eAAe,SAAS;CAC9B,MAAM,YAAY,SAAS;AAE3B,KAAI,CAAC,UACH,QAAOA;CAGT,MAAM,aAAa,sBAAsBH,SAAO;EAC9C;EACA;EACA;EACD,CAAC;AAEF,UAAO,KAAK;EACV,MAAM;EACN,OAAO;EACP,OAAO;GACL,MAAM;GACN,MAAM;GACP;EACF,CAAC;AACF,SAAM,cAAc,IAAI,WAAW;AAEnC,QAAOG;;AAGT,MAAM,mCACJ,UACA,YACoC;CACpC,MAAM,cAAc,SAAS;CAC7B,MAAM,YAAY,SAAS;CAC3B,MAAMA,WAAS,IAAI,OAAiC;CACpD,MAAM,aAAa,0BAA0BH,SAAO,aAAaG,SAAO;AAExE,UAAO,KAAK;EACV,MAAM;EACN,OAAO;EACP,OAAO;GACL,MAAM;GACN,UAAU;GACX;EACF,CAAC;AACF,SAAM,cAAc,IAAI,WAAW;AAEnC,QAAOA;;AAGT,MAAM,kCACJ,UACA,YACoC;CACpC,MAAM,cAAc,SAAS;CAC7B,MAAM,OAAO,SAAS;CACtB,MAAMA,WAAS,IAAI,OAAiC;CACpD,MAAM,aAAa,0BAA0BH,SAAO,aAAaG,SAAO;AAExE,KAAI,QAAQ,CAACH,QAAM,cAAc,IAAI,WAAW,CAC9C,UAAO,KAAK;EACV,MAAM;EACN,OAAO;EACP,OAAO;GACL,MAAM;GACN,UAAU;GACX;EACF,CAAC;AAGJ,QAAOG;;AAGT,MAAM,wBACJ,UACA,YACoC;CACpC,MAAMA,WAAS,IAAI,OAAiC;CACpD,MAAM,cAAc,SAAS;CAC7B,MAAM,eAAe,SAAS;CAC9B,MAAM,OAAO,SAAS;CAEtB,MAAM,aAAa,sBAAsBH,SAAO;EAC9C;EACA;EACA;EACD,CAAC;AAEF,KAAI,QAAQ,CAACA,QAAM,cAAc,IAAI,WAAW,CAC9C,UAAO,KAAK;EACV,MAAM;EACN,OAAO;EACP,OAAO;GACL,MAAM;GACN;GACD;EACF,CAAC;AAGJ,QAAOG;;AAGT,MAAM,2BACJ,UACA,YACoC;CACpC,MAAM,WAAW,SAAS;CAC1B,MAAMA,WAAS,IAAI,OAAiC;AAEpD,oBAAmBH,SAAOG,SAAO;CACjC,MAAM,YAAY,oCAAoC,SAAS;AAC/D,UAAO,KACL;EACE,MAAM;EACN,OAAO;GACL,aAAa,UAAU;GACvB,eAAe,UAAU;GAC1B;EACD,OAAO,UAAU;EAClB,EACD,EAAE,MAAM,gBAAgB,CACzB;AACD,SAAM,mBAAmB;AACzB,QAAOA;;AAGT,MAAM,wBACJ,UACA,YACoC;CACpC,MAAM,WAAW,SAAS;CAC1B,MAAMA,WAAS,IAAI,OAAiC;AACpD,oBAAmBH,SAAOG,SAAO;CAEjC,MAAM,UACJ,SAAS,OAAO,WAAW;AAE7B,UAAO,KAAK,gBAAgB,QAAQ,CAAC;AACrC,SAAM,mBAAmB;AAEzB,QAAOA;;AAGT,MAAM,oBACJ,UACA,YACoC;CACpC,MAAM,UACJ,OAAO,SAAS,YAAY,WAC1B,SAAS,UACT;AAEJ,SAAM,mBAAmB;AACzB,QAAO,CAAC,gBAAgB,QAAQ,CAAC;;AAGnC,MAAM,8CACJ,OACA,SACA,WAA0C,EAAE,KACR;CACpC,MAAM,SAAS,MAAM;AAErB,oBAAmBH,SAAOG,SAAO;AACjC,SAAM,mBAAmB;AAEzB,UAAO,KAAK,gBAAgB,OAAO,CAAC;AAEpC,QAAOA;;AAGT,MAAM,gBACJ,SACA,aACoC;AACpC,SAAM,mBAAmB;CACzB,MAAM,oBAAoB,SAAS,OAAO,sBAAsB;CAChE,MAAM,eACH,SAAS,OAAO,gBAAgB,MAAM,qBAAqB;AAC9D,QAAO,CACL;EACE,MAAM;EACN,SAAS;GACP,IAAI,SAAS;GACb,MAAM;GACN,MAAM;GACN,SAAS,EAAE;GACX,OAAO,SAAS;GAChB,aAAa;GACb,eAAe;GACf,OAAO;IACL,cAAc;IACd,eAAe;IACf,yBAAyB,qBAAqB;IAC/C;GACF;EACF,CACF;;AAGH,MAAM,yBACJ,SACA,WAKW;CACX,MAAM,EAAE,aAAa,cAAc,qBAAW;CAC9C,MAAM,MAAM,YAAY,aAAa,aAAa;CAClD,IAAI,aAAaH,QAAM,gBAAgB,IAAI,IAAI;AAE/C,KAAI,eAAe,QAAW;AAC5B,eAAaA,QAAM;AACnB,UAAM,yBAAyB;AAC/B,UAAM,gBAAgB,IAAI,KAAK,WAAW;;AAG5C,KAAI,CAACA,QAAM,WAAW,IAAI,WAAW,EAAE;AACrC,kBAAgBA,SAAOG,SAAO;AAC9B,WAAO,KAAK;GACV,MAAM;GACN,OAAO;GACP,eAAe;IACb,MAAM;IACN,MAAM;IACP;GACF,CAAC;AACF,UAAM,WAAW,IAAI,WAAW;;AAGlC,QAAO;;AAGT,MAAM,6BACJ,SACA,aACA,aACW;CAGX,MAAM,MAAM,YAAY,aADH,EAC6B;CAClD,IAAI,aAAaH,QAAM,gBAAgB,IAAI,IAAI;AAE/C,KAAI,eAAe,QAAW;AAC5B,eAAaA,QAAM;AACnB,UAAM,yBAAyB;AAC/B,UAAM,gBAAgB,IAAI,KAAK,WAAW;;AAG5C,KAAI,CAACA,QAAM,WAAW,IAAI,WAAW,EAAE;AACrC,kBAAgBA,SAAOG,SAAO;AAC9B,WAAO,KAAK;GACV,MAAM;GACN,OAAO;GACP,eAAe;IACb,MAAM;IACN,UAAU;IACX;GACF,CAAC;AACF,UAAM,WAAW,IAAI,WAAW;;AAGlC,QAAO;;AAGT,MAAM,oBACJ,SACA,YACA,aACG;AACH,KAAI,CAACH,QAAM,WAAW,IAAI,WAAW,CACnC;AAGF,UAAO,KAAK;EAAE,MAAM;EAAsB,OAAO;EAAY,CAAC;AAC9D,SAAM,WAAW,OAAO,WAAW;AACnC,SAAM,cAAc,OAAO,WAAW;;AAGxC,MAAM,mBACJ,SACA,aACG;AACH,MAAK,MAAM,cAAcA,QAAM,WAC7B,kBAAiBA,SAAO,YAAYG,SAAO;;AAI/C,MAAM,sBACJ,SACA,aACG;AACH,iBAAgBH,SAAOG,SAAO;AAE9B,SAAM,+BAA+B,OAAO;;AAG9C,MAAa,mBAAmB,aAA+C;CAC7E,MAAM;CACN,OAAO;EACL,MAAM;EACN;EACD;CACF;AAED,MAAM,eAAe,aAAqB,iBACxC,GAAG,YAAY,GAAG;AAEpB,MAAM,yBACJ,SACA,WAMW;CACX,MAAM,EAAE,aAAa,YAAY,MAAM,qBAAW;CAElD,IAAI,oBAAoBH,QAAM,+BAA+B,IAAI,YAAY;AAE7E,KAAI,CAAC,mBAAmB;EACtB,MAAMK,eAAaL,QAAM;AACzB,UAAM,yBAAyB;EAE/B,MAAM,qBAAqB,cAAc,aAAaK;AAGtD,sBAAoB;GAClB;GACA,YAAY;GACZ,MALmB,QAAQ;GAM3B,4BAA4B;GAC7B;AAED,UAAM,+BAA+B,IAAI,aAAa,kBAAkB;;CAG1E,MAAM,EAAE,eAAe;AAEvB,KAAI,CAACL,QAAM,WAAW,IAAI,WAAW,EAAE;AACrC,kBAAgBA,SAAOG,SAAO;AAC9B,WAAO,KAAK;GACV,MAAM;GACN,OAAO;GACP,eAAe;IACb,MAAM;IACN,IAAI,kBAAkB;IACtB,MAAM,kBAAkB;IACxB,OAAO,EAAE;IACV;GACF,CAAC;AACF,UAAM,WAAW,IAAI,WAAW;;AAGlC,QAAO;;AAUT,MAAM,8BACJ,aACoC;CACpC,MAAM,OAAO,SAAS;AAEtB,KADiB,KAAK,SACL,gBACf;CAGF,MAAM,cAAc,SAAS;CAC7B,MAAM,aAAa,KAAK;CACxB,MAAM,OAAO,KAAK;CAClB,MAAM,mBAAmB,KAAK;AAC9B,QAAO;EACL;EACA;EACA;EACA;EACD;;;;;AC7rBH,MAAa,8BACX,YACqD;CACrD,MAAM,SAAS,eAAe,QAAQ;CACtC,MAAM,YAAY,kBAAkB,QAAQ,GAAG,UAAU;AAEzD,QAAO;EAAE;EAAQ;EAAW;;AAG9B,MAAa,qBAAqB,YAAuC;CAEvE,MAAM,WAAW,gBAAgB,QAAQ,CAAC,GAAG,GAAG;AAChD,KAAI,CAAC,SACH,QAAO;AAET,KAAI,EAAE,UAAU,aAAa,CAAC,SAAS,KACrC,QAAO;AAIT,SADE,OAAO,SAAS,SAAS,WAAW,SAAS,KAAK,aAAa,GAAG,QACpD;;AAGlB,MAAa,kBAAkB,YAAuC;AAEpE,QADe,gBAAgB,QAAQ,CACzB,MAAM,SAAS,sBAAsB,KAAK,CAAC;;AAG3D,MAAM,mBACJ,YAC6B;CAC7B,MAAMG,SAAmC,EAAE;CAE3C,MAAM,EAAE,UAAU;AAElB,KAAI,MAAM,QAAQ,MAAM,CACtB,QAAO,KAAK,GAAG,MAAM;AAGvB,QAAO;;AAGT,MAAM,yBAAyB,UAA4B;AACzD,KAAI,CAAC,MAAO,QAAO;AAEnB,KAAI,MAAM,QAAQ,MAAM,CACtB,QAAO,MAAM,MAAM,UAAU,sBAAsB,MAAM,CAAC;AAG5D,KAAI,OAAO,UAAU,SACnB,QAAO;CAGT,MAAM,SAAS;AAIf,MAFE,OAAO,OAAO,SAAS,WAAW,OAAO,KAAK,aAAa,GAAG,YAEnD,cACX,QAAO;AAGT,KAAI,MAAM,QAAQ,OAAO,QAAQ,CAC/B,QAAO,OAAO,QAAQ,MAAM,UAAU,sBAAsB,MAAM,CAAC;AAGrE,QAAO;;;;;ACvDT,MAAa,iBAAiB,OAC5B,SACA,wBACkC;AAClC,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,eAAe,QAAQ,SAAS,MACnC,YACC,MAAM,QAAQ,QAAQ,QAAQ,IAC3B,QAAQ,QAAQ,MAAM,UAAU,MAAM,SAAS,QAAQ,CAC7D;CAED,IAAI,oBAAoB;CACxB,MAAM,cAAc,QAAQ,SAAS,GAAG,GAAG;AAC3C,KAAI,aAAa,SAAS,OACxB,qBACE,MAAM,QAAQ,YAAY,QAAQ,GAChC,YAAY,QAAQ,MAAM,UAAU,MAAM,SAAS,cAAc,GACjE;CAGN,MAAMC,UAAkC;EACtC,GAAG,eAAe,OAAO,aAAa;EACtC,eAAe,oBAAoB,SAAS;EAC7C;AAED,KAAI,oBACF,SAAQ,oBAAoB;UACnB,QAAQ,UAAU,cAC3B,SAAQ,oBAAoB;CAG9B,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,eAAe;EACnE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,6BAA6B,SAAS;AACpD,QAAM,IAAI,UAAU,6BAA6B,SAAS;;AAG5D,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;;;;ACjD/B,SAAS,gBAAgB,SAAsC;AAC7D,KAAI,CAACC,QAAM,iBACT,QAAO;AAGT,QAAO,OAAO,OAAOA,QAAM,UAAU,CAAC,MACnC,OAAO,GAAG,wBAAwBA,QAAM,kBAC1C;;AAGH,SAAgB,gCACd,OACA,SACiC;CACjC,MAAMC,WAA0C,EAAE;AAElD,KAAI,MAAM,QAAQ,WAAW,EAC3B,QAAOC;CAGT,MAAM,SAAS,MAAM,QAAQ;CAC7B,MAAM,EAAE,UAAU;AAElB,oBAAmBF,SAAOE,UAAQ,MAAM;AAExC,oBAAmB,OAAOF,SAAOE,SAAO;AAExC,eAAc,OAAOF,SAAOE,SAAO;AAEnC,iBAAgB,OAAOF,SAAOE,SAAO;AAErC,cAAa,QAAQF,SAAO;EAAE;EAAQ;EAAO,CAAC;AAE9C,QAAOE;;AAGT,SAAS,aACP,QACA,SACA,SAIA;CACA,MAAM,EAAE,kBAAQ,UAAU;AAC1B,KAAI,OAAO,iBAAiB,OAAO,cAAc,SAAS,GAAG;AAC3D,MAAIF,QAAM,kBAAkB;GAC1B,MAAM,gBAAgB,gBAAgBA,QAAM;AAC5C,WAAQ,OAAO,KAAK;IAClB,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM,mBAAmB;AACzB,WAAM;AACN,OAAI,CAAC,cACH,uBAAsB,OAAO,OAAOE,UAAQF,QAAM;;AAItD,WAAO,KACL;GACE,MAAM;GACN,OAAO;IACL,aAAa,+BAA+B,OAAO,cAAc;IACjE,eAAe;IAChB;GACD,OAAO;IACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;IAC1D,eAAe,MAAM,OAAO,qBAAqB;IACjD,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;IACF;GACF,EACD,EACE,MAAM,gBACP,CACF;;;AAIL,SAAS,gBACP,OACA,SACA,UACA;AACA,KAAI,MAAM,cAAc,MAAM,WAAW,SAAS,GAAG;AACnD,2BAAyBA,SAAOE,SAAO;AAEvC,mCAAiCF,SAAOE,UAAQ,MAAM;AAEtD,OAAK,MAAM,YAAY,MAAM,YAAY;AACvC,OAAI,SAAS,MAAM,SAAS,UAAU,MAAM;AAE1C,QAAIF,QAAM,kBAAkB;AAE1B,cAAO,KAAK;MACV,MAAM;MACN,OAAOA,QAAM;MACd,CAAC;AACF,aAAM;AACN,aAAM,mBAAmB;;IAG3B,MAAM,sBAAsBA,QAAM;AAClC,YAAM,UAAU,SAAS,SAAS;KAChC,IAAI,SAAS;KACb,MAAM,SAAS,SAAS;KACxB;KACD;AAED,aAAO,KAAK;KACV,MAAM;KACN,OAAO;KACP,eAAe;MACb,MAAM;MACN,IAAI,SAAS;MACb,MAAM,SAAS,SAAS;MACxB,OAAO,EAAE;MACV;KACF,CAAC;AACF,YAAM,mBAAmB;;AAG3B,OAAI,SAAS,UAAU,WAAW;IAChC,MAAM,eAAeA,QAAM,UAAU,SAAS;AAG9C,QAAI,aACF,UAAO,KAAK;KACV,MAAM;KACN,OAAO,aAAa;KACpB,OAAO;MACL,MAAM;MACN,cAAc,SAAS,SAAS;MACjC;KACF,CAAC;;;;;AAOZ,SAAS,iCACP,SACA,UACA,OACA;AACA,KAAIA,QAAM,oBAAoB,CAAC,gBAAgBA,QAAM,EAAE;AACrD,WAAO,KAAK;GACV,MAAM;GACN,OAAOA,QAAM;GACd,CAAC;AACF,UAAM;AACN,UAAM,mBAAmB;;AAE3B,uBAAsB,OAAOE,UAAQF,QAAM;;AAG7C,SAAS,cACP,OACA,SACA,UACA;AACA,KAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,2BAAyBA,SAAOE,SAAO;AAEvC,MAAI,gBAAgBF,QAAM,EAAE;AAE1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM;AACN,WAAM,mBAAmB;;AAG3B,MAAI,CAACA,QAAM,kBAAkB;AAC3B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACb,eAAe;KACb,MAAM;KACN,MAAM;KACP;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KAAK;GACV,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,MAAM,MAAM;IACb;GACF,CAAC;;AAIJ,KACE,MAAM,YAAY,MACf,MAAM,oBACN,MAAM,iBAAiB,SAAS,KAChCA,QAAM,mBACT;AACA,WAAO,KACL;GACE,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,WAAW,MAAM;IAClB;GACF,EACD;GACE,MAAM;GACN,OAAOA,QAAM;GACd,CACF;AACD,UAAM;AACN,UAAM,oBAAoB;;;AAI9B,SAAS,mBACP,SACA,UACA,OACA;AACA,KAAI,CAACA,QAAM,kBAAkB;AAC3B,WAAO,KAAK;GACV,MAAM;GACN,SAAS;IACP,IAAI,MAAM;IACV,MAAM;IACN,MAAM;IACN,SAAS,EAAE;IACX,OAAO,MAAM;IACb,aAAa;IACb,eAAe;IACf,OAAO;KACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;KAC1D,eAAe;KACf,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;KACF;IACF;GACF,CAAC;AACF,UAAM,mBAAmB;;;AAI7B,SAAS,sBACP,OACA,UACA,SACA;AACA,KAAI,MAAM,oBAAoB,MAAM,iBAAiB,SAAS,GAAG;AAC/D,WAAO,KACL;GACE,MAAM;GACN,OAAOA,QAAM;GACb,eAAe;IACb,MAAM;IACN,UAAU;IACX;GACF,EACD;GACE,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,UAAU;IACX;GACF,EACD;GACE,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,WAAW,MAAM;IAClB;GACF,EACD;GACE,MAAM;GACN,OAAOA,QAAM;GACd,CACF;AACD,UAAM;;;AAIV,SAAS,mBACP,OACA,SACA,UACA;AACA,KAAI,MAAM,kBAAkB,MAAM,eAAe,SAAS,GAAG;AAI3D,MAAIA,QAAM,kBAAkB;AAC1B,SAAM,UAAU,MAAM;AACtB,SAAM,iBAAiB;AACvB;;AAGF,MAAI,CAACA,QAAM,mBAAmB;AAC5B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACb,eAAe;KACb,MAAM;KACN,UAAU;KACX;IACF,CAAC;AACF,WAAM,oBAAoB;;AAG5B,WAAO,KAAK;GACV,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,UAAU,MAAM;IACjB;GACF,CAAC;;;AAIN,SAAS,yBACP,SACA,UACM;AACN,KAAIA,QAAM,mBAAmB;AAC3B,WAAO,KACL;GACE,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,WAAW;IACZ;GACF,EACD;GACE,MAAM;GACN,OAAOA,QAAM;GACd,CACF;AACD,UAAM;AACN,UAAM,oBAAoB;;;;;;AC1U9B,MAAMG,WAAS,oBAAoB,mBAAmB;AAEtD,MAAM,2BACJ;AAEF,eAAsB,iBAAiB,GAAY;AACjD,OAAM,eAAe,MAAM;CAE3B,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;AACrE,UAAO,MAAM,8BAA8B,KAAK,UAAU,iBAAiB,CAAC;CAG5E,MAAM,YAAY,iBAAiB,iBAAiB;CAIpD,MAAM,gBAAgB,EAAE,IAAI,OAAO,iBAAiB;AACpD,UAAO,MAAM,0BAA0B,cAAc;CACrD,MAAM,UAAU,CAAC,iBAAiB,SAAS,iBAAiB,MAAM,WAAW;AAC7E,KAAI,iBAAiB,WAAW,CAAC,UAC/B,kBAAiB,QAAQ,eAAe;AAG1C,KAAI,WAAW;AACb,WAAO,MAAM,uBAAuB,UAAU;AAC9C,MAAI,4BAA4B,CAC9B,kBAAiB,QAAQ,eAAe;OAQ1C,0BAAyB,iBAAiB;AAG5C,KAAI,MAAM,cACR,OAAM,eAAe;AAGvB,KAAI,qBAAqB,iBAAiB,MAAM,CAC9C,QAAO,MAAM,sBAAsB,GAAG,kBAAkB,cAAc;AAGxE,KAAI,sBAAsB,iBAAiB,MAAM,CAC/C,QAAO,MAAM,uBAAuB,GAAG,iBAAiB;AAG1D,QAAO,MAAM,0BAA0B,GAAG,iBAAiB;;AAG7D,MAAMC,uBAAqB;AAC3B,MAAM,oBAAoB;AAE1B,MAAM,4BAA4B,OAChC,GACA,qBACG;CACH,MAAM,gBAAgB,kBAAkB,iBAAiB;AACzD,UAAO,MACL,sCACA,KAAK,UAAU,cAAc,CAC9B;CAED,MAAM,WAAW,MAAM,sBAAsB,cAAc;AAE3D,KAAI,eAAe,SAAS,EAAE;AAC5B,WAAO,MACL,wCACA,KAAK,UAAU,SAAS,CACzB;EACD,MAAM,oBAAoB,qBAAqB,SAAS;AACxD,WAAO,MACL,kCACA,KAAK,UAAU,kBAAkB,CAClC;AACD,SAAO,EAAE,KAAK,kBAAkB;;AAGlC,UAAO,MAAM,kCAAkC;AAC/C,QAAO,UAAU,GAAG,OAAO,WAAW;EACpC,MAAMC,cAAoC;GACxC,kBAAkB;GAClB,mBAAmB;GACnB,kBAAkB;GAClB,WAAW,EAAE;GACb,mBAAmB;GACpB;AAED,aAAW,MAAM,YAAY,UAAU;AACrC,YAAO,MAAM,6BAA6B,KAAK,UAAU,SAAS,CAAC;AACnE,OAAI,SAAS,SAAS,SACpB;AAGF,OAAI,CAAC,SAAS,KACZ;GAGF,MAAM,QAAQ,KAAK,MAAM,SAAS,KAAK;GACvC,MAAMC,WAAS,gCAAgC,OAAO,YAAY;AAElE,QAAK,MAAM,SAASA,UAAQ;AAC1B,aAAO,MAAM,+BAA+B,KAAK,UAAU,MAAM,CAAC;AAClE,UAAM,OAAO,SAAS;KACpB,OAAO,MAAM;KACb,MAAM,KAAK,UAAU,MAAM;KAC5B,CAAC;;;GAGN;;AAGJ,MAAM,yBAAyB,OAC7B,GACA,qBACG;CACH,MAAM,mBACJ,6CAA6C,iBAAiB;AAChE,UAAO,MACL,iCACA,KAAK,UAAU,iBAAiB,CACjC;CAED,MAAM,EAAE,QAAQ,cAAc,2BAA2B,iBAAiB;CAC1E,MAAM,WAAW,MAAM,gBAAgB,kBAAkB;EACvD;EACA;EACD,CAAC;AAEF,KAAI,iBAAiB,UAAUC,kBAAgB,SAAS,EAAE;AACxD,WAAO,MAAM,kDAAkD;AAC/D,SAAO,UAAU,GAAG,OAAO,WAAW;GACpC,MAAM,cAAc,4BAA4B;AAEhD,cAAW,MAAM,SAAS,UAAU;AAElC,QADkB,MAAM,UACN,QAAQ;AACxB,WAAM,OAAO,SAAS;MAAE,OAAO;MAAQ,MAAM;MAAI,CAAC;AAClD;;IAGF,MAAM,OAAO,MAAM;AACnB,QAAI,CAAC,KACH;AAGF,aAAO,MAAM,+BAA+B,KAAK;IAEjD,MAAMD,WAAS,8BACb,KAAK,MAAM,KAAK,EAChB,YACD;AACD,SAAK,MAAM,SAASA,UAAQ;KAC1B,MAAM,YAAY,KAAK,UAAU,MAAM;AACvC,cAAO,MAAM,+BAA+B,UAAU;AACtD,WAAM,OAAO,SAAS;MACpB,OAAO,MAAM;MACb,MAAM;MACP,CAAC;;AAGJ,QAAI,YAAY,kBAAkB;AAChC,cAAO,MAAM,mCAAmC;AAChD;;;AAIJ,OAAI,CAAC,YAAY,kBAAkB;AACjC,aAAO,KACL,iEACD;IACD,MAAM,aAAa,gBACjB,4CACD;AACD,UAAM,OAAO,SAAS;KACpB,OAAO,WAAW;KAClB,MAAM,KAAK,UAAU,WAAW;KACjC,CAAC;;IAEJ;;AAGJ,UAAO,MACL,mCACA,KAAK,UAAU,SAAS,CAAC,MAAM,KAAK,CACrC;CACD,MAAM,oBAAoB,oCACxB,SACD;AACD,UAAO,MACL,kCACA,KAAK,UAAU,kBAAkB,CAClC;AACD,QAAO,EAAE,KAAK,kBAAkB;;AAGlC,MAAM,wBAAwB,OAC5B,GACA,kBACA,wBACG;AAGH,MAAK,MAAM,OAAO,iBAAiB,SACjC,KAAI,IAAI,SAAS,eAAe,MAAM,QAAQ,IAAI,QAAQ,CACxD,KAAI,UAAU,IAAI,QAAQ,QAAQ,UAAU;AAC1C,MAAI,MAAM,SAAS,WAAY,QAAO;AACtC,SACE,MAAM,YACH,MAAM,aAAa,iBACnB,MAAM,aACN,CAAC,MAAM,UAAU,SAAS,IAAI;GAEnC;CAIN,MAAM,WAAW,MAAM,eAAe,kBAAkB,oBAAoB;AAE5E,KAAIC,kBAAgB,SAAS,EAAE;AAC7B,WAAO,MAAM,iDAAiD;AAC9D,SAAO,UAAU,GAAG,OAAO,WAAW;AACpC,cAAW,MAAM,SAAS,UAAU;IAClC,MAAM,YAAY,MAAM;IACxB,MAAM,OAAO,MAAM,QAAQ;AAC3B,aAAO,MAAM,8BAA8B,KAAK;AAChD,UAAM,OAAO,SAAS;KACpB,OAAO;KACP;KACD,CAAC;;IAEJ;;AAGJ,UAAO,MACL,kCACA,KAAK,UAAU,SAAS,CAAC,MAAM,KAAK,CACrC;AACD,QAAO,EAAE,KAAK,SAAS;;AAGzB,MAAM,yBAAyB,YAA6B;AAE1D,SADsB,MAAM,QAAQ,KAAK,MAAM,UAAU,MAAM,OAAO,QAAQ,GAE7D,qBAAqB,SAASH,qBAAmB,IAAI;;AAIxE,MAAM,wBAAwB,YAA6B;AAEzD,SADsB,MAAM,QAAQ,KAAK,MAAM,UAAU,MAAM,OAAO,QAAQ,GAE7D,qBAAqB,SAAS,kBAAkB,IAAI;;AAIvE,MAAM,kBACJ,aACuC,OAAO,OAAO,UAAU,UAAU;AAE3E,MAAMG,qBAAsB,UAC1B,QAAQ,MAAM,IACX,OAAQ,MAA2B,OAAO,mBAAmB;AAElE,MAAM,oBACJ,qBACY;CACZ,MAAM,SAAS,iBAAiB;AAChC,KAAI,OAAO,WAAW,SACpB,QAAO,OAAO,WAAW,yBAAyB;AAEpD,KAAI,CAAC,MAAM,QAAQ,OAAO,CAAE,QAAO;AAEnC,QAAO,OAAO,MACX,QACC,OAAO,IAAI,SAAS,YACjB,IAAI,KAAK,WAAW,yBAAyB,CACnD;;AAGH,MAAM,wBACJ,IACA,cAC6B;AAC7B,KAAI,OAAO,GAAG,YAAY,SACxB,QAAO;EAAE,GAAG;EAAI,SAAS,GAAG,GAAG,QAAQ,MAAM,UAAU;EAAQ;AAEjE,QAAO;EACL,GAAG;EACH,SAAS,CAAC,GAAG,GAAG,SAAS,UAAU;EACpC;;AAGH,MAAM,yBACJ,IACA,eAC6B;AAC7B,KAAI,OAAO,GAAG,YAAY,UAAU;EAClC,MAAM,gBAAgB,WAAW,KAAK,OAAO,GAAG,KAAK,CAAC,KAAK,OAAO;AAClE,SAAO;GAAE,GAAG;GAAI,SAAS,GAAG,GAAG,QAAQ,MAAM;GAAiB;;AAEhE,QAAO;EAAE,GAAG;EAAI,SAAS,CAAC,GAAG,GAAG,SAAS,GAAG,WAAW;EAAE;;AAG3D,MAAM,4BACJ,qBACS;AACT,MAAK,MAAM,OAAO,iBAAiB,UAAU;AAC3C,MAAI,IAAI,SAAS,UAAU,CAAC,MAAM,QAAQ,IAAI,QAAQ,CAAE;EAExD,MAAMC,cAA+C,EAAE;EACvD,MAAMC,aAAwC,EAAE;EAChD,IAAI,QAAQ;AAEZ,OAAK,MAAM,SAAS,IAAI,QACtB,KAAI,MAAM,SAAS,cACjB,aAAY,KAAK,MAAM;WACd,MAAM,SAAS,OACxB,YAAW,KAAK,MAAM;OACjB;AACL,WAAQ;AACR;;AAIJ,MAAI,CAAC,SAAS,YAAY,WAAW,KAAK,WAAW,WAAW,EAAG;AAEnE,MAAI,UAAU,gBAAgB,aAAa,WAAW;;;AAI1D,MAAM,mBACJ,aACA,eACoC;AAEpC,KAAI,YAAY,WAAW,WAAW,OACpC,QAAO,YAAY,KAAK,IAAI,MAAM,qBAAqB,IAAI,WAAW,GAAG,CAAC;CAI5E,MAAM,YAAY,YAAY,SAAS;AACvC,QAAO,YAAY,KAAK,IAAI,MAC1B,MAAM,YAAY,sBAAsB,IAAI,WAAW,GAAG,GAC3D;;;;;AC7XH,MAAa,gBAAgB,IAAI,MAAM;AAEvC,cAAc,KAAK,KAAK,OAAO,MAAM;AACnC,KAAI;AACF,SAAO,MAAM,iBAAiB,EAAE;UACzB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;AAEF,cAAc,KAAK,iBAAiB,OAAO,MAAM;AAC/C,KAAI;AACF,SAAO,MAAM,kBAAkB,EAAE;UAC1B,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACjBF,MAAa,cAAc,IAAI,MAAM;AAErC,YAAY,IAAI,KAAK,OAAO,MAAM;AAChC,KAAI;AACF,MAAI,CAAC,MAAM,OAET,OAAM,aAAa;EAGrB,MAAM,SAAS,MAAM,QAAQ,KAAK,KAAK,WAAW;GAChD,IAAI,MAAM;GACV,QAAQ;GACR,MAAM;GACN,SAAS;GACT,6BAAY,IAAI,KAAK,EAAE,EAAC,aAAa;GACrC,UAAU,MAAM;GAChB,cAAc,MAAM;GACrB,EAAE;AAEH,SAAO,EAAE,KAAK;GACZ,QAAQ;GACR,MAAM;GACN,UAAU;GACX,CAAC;UACK,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACRF,MAAa,+BAAgD,EAC3D,6BAAa,IAAI,KAAK,EACvB;AAED,MAAa,gBACX,MACA,OACA,YACW;AACX,KAAI,CAAC,KAAM,QAAO;CAClB,MAAM,SAAS,KAAK,MAAM,KAAK;AAC/B,SAAQ,OAAR;EACE,KAAK,6BACH,QAAO,sBACL,QACA,QACD;EAEH,KAAK,4BACH,QAAO,qBACL,QACA,QACD;EAEH,QACE,QAAO,aAAa,QAAQ,QAAQ;;;AAK1C,MAAM,yBACJ,QACA,YACW;AACX,KAAI,CAAC,OAAO,KAAK,IAAI;EACnB,IAAI,eAAe;AACnB,SAAO,aAAa,SAAS,GAC3B,iBAAgB,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE;AAErD,SAAO,KAAK,KAAK,MAAM,OAAO,aAAa,GAAG,aAAa,MAAM,GAAG,GAAG;;CAGzE,MAAM,cAAc,OAAO;AAC3B,SAAQ,YAAY,IAAI,aAAa,OAAO,KAAK,GAAG;AACpD,QAAO,KAAK,UAAU,OAAO;;AAG/B,MAAM,wBACJ,QACA,YACW;CACX,MAAM,cAAc,OAAO;CAC3B,MAAM,aAAa,QAAQ,YAAY,IAAI,YAAY;AACvD,KAAI,WACF,QAAO,KAAK,KAAK;AAEnB,QAAO,KAAK,UAAU,OAAO;;AAG/B,MAAM,gBACJ,QACA,YACW;CACX,MAAM,cAAc,OAAO;AAC3B,KAAI,gBAAgB,QAAW;EAC7B,MAAM,SAAS,QAAQ,YAAY,IAAI,YAAY;AACnD,MAAI,OACF,QAAO,UAAU;;AAGrB,QAAO,KAAK,UAAU,OAAO;;;;;AC7E/B,MAAMC,WAAS,oBAAoB,oBAAoB;AAEvD,MAAM,qBAAqB;AAE3B,MAAa,kBAAkB,OAAO,MAAe;AACnD,OAAM,eAAe,MAAM;CAE3B,MAAM,UAAU,MAAM,EAAE,IAAI,MAAwB;AACpD,UAAO,MAAM,8BAA8B,KAAK,UAAU,QAAQ,CAAC;AAEnE,uBAAsB,QAAQ;AAG9B,qBAAoB,QAAQ;AAQ5B,KAAI,GANkB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,QAAQ,MACjC,GAEgB,qBAAqB,SAAS,mBAAmB,IAAI,OAGpE,QAAO,EAAE,KACP,EACE,OAAO;EACL,SACE;EACF,MAAM;EACP,EACF,EACD,IACD;CAGH,MAAM,EAAE,QAAQ,cAAc,2BAA2B,QAAQ;AAEjE,KAAI,MAAM,cACR,OAAM,eAAe;CAGvB,MAAM,WAAW,MAAM,gBAAgB,SAAS;EAAE;EAAQ;EAAW,CAAC;AAEtE,KAAI,qBAAqB,QAAQ,IAAI,gBAAgB,SAAS,EAAE;AAC9D,WAAO,MAAM,qCAAqC;AAClD,SAAO,UAAU,GAAG,OAAO,WAAW;GACpC,MAAM,YAAY,uBAAuB;AAEzC,cAAW,MAAM,SAAS,UAAU;AAClC,aAAO,MAAM,2BAA2B,KAAK,UAAU,MAAM,CAAC;IAE9D,MAAM,gBAAgB,aACnB,MAA4B,QAAQ,IACpC,MAA6B,OAC9B,UACD;AAED,UAAM,OAAO,SAAS;KACpB,IAAK,MAA0B;KAC/B,OAAQ,MAA6B;KACrC,MAAM;KACP,CAAC;;IAEJ;;AAGJ,UAAO,MACL,uCACA,KAAK,UAAU,SAAS,CAAC,MAAM,KAAK,CACrC;AACD,QAAO,EAAE,KAAK,SAA4B;;AAG5C,MAAM,mBAAsB,UAC1B,QAAQ,MAAM,IACX,OAAQ,MAA2B,OAAO,mBAAmB;AAElE,MAAM,wBAAwB,YAC5B,QAAQ,QAAQ,OAAO;AAEzB,MAAM,yBAAyB,YAAoC;AAGjE,KAFe,WAAW,CACW,yBAAyB,MACnC;AACzB,WAAO,MAAM,gDAAgD;AAC7D,MAAI,MAAM,QAAQ,QAAQ,MAAM,EAAE;GAChC,MAAM,WAAW,QAAQ;AACzB,QAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;IACxC,MAAM,IAAI,SAAS;AACnB,QAAI,EAAE,SAAS,YAAY,EAAE,SAAS,cACpC,UAAS,KAAK;KACZ,MAAM;KACN,MAAM,EAAE;KACR,aAAa;KACb,YAAY;MACV,MAAM;MACN,YAAY,EACV,OAAO;OACL,MAAM;OACN,aAAa;OACd,EACF;MACD,UAAU,CAAC,QAAQ;MACpB;KACD,QAAQ;KACT;;;;;AAOX,MAAM,uBAAuB,YAAoC;AAC/D,KAAI,CAAC,MAAM,QAAQ,QAAQ,MAAM,IAAI,QAAQ,MAAM,WAAW,EAAG;AAEjE,SAAQ,QAAQ,QAAQ,MAAM,QAAQ,MAAM;AAC1C,SAAO,EAAE,SAAS;GAClB;;;;;AChIJ,MAAa,kBAAkB,IAAI,MAAM;AAEzC,gBAAgB,KAAK,KAAK,OAAO,MAAM;AACrC,KAAI;AACF,SAAO,MAAM,gBAAgB,EAAE;UACxB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACVF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,MAAM,MAAM;AACzB,KAAI;AACF,SAAO,EAAE,KAAK,EACZ,OAAO,MAAM,cACd,CAAC;UACK,OAAO;AACd,UAAQ,MAAM,yBAAyB,MAAM;AAC7C,SAAO,EAAE,KAAK;GAAE,OAAO;GAAyB,OAAO;GAAM,EAAE,IAAI;;EAErE;;;;ACXF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,KAAK,OAAO,MAAM;AAC/B,KAAI;EACF,MAAM,QAAQ,MAAM,iBAAiB;AACrC,SAAO,EAAE,KAAK,MAAM;UACb,OAAO;AACd,UAAQ,MAAM,iCAAiC,MAAM;AACrD,SAAO,EAAE,KAAK,EAAE,OAAO,iCAAiC,EAAE,IAAI;;EAEhE;;;;ACFF,MAAa,SAAS,IAAI,MAAM;AAEhC,OAAO,IAAI,QAAQ,CAAC;AACpB,OAAO,IAAI,MAAM,CAAC;AAElB,OAAO,IAAI,MAAM,MAAM,EAAE,KAAK,iBAAiB,CAAC;AAEhD,OAAO,MAAM,qBAAqB,iBAAiB;AACnD,OAAO,MAAM,WAAW,YAAY;AACpC,OAAO,MAAM,eAAe,gBAAgB;AAC5C,OAAO,MAAM,UAAU,WAAW;AAClC,OAAO,MAAM,UAAU,WAAW;AAClC,OAAO,MAAM,cAAc,gBAAgB;AAG3C,OAAO,MAAM,wBAAwB,iBAAiB;AACtD,OAAO,MAAM,cAAc,YAAY;AACvC,OAAO,MAAM,kBAAkB,gBAAgB;AAC/C,OAAO,MAAM,iBAAiB,gBAAgB;AAG9C,OAAO,MAAM,gBAAgB,cAAc"}
|
package/package.json
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@jeffreycao/copilot-api",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Turn GitHub Copilot into OpenAI/Anthropic API compatible server. Usable with Claude Code Or Codex Or Opencode!",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"proxy",
|
|
7
|
+
"github-copilot",
|
|
8
|
+
"openai-compatible"
|
|
9
|
+
],
|
|
10
|
+
"homepage": "https://github.com/caozhiyuan/copilot-api/tree/all",
|
|
11
|
+
"bugs": "https://github.com/caozhiyuan/copilot-api/issues",
|
|
12
|
+
"repository": {
|
|
13
|
+
"type": "git",
|
|
14
|
+
"url": "git+https://github.com/caozhiyuan/copilot-api.git"
|
|
15
|
+
},
|
|
16
|
+
"author": "caozhiyuan",
|
|
17
|
+
"type": "module",
|
|
18
|
+
"bin": {
|
|
19
|
+
"copilot-api": "./dist/main.js"
|
|
20
|
+
},
|
|
21
|
+
"files": [
|
|
22
|
+
"dist"
|
|
23
|
+
],
|
|
24
|
+
"scripts": {
|
|
25
|
+
"build": "tsdown",
|
|
26
|
+
"dev": "bun run --watch ./src/main.ts",
|
|
27
|
+
"knip": "knip-bun",
|
|
28
|
+
"lint": "eslint --cache",
|
|
29
|
+
"lint:all": "eslint --cache .",
|
|
30
|
+
"prepack": "bun run build",
|
|
31
|
+
"prepare": "simple-git-hooks",
|
|
32
|
+
"release": "bumpp && bun publish --access public",
|
|
33
|
+
"start": "NODE_ENV=production bun run ./src/main.ts",
|
|
34
|
+
"typecheck": "tsc"
|
|
35
|
+
},
|
|
36
|
+
"simple-git-hooks": {
|
|
37
|
+
"pre-commit": "bunx lint-staged"
|
|
38
|
+
},
|
|
39
|
+
"lint-staged": {
|
|
40
|
+
"*": "bun run lint --fix"
|
|
41
|
+
},
|
|
42
|
+
"dependencies": {
|
|
43
|
+
"citty": "^0.1.6",
|
|
44
|
+
"clipboardy": "^5.0.0",
|
|
45
|
+
"consola": "^3.4.2",
|
|
46
|
+
"fetch-event-stream": "^0.1.5",
|
|
47
|
+
"gpt-tokenizer": "^3.0.1",
|
|
48
|
+
"hono": "^4.9.9",
|
|
49
|
+
"proxy-from-env": "^1.1.0",
|
|
50
|
+
"srvx": "^0.8.9",
|
|
51
|
+
"tiny-invariant": "^1.3.3",
|
|
52
|
+
"undici": "^7.16.0",
|
|
53
|
+
"zod": "^4.1.11"
|
|
54
|
+
},
|
|
55
|
+
"devDependencies": {
|
|
56
|
+
"@echristian/eslint-config": "^0.0.54",
|
|
57
|
+
"@types/bun": "^1.2.23",
|
|
58
|
+
"@types/proxy-from-env": "^1.0.4",
|
|
59
|
+
"bumpp": "^10.2.3",
|
|
60
|
+
"eslint": "^9.37.0",
|
|
61
|
+
"knip": "^5.64.1",
|
|
62
|
+
"lint-staged": "^16.2.3",
|
|
63
|
+
"prettier-plugin-packagejson": "^2.5.19",
|
|
64
|
+
"simple-git-hooks": "^2.13.1",
|
|
65
|
+
"tsdown": "^0.15.6",
|
|
66
|
+
"typescript": "^5.9.3"
|
|
67
|
+
}
|
|
68
|
+
}
|