@hsupu/copilot-api 0.7.0 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"main.js","names":["state: State","state","headers: Record<string, string>","errorJson: unknown","token","historyState: HistoryState","entry: HistoryEntry","start","modelDist: Record<string, number>","endpointDist: Record<string, number>","hourlyActivity: Record<string, number>","process","commandBlock: string","state","x","headers: Record<string, string>","handleCompletion","isNonStreaming","streamToolCalls: Array<{\n id: string\n name: string\n arguments: string\n }>","toolCallAccumulators: Map<\n number,\n { id: string; name: string; arguments: string }\n >","result: MessageContent","handleCompletion","options: QueryOptions","fixedMessages: Array<Message>","toolNameMapping: ToolNameMapping","shortNameMap: Record<string, string>","newMessages: Array<Message>","contentParts: Array<ContentPart>","allTextBlocks: Array<AnthropicTextBlock>","allToolUseBlocks: Array<AnthropicToolUseBlock>","stopReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null","input: Record<string, unknown>","state","events: Array<AnthropicStreamEventData>","events","streamState: AnthropicStreamState","streamToolCalls: Array<{\n id: string\n name: string\n input: string\n }>","currentToolCall: { id: string; name: string; input: string } | null","chunk: ChatCompletionChunk","events","contentBlocks: Array<{ type: string; text?: string }>","tools: Array<{ id: string; name: string; input: string }>"],"sources":["../src/lib/paths.ts","../src/lib/state.ts","../src/lib/api-config.ts","../src/lib/error.ts","../src/services/github/get-copilot-token.ts","../src/services/github/get-device-code.ts","../src/services/github/get-user.ts","../src/services/copilot/get-models.ts","../src/services/get-vscode-version.ts","../src/lib/utils.ts","../src/services/github/poll-access-token.ts","../src/lib/token.ts","../src/auth.ts","../src/services/github/get-copilot-usage.ts","../src/check-usage.ts","../src/debug.ts","../src/logout.ts","../src/lib/history.ts","../src/lib/proxy.ts","../src/lib/shell.ts","../src/lib/approval.ts","../src/lib/queue.ts","../src/lib/tokenizer.ts","../src/services/copilot/create-chat-completions.ts","../src/routes/chat-completions/handler.ts","../src/routes/chat-completions/route.ts","../src/services/copilot/create-embeddings.ts","../src/routes/embeddings/route.ts","../src/routes/event-logging/route.ts","../src/routes/history/api.ts","../src/routes/history/ui/script.ts","../src/routes/history/ui/styles.ts","../src/routes/history/ui/template.ts","../src/routes/history/ui.ts","../src/routes/history/route.ts","../src/routes/messages/utils.ts","../src/routes/messages/non-stream-translation.ts","../src/routes/messages/count-tokens-handler.ts","../src/routes/messages/stream-translation.ts","../src/routes/messages/handler.ts","../src/routes/messages/route.ts","../src/routes/models/route.ts","../src/routes/token/route.ts","../src/routes/usage/route.ts","../src/server.ts","../src/start.ts","../src/main.ts"],"sourcesContent":["import fs from \"node:fs/promises\"\nimport os from \"node:os\"\nimport path from \"node:path\"\n\nconst APP_DIR = path.join(os.homedir(), \".local\", \"share\", \"copilot-api\")\n\nconst GITHUB_TOKEN_PATH = path.join(APP_DIR, \"github_token\")\n\nexport const PATHS = {\n APP_DIR,\n GITHUB_TOKEN_PATH,\n}\n\nexport async function ensurePaths(): Promise<void> {\n await fs.mkdir(PATHS.APP_DIR, { recursive: true })\n await ensureFile(PATHS.GITHUB_TOKEN_PATH)\n}\n\nasync function ensureFile(filePath: string): Promise<void> {\n try {\n await fs.access(filePath, fs.constants.W_OK)\n // File exists, ensure it has secure permissions (owner read/write only)\n const stats = await fs.stat(filePath)\n const currentMode = stats.mode & 0o777\n if (currentMode !== 0o600) {\n await fs.chmod(filePath, 0o600)\n }\n } catch {\n await fs.writeFile(filePath, \"\")\n await fs.chmod(filePath, 0o600)\n }\n}\n","import type { ModelsResponse } from \"~/services/copilot/get-models\"\n\nexport interface State {\n githubToken?: string\n copilotToken?: string\n\n accountType: string\n models?: ModelsResponse\n vsCodeVersion?: string\n\n manualApprove: boolean\n rateLimitWait: boolean\n showToken: boolean\n\n // Rate limiting configuration\n rateLimitSeconds?: number\n lastRequestTimestamp?: number\n}\n\nexport const state: State = {\n accountType: \"individual\",\n manualApprove: false,\n rateLimitWait: false,\n showToken: false,\n}\n","import { randomUUID } from \"node:crypto\"\n\nimport type { State } from \"./state\"\n\nexport const standardHeaders = () => ({\n \"content-type\": \"application/json\",\n accept: \"application/json\",\n})\n\nconst COPILOT_VERSION = \"0.26.7\"\nconst EDITOR_PLUGIN_VERSION = `copilot-chat/${COPILOT_VERSION}`\nconst USER_AGENT = `GitHubCopilotChat/${COPILOT_VERSION}`\n\nconst API_VERSION = \"2025-04-01\"\n\nexport const copilotBaseUrl = (state: State) =>\n state.accountType === \"individual\" ?\n \"https://api.githubcopilot.com\"\n : `https://api.${state.accountType}.githubcopilot.com`\nexport const copilotHeaders = (state: State, vision: boolean = false) => {\n const headers: Record<string, string> = {\n Authorization: `Bearer ${state.copilotToken}`,\n \"content-type\": standardHeaders()[\"content-type\"],\n \"copilot-integration-id\": \"vscode-chat\",\n \"editor-version\": `vscode/${state.vsCodeVersion}`,\n \"editor-plugin-version\": EDITOR_PLUGIN_VERSION,\n \"user-agent\": USER_AGENT,\n \"openai-intent\": \"conversation-panel\",\n \"x-github-api-version\": API_VERSION,\n \"x-request-id\": randomUUID(),\n \"x-vscode-user-agent-library-version\": \"electron-fetch\",\n }\n\n if (vision) headers[\"copilot-vision-request\"] = \"true\"\n\n return headers\n}\n\nexport const GITHUB_API_BASE_URL = \"https://api.github.com\"\nexport const githubHeaders = (state: State) => ({\n ...standardHeaders(),\n authorization: `token ${state.githubToken}`,\n \"editor-version\": `vscode/${state.vsCodeVersion}`,\n \"editor-plugin-version\": EDITOR_PLUGIN_VERSION,\n \"user-agent\": USER_AGENT,\n \"x-github-api-version\": API_VERSION,\n \"x-vscode-user-agent-library-version\": \"electron-fetch\",\n})\n\nexport const GITHUB_BASE_URL = \"https://github.com\"\nexport const GITHUB_CLIENT_ID = \"Iv1.b507a08c87ecfe98\"\nexport const GITHUB_APP_SCOPES = [\"read:user\"].join(\" \")\n","import type { Context } from \"hono\"\nimport type { ContentfulStatusCode } from \"hono/utils/http-status\"\n\nimport consola from \"consola\"\n\nexport class HTTPError extends Error {\n status: number\n responseText: string\n\n constructor(message: string, status: number, responseText: string) {\n super(message)\n this.status = status\n this.responseText = responseText\n }\n\n static async fromResponse(\n message: string,\n response: Response,\n ): Promise<HTTPError> {\n const text = await response.text()\n return new HTTPError(message, response.status, text)\n }\n}\n\nexport async function forwardError(c: Context, error: unknown) {\n consola.error(\"Error occurred:\", error)\n\n if (error instanceof HTTPError) {\n let errorJson: unknown\n try {\n errorJson = JSON.parse(error.responseText)\n } catch {\n errorJson = error.responseText\n }\n consola.error(\"HTTP error:\", errorJson)\n return c.json(\n {\n error: {\n message: error.responseText,\n type: \"error\",\n },\n },\n error.status as ContentfulStatusCode,\n )\n }\n\n return c.json(\n {\n error: {\n message: (error as Error).message,\n type: \"error\",\n },\n },\n 500,\n )\n}\n","import { GITHUB_API_BASE_URL, githubHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getCopilotToken = async () => {\n const response = await fetch(\n `${GITHUB_API_BASE_URL}/copilot_internal/v2/token`,\n {\n headers: githubHeaders(state),\n },\n )\n\n if (!response.ok) throw await HTTPError.fromResponse(\"Failed to get Copilot token\", response)\n\n return (await response.json()) as GetCopilotTokenResponse\n}\n\n// Trimmed for the sake of simplicity\ninterface GetCopilotTokenResponse {\n expires_at: number\n refresh_in: number\n token: string\n}\n","import {\n GITHUB_APP_SCOPES,\n GITHUB_BASE_URL,\n GITHUB_CLIENT_ID,\n standardHeaders,\n} from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\n\nexport async function getDeviceCode(): Promise<DeviceCodeResponse> {\n const response = await fetch(`${GITHUB_BASE_URL}/login/device/code`, {\n method: \"POST\",\n headers: standardHeaders(),\n body: JSON.stringify({\n client_id: GITHUB_CLIENT_ID,\n scope: GITHUB_APP_SCOPES,\n }),\n })\n\n if (!response.ok) throw await HTTPError.fromResponse(\"Failed to get device code\", response)\n\n return (await response.json()) as DeviceCodeResponse\n}\n\nexport interface DeviceCodeResponse {\n device_code: string\n user_code: string\n verification_uri: string\n expires_in: number\n interval: number\n}\n","import { GITHUB_API_BASE_URL, standardHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport async function getGitHubUser() {\n const response = await fetch(`${GITHUB_API_BASE_URL}/user`, {\n headers: {\n authorization: `token ${state.githubToken}`,\n ...standardHeaders(),\n },\n })\n\n if (!response.ok) throw await HTTPError.fromResponse(\"Failed to get GitHub user\", response)\n\n return (await response.json()) as GithubUserResponse\n}\n\n// Trimmed for the sake of simplicity\ninterface GithubUserResponse {\n login: string\n}\n","import { copilotBaseUrl, copilotHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getModels = async () => {\n const response = await fetch(`${copilotBaseUrl(state)}/models`, {\n headers: copilotHeaders(state),\n })\n\n if (!response.ok) throw await HTTPError.fromResponse(\"Failed to get models\", response)\n\n return (await response.json()) as ModelsResponse\n}\n\nexport interface ModelsResponse {\n data: Array<Model>\n object: string\n}\n\ninterface ModelLimits {\n max_context_window_tokens?: number\n max_output_tokens?: number\n max_prompt_tokens?: number\n max_inputs?: number\n}\n\ninterface ModelSupports {\n tool_calls?: boolean\n parallel_tool_calls?: boolean\n dimensions?: boolean\n}\n\ninterface ModelCapabilities {\n family: string\n limits: ModelLimits\n object: string\n supports: ModelSupports\n tokenizer: string\n type: string\n}\n\nexport interface Model {\n capabilities: ModelCapabilities\n id: string\n model_picker_enabled: boolean\n name: string\n object: string\n preview: boolean\n vendor: string\n version: string\n policy?: {\n state: string\n terms: string\n }\n}\n","const FALLBACK = \"1.104.3\"\n\n// GitHub API endpoint for latest VSCode release\nconst GITHUB_API_URL = \"https://api.github.com/repos/microsoft/vscode/releases/latest\"\n\ninterface GitHubRelease {\n tag_name: string\n}\n\nexport async function getVSCodeVersion() {\n const controller = new AbortController()\n const timeout = setTimeout(() => {\n controller.abort()\n }, 5000)\n\n try {\n const response = await fetch(GITHUB_API_URL, {\n signal: controller.signal,\n headers: {\n Accept: \"application/vnd.github.v3+json\",\n \"User-Agent\": \"copilot-api\",\n },\n })\n\n if (!response.ok) {\n return FALLBACK\n }\n\n const release = (await response.json()) as GitHubRelease\n // tag_name is in format \"1.107.1\"\n const version = release.tag_name\n if (version && /^\\d+\\.\\d+\\.\\d+$/.test(version)) {\n return version\n }\n\n return FALLBACK\n } catch {\n return FALLBACK\n } finally {\n clearTimeout(timeout)\n }\n}\n","import consola from \"consola\"\n\nimport { getModels } from \"~/services/copilot/get-models\"\nimport { getVSCodeVersion } from \"~/services/get-vscode-version\"\n\nimport { state } from \"./state\"\n\nexport const sleep = (ms: number) =>\n new Promise((resolve) => {\n setTimeout(resolve, ms)\n })\n\nexport const isNullish = (value: unknown): value is null | undefined =>\n value === null || value === undefined\n\nexport async function cacheModels(): Promise<void> {\n const models = await getModels()\n state.models = models\n}\n\nexport const cacheVSCodeVersion = async () => {\n const response = await getVSCodeVersion()\n state.vsCodeVersion = response\n\n consola.info(`Using VSCode version: ${response}`)\n}\n","import consola from \"consola\"\n\nimport {\n GITHUB_BASE_URL,\n GITHUB_CLIENT_ID,\n standardHeaders,\n} from \"~/lib/api-config\"\nimport { sleep } from \"~/lib/utils\"\n\nimport type { DeviceCodeResponse } from \"./get-device-code\"\n\nexport async function pollAccessToken(\n deviceCode: DeviceCodeResponse,\n): Promise<string> {\n // Interval is in seconds, we need to multiply by 1000 to get milliseconds\n // I'm also adding another second, just to be safe\n const sleepDuration = (deviceCode.interval + 1) * 1000\n consola.debug(`Polling access token with interval of ${sleepDuration}ms`)\n\n // Calculate expiration time based on expires_in from device code response\n const expiresAt = Date.now() + deviceCode.expires_in * 1000\n\n while (Date.now() < expiresAt) {\n const response = await fetch(\n `${GITHUB_BASE_URL}/login/oauth/access_token`,\n {\n method: \"POST\",\n headers: standardHeaders(),\n body: JSON.stringify({\n client_id: GITHUB_CLIENT_ID,\n device_code: deviceCode.device_code,\n grant_type: \"urn:ietf:params:oauth:grant-type:device_code\",\n }),\n },\n )\n\n if (!response.ok) {\n await sleep(sleepDuration)\n consola.error(\"Failed to poll access token:\", await response.text())\n\n continue\n }\n\n const json = await response.json()\n consola.debug(\"Polling access token response:\", json)\n\n const { access_token } = json as AccessTokenResponse\n\n if (access_token) {\n return access_token\n } else {\n await sleep(sleepDuration)\n }\n }\n\n throw new Error(\n \"Device code expired. Please run the authentication flow again.\",\n )\n}\n\ninterface AccessTokenResponse {\n access_token: string\n token_type: string\n scope: string\n}\n","import consola from \"consola\"\nimport fs from \"node:fs/promises\"\n\nimport { PATHS } from \"~/lib/paths\"\nimport { getCopilotToken } from \"~/services/github/get-copilot-token\"\nimport { getDeviceCode } from \"~/services/github/get-device-code\"\nimport { getGitHubUser } from \"~/services/github/get-user\"\nimport { pollAccessToken } from \"~/services/github/poll-access-token\"\n\nimport { HTTPError } from \"./error\"\nimport { state } from \"./state\"\n\nconst readGithubToken = () => fs.readFile(PATHS.GITHUB_TOKEN_PATH, \"utf8\")\n\nconst writeGithubToken = (token: string) =>\n fs.writeFile(PATHS.GITHUB_TOKEN_PATH, token)\n\nexport const setupCopilotToken = async () => {\n const { token, refresh_in } = await getCopilotToken()\n state.copilotToken = token\n\n // Display the Copilot token to the screen\n consola.debug(\"GitHub Copilot Token fetched successfully!\")\n if (state.showToken) {\n consola.info(\"Copilot token:\", token)\n }\n\n const refreshInterval = (refresh_in - 60) * 1000\n setInterval(async () => {\n consola.debug(\"Refreshing Copilot token\")\n try {\n const { token } = await getCopilotToken()\n state.copilotToken = token\n consola.debug(\"Copilot token refreshed\")\n if (state.showToken) {\n consola.info(\"Refreshed Copilot token:\", token)\n }\n } catch (error) {\n // Log error but don't throw - throwing in setInterval crashes the process\n // The existing token will continue to work until it expires\n // Next refresh attempt will try again\n consola.error(\"Failed to refresh Copilot token (will retry on next interval):\", error)\n }\n }, refreshInterval)\n}\n\ninterface SetupGitHubTokenOptions {\n force?: boolean\n}\n\nexport async function setupGitHubToken(\n options?: SetupGitHubTokenOptions,\n): Promise<void> {\n try {\n const githubToken = await readGithubToken()\n\n if (githubToken && !options?.force) {\n state.githubToken = githubToken\n if (state.showToken) {\n consola.info(\"GitHub token:\", githubToken)\n }\n await logUser()\n\n return\n }\n\n consola.info(\"Not logged in, getting new access token\")\n const response = await getDeviceCode()\n consola.debug(\"Device code response:\", response)\n\n consola.info(\n `Please enter the code \"${response.user_code}\" in ${response.verification_uri}`,\n )\n\n const token = await pollAccessToken(response)\n await writeGithubToken(token)\n state.githubToken = token\n\n if (state.showToken) {\n consola.info(\"GitHub token:\", token)\n }\n await logUser()\n } catch (error) {\n if (error instanceof HTTPError) {\n consola.error(\"Failed to get GitHub token:\", error.responseText)\n throw error\n }\n\n consola.error(\"Failed to get GitHub token:\", error)\n throw error\n }\n}\n\nasync function logUser() {\n const user = await getGitHubUser()\n consola.info(`Logged in as ${user.login}`)\n}\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport consola from \"consola\"\n\nimport { PATHS, ensurePaths } from \"./lib/paths\"\nimport { state } from \"./lib/state\"\nimport { setupGitHubToken } from \"./lib/token\"\n\ninterface RunAuthOptions {\n verbose: boolean\n showToken: boolean\n}\n\nexport async function runAuth(options: RunAuthOptions): Promise<void> {\n if (options.verbose) {\n consola.level = 5\n consola.info(\"Verbose logging enabled\")\n }\n\n state.showToken = options.showToken\n\n await ensurePaths()\n await setupGitHubToken({ force: true })\n consola.success(\"GitHub token written to\", PATHS.GITHUB_TOKEN_PATH)\n}\n\nexport const auth = defineCommand({\n meta: {\n name: \"auth\",\n description: \"Run GitHub auth flow without running the server\",\n },\n args: {\n verbose: {\n alias: \"v\",\n type: \"boolean\",\n default: false,\n description: \"Enable verbose logging\",\n },\n \"show-token\": {\n type: \"boolean\",\n default: false,\n description: \"Show GitHub token on auth\",\n },\n },\n run({ args }) {\n return runAuth({\n verbose: args.verbose,\n showToken: args[\"show-token\"],\n })\n },\n})\n","import { GITHUB_API_BASE_URL, githubHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getCopilotUsage = async (): Promise<CopilotUsageResponse> => {\n const response = await fetch(`${GITHUB_API_BASE_URL}/copilot_internal/user`, {\n headers: githubHeaders(state),\n })\n\n if (!response.ok) {\n throw await HTTPError.fromResponse(\"Failed to get Copilot usage\", response)\n }\n\n return (await response.json()) as CopilotUsageResponse\n}\n\nexport interface QuotaDetail {\n entitlement: number\n overage_count: number\n overage_permitted: boolean\n percent_remaining: number\n quota_id: string\n quota_remaining: number\n remaining: number\n unlimited: boolean\n}\n\ninterface QuotaSnapshots {\n chat: QuotaDetail\n completions: QuotaDetail\n premium_interactions: QuotaDetail\n}\n\ninterface CopilotUsageResponse {\n access_type_sku: string\n analytics_tracking_id: string\n assigned_date: string\n can_signup_for_limited: boolean\n chat_enabled: boolean\n copilot_plan: string\n organization_login_list: Array<unknown>\n organization_list: Array<unknown>\n quota_reset_date: string\n quota_snapshots: QuotaSnapshots\n}\n","import { defineCommand } from \"citty\"\nimport consola from \"consola\"\n\nimport { ensurePaths } from \"./lib/paths\"\nimport { setupGitHubToken } from \"./lib/token\"\nimport {\n getCopilotUsage,\n type QuotaDetail,\n} from \"./services/github/get-copilot-usage\"\n\nexport const checkUsage = defineCommand({\n meta: {\n name: \"check-usage\",\n description: \"Show current GitHub Copilot usage/quota information\",\n },\n async run() {\n await ensurePaths()\n await setupGitHubToken()\n try {\n const usage = await getCopilotUsage()\n const premium = usage.quota_snapshots.premium_interactions\n const premiumTotal = premium.entitlement\n const premiumUsed = premiumTotal - premium.remaining\n const premiumPercentUsed =\n premiumTotal > 0 ? (premiumUsed / premiumTotal) * 100 : 0\n const premiumPercentRemaining = premium.percent_remaining\n\n // Helper to summarize a quota snapshot\n function summarizeQuota(name: string, snap: QuotaDetail | undefined) {\n if (!snap) return `${name}: N/A`\n const total = snap.entitlement\n const used = total - snap.remaining\n const percentUsed = total > 0 ? (used / total) * 100 : 0\n const percentRemaining = snap.percent_remaining\n return `${name}: ${used}/${total} used (${percentUsed.toFixed(1)}% used, ${percentRemaining.toFixed(1)}% remaining)`\n }\n\n const premiumLine = `Premium: ${premiumUsed}/${premiumTotal} used (${premiumPercentUsed.toFixed(1)}% used, ${premiumPercentRemaining.toFixed(1)}% remaining)`\n const chatLine = summarizeQuota(\"Chat\", usage.quota_snapshots.chat)\n const completionsLine = summarizeQuota(\n \"Completions\",\n usage.quota_snapshots.completions,\n )\n\n consola.box(\n `Copilot Usage (plan: ${usage.copilot_plan})\\n`\n + `Quota resets: ${usage.quota_reset_date}\\n`\n + `\\nQuotas:\\n`\n + ` ${premiumLine}\\n`\n + ` ${chatLine}\\n`\n + ` ${completionsLine}`,\n )\n } catch (err) {\n consola.error(\"Failed to fetch Copilot usage:\", err)\n process.exit(1)\n }\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport consola from \"consola\"\nimport fs from \"node:fs/promises\"\nimport os from \"node:os\"\n\nimport { PATHS } from \"./lib/paths\"\n\ninterface DebugInfo {\n version: string\n runtime: {\n name: string\n version: string\n platform: string\n arch: string\n }\n paths: {\n APP_DIR: string\n GITHUB_TOKEN_PATH: string\n }\n tokenExists: boolean\n}\n\ninterface RunDebugOptions {\n json: boolean\n}\n\nasync function getPackageVersion(): Promise<string> {\n try {\n const packageJsonPath = new URL(\"../package.json\", import.meta.url).pathname\n // @ts-expect-error https://github.com/sindresorhus/eslint-plugin-unicorn/blob/v59.0.1/docs/rules/prefer-json-parse-buffer.md\n // JSON.parse() can actually parse buffers\n const packageJson = JSON.parse(await fs.readFile(packageJsonPath)) as {\n version: string\n }\n return packageJson.version\n } catch {\n return \"unknown\"\n }\n}\n\nfunction getRuntimeInfo() {\n const isBun = typeof Bun !== \"undefined\"\n\n return {\n name: isBun ? \"bun\" : \"node\",\n version: isBun ? Bun.version : process.version.slice(1),\n platform: os.platform(),\n arch: os.arch(),\n }\n}\n\nasync function checkTokenExists(): Promise<boolean> {\n try {\n const stats = await fs.stat(PATHS.GITHUB_TOKEN_PATH)\n if (!stats.isFile()) return false\n\n const content = await fs.readFile(PATHS.GITHUB_TOKEN_PATH, \"utf8\")\n return content.trim().length > 0\n } catch {\n return false\n }\n}\n\nasync function getDebugInfo(): Promise<DebugInfo> {\n const [version, tokenExists] = await Promise.all([\n getPackageVersion(),\n checkTokenExists(),\n ])\n\n return {\n version,\n runtime: getRuntimeInfo(),\n paths: {\n APP_DIR: PATHS.APP_DIR,\n GITHUB_TOKEN_PATH: PATHS.GITHUB_TOKEN_PATH,\n },\n tokenExists,\n }\n}\n\nfunction printDebugInfoPlain(info: DebugInfo): void {\n consola.info(`copilot-api debug\n\nVersion: ${info.version}\nRuntime: ${info.runtime.name} ${info.runtime.version} (${info.runtime.platform} ${info.runtime.arch})\n\nPaths:\n- APP_DIR: ${info.paths.APP_DIR}\n- GITHUB_TOKEN_PATH: ${info.paths.GITHUB_TOKEN_PATH}\n\nToken exists: ${info.tokenExists ? \"Yes\" : \"No\"}`)\n}\n\nfunction printDebugInfoJson(info: DebugInfo): void {\n console.log(JSON.stringify(info, null, 2))\n}\n\nexport async function runDebug(options: RunDebugOptions): Promise<void> {\n const debugInfo = await getDebugInfo()\n\n if (options.json) {\n printDebugInfoJson(debugInfo)\n } else {\n printDebugInfoPlain(debugInfo)\n }\n}\n\nexport const debug = defineCommand({\n meta: {\n name: \"debug\",\n description: \"Print debug information about the application\",\n },\n args: {\n json: {\n type: \"boolean\",\n default: false,\n description: \"Output debug information as JSON\",\n },\n },\n run({ args }) {\n return runDebug({\n json: args.json,\n })\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport consola from \"consola\"\nimport fs from \"node:fs/promises\"\n\nimport { PATHS } from \"./lib/paths\"\n\nexport async function runLogout(): Promise<void> {\n try {\n await fs.unlink(PATHS.GITHUB_TOKEN_PATH)\n consola.success(\"Logged out successfully. GitHub token removed.\")\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n consola.info(\"No token found. Already logged out.\")\n } else {\n consola.error(\"Failed to remove token:\", error)\n throw error\n }\n }\n}\n\nexport const logout = defineCommand({\n meta: {\n name: \"logout\",\n description: \"Remove stored GitHub token and log out\",\n },\n run() {\n return runLogout()\n },\n})\n","// History recording module for API requests/responses\n// Supports full message content, session grouping, and rich querying\n\n// Simple ID generator (no external deps)\nfunction generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 9)\n}\n\n// Message types for full content storage\nexport interface MessageContent {\n role: string\n content:\n | string\n | Array<{ type: string; text?: string; [key: string]: unknown }>\n tool_calls?: Array<{\n id: string\n type: string\n function: { name: string; arguments: string }\n }>\n tool_call_id?: string\n name?: string\n}\n\nexport interface ToolDefinition {\n name: string\n description?: string\n}\n\nexport interface HistoryEntry {\n id: string\n sessionId: string // Group related requests together\n timestamp: number\n endpoint: \"anthropic\" | \"openai\"\n\n request: {\n model: string\n messages: Array<MessageContent> // Full message history\n stream: boolean\n tools?: Array<ToolDefinition>\n max_tokens?: number\n temperature?: number\n system?: string // System prompt (for Anthropic)\n }\n\n response?: {\n success: boolean\n model: string\n usage: {\n input_tokens: number\n output_tokens: number\n cache_read_input_tokens?: number\n }\n stop_reason?: string\n error?: string\n content: MessageContent | null // Full response content\n toolCalls?: Array<{\n id: string\n name: string\n input: string\n }>\n }\n\n durationMs?: number\n}\n\nexport interface Session {\n id: string\n startTime: number\n lastActivity: number\n requestCount: number\n totalInputTokens: number\n totalOutputTokens: number\n models: Array<string>\n endpoint: \"anthropic\" | \"openai\"\n}\n\nexport interface HistoryState {\n enabled: boolean\n entries: Array<HistoryEntry>\n sessions: Map<string, Session>\n currentSessionId: string\n maxEntries: number\n sessionTimeoutMs: number // New session after this idle time\n}\n\nexport interface QueryOptions {\n page?: number\n limit?: number\n model?: string\n endpoint?: \"anthropic\" | \"openai\"\n success?: boolean\n from?: number\n to?: number\n search?: string\n sessionId?: string\n}\n\nexport interface HistoryResult {\n entries: Array<HistoryEntry>\n total: number\n page: number\n limit: number\n totalPages: number\n}\n\nexport interface SessionResult {\n sessions: Array<Session>\n total: number\n}\n\nexport interface HistoryStats {\n totalRequests: number\n successfulRequests: number\n failedRequests: number\n totalInputTokens: number\n totalOutputTokens: number\n averageDurationMs: number\n modelDistribution: Record<string, number>\n endpointDistribution: Record<string, number>\n recentActivity: Array<{ hour: string; count: number }>\n activeSessions: number\n}\n\n// Global history state\nexport const historyState: HistoryState = {\n enabled: false,\n entries: [],\n sessions: new Map(),\n currentSessionId: \"\",\n maxEntries: 1000,\n sessionTimeoutMs: 30 * 60 * 1000, // 30 minutes\n}\n\nexport function initHistory(enabled: boolean, maxEntries: number): void {\n historyState.enabled = enabled\n historyState.maxEntries = maxEntries\n historyState.entries = []\n historyState.sessions = new Map()\n historyState.currentSessionId = enabled ? generateId() : \"\"\n}\n\nexport function isHistoryEnabled(): boolean {\n return historyState.enabled\n}\n\n// Get or create current session\nfunction getCurrentSession(endpoint: \"anthropic\" | \"openai\"): string {\n const now = Date.now()\n\n // Check if current session is still active\n if (historyState.currentSessionId) {\n const session = historyState.sessions.get(historyState.currentSessionId)\n if (session && now - session.lastActivity < historyState.sessionTimeoutMs) {\n session.lastActivity = now\n return historyState.currentSessionId\n }\n }\n\n // Create new session\n const sessionId = generateId()\n historyState.currentSessionId = sessionId\n historyState.sessions.set(sessionId, {\n id: sessionId,\n startTime: now,\n lastActivity: now,\n requestCount: 0,\n totalInputTokens: 0,\n totalOutputTokens: 0,\n models: [],\n endpoint,\n })\n\n return sessionId\n}\n\nexport interface RecordRequestParams {\n model: string\n messages: Array<MessageContent>\n stream: boolean\n tools?: Array<ToolDefinition>\n max_tokens?: number\n temperature?: number\n system?: string\n}\n\nexport function recordRequest(\n endpoint: \"anthropic\" | \"openai\",\n request: RecordRequestParams,\n): string {\n if (!historyState.enabled) {\n return \"\"\n }\n\n const sessionId = getCurrentSession(endpoint)\n const session = historyState.sessions.get(sessionId)\n if (!session) {\n return \"\"\n }\n\n const entry: HistoryEntry = {\n id: generateId(),\n sessionId,\n timestamp: Date.now(),\n endpoint,\n request: {\n model: request.model,\n messages: request.messages,\n stream: request.stream,\n tools: request.tools,\n max_tokens: request.max_tokens,\n temperature: request.temperature,\n system: request.system,\n },\n }\n\n historyState.entries.push(entry)\n session.requestCount++\n\n if (!session.models.includes(request.model)) {\n session.models.push(request.model)\n }\n\n // Enforce max entries limit (FIFO)\n while (historyState.entries.length > historyState.maxEntries) {\n const removed = historyState.entries.shift()\n // Clean up empty sessions\n if (removed) {\n const sessionEntries = historyState.entries.filter(\n (e) => e.sessionId === removed.sessionId,\n )\n if (sessionEntries.length === 0) {\n historyState.sessions.delete(removed.sessionId)\n }\n }\n }\n\n return entry.id\n}\n\nexport interface RecordResponseParams {\n success: boolean\n model: string\n usage: {\n input_tokens: number\n output_tokens: number\n cache_read_input_tokens?: number\n }\n stop_reason?: string\n error?: string\n content: MessageContent | null\n toolCalls?: Array<{\n id: string\n name: string\n input: string\n }>\n}\n\nexport function recordResponse(\n id: string,\n response: RecordResponseParams,\n durationMs: number,\n): void {\n if (!historyState.enabled || !id) {\n return\n }\n\n const entry = historyState.entries.find((e) => e.id === id)\n if (entry) {\n entry.response = response\n entry.durationMs = durationMs\n\n // Update session stats\n const session = historyState.sessions.get(entry.sessionId)\n if (session) {\n session.totalInputTokens += response.usage.input_tokens\n session.totalOutputTokens += response.usage.output_tokens\n session.lastActivity = Date.now()\n }\n }\n}\n\nexport function getHistory(options: QueryOptions = {}): HistoryResult {\n const {\n page = 1,\n limit = 50,\n model,\n endpoint,\n success,\n from,\n to,\n search,\n sessionId,\n } = options\n\n let filtered = [...historyState.entries]\n\n // Apply filters\n if (sessionId) {\n filtered = filtered.filter((e) => e.sessionId === sessionId)\n }\n\n if (model) {\n const modelLower = model.toLowerCase()\n filtered = filtered.filter(\n (e) =>\n e.request.model.toLowerCase().includes(modelLower)\n || e.response?.model.toLowerCase().includes(modelLower),\n )\n }\n\n if (endpoint) {\n filtered = filtered.filter((e) => e.endpoint === endpoint)\n }\n\n if (success !== undefined) {\n filtered = filtered.filter((e) => e.response?.success === success)\n }\n\n if (from) {\n filtered = filtered.filter((e) => e.timestamp >= from)\n }\n\n if (to) {\n filtered = filtered.filter((e) => e.timestamp <= to)\n }\n\n if (search) {\n const searchLower = search.toLowerCase()\n filtered = filtered.filter((e) => {\n // Search in messages\n const msgMatch = e.request.messages.some((m) => {\n if (typeof m.content === \"string\") {\n return m.content.toLowerCase().includes(searchLower)\n }\n if (Array.isArray(m.content)) {\n return m.content.some(\n (c) => c.text && c.text.toLowerCase().includes(searchLower),\n )\n }\n return false\n })\n\n // Search in response content\n const respMatch =\n e.response?.content\n && typeof e.response.content.content === \"string\"\n && e.response.content.content.toLowerCase().includes(searchLower)\n\n // Search in tool names\n const toolMatch = e.response?.toolCalls?.some((t) =>\n t.name.toLowerCase().includes(searchLower),\n )\n\n // Search in system prompt\n const sysMatch = e.request.system?.toLowerCase().includes(searchLower)\n\n return msgMatch || respMatch || toolMatch || sysMatch\n })\n }\n\n // Sort by timestamp descending (newest first)\n filtered.sort((a, b) => b.timestamp - a.timestamp)\n\n const total = filtered.length\n const totalPages = Math.ceil(total / limit)\n const start = (page - 1) * limit\n const entries = filtered.slice(start, start + limit)\n\n return {\n entries,\n total,\n page,\n limit,\n totalPages,\n }\n}\n\nexport function getEntry(id: string): HistoryEntry | undefined {\n return historyState.entries.find((e) => e.id === id)\n}\n\nexport function getSessions(): SessionResult {\n const sessions = Array.from(historyState.sessions.values()).sort(\n (a, b) => b.lastActivity - a.lastActivity,\n )\n\n return {\n sessions,\n total: sessions.length,\n }\n}\n\nexport function getSession(id: string): Session | undefined {\n return historyState.sessions.get(id)\n}\n\nexport function getSessionEntries(sessionId: string): Array<HistoryEntry> {\n return historyState.entries\n .filter((e) => e.sessionId === sessionId)\n .sort((a, b) => a.timestamp - b.timestamp) // Chronological order for sessions\n}\n\nexport function clearHistory(): void {\n historyState.entries = []\n historyState.sessions = new Map()\n historyState.currentSessionId = generateId()\n}\n\nexport function deleteSession(sessionId: string): boolean {\n if (!historyState.sessions.has(sessionId)) {\n return false\n }\n\n historyState.entries = historyState.entries.filter(\n (e) => e.sessionId !== sessionId,\n )\n historyState.sessions.delete(sessionId)\n\n if (historyState.currentSessionId === sessionId) {\n historyState.currentSessionId = generateId()\n }\n\n return true\n}\n\nexport function getStats(): HistoryStats {\n const entries = historyState.entries\n\n const modelDist: Record<string, number> = {}\n const endpointDist: Record<string, number> = {}\n const hourlyActivity: Record<string, number> = {}\n\n let totalInput = 0\n let totalOutput = 0\n let totalDuration = 0\n let durationCount = 0\n let successCount = 0\n let failCount = 0\n\n for (const entry of entries) {\n // Model distribution\n const model = entry.response?.model || entry.request.model\n modelDist[model] = (modelDist[model] || 0) + 1\n\n // Endpoint distribution\n endpointDist[entry.endpoint] = (endpointDist[entry.endpoint] || 0) + 1\n\n // Hourly activity (last 24 hours)\n const hour = new Date(entry.timestamp).toISOString().slice(0, 13)\n hourlyActivity[hour] = (hourlyActivity[hour] || 0) + 1\n\n if (entry.response) {\n if (entry.response.success) {\n successCount++\n } else {\n failCount++\n }\n\n totalInput += entry.response.usage.input_tokens\n totalOutput += entry.response.usage.output_tokens\n }\n\n if (entry.durationMs) {\n totalDuration += entry.durationMs\n durationCount++\n }\n }\n\n // Convert hourly activity to sorted array (last 24 entries)\n const recentActivity = Object.entries(hourlyActivity)\n .sort(([a], [b]) => a.localeCompare(b))\n .slice(-24)\n .map(([hour, count]) => ({ hour, count }))\n\n // Count active sessions (activity within timeout period)\n const now = Date.now()\n let activeSessions = 0\n for (const session of historyState.sessions.values()) {\n if (now - session.lastActivity < historyState.sessionTimeoutMs) {\n activeSessions++\n }\n }\n\n return {\n totalRequests: entries.length,\n successfulRequests: successCount,\n failedRequests: failCount,\n totalInputTokens: totalInput,\n totalOutputTokens: totalOutput,\n averageDurationMs: durationCount > 0 ? totalDuration / durationCount : 0,\n modelDistribution: modelDist,\n endpointDistribution: endpointDist,\n recentActivity,\n activeSessions,\n }\n}\n\nexport function exportHistory(format: \"json\" | \"csv\" = \"json\"): string {\n if (format === \"json\") {\n return JSON.stringify(\n {\n sessions: Array.from(historyState.sessions.values()),\n entries: historyState.entries,\n },\n null,\n 2,\n )\n }\n\n // CSV format - simplified view\n const headers = [\n \"id\",\n \"session_id\",\n \"timestamp\",\n \"endpoint\",\n \"request_model\",\n \"message_count\",\n \"stream\",\n \"success\",\n \"response_model\",\n \"input_tokens\",\n \"output_tokens\",\n \"duration_ms\",\n \"stop_reason\",\n \"error\",\n ]\n\n const rows = historyState.entries.map((e) => [\n e.id,\n e.sessionId,\n new Date(e.timestamp).toISOString(),\n e.endpoint,\n e.request.model,\n e.request.messages.length,\n e.request.stream,\n e.response?.success ?? \"\",\n e.response?.model ?? \"\",\n e.response?.usage.input_tokens ?? \"\",\n e.response?.usage.output_tokens ?? \"\",\n e.durationMs ?? \"\",\n e.response?.stop_reason ?? \"\",\n e.response?.error ?? \"\",\n ])\n\n return [headers.join(\",\"), ...rows.map((r) => r.join(\",\"))].join(\"\\n\")\n}\n","import consola from \"consola\"\nimport { getProxyForUrl } from \"proxy-from-env\"\nimport { Agent, ProxyAgent, setGlobalDispatcher, type Dispatcher } from \"undici\"\n\nexport function initProxyFromEnv(): void {\n if (typeof Bun !== \"undefined\") return\n\n try {\n const direct = new Agent()\n const proxies = new Map<string, ProxyAgent>()\n\n // We only need a minimal dispatcher that implements `dispatch` at runtime.\n // Typing the object as `Dispatcher` forces TypeScript to require many\n // additional methods. Instead, keep a plain object and cast when passing\n // to `setGlobalDispatcher`.\n const dispatcher = {\n dispatch(\n options: Dispatcher.DispatchOptions,\n handler: Dispatcher.DispatchHandler,\n ) {\n try {\n const origin =\n typeof options.origin === \"string\" ?\n new URL(options.origin)\n : (options.origin as URL)\n const get = getProxyForUrl as unknown as (\n u: string,\n ) => string | undefined\n const raw = get(origin.toString())\n const proxyUrl = raw && raw.length > 0 ? raw : undefined\n if (!proxyUrl) {\n consola.debug(`HTTP proxy bypass: ${origin.hostname}`)\n return (direct as unknown as Dispatcher).dispatch(options, handler)\n }\n let agent = proxies.get(proxyUrl)\n if (!agent) {\n agent = new ProxyAgent(proxyUrl)\n proxies.set(proxyUrl, agent)\n }\n let label = proxyUrl\n try {\n const u = new URL(proxyUrl)\n label = `${u.protocol}//${u.host}`\n } catch {\n /* noop */\n }\n consola.debug(`HTTP proxy route: ${origin.hostname} via ${label}`)\n return (agent as unknown as Dispatcher).dispatch(options, handler)\n } catch {\n return (direct as unknown as Dispatcher).dispatch(options, handler)\n }\n },\n close() {\n return direct.close()\n },\n destroy() {\n return direct.destroy()\n },\n }\n\n setGlobalDispatcher(dispatcher as unknown as Dispatcher)\n consola.debug(\"HTTP proxy configured from environment (per-URL)\")\n } catch (err) {\n consola.debug(\"Proxy setup skipped:\", err)\n }\n}\n","import { execSync } from \"node:child_process\"\nimport process from \"node:process\"\n\ntype ShellName = \"bash\" | \"zsh\" | \"fish\" | \"powershell\" | \"cmd\" | \"sh\"\ntype EnvVars = Record<string, string | undefined>\n\nfunction getShell(): ShellName {\n const { platform, ppid, env } = process\n\n if (platform === \"win32\") {\n try {\n const command = `wmic process get ParentProcessId,Name | findstr \"${ppid}\"`\n const parentProcess = execSync(command, { stdio: \"pipe\" }).toString()\n\n if (parentProcess.toLowerCase().includes(\"powershell.exe\")) {\n return \"powershell\"\n }\n } catch {\n return \"cmd\"\n }\n\n return \"cmd\"\n } else {\n const shellPath = env.SHELL\n if (shellPath) {\n if (shellPath.endsWith(\"zsh\")) return \"zsh\"\n if (shellPath.endsWith(\"fish\")) return \"fish\"\n if (shellPath.endsWith(\"bash\")) return \"bash\"\n }\n\n return \"sh\"\n }\n}\n\n/**\n * Generates a copy-pasteable script to set multiple environment variables\n * and run a subsequent command.\n * @param {EnvVars} envVars - An object of environment variables to set.\n * @param {string} commandToRun - The command to run after setting the variables.\n * @returns {string} The formatted script string.\n */\nexport function generateEnvScript(\n envVars: EnvVars,\n commandToRun: string = \"\",\n): string {\n const shell = getShell()\n const filteredEnvVars = Object.entries(envVars).filter(\n ([, value]) => value !== undefined,\n ) as Array<[string, string]>\n\n let commandBlock: string\n\n switch (shell) {\n case \"powershell\": {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `$env:${key} = \"${value.replace(/\"/g, '`\"')}\"`)\n .join(\"; \")\n break\n }\n case \"cmd\": {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `set ${key}=${value}`)\n .join(\" & \")\n break\n }\n case \"fish\": {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `set -gx ${key} \"${value.replace(/\"/g, '\\\\\"')}\"`)\n .join(\"; \")\n break\n }\n default: {\n // bash, zsh, sh\n const assignments = filteredEnvVars\n .map(([key, value]) => `${key}=\"${value.replace(/\"/g, '\\\\\"')}\"`)\n .join(\" \")\n commandBlock = filteredEnvVars.length > 0 ? `export ${assignments}` : \"\"\n break\n }\n }\n\n if (commandBlock && commandToRun) {\n const separator = shell === \"cmd\" ? \" & \" : \" && \"\n return `${commandBlock}${separator}${commandToRun}`\n }\n\n return commandBlock || commandToRun\n}\n","import consola from \"consola\"\n\nimport { HTTPError } from \"./error\"\n\nexport const awaitApproval = async () => {\n const response = await consola.prompt(`Accept incoming request?`, {\n type: \"confirm\",\n })\n\n if (!response)\n throw new HTTPError(\n \"Request rejected\",\n 403,\n JSON.stringify({ message: \"Request rejected\" }),\n )\n}\n","import consola from \"consola\"\n\nimport type { State } from \"./state\"\n\ninterface QueuedRequest<T> {\n execute: () => Promise<T>\n resolve: (value: T) => void\n reject: (error: unknown) => void\n}\n\n// Simple request queue for rate limiting\n// Instead of rejecting requests, queue them and process sequentially\nclass RequestQueue {\n private queue: Array<QueuedRequest<unknown>> = []\n private processing = false\n private lastRequestTime = 0\n\n async enqueue<T>(\n execute: () => Promise<T>,\n rateLimitSeconds: number,\n ): Promise<T> {\n return new Promise((resolve, reject) => {\n this.queue.push({\n execute: execute as () => Promise<unknown>,\n resolve: resolve as (value: unknown) => void,\n reject,\n })\n\n if (this.queue.length > 1) {\n const waitTime = Math.ceil((this.queue.length - 1) * rateLimitSeconds)\n consola.info(\n `Request queued. Position: ${this.queue.length}, estimated wait: ${waitTime}s`,\n )\n }\n\n void this.processQueue(rateLimitSeconds)\n })\n }\n\n private async processQueue(rateLimitSeconds: number): Promise<void> {\n if (this.processing) return\n this.processing = true\n\n while (this.queue.length > 0) {\n const now = Date.now()\n const elapsedMs = now - this.lastRequestTime\n const requiredMs = rateLimitSeconds * 1000\n\n if (this.lastRequestTime > 0 && elapsedMs < requiredMs) {\n const waitMs = requiredMs - elapsedMs\n consola.debug(`Rate limit: waiting ${Math.ceil(waitMs / 1000)}s`)\n await new Promise((resolve) => setTimeout(resolve, waitMs))\n }\n\n const request = this.queue.shift()\n if (!request) break\n\n this.lastRequestTime = Date.now()\n\n try {\n const result = await request.execute()\n request.resolve(result)\n } catch (error) {\n request.reject(error)\n }\n }\n\n this.processing = false\n }\n\n get length(): number {\n return this.queue.length\n }\n}\n\nconst requestQueue = new RequestQueue()\n\n/**\n * Execute a request with rate limiting via queue.\n * Requests are queued and processed sequentially at the configured rate.\n */\nexport async function executeWithRateLimit<T>(\n state: State,\n execute: () => Promise<T>,\n): Promise<T> {\n // If no rate limit configured, execute immediately\n if (state.rateLimitSeconds === undefined) {\n return execute()\n }\n\n return requestQueue.enqueue(execute, state.rateLimitSeconds)\n}\n\nexport { requestQueue }\n","import type {\n ChatCompletionsPayload,\n ContentPart,\n Message,\n Tool,\n ToolCall,\n} from \"~/services/copilot/create-chat-completions\"\nimport type { Model } from \"~/services/copilot/get-models\"\n\n// Encoder type mapping\nconst ENCODING_MAP = {\n o200k_base: () => import(\"gpt-tokenizer/encoding/o200k_base\"),\n cl100k_base: () => import(\"gpt-tokenizer/encoding/cl100k_base\"),\n p50k_base: () => import(\"gpt-tokenizer/encoding/p50k_base\"),\n p50k_edit: () => import(\"gpt-tokenizer/encoding/p50k_edit\"),\n r50k_base: () => import(\"gpt-tokenizer/encoding/r50k_base\"),\n} as const\n\ntype SupportedEncoding = keyof typeof ENCODING_MAP\n\n// Define encoder interface\ninterface Encoder {\n encode: (text: string) => Array<number>\n}\n\n// Cache loaded encoders to avoid repeated imports\nconst encodingCache = new Map<string, Encoder>()\n\n/**\n * Calculate tokens for tool calls\n */\nconst calculateToolCallsTokens = (\n toolCalls: Array<ToolCall>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = 0\n for (const toolCall of toolCalls) {\n tokens += constants.funcInit\n tokens += encoder.encode(JSON.stringify(toolCall)).length\n }\n tokens += constants.funcEnd\n return tokens\n}\n\n/**\n * Calculate tokens for content parts\n */\nconst calculateContentPartsTokens = (\n contentParts: Array<ContentPart>,\n encoder: Encoder,\n): number => {\n let tokens = 0\n for (const part of contentParts) {\n if (part.type === \"image_url\") {\n // Image URLs incur ~85 tokens overhead for the image processing metadata\n // This is an approximation based on OpenAI's image token calculation\n tokens += encoder.encode(part.image_url.url).length + 85\n } else if (part.text) {\n tokens += encoder.encode(part.text).length\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens for a single message\n */\nconst calculateMessageTokens = (\n message: Message,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n // Each message incurs 3 tokens overhead for role/metadata framing\n // Based on OpenAI's token counting methodology\n const tokensPerMessage = 3\n // Additional token when a \"name\" field is present\n const tokensPerName = 1\n let tokens = tokensPerMessage\n for (const [key, value] of Object.entries(message)) {\n if (typeof value === \"string\") {\n tokens += encoder.encode(value).length\n }\n if (key === \"name\") {\n tokens += tokensPerName\n }\n if (key === \"tool_calls\") {\n tokens += calculateToolCallsTokens(\n value as Array<ToolCall>,\n encoder,\n constants,\n )\n }\n if (key === \"content\" && Array.isArray(value)) {\n tokens += calculateContentPartsTokens(\n value as Array<ContentPart>,\n encoder,\n )\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens using custom algorithm\n */\nconst calculateTokens = (\n messages: Array<Message>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n if (messages.length === 0) {\n return 0\n }\n let numTokens = 0\n for (const message of messages) {\n numTokens += calculateMessageTokens(message, encoder, constants)\n }\n // every reply is primed with <|start|>assistant<|message|> (3 tokens)\n numTokens += 3\n return numTokens\n}\n\n/**\n * Get the corresponding encoder module based on encoding type\n */\nconst getEncodeChatFunction = async (encoding: string): Promise<Encoder> => {\n if (encodingCache.has(encoding)) {\n const cached = encodingCache.get(encoding)\n if (cached) {\n return cached\n }\n }\n\n const supportedEncoding = encoding as SupportedEncoding\n if (!(supportedEncoding in ENCODING_MAP)) {\n const fallbackModule = (await ENCODING_MAP.o200k_base()) as Encoder\n encodingCache.set(encoding, fallbackModule)\n return fallbackModule\n }\n\n const encodingModule = (await ENCODING_MAP[supportedEncoding]()) as Encoder\n encodingCache.set(encoding, encodingModule)\n return encodingModule\n}\n\n/**\n * Get tokenizer type from model information\n */\nexport const getTokenizerFromModel = (model: Model): string => {\n return model.capabilities.tokenizer || \"o200k_base\"\n}\n\n/**\n * Get model-specific constants for token calculation.\n * These values are empirically determined based on OpenAI's function calling token overhead.\n * - funcInit: Tokens for initializing a function definition\n * - propInit: Tokens for initializing the properties section\n * - propKey: Tokens per property key\n * - enumInit: Token adjustment when enum is present (negative because type info is replaced)\n * - enumItem: Tokens per enum value\n * - funcEnd: Tokens for closing the function definition\n */\nconst getModelConstants = (model: Model) => {\n return model.id === \"gpt-3.5-turbo\" || model.id === \"gpt-4\" ?\n {\n funcInit: 10,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n }\n : {\n funcInit: 7,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n }\n}\n\n/**\n * Calculate tokens for a single parameter\n */\nconst calculateParameterTokens = (\n key: string,\n prop: unknown,\n context: {\n encoder: Encoder\n constants: ReturnType<typeof getModelConstants>\n },\n): number => {\n const { encoder, constants } = context\n let tokens = constants.propKey\n\n // Early return if prop is not an object\n if (typeof prop !== \"object\" || prop === null) {\n return tokens\n }\n\n // Type assertion for parameter properties\n const param = prop as {\n type?: string\n description?: string\n enum?: Array<unknown>\n [key: string]: unknown\n }\n\n const paramName = key\n const paramType = param.type || \"string\"\n let paramDesc = param.description || \"\"\n\n // Handle enum values\n if (param.enum && Array.isArray(param.enum)) {\n tokens += constants.enumInit\n for (const item of param.enum) {\n tokens += constants.enumItem\n tokens += encoder.encode(String(item)).length\n }\n }\n\n // Clean up description\n if (paramDesc.endsWith(\".\")) {\n paramDesc = paramDesc.slice(0, -1)\n }\n\n // Encode the main parameter line\n const line = `${paramName}:${paramType}:${paramDesc}`\n tokens += encoder.encode(line).length\n\n // Handle additional properties (excluding standard ones)\n const excludedKeys = new Set([\"type\", \"description\", \"enum\"])\n for (const propertyName of Object.keys(param)) {\n if (!excludedKeys.has(propertyName)) {\n const propertyValue = param[propertyName]\n const propertyText =\n typeof propertyValue === \"string\" ? propertyValue : (\n JSON.stringify(propertyValue)\n )\n tokens += encoder.encode(`${propertyName}:${propertyText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for function parameters\n */\nconst calculateParametersTokens = (\n parameters: unknown,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n if (!parameters || typeof parameters !== \"object\") {\n return 0\n }\n\n const params = parameters as Record<string, unknown>\n let tokens = 0\n\n for (const [key, value] of Object.entries(params)) {\n if (key === \"properties\") {\n const properties = value as Record<string, unknown>\n if (Object.keys(properties).length > 0) {\n tokens += constants.propInit\n for (const propKey of Object.keys(properties)) {\n tokens += calculateParameterTokens(propKey, properties[propKey], {\n encoder,\n constants,\n })\n }\n }\n } else {\n const paramText =\n typeof value === \"string\" ? value : JSON.stringify(value)\n tokens += encoder.encode(`${key}:${paramText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for a single tool\n */\nconst calculateToolTokens = (\n tool: Tool,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = constants.funcInit\n const func = tool.function\n const fName = func.name\n let fDesc = func.description || \"\"\n if (fDesc.endsWith(\".\")) {\n fDesc = fDesc.slice(0, -1)\n }\n const line = fName + \":\" + fDesc\n tokens += encoder.encode(line).length\n if (\n typeof func.parameters === \"object\" // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n && func.parameters !== null\n ) {\n tokens += calculateParametersTokens(func.parameters, encoder, constants)\n }\n return tokens\n}\n\n/**\n * Calculate token count for tools based on model\n */\nexport const numTokensForTools = (\n tools: Array<Tool>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let funcTokenCount = 0\n for (const tool of tools) {\n funcTokenCount += calculateToolTokens(tool, encoder, constants)\n }\n funcTokenCount += constants.funcEnd\n return funcTokenCount\n}\n\n/**\n * Calculate the token count of messages, supporting multiple GPT encoders\n */\nexport const getTokenCount = async (\n payload: ChatCompletionsPayload,\n model: Model,\n): Promise<{ input: number; output: number }> => {\n // Get tokenizer string\n const tokenizer = getTokenizerFromModel(model)\n\n // Get corresponding encoder module\n const encoder = await getEncodeChatFunction(tokenizer)\n\n const simplifiedMessages = payload.messages\n const inputMessages = simplifiedMessages.filter(\n (msg) => msg.role !== \"assistant\",\n )\n const outputMessages = simplifiedMessages.filter(\n (msg) => msg.role === \"assistant\",\n )\n\n const constants = getModelConstants(model)\n let inputTokens = calculateTokens(inputMessages, encoder, constants)\n if (payload.tools && payload.tools.length > 0) {\n inputTokens += numTokensForTools(payload.tools, encoder, constants)\n }\n const outputTokens = calculateTokens(outputMessages, encoder, constants)\n\n return {\n input: inputTokens,\n output: outputTokens,\n }\n}\n","import consola from \"consola\"\nimport { events } from \"fetch-event-stream\"\n\nimport { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createChatCompletions = async (\n payload: ChatCompletionsPayload,\n) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const enableVision = payload.messages.some(\n (x) =>\n typeof x.content !== \"string\"\n && x.content?.some((x) => x.type === \"image_url\"),\n )\n\n // Agent/user check for X-Initiator header\n // Determine if any message is from an agent (\"assistant\" or \"tool\")\n const isAgentCall = payload.messages.some((msg) =>\n [\"assistant\", \"tool\"].includes(msg.role),\n )\n\n // Build headers and add X-Initiator\n const headers: Record<string, string> = {\n ...copilotHeaders(state, enableVision),\n \"X-Initiator\": isAgentCall ? \"agent\" : \"user\",\n }\n\n const response = await fetch(`${copilotBaseUrl(state)}/chat/completions`, {\n method: \"POST\",\n headers,\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) {\n consola.error(\"Failed to create chat completions\", response)\n throw await HTTPError.fromResponse(\"Failed to create chat completions\", response)\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as ChatCompletionResponse\n}\n\n// Streaming types\n\nexport interface ChatCompletionChunk {\n id: string\n object: \"chat.completion.chunk\"\n created: number\n model: string\n choices: Array<Choice>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n completion_tokens_details?: {\n accepted_prediction_tokens: number\n rejected_prediction_tokens: number\n }\n }\n}\n\ninterface Delta {\n content?: string | null\n role?: \"user\" | \"assistant\" | \"system\" | \"tool\"\n tool_calls?: Array<{\n index: number\n id?: string\n type?: \"function\"\n function?: {\n name?: string\n arguments?: string\n }\n }>\n}\n\ninterface Choice {\n index: number\n delta: Delta\n finish_reason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null\n logprobs: object | null\n}\n\n// Non-streaming types\n\nexport interface ChatCompletionResponse {\n id: string\n object: \"chat.completion\"\n created: number\n model: string\n choices: Array<ChoiceNonStreaming>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n }\n}\n\ninterface ResponseMessage {\n role: \"assistant\"\n content: string | null\n tool_calls?: Array<ToolCall>\n}\n\ninterface ChoiceNonStreaming {\n index: number\n message: ResponseMessage\n logprobs: object | null\n finish_reason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\"\n}\n\n// Payload types\n\nexport interface ChatCompletionsPayload {\n messages: Array<Message>\n model: string\n temperature?: number | null\n top_p?: number | null\n max_tokens?: number | null\n stop?: string | Array<string> | null\n n?: number | null\n stream?: boolean | null\n\n frequency_penalty?: number | null\n presence_penalty?: number | null\n logit_bias?: Record<string, number> | null\n logprobs?: boolean | null\n response_format?: { type: \"json_object\" } | null\n seed?: number | null\n tools?: Array<Tool> | null\n tool_choice?:\n | \"none\"\n | \"auto\"\n | \"required\"\n | { type: \"function\"; function: { name: string } }\n | null\n user?: string | null\n}\n\nexport interface Tool {\n type: \"function\"\n function: {\n name: string\n description?: string\n parameters: Record<string, unknown>\n }\n}\n\nexport interface Message {\n role: \"user\" | \"assistant\" | \"system\" | \"tool\" | \"developer\"\n content: string | Array<ContentPart> | null\n\n name?: string\n tool_calls?: Array<ToolCall>\n tool_call_id?: string\n}\n\nexport interface ToolCall {\n id: string\n type: \"function\"\n function: {\n name: string\n arguments: string\n }\n}\n\nexport type ContentPart = TextPart | ImagePart\n\nexport interface TextPart {\n type: \"text\"\n text: string\n}\n\nexport interface ImagePart {\n type: \"image_url\"\n image_url: {\n url: string\n detail?: \"low\" | \"high\" | \"auto\"\n }\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\nimport { streamSSE, type SSEMessage } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport {\n type MessageContent,\n recordRequest,\n recordResponse,\n} from \"~/lib/history\"\nimport { executeWithRateLimit } from \"~/lib/queue\"\nimport { state } from \"~/lib/state\"\nimport { getTokenCount } from \"~/lib/tokenizer\"\nimport { isNullish } from \"~/lib/utils\"\nimport {\n createChatCompletions,\n type ChatCompletionChunk,\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n} from \"~/services/copilot/create-chat-completions\"\n\nexport async function handleCompletion(c: Context) {\n const startTime = Date.now()\n let payload = await c.req.json<ChatCompletionsPayload>()\n consola.debug(\"Request payload:\", JSON.stringify(payload).slice(-400))\n\n // Record request to history with full messages\n const historyId = recordRequest(\"openai\", {\n model: payload.model,\n messages: convertOpenAIMessages(payload.messages),\n stream: payload.stream ?? false,\n tools: payload.tools?.map((t) => ({\n name: t.function.name,\n description: t.function.description,\n })),\n max_tokens: payload.max_tokens ?? undefined,\n temperature: payload.temperature ?? undefined,\n })\n\n // Find the selected model\n const selectedModel = state.models?.data.find(\n (model) => model.id === payload.model,\n )\n\n // Calculate and display token count\n try {\n if (selectedModel) {\n const tokenCount = await getTokenCount(payload, selectedModel)\n consola.info(\"Current token count:\", tokenCount)\n } else {\n consola.warn(\"No model selected, skipping token count calculation\")\n }\n } catch (error) {\n consola.warn(\"Failed to calculate token count:\", error)\n }\n\n if (state.manualApprove) await awaitApproval()\n\n if (isNullish(payload.max_tokens)) {\n payload = {\n ...payload,\n max_tokens: selectedModel?.capabilities.limits.max_output_tokens,\n }\n consola.debug(\"Set max_tokens to:\", JSON.stringify(payload.max_tokens))\n }\n\n try {\n // Use queue-based rate limiting\n const response = await executeWithRateLimit(state, () =>\n createChatCompletions(payload),\n )\n\n if (isNonStreaming(response)) {\n consola.debug(\"Non-streaming response:\", JSON.stringify(response))\n\n // Record response to history with full content\n const choice = response.choices[0]\n recordResponse(\n historyId,\n {\n success: true,\n model: response.model,\n usage: {\n input_tokens: response.usage?.prompt_tokens ?? 0,\n output_tokens: response.usage?.completion_tokens ?? 0,\n },\n stop_reason: choice?.finish_reason ?? undefined,\n content:\n choice?.message ?\n {\n role: choice.message.role,\n content:\n typeof choice.message.content === \"string\" ?\n choice.message.content\n : JSON.stringify(choice.message.content),\n tool_calls: choice.message.tool_calls?.map((tc) => ({\n id: tc.id,\n type: tc.type,\n function: {\n name: tc.function.name,\n arguments: tc.function.arguments,\n },\n })),\n }\n : null,\n toolCalls: choice?.message?.tool_calls?.map((tc) => ({\n id: tc.id,\n name: tc.function.name,\n input: tc.function.arguments,\n })),\n },\n Date.now() - startTime,\n )\n\n return c.json(response)\n }\n\n consola.debug(\"Streaming response\")\n return streamSSE(c, async (stream) => {\n // Accumulate stream data for history\n let streamModel = \"\"\n let streamInputTokens = 0\n let streamOutputTokens = 0\n let streamFinishReason = \"\"\n let streamContent = \"\"\n const streamToolCalls: Array<{\n id: string\n name: string\n arguments: string\n }> = []\n const toolCallAccumulators: Map<\n number,\n { id: string; name: string; arguments: string }\n > = new Map()\n\n try {\n for await (const chunk of response) {\n consola.debug(\"Streaming chunk:\", JSON.stringify(chunk))\n\n // Parse chunk data for history\n if (chunk.data && chunk.data !== \"[DONE]\") {\n try {\n const parsed = JSON.parse(chunk.data) as ChatCompletionChunk\n if (parsed.model && !streamModel) {\n streamModel = parsed.model\n }\n if (parsed.usage) {\n streamInputTokens = parsed.usage.prompt_tokens\n streamOutputTokens = parsed.usage.completion_tokens\n }\n const choice = parsed.choices[0]\n if (choice?.delta?.content) {\n streamContent += choice.delta.content\n }\n if (choice?.delta?.tool_calls) {\n for (const tc of choice.delta.tool_calls) {\n const idx = tc.index\n if (!toolCallAccumulators.has(idx)) {\n toolCallAccumulators.set(idx, {\n id: tc.id || \"\",\n name: tc.function?.name || \"\",\n arguments: \"\",\n })\n }\n const acc = toolCallAccumulators.get(idx)\n if (acc) {\n if (tc.id) acc.id = tc.id\n if (tc.function?.name) acc.name = tc.function.name\n if (tc.function?.arguments)\n acc.arguments += tc.function.arguments\n }\n }\n }\n if (choice?.finish_reason) {\n streamFinishReason = choice.finish_reason\n }\n } catch {\n // Ignore parse errors for history\n }\n }\n\n await stream.writeSSE(chunk as SSEMessage)\n }\n\n // Collect accumulated tool calls\n for (const tc of toolCallAccumulators.values()) {\n if (tc.id && tc.name) {\n streamToolCalls.push({\n id: tc.id,\n name: tc.name,\n arguments: tc.arguments,\n })\n }\n }\n\n // Build content for history\n const toolCallsForContent = streamToolCalls.map((tc) => ({\n id: tc.id,\n type: \"function\" as const,\n function: { name: tc.name, arguments: tc.arguments },\n }))\n\n // Record streaming response to history with full content\n recordResponse(\n historyId,\n {\n success: true,\n model: streamModel || payload.model,\n usage: {\n input_tokens: streamInputTokens,\n output_tokens: streamOutputTokens,\n },\n stop_reason: streamFinishReason || undefined,\n content: {\n role: \"assistant\",\n content: streamContent || undefined,\n tool_calls:\n toolCallsForContent.length > 0 ?\n toolCallsForContent\n : undefined,\n },\n toolCalls:\n streamToolCalls.length > 0 ?\n streamToolCalls.map((tc) => ({\n id: tc.id,\n name: tc.name,\n input: tc.arguments,\n }))\n : undefined,\n },\n Date.now() - startTime,\n )\n } catch (error) {\n // Record error to history\n recordResponse(\n historyId,\n {\n success: false,\n model: streamModel || payload.model,\n usage: { input_tokens: 0, output_tokens: 0 },\n error: error instanceof Error ? error.message : \"Stream error\",\n content: null,\n },\n Date.now() - startTime,\n )\n throw error\n }\n })\n } catch (error) {\n // Record error to history\n recordResponse(\n historyId,\n {\n success: false,\n model: payload.model,\n usage: { input_tokens: 0, output_tokens: 0 },\n error: error instanceof Error ? error.message : \"Unknown error\",\n content: null,\n },\n Date.now() - startTime,\n )\n throw error\n }\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createChatCompletions>>,\n): response is ChatCompletionResponse => Object.hasOwn(response, \"choices\")\n\n// Convert OpenAI messages to history MessageContent format\nfunction convertOpenAIMessages(\n messages: ChatCompletionsPayload[\"messages\"],\n): Array<MessageContent> {\n return messages.map((msg) => {\n const result: MessageContent = {\n role: msg.role,\n content:\n typeof msg.content === \"string\" ?\n msg.content\n : JSON.stringify(msg.content),\n }\n\n // Handle tool calls in assistant messages\n if (\"tool_calls\" in msg && msg.tool_calls) {\n result.tool_calls = msg.tool_calls.map((tc) => ({\n id: tc.id,\n type: tc.type,\n function: {\n name: tc.function.name,\n arguments: tc.function.arguments,\n },\n }))\n }\n\n // Handle tool result messages\n if (\"tool_call_id\" in msg && msg.tool_call_id) {\n result.tool_call_id = msg.tool_call_id\n }\n\n // Handle function name\n if (\"name\" in msg && msg.name) {\n result.name = msg.name\n }\n\n return result\n })\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleCompletion } from \"./handler\"\n\nexport const completionRoutes = new Hono()\n\ncompletionRoutes.post(\"/\", async (c) => {\n try {\n return await handleCompletion(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createEmbeddings = async (payload: EmbeddingRequest) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const response = await fetch(`${copilotBaseUrl(state)}/embeddings`, {\n method: \"POST\",\n headers: copilotHeaders(state),\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) throw await HTTPError.fromResponse(\"Failed to create embeddings\", response)\n\n return (await response.json()) as EmbeddingResponse\n}\n\nexport interface EmbeddingRequest {\n input: string | Array<string>\n model: string\n}\n\nexport interface Embedding {\n object: string\n embedding: Array<number>\n index: number\n}\n\nexport interface EmbeddingResponse {\n object: string\n data: Array<Embedding>\n model: string\n usage: {\n prompt_tokens: number\n total_tokens: number\n }\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport {\n createEmbeddings,\n type EmbeddingRequest,\n} from \"~/services/copilot/create-embeddings\"\n\nexport const embeddingRoutes = new Hono()\n\nembeddingRoutes.post(\"/\", async (c) => {\n try {\n const payload = await c.req.json<EmbeddingRequest>()\n const response = await createEmbeddings(payload)\n\n return c.json(response)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nexport const eventLoggingRoutes = new Hono()\n\n// Anthropic SDK sends telemetry to this endpoint\n// Return 200 OK to prevent errors in the SDK\neventLoggingRoutes.post(\"/batch\", (c) => {\n return c.text(\"OK\", 200)\n})\n","import type { Context } from \"hono\"\n\nimport {\n clearHistory,\n deleteSession,\n exportHistory,\n getEntry,\n getHistory,\n getSession,\n getSessionEntries,\n getSessions,\n getStats,\n isHistoryEnabled,\n type QueryOptions,\n} from \"~/lib/history\"\n\nexport function handleGetEntries(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const query = c.req.query()\n const options: QueryOptions = {\n page: query.page ? Number.parseInt(query.page, 10) : undefined,\n limit: query.limit ? Number.parseInt(query.limit, 10) : undefined,\n model: query.model || undefined,\n endpoint: query.endpoint as \"anthropic\" | \"openai\" | undefined,\n success: query.success ? query.success === \"true\" : undefined,\n from: query.from ? Number.parseInt(query.from, 10) : undefined,\n to: query.to ? Number.parseInt(query.to, 10) : undefined,\n search: query.search || undefined,\n sessionId: query.sessionId || undefined,\n }\n\n const result = getHistory(options)\n return c.json(result)\n}\n\nexport function handleGetEntry(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const id = c.req.param(\"id\")\n const entry = getEntry(id)\n\n if (!entry) {\n return c.json({ error: \"Entry not found\" }, 404)\n }\n\n return c.json(entry)\n}\n\nexport function handleDeleteEntries(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n clearHistory()\n return c.json({ success: true, message: \"History cleared\" })\n}\n\nexport function handleGetStats(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const stats = getStats()\n return c.json(stats)\n}\n\nexport function handleExport(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const format = (c.req.query(\"format\") || \"json\") as \"json\" | \"csv\"\n const data = exportHistory(format)\n\n if (format === \"csv\") {\n c.header(\"Content-Type\", \"text/csv\")\n c.header(\"Content-Disposition\", \"attachment; filename=history.csv\")\n } else {\n c.header(\"Content-Type\", \"application/json\")\n c.header(\"Content-Disposition\", \"attachment; filename=history.json\")\n }\n\n return c.body(data)\n}\n\n// Session management endpoints\nexport function handleGetSessions(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const result = getSessions()\n return c.json(result)\n}\n\nexport function handleGetSession(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const id = c.req.param(\"id\")\n const session = getSession(id)\n\n if (!session) {\n return c.json({ error: \"Session not found\" }, 404)\n }\n\n // Include entries in the session response\n const entries = getSessionEntries(id)\n\n return c.json({\n ...session,\n entries,\n })\n}\n\nexport function handleDeleteSession(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const id = c.req.param(\"id\")\n const success = deleteSession(id)\n\n if (!success) {\n return c.json({ error: \"Session not found\" }, 404)\n }\n\n return c.json({ success: true, message: \"Session deleted\" })\n}\n","// JavaScript for history viewer\nexport const script = `\nlet currentSessionId = null;\nlet currentEntryId = null;\nlet debounceTimer = null;\n\nfunction formatTime(ts) {\n const d = new Date(ts);\n return d.toLocaleTimeString([], {hour:'2-digit',minute:'2-digit',second:'2-digit'});\n}\n\nfunction formatDate(ts) {\n const d = new Date(ts);\n return d.toLocaleDateString([], {month:'short',day:'numeric'}) + ' ' + formatTime(ts);\n}\n\nfunction formatNumber(n) {\n if (n >= 1000000) return (n / 1000000).toFixed(1) + 'M';\n if (n >= 1000) return (n / 1000).toFixed(1) + 'K';\n return n.toString();\n}\n\nfunction formatDuration(ms) {\n if (!ms) return '-';\n if (ms < 1000) return ms + 'ms';\n return (ms / 1000).toFixed(1) + 's';\n}\n\nfunction getContentText(content) {\n if (!content) return '';\n if (typeof content === 'string') return content;\n if (Array.isArray(content)) {\n return content.map(c => {\n if (c.type === 'text') return c.text || '';\n if (c.type === 'tool_use') return '[tool_use: ' + c.name + ']';\n if (c.type === 'tool_result') return '[tool_result: ' + (c.tool_use_id || '').slice(0,8) + ']';\n if (c.type === 'image' || c.type === 'image_url') return '[image]';\n return c.text || '[' + (c.type || 'unknown') + ']';\n }).join('\\\\n');\n }\n return JSON.stringify(content, null, 2);\n}\n\nfunction formatContentForDisplay(content) {\n if (!content) return { summary: '', raw: 'null' };\n if (typeof content === 'string') return { summary: content, raw: JSON.stringify(content) };\n if (Array.isArray(content)) {\n const parts = [];\n for (const c of content) {\n if (c.type === 'text') {\n parts.push(c.text || '');\n } else if (c.type === 'tool_use') {\n parts.push('--- tool_use: ' + c.name + ' [' + (c.id || '').slice(0,8) + '] ---\\\\n' + JSON.stringify(c.input, null, 2));\n } else if (c.type === 'tool_result') {\n const resultContent = typeof c.content === 'string' ? c.content : JSON.stringify(c.content, null, 2);\n parts.push('--- tool_result [' + (c.tool_use_id || '').slice(0,8) + '] ---\\\\n' + resultContent);\n } else if (c.type === 'image' || c.type === 'image_url') {\n parts.push('[image data]');\n } else {\n parts.push(JSON.stringify(c, null, 2));\n }\n }\n return { summary: parts.join('\\\\n\\\\n'), raw: JSON.stringify(content, null, 2) };\n }\n const raw = JSON.stringify(content, null, 2);\n return { summary: raw, raw };\n}\n\nasync function loadStats() {\n try {\n const res = await fetch('/history/api/stats');\n const data = await res.json();\n if (data.error) return;\n document.getElementById('stat-total').textContent = formatNumber(data.totalRequests);\n document.getElementById('stat-success').textContent = formatNumber(data.successfulRequests);\n document.getElementById('stat-failed').textContent = formatNumber(data.failedRequests);\n document.getElementById('stat-input').textContent = formatNumber(data.totalInputTokens);\n document.getElementById('stat-output').textContent = formatNumber(data.totalOutputTokens);\n document.getElementById('stat-sessions').textContent = data.activeSessions;\n } catch (e) {\n console.error('Failed to load stats', e);\n }\n}\n\nasync function loadSessions() {\n try {\n const res = await fetch('/history/api/sessions');\n const data = await res.json();\n if (data.error) {\n document.getElementById('sessions-list').innerHTML = '<div class=\"empty-state\">Not enabled</div>';\n return;\n }\n\n let html = '<div class=\"session-item all' + (currentSessionId === null ? ' active' : '') + '\" onclick=\"selectSession(null)\">All Requests</div>';\n\n for (const s of data.sessions) {\n const isActive = currentSessionId === s.id;\n const shortId = s.id.slice(0, 8);\n html += \\`\n <div class=\"session-item\\${isActive ? ' active' : ''}\" onclick=\"selectSession('\\${s.id}')\">\n <div class=\"session-meta\">\n <span>\\${s.models[0] || 'Unknown'}</span>\n <span class=\"session-time\">\\${formatDate(s.startTime)}</span>\n </div>\n <div class=\"session-stats\">\n <span style=\"color:var(--text-dim);font-family:monospace;font-size:10px;\">\\${shortId}</span>\n <span>\\${s.requestCount} req</span>\n <span>\\${formatNumber(s.totalInputTokens + s.totalOutputTokens)} tok</span>\n <span class=\"badge \\${s.endpoint}\">\\${s.endpoint}</span>\n </div>\n </div>\n \\`;\n }\n\n document.getElementById('sessions-list').innerHTML = html || '<div class=\"empty-state\">No sessions</div>';\n } catch (e) {\n document.getElementById('sessions-list').innerHTML = '<div class=\"empty-state\">Error loading</div>';\n }\n}\n\nfunction selectSession(id) {\n currentSessionId = id;\n loadSessions();\n loadEntries();\n closeDetail();\n}\n\nasync function loadEntries() {\n const container = document.getElementById('entries-container');\n container.innerHTML = '<div class=\"loading\">Loading...</div>';\n\n const params = new URLSearchParams();\n params.set('limit', '100');\n\n if (currentSessionId) params.set('sessionId', currentSessionId);\n\n const endpoint = document.getElementById('filter-endpoint').value;\n const success = document.getElementById('filter-success').value;\n const search = document.getElementById('filter-search').value;\n\n if (endpoint) params.set('endpoint', endpoint);\n if (success) params.set('success', success);\n if (search) params.set('search', search);\n\n try {\n const res = await fetch('/history/api/entries?' + params.toString());\n const data = await res.json();\n\n if (data.error) {\n container.innerHTML = '<div class=\"empty-state\"><h3>History Not Enabled</h3><p>Start server with --history</p></div>';\n return;\n }\n\n if (data.entries.length === 0) {\n container.innerHTML = '<div class=\"empty-state\"><h3>No entries</h3><p>Make some API requests</p></div>';\n return;\n }\n\n let html = '';\n for (const e of data.entries) {\n const isSelected = currentEntryId === e.id;\n const status = !e.response ? 'pending' : (e.response.success ? 'success' : 'error');\n const statusLabel = !e.response ? 'pending' : (e.response.success ? 'success' : 'error');\n const tokens = e.response ? formatNumber(e.response.usage.input_tokens) + '/' + formatNumber(e.response.usage.output_tokens) : '-';\n const shortId = e.id.slice(0, 8);\n\n html += \\`\n <div class=\"entry-item\\${isSelected ? ' selected' : ''}\" onclick=\"showDetail('\\${e.id}')\">\n <div class=\"entry-header\">\n <span class=\"entry-time\">\\${formatTime(e.timestamp)}</span>\n <span style=\"color:var(--text-dim);font-family:monospace;font-size:10px;\">\\${shortId}</span>\n <span class=\"badge \\${e.endpoint}\">\\${e.endpoint}</span>\n <span class=\"badge \\${status}\">\\${statusLabel}</span>\n \\${e.request.stream ? '<span class=\"badge stream\">stream</span>' : ''}\n <span class=\"entry-model\">\\${e.response?.model || e.request.model}</span>\n <span class=\"entry-tokens\">\\${tokens}</span>\n <span class=\"entry-duration\">\\${formatDuration(e.durationMs)}</span>\n </div>\n </div>\n \\`;\n }\n\n container.innerHTML = html;\n } catch (e) {\n container.innerHTML = '<div class=\"empty-state\">Error: ' + e.message + '</div>';\n }\n}\n\nasync function showDetail(id) {\n // Update selected state without reloading\n const prevSelected = document.querySelector('.entry-item.selected');\n if (prevSelected) prevSelected.classList.remove('selected');\n const newSelected = document.querySelector(\\`.entry-item[onclick*=\"'\\${id}'\"]\\`);\n if (newSelected) newSelected.classList.add('selected');\n currentEntryId = id;\n\n const panel = document.getElementById('detail-panel');\n const content = document.getElementById('detail-content');\n panel.classList.add('open');\n content.innerHTML = '<div class=\"loading\">Loading...</div>';\n\n try {\n const res = await fetch('/history/api/entries/' + id);\n const entry = await res.json();\n if (entry.error) {\n content.innerHTML = '<div class=\"empty-state\">Not found</div>';\n return;\n }\n\n let html = '';\n\n // Entry metadata (IDs)\n html += \\`\n <div class=\"detail-section\">\n <h4>Entry Info</h4>\n <div class=\"response-info\">\n <div class=\"info-item\"><div class=\"info-label\">Entry ID</div><div class=\"info-value\" style=\"font-family:monospace;font-size:11px;\">\\${entry.id}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Session ID</div><div class=\"info-value\" style=\"font-family:monospace;font-size:11px;\">\\${entry.sessionId || '-'}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Timestamp</div><div class=\"info-value\">\\${formatDate(entry.timestamp)}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Endpoint</div><div class=\"info-value\"><span class=\"badge \\${entry.endpoint}\">\\${entry.endpoint}</span></div></div>\n </div>\n </div>\n \\`;\n\n // Response info\n if (entry.response) {\n html += \\`\n <div class=\"detail-section\">\n <h4>Response</h4>\n <div class=\"response-info\">\n <div class=\"info-item\"><div class=\"info-label\">Status</div><div class=\"info-value\"><span class=\"badge \\${entry.response.success ? 'success' : 'error'}\">\\${entry.response.success ? 'Success' : 'Error'}</span></div></div>\n <div class=\"info-item\"><div class=\"info-label\">Model</div><div class=\"info-value\">\\${entry.response.model}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Input Tokens</div><div class=\"info-value\">\\${formatNumber(entry.response.usage.input_tokens)}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Output Tokens</div><div class=\"info-value\">\\${formatNumber(entry.response.usage.output_tokens)}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Duration</div><div class=\"info-value\">\\${formatDuration(entry.durationMs)}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Stop Reason</div><div class=\"info-value\">\\${entry.response.stop_reason || '-'}</div></div>\n </div>\n \\${entry.response.error ? '<div style=\"color:var(--error);margin-top:8px;\">Error: ' + entry.response.error + '</div>' : ''}\n </div>\n \\`;\n }\n\n // System prompt\n if (entry.request.system) {\n html += \\`\n <div class=\"detail-section\">\n <h4>System Prompt</h4>\n <div class=\"message system\">\n <div class=\"message-content\">\\${escapeHtml(entry.request.system)}</div>\n </div>\n </div>\n \\`;\n }\n\n // Messages\n html += '<div class=\"detail-section\"><h4>Messages</h4><div class=\"messages-list\">';\n for (const msg of entry.request.messages) {\n const roleClass = msg.role === 'user' ? 'user' : (msg.role === 'assistant' ? 'assistant' : (msg.role === 'system' ? 'system' : 'tool'));\n const formatted = formatContentForDisplay(msg.content);\n const isLong = formatted.summary.length > 500;\n const rawContent = JSON.stringify(msg, null, 2);\n\n html += \\`\n <div class=\"message \\${roleClass}\">\n <button class=\"raw-btn small\" onclick=\"showRawJson(event, \\${escapeAttr(rawContent)})\">Raw</button>\n <button class=\"copy-btn small\" onclick=\"copyText(event, this)\" data-content=\"\\${escapeAttr(formatted.summary)}\">Copy</button>\n <div class=\"message-role\">\\${msg.role}\\${msg.name ? ' (' + msg.name + ')' : ''}\\${msg.tool_call_id ? ' [' + (msg.tool_call_id || '').slice(0,8) + ']' : ''}</div>\n <div class=\"message-content\\${isLong ? ' collapsed' : ''}\" id=\"msg-\\${Math.random().toString(36).slice(2)}\">\\${escapeHtml(formatted.summary)}</div>\n \\${isLong ? '<span class=\"expand-btn\" onclick=\"toggleExpand(this)\">Show more</span>' : ''}\n \\`;\n\n // Tool calls\n if (msg.tool_calls && msg.tool_calls.length > 0) {\n for (const tc of msg.tool_calls) {\n html += \\`\n <div class=\"tool-call\">\n <span class=\"tool-name\">\\${tc.function.name}</span>\n <div class=\"tool-args\">\\${escapeHtml(tc.function.arguments)}</div>\n </div>\n \\`;\n }\n }\n\n html += '</div>';\n }\n html += '</div></div>';\n\n // Response content\n if (entry.response?.content) {\n const formatted = formatContentForDisplay(entry.response.content.content);\n const rawContent = JSON.stringify(entry.response.content, null, 2);\n html += \\`\n <div class=\"detail-section\">\n <h4>Response Content</h4>\n <div class=\"message assistant\">\n <button class=\"raw-btn small\" onclick=\"showRawJson(event, \\${escapeAttr(rawContent)})\">Raw</button>\n <button class=\"copy-btn small\" onclick=\"copyText(event, this)\" data-content=\"\\${escapeAttr(formatted.summary)}\">Copy</button>\n <div class=\"message-content\">\\${escapeHtml(formatted.summary)}</div>\n </div>\n </div>\n \\`;\n }\n\n // Response tool calls\n if (entry.response?.toolCalls && entry.response.toolCalls.length > 0) {\n html += '<div class=\"detail-section\"><h4>Tool Calls</h4>';\n for (const tc of entry.response.toolCalls) {\n const tcRaw = JSON.stringify(tc, null, 2);\n html += \\`\n <div class=\"tool-call\" style=\"position:relative;\">\n <button class=\"raw-btn small\" style=\"position:absolute;top:4px;right:4px;opacity:1;\" onclick=\"showRawJson(event, \\${escapeAttr(tcRaw)})\">Raw</button>\n <span class=\"tool-name\">\\${tc.name}</span> <span style=\"color:var(--text-muted);font-size:11px;\">[\\${(tc.id || '').slice(0,8)}]</span>\n <div class=\"tool-args\">\\${escapeHtml(tc.input)}</div>\n </div>\n \\`;\n }\n html += '</div>';\n }\n\n // Tools defined\n if (entry.request.tools && entry.request.tools.length > 0) {\n html += '<div class=\"detail-section\"><h4>Available Tools (' + entry.request.tools.length + ')</h4>';\n html += '<div style=\"font-size:11px;color:var(--text-muted)\">' + entry.request.tools.map(t => t.name).join(', ') + '</div>';\n html += '</div>';\n }\n\n content.innerHTML = html;\n } catch (e) {\n content.innerHTML = '<div class=\"empty-state\">Error: ' + e.message + '</div>';\n }\n}\n\nfunction closeDetail() {\n currentEntryId = null;\n document.getElementById('detail-panel').classList.remove('open');\n loadEntries();\n}\n\nfunction toggleExpand(btn) {\n const content = btn.previousElementSibling;\n const isCollapsed = content.classList.contains('collapsed');\n content.classList.toggle('collapsed');\n btn.textContent = isCollapsed ? 'Show less' : 'Show more';\n}\n\nfunction copyText(event, btn) {\n event.stopPropagation();\n const text = btn.getAttribute('data-content');\n navigator.clipboard.writeText(text);\n const orig = btn.textContent;\n btn.textContent = 'Copied!';\n setTimeout(() => btn.textContent = orig, 1000);\n}\n\nfunction escapeHtml(str) {\n if (!str) return '';\n return str.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/\"/g, '&quot;');\n}\n\nfunction escapeAttr(str) {\n if (!str) return '';\n return str.replace(/&/g, '&amp;').replace(/\"/g, '&quot;').replace(/'/g, '&#39;');\n}\n\nlet currentRawContent = '';\n\nfunction showRawJson(event, content) {\n event.stopPropagation();\n currentRawContent = typeof content === 'string' ? content : JSON.stringify(content, null, 2);\n document.getElementById('raw-content').textContent = currentRawContent;\n document.getElementById('raw-modal').classList.add('open');\n}\n\nfunction closeRawModal(event) {\n if (event && event.target !== event.currentTarget) return;\n document.getElementById('raw-modal').classList.remove('open');\n}\n\nfunction copyRawContent() {\n navigator.clipboard.writeText(currentRawContent);\n const btns = document.querySelectorAll('.modal-header button');\n const copyBtn = btns[0];\n const orig = copyBtn.textContent;\n copyBtn.textContent = 'Copied!';\n setTimeout(() => copyBtn.textContent = orig, 1000);\n}\n\nfunction debounceFilter() {\n clearTimeout(debounceTimer);\n debounceTimer = setTimeout(loadEntries, 300);\n}\n\nfunction refresh() {\n loadStats();\n loadSessions();\n loadEntries();\n}\n\nfunction exportData(format) {\n window.open('/history/api/export?format=' + format, '_blank');\n}\n\nasync function clearAll() {\n if (!confirm('Clear all history? This cannot be undone.')) return;\n try {\n await fetch('/history/api/entries', { method: 'DELETE' });\n currentSessionId = null;\n currentEntryId = null;\n closeDetail();\n refresh();\n } catch (e) {\n alert('Failed: ' + e.message);\n }\n}\n\n// Initial load\nloadStats();\nloadSessions();\nloadEntries();\n\n// Keyboard shortcuts\ndocument.addEventListener('keydown', (e) => {\n if (e.key === 'Escape') {\n if (document.getElementById('raw-modal').classList.contains('open')) {\n closeRawModal();\n } else {\n closeDetail();\n }\n }\n if (e.key === 'r' && (e.metaKey || e.ctrlKey)) {\n e.preventDefault();\n refresh();\n }\n});\n\n// Auto-refresh every 10 seconds\nsetInterval(() => {\n loadStats();\n loadSessions();\n}, 10000);\n`\n","// CSS styles for history viewer\nexport const styles = `\n:root {\n --bg: #0d1117;\n --bg-secondary: #161b22;\n --bg-tertiary: #21262d;\n --bg-hover: #30363d;\n --text: #e6edf3;\n --text-muted: #8b949e;\n --text-dim: #6e7681;\n --border: #30363d;\n --primary: #58a6ff;\n --success: #3fb950;\n --error: #f85149;\n --warning: #d29922;\n --purple: #a371f7;\n --cyan: #39c5cf;\n}\n@media (prefers-color-scheme: light) {\n :root {\n --bg: #ffffff;\n --bg-secondary: #f6f8fa;\n --bg-tertiary: #eaeef2;\n --bg-hover: #d0d7de;\n --text: #1f2328;\n --text-muted: #656d76;\n --text-dim: #8c959f;\n --border: #d0d7de;\n }\n}\n* { box-sizing: border-box; margin: 0; padding: 0; }\nbody {\n font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif;\n background: var(--bg);\n color: var(--text);\n line-height: 1.4;\n font-size: 13px;\n}\n\n/* Layout */\n.layout { display: flex; height: 100vh; }\n.sidebar {\n width: 280px;\n border-right: 1px solid var(--border);\n display: flex;\n flex-direction: column;\n background: var(--bg-secondary);\n}\n.main { flex: 1; display: flex; flex-direction: column; overflow: hidden; }\n\n/* Header */\n.header {\n padding: 12px 16px;\n border-bottom: 1px solid var(--border);\n display: flex;\n align-items: center;\n justify-content: space-between;\n gap: 12px;\n background: var(--bg-secondary);\n}\n.header h1 { font-size: 16px; font-weight: 600; }\n.header-actions { display: flex; gap: 8px; }\n\n/* Stats bar */\n.stats-bar {\n display: flex;\n gap: 16px;\n padding: 8px 16px;\n border-bottom: 1px solid var(--border);\n background: var(--bg-tertiary);\n font-size: 12px;\n}\n.stat { display: flex; align-items: center; gap: 4px; }\n.stat-value { font-weight: 600; }\n.stat-label { color: var(--text-muted); }\n\n/* Sessions sidebar */\n.sidebar-header {\n padding: 12px;\n border-bottom: 1px solid var(--border);\n font-weight: 600;\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n.sessions-list {\n flex: 1;\n overflow-y: auto;\n}\n.session-item {\n padding: 10px 12px;\n border-bottom: 1px solid var(--border);\n cursor: pointer;\n transition: background 0.15s;\n}\n.session-item:hover { background: var(--bg-hover); }\n.session-item.active { background: var(--bg-tertiary); border-left: 3px solid var(--primary); }\n.session-item.all { font-weight: 600; color: var(--primary); }\n.session-meta { display: flex; justify-content: space-between; margin-bottom: 4px; }\n.session-time { color: var(--text-muted); font-size: 11px; }\n.session-stats { display: flex; gap: 8px; font-size: 11px; color: var(--text-dim); }\n\n/* Buttons */\nbutton {\n background: var(--bg-tertiary);\n border: 1px solid var(--border);\n color: var(--text);\n padding: 5px 10px;\n border-radius: 6px;\n cursor: pointer;\n font-size: 12px;\n transition: all 0.15s;\n display: inline-flex;\n align-items: center;\n gap: 4px;\n}\nbutton:hover { background: var(--bg-hover); }\nbutton.primary { background: var(--primary); color: #fff; border-color: var(--primary); }\nbutton.danger { color: var(--error); }\nbutton.danger:hover { background: rgba(248,81,73,0.1); }\nbutton:disabled { opacity: 0.5; cursor: not-allowed; }\nbutton.small { padding: 3px 6px; font-size: 11px; }\nbutton.icon-only { padding: 5px 6px; }\n\n/* Filters */\n.filters {\n display: flex;\n gap: 8px;\n padding: 8px 16px;\n border-bottom: 1px solid var(--border);\n flex-wrap: wrap;\n}\ninput, select {\n background: var(--bg);\n border: 1px solid var(--border);\n color: var(--text);\n padding: 5px 8px;\n border-radius: 6px;\n font-size: 12px;\n}\ninput:focus, select:focus { outline: none; border-color: var(--primary); }\ninput::placeholder { color: var(--text-dim); }\n\n/* Entries list */\n.entries-container { flex: 1; overflow-y: auto; }\n.entry-item {\n border-bottom: 1px solid var(--border);\n cursor: pointer;\n transition: background 0.15s;\n}\n.entry-item:hover { background: var(--bg-secondary); }\n.entry-item.selected { background: var(--bg-tertiary); }\n.entry-header {\n display: flex;\n align-items: center;\n gap: 8px;\n padding: 8px 16px;\n}\n.entry-time { color: var(--text-muted); font-size: 11px; min-width: 70px; }\n.entry-model { font-weight: 500; flex: 1; }\n.entry-tokens { font-size: 11px; color: var(--text-dim); }\n.entry-duration { font-size: 11px; color: var(--text-dim); min-width: 50px; text-align: right; }\n\n/* Badges */\n.badge {\n display: inline-block;\n padding: 1px 6px;\n border-radius: 10px;\n font-size: 10px;\n font-weight: 500;\n}\n.badge.success { background: rgba(63, 185, 80, 0.15); color: var(--success); }\n.badge.error { background: rgba(248, 81, 73, 0.15); color: var(--error); }\n.badge.pending { background: rgba(136, 136, 136, 0.15); color: var(--text-muted); }\n.badge.anthropic { background: rgba(163, 113, 247, 0.15); color: var(--purple); }\n.badge.openai { background: rgba(210, 153, 34, 0.15); color: var(--warning); }\n.badge.stream { background: rgba(57, 197, 207, 0.15); color: var(--cyan); }\n\n/* Detail panel */\n.detail-panel {\n width: 0;\n border-left: 1px solid var(--border);\n background: var(--bg-secondary);\n transition: width 0.2s;\n overflow: hidden;\n display: flex;\n flex-direction: column;\n}\n.detail-panel.open { width: 50%; min-width: 400px; }\n.detail-header {\n padding: 12px 16px;\n border-bottom: 1px solid var(--border);\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n.detail-content { flex: 1; overflow-y: auto; padding: 16px; }\n.detail-section { margin-bottom: 16px; }\n.detail-section h4 {\n font-size: 11px;\n text-transform: uppercase;\n color: var(--text-muted);\n margin-bottom: 8px;\n letter-spacing: 0.5px;\n}\n\n/* Messages display */\n.messages-list { display: flex; flex-direction: column; gap: 8px; }\n.message {\n padding: 10px 12px;\n border-radius: 8px;\n background: var(--bg);\n border: 1px solid var(--border);\n position: relative;\n}\n.message.user { border-left: 3px solid var(--primary); }\n.message.assistant { border-left: 3px solid var(--success); }\n.message.system { border-left: 3px solid var(--warning); background: var(--bg-tertiary); }\n.message.tool { border-left: 3px solid var(--purple); }\n.message-role {\n font-size: 10px;\n text-transform: uppercase;\n color: var(--text-muted);\n margin-bottom: 4px;\n font-weight: 600;\n}\n.message-content {\n white-space: pre-wrap;\n word-break: break-word;\n font-family: 'SF Mono', Monaco, 'Courier New', monospace;\n font-size: 12px;\n max-height: 300px;\n overflow-y: auto;\n}\n.message-content.collapsed { max-height: 100px; }\n.expand-btn {\n color: var(--primary);\n cursor: pointer;\n font-size: 11px;\n margin-top: 4px;\n display: inline-block;\n}\n\n/* Tool calls */\n.tool-call {\n background: var(--bg-tertiary);\n padding: 8px;\n border-radius: 6px;\n margin-top: 8px;\n font-size: 12px;\n}\n.tool-name { color: var(--purple); font-weight: 600; }\n.tool-args {\n font-family: monospace;\n font-size: 11px;\n color: var(--text-muted);\n margin-top: 4px;\n white-space: pre-wrap;\n max-height: 150px;\n overflow-y: auto;\n}\n\n/* Response info */\n.response-info {\n display: grid;\n grid-template-columns: repeat(auto-fit, minmax(100px, 1fr));\n gap: 12px;\n}\n.info-item { }\n.info-label { font-size: 11px; color: var(--text-muted); }\n.info-value { font-weight: 500; }\n\n/* Empty state */\n.empty-state {\n text-align: center;\n padding: 40px 20px;\n color: var(--text-muted);\n}\n.empty-state h3 { margin-bottom: 8px; color: var(--text); }\n\n/* Loading */\n.loading { text-align: center; padding: 20px; color: var(--text-muted); }\n\n/* Scrollbar */\n::-webkit-scrollbar { width: 8px; height: 8px; }\n::-webkit-scrollbar-track { background: var(--bg); }\n::-webkit-scrollbar-thumb { background: var(--border); border-radius: 4px; }\n::-webkit-scrollbar-thumb:hover { background: var(--text-dim); }\n\n/* Copy/Raw buttons */\n.copy-btn, .raw-btn {\n position: absolute;\n top: 4px;\n opacity: 0;\n transition: opacity 0.15s;\n}\n.copy-btn { right: 4px; }\n.raw-btn { right: 50px; }\n.message:hover .copy-btn, .message:hover .raw-btn { opacity: 1; }\n\n/* Raw JSON modal */\n.modal-overlay {\n position: fixed;\n top: 0; left: 0; right: 0; bottom: 0;\n background: rgba(0,0,0,0.6);\n display: none;\n justify-content: center;\n align-items: center;\n z-index: 1000;\n}\n.modal-overlay.open { display: flex; }\n.modal {\n background: var(--bg-secondary);\n border: 1px solid var(--border);\n border-radius: 8px;\n width: 80%;\n max-width: 800px;\n max-height: 80vh;\n display: flex;\n flex-direction: column;\n}\n.modal-header {\n padding: 12px 16px;\n border-bottom: 1px solid var(--border);\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n.modal-body {\n flex: 1;\n overflow: auto;\n padding: 16px;\n}\n.modal-body pre {\n margin: 0;\n font-family: 'SF Mono', Monaco, 'Courier New', monospace;\n font-size: 12px;\n white-space: pre-wrap;\n word-break: break-word;\n}\n`\n","// HTML template for history viewer\nexport const template = `\n<div class=\"layout\">\n <!-- Sidebar: Sessions -->\n <div class=\"sidebar\">\n <div class=\"sidebar-header\">\n <span>Sessions</span>\n <button class=\"small danger\" onclick=\"clearAll()\" title=\"Clear all\">Clear</button>\n </div>\n <div class=\"sessions-list\" id=\"sessions-list\">\n <div class=\"loading\">Loading...</div>\n </div>\n </div>\n\n <!-- Main content -->\n <div class=\"main\">\n <div class=\"header\">\n <h1>Request History</h1>\n <div class=\"header-actions\">\n <button onclick=\"refresh()\">Refresh</button>\n <button onclick=\"exportData('json')\">Export JSON</button>\n <button onclick=\"exportData('csv')\">Export CSV</button>\n </div>\n </div>\n\n <div class=\"stats-bar\" id=\"stats-bar\">\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-total\">-</span><span class=\"stat-label\">requests</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-success\">-</span><span class=\"stat-label\">success</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-failed\">-</span><span class=\"stat-label\">failed</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-input\">-</span><span class=\"stat-label\">in tokens</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-output\">-</span><span class=\"stat-label\">out tokens</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-sessions\">-</span><span class=\"stat-label\">sessions</span></div>\n </div>\n\n <div class=\"filters\">\n <input type=\"text\" id=\"filter-search\" placeholder=\"Search messages...\" style=\"flex:1;min-width:150px\" onkeyup=\"debounceFilter()\">\n <select id=\"filter-endpoint\" onchange=\"loadEntries()\">\n <option value=\"\">All Endpoints</option>\n <option value=\"anthropic\">Anthropic</option>\n <option value=\"openai\">OpenAI</option>\n </select>\n <select id=\"filter-success\" onchange=\"loadEntries()\">\n <option value=\"\">All Status</option>\n <option value=\"true\">Success</option>\n <option value=\"false\">Failed</option>\n </select>\n </div>\n\n <div style=\"display:flex;flex:1;overflow:hidden;\">\n <div class=\"entries-container\" id=\"entries-container\">\n <div class=\"loading\">Loading...</div>\n </div>\n\n <!-- Detail panel -->\n <div class=\"detail-panel\" id=\"detail-panel\">\n <div class=\"detail-header\">\n <span>Request Details</span>\n <button class=\"icon-only\" onclick=\"closeDetail()\">&times;</button>\n </div>\n <div class=\"detail-content\" id=\"detail-content\"></div>\n </div>\n </div>\n </div>\n</div>\n\n<!-- Raw JSON Modal -->\n<div class=\"modal-overlay\" id=\"raw-modal\" onclick=\"closeRawModal(event)\">\n <div class=\"modal\" onclick=\"event.stopPropagation()\">\n <div class=\"modal-header\">\n <span>Raw JSON</span>\n <div>\n <button class=\"small\" onclick=\"copyRawContent()\">Copy</button>\n <button class=\"icon-only\" onclick=\"closeRawModal()\">&times;</button>\n </div>\n </div>\n <div class=\"modal-body\">\n <pre id=\"raw-content\"></pre>\n </div>\n </div>\n</div>\n`\n","// Web UI HTML template for history viewer\n// Features: Session grouping, full message content, compact design\n\nimport { script } from \"./ui/script\"\nimport { styles } from \"./ui/styles\"\nimport { template } from \"./ui/template\"\n\nexport function getHistoryUI(): string {\n return `<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"UTF-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n <title>Copilot API - Request History</title>\n <style>${styles}</style>\n</head>\n<body>\n ${template}\n <script>${script}</script>\n</body>\n</html>`\n}\n","import { Hono } from \"hono\"\n\nimport {\n handleDeleteEntries,\n handleDeleteSession,\n handleExport,\n handleGetEntries,\n handleGetEntry,\n handleGetSession,\n handleGetSessions,\n handleGetStats,\n} from \"./api\"\nimport { getHistoryUI } from \"./ui\"\n\nexport const historyRoutes = new Hono()\n\n// API endpoints\nhistoryRoutes.get(\"/api/entries\", handleGetEntries)\nhistoryRoutes.get(\"/api/entries/:id\", handleGetEntry)\nhistoryRoutes.delete(\"/api/entries\", handleDeleteEntries)\nhistoryRoutes.get(\"/api/stats\", handleGetStats)\nhistoryRoutes.get(\"/api/export\", handleExport)\n\n// Session endpoints\nhistoryRoutes.get(\"/api/sessions\", handleGetSessions)\nhistoryRoutes.get(\"/api/sessions/:id\", handleGetSession)\nhistoryRoutes.delete(\"/api/sessions/:id\", handleDeleteSession)\n\n// Web UI - serve HTML for the root path\nhistoryRoutes.get(\"/\", (c) => {\n return c.html(getHistoryUI())\n})\n","import { type AnthropicResponse } from \"./anthropic-types\"\n\nexport function mapOpenAIStopReasonToAnthropic(\n finishReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null,\n): AnthropicResponse[\"stop_reason\"] {\n if (finishReason === null) {\n return null\n }\n const stopReasonMap = {\n stop: \"end_turn\",\n length: \"max_tokens\",\n tool_calls: \"tool_use\",\n content_filter: \"end_turn\",\n } as const\n return stopReasonMap[finishReason]\n}\n","import {\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n type ContentPart,\n type Message,\n type TextPart,\n type Tool,\n type ToolCall,\n} from \"~/services/copilot/create-chat-completions\"\n\nimport consola from \"consola\"\n\nimport {\n type AnthropicAssistantContentBlock,\n type AnthropicAssistantMessage,\n type AnthropicMessage,\n type AnthropicMessagesPayload,\n type AnthropicResponse,\n type AnthropicTextBlock,\n type AnthropicThinkingBlock,\n type AnthropicTool,\n type AnthropicToolResultBlock,\n type AnthropicToolUseBlock,\n type AnthropicUserContentBlock,\n type AnthropicUserMessage,\n} from \"./anthropic-types\"\nimport { mapOpenAIStopReasonToAnthropic } from \"./utils\"\n\n// OpenAI limits function names to 64 characters\nconst OPENAI_TOOL_NAME_LIMIT = 64\n\n// Mapping from truncated tool names to original names\n// This is used to restore original names in responses\nexport interface ToolNameMapping {\n truncatedToOriginal: Map<string, string>\n originalToTruncated: Map<string, string>\n}\n\n// Helper function to fix message sequences by adding missing tool responses\n// This prevents \"tool_use ids were found without tool_result blocks\" errors\nfunction fixMessageSequence(messages: Array<Message>): Array<Message> {\n const fixedMessages: Array<Message> = []\n\n for (let i = 0; i < messages.length; i++) {\n const message = messages[i]\n fixedMessages.push(message)\n\n if (\n message.role === \"assistant\"\n && message.tool_calls\n && message.tool_calls.length > 0\n ) {\n // Find which tool calls already have responses\n const foundToolResponses = new Set<string>()\n\n // Look ahead to see what tool responses exist\n let j = i + 1\n while (j < messages.length && messages[j].role === \"tool\") {\n const toolMessage = messages[j]\n if (toolMessage.tool_call_id) {\n foundToolResponses.add(toolMessage.tool_call_id)\n }\n j++\n }\n\n // Add placeholder responses for missing tool calls\n for (const toolCall of message.tool_calls) {\n if (!foundToolResponses.has(toolCall.id)) {\n consola.debug(`Adding placeholder tool_result for ${toolCall.id}`)\n fixedMessages.push({\n role: \"tool\",\n tool_call_id: toolCall.id,\n content: \"Tool execution was interrupted or failed.\",\n })\n }\n }\n }\n }\n\n return fixedMessages\n}\n\n// Payload translation\n\nexport interface TranslationResult {\n payload: ChatCompletionsPayload\n toolNameMapping: ToolNameMapping\n}\n\nexport function translateToOpenAI(\n payload: AnthropicMessagesPayload,\n): TranslationResult {\n // Create tool name mapping for this request\n const toolNameMapping: ToolNameMapping = {\n truncatedToOriginal: new Map(),\n originalToTruncated: new Map(),\n }\n\n const messages = translateAnthropicMessagesToOpenAI(\n payload.messages,\n payload.system,\n toolNameMapping,\n )\n\n return {\n payload: {\n model: translateModelName(payload.model),\n // Fix message sequence to ensure all tool_use blocks have corresponding tool_result\n messages: fixMessageSequence(messages),\n max_tokens: payload.max_tokens,\n stop: payload.stop_sequences,\n stream: payload.stream,\n temperature: payload.temperature,\n top_p: payload.top_p,\n user: payload.metadata?.user_id,\n tools: translateAnthropicToolsToOpenAI(payload.tools, toolNameMapping),\n tool_choice: translateAnthropicToolChoiceToOpenAI(\n payload.tool_choice,\n toolNameMapping,\n ),\n },\n toolNameMapping,\n }\n}\n\nfunction translateModelName(model: string): string {\n // Handle short model name aliases (e.g., \"opus\", \"sonnet\", \"haiku\")\n // Maps to the latest available version in Copilot\n const shortNameMap: Record<string, string> = {\n opus: \"claude-opus-4.5\",\n sonnet: \"claude-sonnet-4.5\",\n haiku: \"claude-haiku-4.5\",\n }\n\n if (shortNameMap[model]) {\n return shortNameMap[model]\n }\n\n // Handle versioned model names from Anthropic API (e.g., claude-sonnet-4-20250514)\n // Strip date suffixes and convert to Copilot-compatible format\n\n // claude-sonnet-4-5-YYYYMMDD -> claude-sonnet-4.5\n if (model.match(/^claude-sonnet-4-5-\\d+$/)) {\n return \"claude-sonnet-4.5\"\n }\n // claude-sonnet-4-YYYYMMDD -> claude-sonnet-4\n if (model.match(/^claude-sonnet-4-\\d+$/)) {\n return \"claude-sonnet-4\"\n }\n\n // claude-opus-4-5-YYYYMMDD -> claude-opus-4.5\n if (model.match(/^claude-opus-4-5-\\d+$/)) {\n return \"claude-opus-4.5\"\n }\n // claude-opus-4-YYYYMMDD -> claude-opus-4.5 (default to latest)\n if (model.match(/^claude-opus-4-\\d+$/)) {\n return \"claude-opus-4.5\"\n }\n\n // claude-haiku-4-5-YYYYMMDD -> claude-haiku-4.5\n if (model.match(/^claude-haiku-4-5-\\d+$/)) {\n return \"claude-haiku-4.5\"\n }\n // claude-haiku-3-5-YYYYMMDD -> claude-haiku-4.5 (upgrade to latest available)\n if (model.match(/^claude-haiku-3-5-\\d+$/)) {\n return \"claude-haiku-4.5\"\n }\n\n return model\n}\n\nfunction translateAnthropicMessagesToOpenAI(\n anthropicMessages: Array<AnthropicMessage>,\n system: string | Array<AnthropicTextBlock> | undefined,\n toolNameMapping: ToolNameMapping,\n): Array<Message> {\n const systemMessages = handleSystemPrompt(system)\n\n const otherMessages = anthropicMessages.flatMap((message) =>\n message.role === \"user\" ?\n handleUserMessage(message)\n : handleAssistantMessage(message, toolNameMapping),\n )\n\n return [...systemMessages, ...otherMessages]\n}\n\nfunction handleSystemPrompt(\n system: string | Array<AnthropicTextBlock> | undefined,\n): Array<Message> {\n if (!system) {\n return []\n }\n\n if (typeof system === \"string\") {\n return [{ role: \"system\", content: system }]\n } else {\n const systemText = system.map((block) => block.text).join(\"\\n\\n\")\n return [{ role: \"system\", content: systemText }]\n }\n}\n\nfunction handleUserMessage(message: AnthropicUserMessage): Array<Message> {\n const newMessages: Array<Message> = []\n\n if (Array.isArray(message.content)) {\n const toolResultBlocks = message.content.filter(\n (block): block is AnthropicToolResultBlock =>\n block.type === \"tool_result\",\n )\n const otherBlocks = message.content.filter(\n (block) => block.type !== \"tool_result\",\n )\n\n // Tool results must come first to maintain protocol: tool_use -> tool_result -> user\n for (const block of toolResultBlocks) {\n newMessages.push({\n role: \"tool\",\n tool_call_id: block.tool_use_id,\n content: mapContent(block.content),\n })\n }\n\n if (otherBlocks.length > 0) {\n newMessages.push({\n role: \"user\",\n content: mapContent(otherBlocks),\n })\n }\n } else {\n newMessages.push({\n role: \"user\",\n content: mapContent(message.content),\n })\n }\n\n return newMessages\n}\n\nfunction handleAssistantMessage(\n message: AnthropicAssistantMessage,\n toolNameMapping: ToolNameMapping,\n): Array<Message> {\n if (!Array.isArray(message.content)) {\n return [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n },\n ]\n }\n\n const toolUseBlocks = message.content.filter(\n (block): block is AnthropicToolUseBlock => block.type === \"tool_use\",\n )\n\n const textBlocks = message.content.filter(\n (block): block is AnthropicTextBlock => block.type === \"text\",\n )\n\n const thinkingBlocks = message.content.filter(\n (block): block is AnthropicThinkingBlock => block.type === \"thinking\",\n )\n\n // Combine text and thinking blocks, as OpenAI doesn't have separate thinking blocks\n const allTextContent = [\n ...textBlocks.map((b) => b.text),\n ...thinkingBlocks.map((b) => b.thinking),\n ].join(\"\\n\\n\")\n\n return toolUseBlocks.length > 0 ?\n [\n {\n role: \"assistant\",\n content: allTextContent || null,\n tool_calls: toolUseBlocks.map((toolUse) => ({\n id: toolUse.id,\n type: \"function\",\n function: {\n name: getTruncatedToolName(toolUse.name, toolNameMapping),\n arguments: JSON.stringify(toolUse.input),\n },\n })),\n },\n ]\n : [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n },\n ]\n}\n\nfunction mapContent(\n content:\n | string\n | Array<AnthropicUserContentBlock | AnthropicAssistantContentBlock>,\n): string | Array<ContentPart> | null {\n if (typeof content === \"string\") {\n return content\n }\n if (!Array.isArray(content)) {\n return null\n }\n\n const hasImage = content.some((block) => block.type === \"image\")\n if (!hasImage) {\n return content\n .filter(\n (block): block is AnthropicTextBlock | AnthropicThinkingBlock =>\n block.type === \"text\" || block.type === \"thinking\",\n )\n .map((block) => (block.type === \"text\" ? block.text : block.thinking))\n .join(\"\\n\\n\")\n }\n\n const contentParts: Array<ContentPart> = []\n for (const block of content) {\n switch (block.type) {\n case \"text\": {\n contentParts.push({ type: \"text\", text: block.text })\n\n break\n }\n case \"thinking\": {\n contentParts.push({ type: \"text\", text: block.thinking })\n\n break\n }\n case \"image\": {\n contentParts.push({\n type: \"image_url\",\n image_url: {\n url: `data:${block.source.media_type};base64,${block.source.data}`,\n },\n })\n\n break\n }\n // No default\n }\n }\n return contentParts\n}\n\n// Truncate tool name to fit OpenAI's 64-character limit\n// Uses consistent truncation with hash suffix to avoid collisions\nfunction getTruncatedToolName(\n originalName: string,\n toolNameMapping: ToolNameMapping,\n): string {\n // If already within limit, return as-is\n if (originalName.length <= OPENAI_TOOL_NAME_LIMIT) {\n return originalName\n }\n\n // Check if we've already truncated this name\n const existingTruncated = toolNameMapping.originalToTruncated.get(originalName)\n if (existingTruncated) {\n return existingTruncated\n }\n\n // Create a simple hash suffix from the original name\n // Use last 8 chars of a simple hash to ensure uniqueness\n let hash = 0\n for (let i = 0; i < originalName.length; i++) {\n const char = originalName.charCodeAt(i)\n hash = ((hash << 5) - hash) + char\n hash = hash & hash // Convert to 32bit integer\n }\n const hashSuffix = Math.abs(hash).toString(36).slice(0, 8)\n\n // Truncate: leave room for \"_\" + 8-char hash = 9 chars\n const truncatedName =\n originalName.slice(0, OPENAI_TOOL_NAME_LIMIT - 9) + \"_\" + hashSuffix\n\n // Store mapping in both directions\n toolNameMapping.truncatedToOriginal.set(truncatedName, originalName)\n toolNameMapping.originalToTruncated.set(originalName, truncatedName)\n\n consola.debug(\n `Truncated tool name: \"${originalName}\" -> \"${truncatedName}\"`,\n )\n\n return truncatedName\n}\n\nfunction translateAnthropicToolsToOpenAI(\n anthropicTools: Array<AnthropicTool> | undefined,\n toolNameMapping: ToolNameMapping,\n): Array<Tool> | undefined {\n if (!anthropicTools) {\n return undefined\n }\n return anthropicTools.map((tool) => ({\n type: \"function\",\n function: {\n name: getTruncatedToolName(tool.name, toolNameMapping),\n description: tool.description,\n parameters: tool.input_schema,\n },\n }))\n}\n\nfunction translateAnthropicToolChoiceToOpenAI(\n anthropicToolChoice: AnthropicMessagesPayload[\"tool_choice\"],\n toolNameMapping: ToolNameMapping,\n): ChatCompletionsPayload[\"tool_choice\"] {\n if (!anthropicToolChoice) {\n return undefined\n }\n\n switch (anthropicToolChoice.type) {\n case \"auto\": {\n return \"auto\"\n }\n case \"any\": {\n return \"required\"\n }\n case \"tool\": {\n if (anthropicToolChoice.name) {\n return {\n type: \"function\",\n function: {\n name: getTruncatedToolName(\n anthropicToolChoice.name,\n toolNameMapping,\n ),\n },\n }\n }\n return undefined\n }\n case \"none\": {\n return \"none\"\n }\n default: {\n return undefined\n }\n }\n}\n\n// Response translation\n\nexport function translateToAnthropic(\n response: ChatCompletionResponse,\n toolNameMapping?: ToolNameMapping,\n): AnthropicResponse {\n // Handle edge case of empty choices array\n if (response.choices.length === 0) {\n return {\n id: response.id,\n type: \"message\",\n role: \"assistant\",\n model: response.model,\n content: [],\n stop_reason: \"end_turn\",\n stop_sequence: null,\n usage: {\n input_tokens: response.usage?.prompt_tokens ?? 0,\n output_tokens: response.usage?.completion_tokens ?? 0,\n },\n }\n }\n\n // Merge content from all choices\n const allTextBlocks: Array<AnthropicTextBlock> = []\n const allToolUseBlocks: Array<AnthropicToolUseBlock> = []\n let stopReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null =\n null // default\n stopReason = response.choices[0]?.finish_reason ?? stopReason\n\n // Process all choices to extract text and tool use blocks\n for (const choice of response.choices) {\n const textBlocks = getAnthropicTextBlocks(choice.message.content)\n const toolUseBlocks = getAnthropicToolUseBlocks(\n choice.message.tool_calls,\n toolNameMapping,\n )\n\n allTextBlocks.push(...textBlocks)\n allToolUseBlocks.push(...toolUseBlocks)\n\n // Use the finish_reason from the first choice, or prioritize tool_calls\n if (choice.finish_reason === \"tool_calls\" || stopReason === \"stop\") {\n stopReason = choice.finish_reason\n }\n }\n\n // Note: GitHub Copilot doesn't generate thinking blocks, so we don't include them in responses\n\n return {\n id: response.id,\n type: \"message\",\n role: \"assistant\",\n model: response.model,\n content: [...allTextBlocks, ...allToolUseBlocks],\n stop_reason: mapOpenAIStopReasonToAnthropic(stopReason),\n stop_sequence: null,\n usage: {\n input_tokens:\n (response.usage?.prompt_tokens ?? 0)\n - (response.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: response.usage?.completion_tokens ?? 0,\n ...(response.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n response.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n }\n}\n\nfunction getAnthropicTextBlocks(\n messageContent: Message[\"content\"],\n): Array<AnthropicTextBlock> {\n if (typeof messageContent === \"string\") {\n return [{ type: \"text\", text: messageContent }]\n }\n\n if (Array.isArray(messageContent)) {\n return messageContent\n .filter((part): part is TextPart => part.type === \"text\")\n .map((part) => ({ type: \"text\", text: part.text }))\n }\n\n return []\n}\n\nfunction getAnthropicToolUseBlocks(\n toolCalls: Array<ToolCall> | undefined,\n toolNameMapping?: ToolNameMapping,\n): Array<AnthropicToolUseBlock> {\n if (!toolCalls) {\n return []\n }\n return toolCalls.map((toolCall) => {\n let input: Record<string, unknown> = {}\n try {\n input = JSON.parse(toolCall.function.arguments) as Record<string, unknown>\n } catch (error) {\n consola.warn(\n `Failed to parse tool call arguments for ${toolCall.function.name}:`,\n error,\n )\n }\n\n // Restore original tool name if it was truncated\n const originalName =\n toolNameMapping?.truncatedToOriginal.get(toolCall.function.name)\n ?? toolCall.function.name\n\n return {\n type: \"tool_use\",\n id: toolCall.id,\n name: originalName,\n input,\n }\n })\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\n\nimport { state } from \"~/lib/state\"\nimport { getTokenCount } from \"~/lib/tokenizer\"\n\nimport { type AnthropicMessagesPayload } from \"./anthropic-types\"\nimport { translateToOpenAI } from \"./non-stream-translation\"\n\n/**\n * Handles token counting for Anthropic messages\n */\nexport async function handleCountTokens(c: Context) {\n try {\n const anthropicBeta = c.req.header(\"anthropic-beta\")\n\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n\n const { payload: openAIPayload } = translateToOpenAI(anthropicPayload)\n\n const selectedModel = state.models?.data.find(\n (model) => model.id === anthropicPayload.model,\n )\n\n if (!selectedModel) {\n consola.warn(\"Model not found, returning default token count\")\n return c.json({\n input_tokens: 1,\n })\n }\n\n const tokenCount = await getTokenCount(openAIPayload, selectedModel)\n\n if (anthropicPayload.tools && anthropicPayload.tools.length > 0) {\n let mcpToolExist = false\n if (anthropicBeta?.startsWith(\"claude-code\")) {\n mcpToolExist = anthropicPayload.tools.some((tool) =>\n tool.name.startsWith(\"mcp__\"),\n )\n }\n if (!mcpToolExist) {\n if (anthropicPayload.model.startsWith(\"claude\")) {\n // Base token overhead for tool use capability\n // See: https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/overview#pricing\n tokenCount.input = tokenCount.input + 346\n } else if (anthropicPayload.model.startsWith(\"grok\")) {\n // Estimated base token overhead for Grok tool use (empirically determined)\n tokenCount.input = tokenCount.input + 480\n }\n }\n }\n\n let finalTokenCount = tokenCount.input + tokenCount.output\n if (anthropicPayload.model.startsWith(\"claude\")) {\n // Apply 15% buffer for Claude models to account for tokenization differences\n // between the GPT tokenizer used here and Claude's actual tokenizer\n finalTokenCount = Math.round(finalTokenCount * 1.15)\n } else if (anthropicPayload.model.startsWith(\"grok\")) {\n // Apply 3% buffer for Grok models (smaller difference from GPT tokenizer)\n finalTokenCount = Math.round(finalTokenCount * 1.03)\n }\n\n consola.info(\"Token count:\", finalTokenCount)\n\n return c.json({\n input_tokens: finalTokenCount,\n })\n } catch (error) {\n consola.error(\"Error counting tokens:\", error)\n return c.json({\n input_tokens: 1,\n })\n }\n}\n","import { type ChatCompletionChunk } from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicStreamEventData,\n type AnthropicStreamState,\n} from \"./anthropic-types\"\nimport { type ToolNameMapping } from \"./non-stream-translation\"\nimport { mapOpenAIStopReasonToAnthropic } from \"./utils\"\n\nfunction isToolBlockOpen(state: AnthropicStreamState): boolean {\n if (!state.contentBlockOpen) {\n return false\n }\n // Check if the current block index corresponds to any known tool call\n return Object.values(state.toolCalls).some(\n (tc) => tc.anthropicBlockIndex === state.contentBlockIndex,\n )\n}\n\n// eslint-disable-next-line max-lines-per-function, complexity\nexport function translateChunkToAnthropicEvents(\n chunk: ChatCompletionChunk,\n state: AnthropicStreamState,\n toolNameMapping?: ToolNameMapping,\n): Array<AnthropicStreamEventData> {\n const events: Array<AnthropicStreamEventData> = []\n\n // Skip chunks with empty choices (e.g., first chunk with prompt_filter_results)\n if (chunk.choices.length === 0) {\n // Store model for later if available (some chunks have model but empty choices)\n if (chunk.model && !state.model) {\n state.model = chunk.model\n }\n return events\n }\n\n const choice = chunk.choices[0]\n const { delta } = choice\n\n if (!state.messageStartSent) {\n // Use model from current chunk, or from stored state (from earlier empty chunk)\n const model = chunk.model || state.model || \"unknown\"\n events.push({\n type: \"message_start\",\n message: {\n id: chunk.id || `msg_${Date.now()}`,\n type: \"message\",\n role: \"assistant\",\n content: [],\n model,\n stop_reason: null,\n stop_sequence: null,\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: 0, // Will be updated in message_delta when finished\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n })\n state.messageStartSent = true\n }\n\n if (delta.content) {\n if (isToolBlockOpen(state)) {\n // A tool block was open, so close it before starting a text block.\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n if (!state.contentBlockOpen) {\n events.push({\n type: \"content_block_start\",\n index: state.contentBlockIndex,\n content_block: {\n type: \"text\",\n text: \"\",\n },\n })\n state.contentBlockOpen = true\n }\n\n events.push({\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"text_delta\",\n text: delta.content,\n },\n })\n }\n\n if (delta.tool_calls) {\n for (const toolCall of delta.tool_calls) {\n if (toolCall.id && toolCall.function?.name) {\n // New tool call starting.\n if (state.contentBlockOpen) {\n // Close any previously open block.\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n // Restore original tool name if it was truncated\n const originalName =\n toolNameMapping?.truncatedToOriginal.get(toolCall.function.name)\n ?? toolCall.function.name\n\n const anthropicBlockIndex = state.contentBlockIndex\n state.toolCalls[toolCall.index] = {\n id: toolCall.id,\n name: originalName,\n anthropicBlockIndex,\n }\n\n events.push({\n type: \"content_block_start\",\n index: anthropicBlockIndex,\n content_block: {\n type: \"tool_use\",\n id: toolCall.id,\n name: originalName,\n input: {},\n },\n })\n state.contentBlockOpen = true\n }\n\n if (toolCall.function?.arguments) {\n const toolCallInfo = state.toolCalls[toolCall.index]\n // Tool call can still be empty\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (toolCallInfo) {\n events.push({\n type: \"content_block_delta\",\n index: toolCallInfo.anthropicBlockIndex,\n delta: {\n type: \"input_json_delta\",\n partial_json: toolCall.function.arguments,\n },\n })\n }\n }\n }\n }\n\n if (choice.finish_reason) {\n if (state.contentBlockOpen) {\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockOpen = false\n }\n\n events.push(\n {\n type: \"message_delta\",\n delta: {\n stop_reason: mapOpenAIStopReasonToAnthropic(choice.finish_reason),\n stop_sequence: null,\n },\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: chunk.usage?.completion_tokens ?? 0,\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n {\n type: \"message_stop\",\n },\n )\n }\n\n return events\n}\n\nexport function translateErrorToAnthropicErrorEvent(): AnthropicStreamEventData {\n return {\n type: \"error\",\n error: {\n type: \"api_error\",\n message: \"An unexpected error occurred during streaming.\",\n },\n }\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\nimport { streamSSE } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport {\n type MessageContent,\n recordRequest,\n recordResponse,\n} from \"~/lib/history\"\nimport { executeWithRateLimit } from \"~/lib/queue\"\nimport { state } from \"~/lib/state\"\nimport {\n createChatCompletions,\n type ChatCompletionChunk,\n type ChatCompletionResponse,\n} from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicMessagesPayload,\n type AnthropicStreamState,\n} from \"./anthropic-types\"\nimport {\n translateToAnthropic,\n translateToOpenAI,\n} from \"./non-stream-translation\"\nimport {\n translateChunkToAnthropicEvents,\n translateErrorToAnthropicErrorEvent,\n} from \"./stream-translation\"\n\nexport async function handleCompletion(c: Context) {\n const startTime = Date.now()\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n consola.debug(\"Anthropic request payload:\", JSON.stringify(anthropicPayload))\n\n // Record request to history with full message content\n const historyId = recordRequest(\"anthropic\", {\n model: anthropicPayload.model,\n messages: convertAnthropicMessages(anthropicPayload.messages),\n stream: anthropicPayload.stream ?? false,\n tools: anthropicPayload.tools?.map((t) => ({\n name: t.name,\n description: t.description,\n })),\n max_tokens: anthropicPayload.max_tokens,\n temperature: anthropicPayload.temperature,\n system: extractSystemPrompt(anthropicPayload.system),\n })\n\n const { payload: openAIPayload, toolNameMapping } =\n translateToOpenAI(anthropicPayload)\n consola.debug(\n \"Translated OpenAI request payload:\",\n JSON.stringify(openAIPayload),\n )\n\n if (state.manualApprove) {\n await awaitApproval()\n }\n\n try {\n // Use queue-based rate limiting\n const response = await executeWithRateLimit(state, () =>\n createChatCompletions(openAIPayload),\n )\n\n if (isNonStreaming(response)) {\n consola.debug(\n \"Non-streaming response from Copilot:\",\n JSON.stringify(response).slice(-400),\n )\n const anthropicResponse = translateToAnthropic(response, toolNameMapping)\n consola.debug(\n \"Translated Anthropic response:\",\n JSON.stringify(anthropicResponse),\n )\n\n // Record response to history with full content\n recordResponse(\n historyId,\n {\n success: true,\n model: anthropicResponse.model,\n usage: anthropicResponse.usage,\n stop_reason: anthropicResponse.stop_reason ?? undefined,\n content: {\n role: \"assistant\",\n content: anthropicResponse.content.map((block) => {\n if (block.type === \"text\") {\n return { type: \"text\", text: block.text }\n }\n if (block.type === \"tool_use\") {\n return {\n type: \"tool_use\",\n id: block.id,\n name: block.name,\n input: JSON.stringify(block.input),\n }\n }\n return { type: block.type }\n }),\n },\n toolCalls: extractToolCallsFromContent(anthropicResponse.content),\n },\n Date.now() - startTime,\n )\n\n return c.json(anthropicResponse)\n }\n\n consola.debug(\"Streaming response from Copilot\")\n return streamSSE(c, async (stream) => {\n const streamState: AnthropicStreamState = {\n messageStartSent: false,\n contentBlockIndex: 0,\n contentBlockOpen: false,\n toolCalls: {},\n }\n\n // Accumulate stream data for history\n let streamModel = \"\"\n let streamInputTokens = 0\n let streamOutputTokens = 0\n let streamStopReason = \"\"\n let streamContent = \"\"\n const streamToolCalls: Array<{\n id: string\n name: string\n input: string\n }> = []\n let currentToolCall: { id: string; name: string; input: string } | null =\n null\n\n try {\n for await (const rawEvent of response) {\n consola.debug(\"Copilot raw stream event:\", JSON.stringify(rawEvent))\n if (rawEvent.data === \"[DONE]\") {\n break\n }\n\n if (!rawEvent.data) {\n continue\n }\n\n let chunk: ChatCompletionChunk\n try {\n chunk = JSON.parse(rawEvent.data) as ChatCompletionChunk\n } catch (parseError) {\n consola.error(\n \"Failed to parse stream chunk:\",\n parseError,\n rawEvent.data,\n )\n continue\n }\n\n // Capture model from chunk\n if (chunk.model && !streamModel) {\n streamModel = chunk.model\n }\n\n const events = translateChunkToAnthropicEvents(\n chunk,\n streamState,\n toolNameMapping,\n )\n\n for (const event of events) {\n consola.debug(\"Translated Anthropic event:\", JSON.stringify(event))\n\n // Capture data for history\n switch (event.type) {\n case \"content_block_delta\": {\n if (\"text\" in event.delta) {\n streamContent += event.delta.text\n } else if (\"partial_json\" in event.delta && currentToolCall) {\n currentToolCall.input += event.delta.partial_json\n }\n\n break\n }\n case \"content_block_start\": {\n if (event.content_block.type === \"tool_use\") {\n currentToolCall = {\n id: event.content_block.id,\n name: event.content_block.name,\n input: \"\",\n }\n }\n\n break\n }\n case \"content_block_stop\": {\n if (currentToolCall) {\n streamToolCalls.push(currentToolCall)\n currentToolCall = null\n }\n\n break\n }\n case \"message_delta\": {\n if (event.delta.stop_reason) {\n streamStopReason = event.delta.stop_reason\n }\n if (event.usage) {\n streamInputTokens = event.usage.input_tokens ?? 0\n streamOutputTokens = event.usage.output_tokens\n }\n\n break\n }\n // No default\n }\n\n await stream.writeSSE({\n event: event.type,\n data: JSON.stringify(event),\n })\n }\n }\n\n // Record streaming response to history with full content\n const contentBlocks: Array<{ type: string; text?: string }> = []\n if (streamContent) {\n contentBlocks.push({ type: \"text\", text: streamContent })\n }\n for (const tc of streamToolCalls) {\n contentBlocks.push({\n type: \"tool_use\",\n ...tc,\n })\n }\n\n recordResponse(\n historyId,\n {\n success: true,\n model: streamModel || anthropicPayload.model,\n usage: {\n input_tokens: streamInputTokens,\n output_tokens: streamOutputTokens,\n },\n stop_reason: streamStopReason || undefined,\n content:\n contentBlocks.length > 0 ?\n { role: \"assistant\", content: contentBlocks }\n : null,\n toolCalls:\n streamToolCalls.length > 0 ?\n streamToolCalls.map((tc) => ({\n id: tc.id,\n name: tc.name,\n input: tc.input,\n }))\n : undefined,\n },\n Date.now() - startTime,\n )\n } catch (error) {\n consola.error(\"Stream error:\", error)\n\n // Record error to history\n recordResponse(\n historyId,\n {\n success: false,\n model: streamModel || anthropicPayload.model,\n usage: { input_tokens: 0, output_tokens: 0 },\n error: error instanceof Error ? error.message : \"Stream error\",\n content: null,\n },\n Date.now() - startTime,\n )\n\n const errorEvent = translateErrorToAnthropicErrorEvent()\n await stream.writeSSE({\n event: errorEvent.type,\n data: JSON.stringify(errorEvent),\n })\n }\n })\n } catch (error) {\n // Record error to history\n recordResponse(\n historyId,\n {\n success: false,\n model: anthropicPayload.model,\n usage: { input_tokens: 0, output_tokens: 0 },\n error: error instanceof Error ? error.message : \"Unknown error\",\n content: null,\n },\n Date.now() - startTime,\n )\n throw error\n }\n}\n\n// Convert Anthropic messages to history MessageContent format\nfunction convertAnthropicMessages(\n messages: AnthropicMessagesPayload[\"messages\"],\n): Array<MessageContent> {\n return messages.map((msg) => {\n if (typeof msg.content === \"string\") {\n return { role: msg.role, content: msg.content }\n }\n\n // Convert content blocks\n const content = msg.content.map((block) => {\n if (block.type === \"text\") {\n return { type: \"text\", text: block.text }\n }\n if (block.type === \"tool_use\") {\n return {\n type: \"tool_use\",\n id: block.id,\n name: block.name,\n input: JSON.stringify(block.input),\n }\n }\n if (block.type === \"tool_result\") {\n const resultContent =\n typeof block.content === \"string\" ?\n block.content\n : block.content\n .map((c) => (c.type === \"text\" ? c.text : `[${c.type}]`))\n .join(\"\\n\")\n return {\n type: \"tool_result\",\n tool_use_id: block.tool_use_id,\n content: resultContent,\n }\n }\n return { type: block.type }\n })\n\n return { role: msg.role, content }\n })\n}\n\n// Extract system prompt from Anthropic format\nfunction extractSystemPrompt(\n system: AnthropicMessagesPayload[\"system\"],\n): string | undefined {\n if (!system) return undefined\n if (typeof system === \"string\") return system\n return system.map((block) => block.text).join(\"\\n\")\n}\n\n// Extract tool calls from response content\nfunction extractToolCallsFromContent(\n content: Array<unknown>,\n): Array<{ id: string; name: string; input: string }> | undefined {\n const tools: Array<{ id: string; name: string; input: string }> = []\n for (const block of content) {\n if (\n typeof block === \"object\"\n && block !== null\n && \"type\" in block\n && block.type === \"tool_use\"\n && \"id\" in block\n && \"name\" in block\n && \"input\" in block\n ) {\n tools.push({\n id: String(block.id),\n name: String(block.name),\n input: JSON.stringify(block.input),\n })\n }\n }\n return tools.length > 0 ? tools : undefined\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createChatCompletions>>,\n): response is ChatCompletionResponse => Object.hasOwn(response, \"choices\")\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleCountTokens } from \"./count-tokens-handler\"\nimport { handleCompletion } from \"./handler\"\n\nexport const messageRoutes = new Hono()\n\nmessageRoutes.post(\"/\", async (c) => {\n try {\n return await handleCompletion(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n\nmessageRoutes.post(\"/count_tokens\", async (c) => {\n try {\n return await handleCountTokens(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\nimport { cacheModels } from \"~/lib/utils\"\n\nexport const modelRoutes = new Hono()\n\nmodelRoutes.get(\"/\", async (c) => {\n try {\n if (!state.models) {\n // This should be handled by startup logic, but as a fallback.\n await cacheModels()\n }\n\n const models = state.models?.data.map((model) => ({\n id: model.id,\n object: \"model\",\n type: \"model\",\n created: 0, // No date available from source\n created_at: new Date(0).toISOString(), // No date available from source\n owned_by: model.vendor,\n display_name: model.name,\n }))\n\n return c.json({\n object: \"list\",\n data: models,\n has_more: false,\n })\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const tokenRoute = new Hono()\n\ntokenRoute.get(\"/\", async (c) => {\n try {\n return c.json({\n token: state.copilotToken,\n })\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { getCopilotUsage } from \"~/services/github/get-copilot-usage\"\n\nexport const usageRoute = new Hono()\n\nusageRoute.get(\"/\", async (c) => {\n try {\n const usage = await getCopilotUsage()\n return c.json(usage)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\nimport { cors } from \"hono/cors\"\nimport { logger } from \"hono/logger\"\n\nimport { state } from \"./lib/state\"\nimport { completionRoutes } from \"./routes/chat-completions/route\"\nimport { embeddingRoutes } from \"./routes/embeddings/route\"\nimport { eventLoggingRoutes } from \"./routes/event-logging/route\"\nimport { historyRoutes } from \"./routes/history/route\"\nimport { messageRoutes } from \"./routes/messages/route\"\nimport { modelRoutes } from \"./routes/models/route\"\nimport { tokenRoute } from \"./routes/token/route\"\nimport { usageRoute } from \"./routes/usage/route\"\n\nexport const server = new Hono()\n\nserver.use(logger())\nserver.use(cors())\n\nserver.get(\"/\", (c) => c.text(\"Server running\"))\n\n// Health check endpoint for container orchestration (Docker, Kubernetes)\nserver.get(\"/health\", (c) => {\n const healthy = Boolean(state.copilotToken && state.githubToken)\n return c.json(\n {\n status: healthy ? \"healthy\" : \"unhealthy\",\n checks: {\n copilotToken: Boolean(state.copilotToken),\n githubToken: Boolean(state.githubToken),\n models: Boolean(state.models),\n },\n },\n healthy ? 200 : 503,\n )\n})\n\nserver.route(\"/chat/completions\", completionRoutes)\nserver.route(\"/models\", modelRoutes)\nserver.route(\"/embeddings\", embeddingRoutes)\nserver.route(\"/usage\", usageRoute)\nserver.route(\"/token\", tokenRoute)\n\n// Compatibility with tools that expect v1/ prefix\nserver.route(\"/v1/chat/completions\", completionRoutes)\nserver.route(\"/v1/models\", modelRoutes)\nserver.route(\"/v1/embeddings\", embeddingRoutes)\n\n// Anthropic compatible endpoints\nserver.route(\"/v1/messages\", messageRoutes)\nserver.route(\"/api/event_logging\", eventLoggingRoutes)\n\n// History viewer (optional, enabled with --history flag)\nserver.route(\"/history\", historyRoutes)\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport clipboard from \"clipboardy\"\nimport consola from \"consola\"\nimport { serve, type ServerHandler } from \"srvx\"\nimport invariant from \"tiny-invariant\"\n\nimport { initHistory } from \"./lib/history\"\nimport { ensurePaths } from \"./lib/paths\"\nimport { initProxyFromEnv } from \"./lib/proxy\"\nimport { generateEnvScript } from \"./lib/shell\"\nimport { state } from \"./lib/state\"\nimport { setupCopilotToken, setupGitHubToken } from \"./lib/token\"\nimport { cacheModels, cacheVSCodeVersion } from \"./lib/utils\"\nimport { server } from \"./server\"\n\ninterface RunServerOptions {\n port: number\n host?: string\n verbose: boolean\n accountType: string\n manual: boolean\n rateLimit?: number\n rateLimitWait: boolean\n githubToken?: string\n claudeCode: boolean\n showToken: boolean\n proxyEnv: boolean\n history: boolean\n historyLimit: number\n}\n\nexport async function runServer(options: RunServerOptions): Promise<void> {\n if (options.proxyEnv) {\n initProxyFromEnv()\n }\n\n if (options.verbose) {\n consola.level = 5\n consola.info(\"Verbose logging enabled\")\n }\n\n state.accountType = options.accountType\n if (options.accountType !== \"individual\") {\n consola.info(`Using ${options.accountType} plan GitHub account`)\n }\n\n state.manualApprove = options.manual\n state.rateLimitSeconds = options.rateLimit\n state.rateLimitWait = options.rateLimitWait\n state.showToken = options.showToken\n\n // Initialize history recording if enabled\n initHistory(options.history, options.historyLimit)\n if (options.history) {\n consola.info(`History recording enabled (max ${options.historyLimit} entries)`)\n }\n\n await ensurePaths()\n await cacheVSCodeVersion()\n\n if (options.githubToken) {\n state.githubToken = options.githubToken\n consola.info(\"Using provided GitHub token\")\n } else {\n await setupGitHubToken()\n }\n\n await setupCopilotToken()\n await cacheModels()\n\n consola.info(\n `Available models: \\n${state.models?.data.map((model) => `- ${model.id}`).join(\"\\n\")}`,\n )\n\n const displayHost = options.host ?? \"localhost\"\n const serverUrl = `http://${displayHost}:${options.port}`\n\n if (options.claudeCode) {\n invariant(state.models, \"Models should be loaded by now\")\n\n const selectedModel = await consola.prompt(\n \"Select a model to use with Claude Code\",\n {\n type: \"select\",\n options: state.models.data.map((model) => model.id),\n },\n )\n\n const selectedSmallModel = await consola.prompt(\n \"Select a small model to use with Claude Code\",\n {\n type: \"select\",\n options: state.models.data.map((model) => model.id),\n },\n )\n\n const command = generateEnvScript(\n {\n ANTHROPIC_BASE_URL: serverUrl,\n ANTHROPIC_AUTH_TOKEN: \"dummy\",\n ANTHROPIC_MODEL: selectedModel,\n ANTHROPIC_DEFAULT_SONNET_MODEL: selectedModel,\n ANTHROPIC_SMALL_FAST_MODEL: selectedSmallModel,\n ANTHROPIC_DEFAULT_HAIKU_MODEL: selectedSmallModel,\n DISABLE_NON_ESSENTIAL_MODEL_CALLS: \"1\",\n CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC: \"1\",\n },\n \"claude\",\n )\n\n try {\n clipboard.writeSync(command)\n consola.success(\"Copied Claude Code command to clipboard!\")\n } catch {\n consola.warn(\n \"Failed to copy to clipboard. Here is the Claude Code command:\",\n )\n consola.log(command)\n }\n }\n\n consola.box(\n `🌐 Usage Viewer: https://ericc-ch.github.io/copilot-api?endpoint=${serverUrl}/usage${options.history ? `\\n📜 History UI: ${serverUrl}/history` : \"\"}`,\n )\n\n serve({\n fetch: server.fetch as ServerHandler,\n port: options.port,\n hostname: options.host,\n })\n}\n\nexport const start = defineCommand({\n meta: {\n name: \"start\",\n description: \"Start the Copilot API server\",\n },\n args: {\n port: {\n alias: \"p\",\n type: \"string\",\n default: \"4141\",\n description: \"Port to listen on\",\n },\n host: {\n alias: \"H\",\n type: \"string\",\n description:\n \"Host/interface to bind to (e.g., 127.0.0.1 for localhost only, 0.0.0.0 for all interfaces)\",\n },\n verbose: {\n alias: \"v\",\n type: \"boolean\",\n default: false,\n description: \"Enable verbose logging\",\n },\n \"account-type\": {\n alias: \"a\",\n type: \"string\",\n default: \"individual\",\n description: \"Account type to use (individual, business, enterprise)\",\n },\n manual: {\n type: \"boolean\",\n default: false,\n description: \"Enable manual request approval\",\n },\n \"rate-limit\": {\n alias: \"r\",\n type: \"string\",\n description: \"Rate limit in seconds between requests\",\n },\n wait: {\n alias: \"w\",\n type: \"boolean\",\n default: false,\n description:\n \"Wait instead of error when rate limit is hit. Has no effect if rate limit is not set\",\n },\n \"github-token\": {\n alias: \"g\",\n type: \"string\",\n description:\n \"Provide GitHub token directly (must be generated using the `auth` subcommand)\",\n },\n \"claude-code\": {\n alias: \"c\",\n type: \"boolean\",\n default: false,\n description:\n \"Generate a command to launch Claude Code with Copilot API config\",\n },\n \"show-token\": {\n type: \"boolean\",\n default: false,\n description: \"Show GitHub and Copilot tokens on fetch and refresh\",\n },\n \"proxy-env\": {\n type: \"boolean\",\n default: false,\n description: \"Initialize proxy from environment variables\",\n },\n history: {\n type: \"boolean\",\n default: false,\n description: \"Enable request history recording and Web UI at /history\",\n },\n \"history-limit\": {\n type: \"string\",\n default: \"1000\",\n description: \"Maximum number of history entries to keep in memory\",\n },\n },\n run({ args }) {\n const rateLimitRaw = args[\"rate-limit\"]\n const rateLimit =\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n rateLimitRaw === undefined ? undefined : Number.parseInt(rateLimitRaw, 10)\n\n return runServer({\n port: Number.parseInt(args.port, 10),\n host: args.host,\n verbose: args.verbose,\n accountType: args[\"account-type\"],\n manual: args.manual,\n rateLimit,\n rateLimitWait: args.wait,\n githubToken: args[\"github-token\"],\n claudeCode: args[\"claude-code\"],\n showToken: args[\"show-token\"],\n proxyEnv: args[\"proxy-env\"],\n history: args.history,\n historyLimit: Number.parseInt(args[\"history-limit\"], 10),\n })\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand, runMain } from \"citty\"\nimport consola from \"consola\"\n\nimport { auth } from \"./auth\"\nimport { checkUsage } from \"./check-usage\"\nimport { debug } from \"./debug\"\nimport { logout } from \"./logout\"\nimport { start } from \"./start\"\n\n// Configure consola to show timestamps in log output\nconsola.options.formatOptions.date = true\n\nconst main = defineCommand({\n meta: {\n name: \"copilot-api\",\n description:\n \"A wrapper around GitHub Copilot API to make it OpenAI compatible, making it usable for other tools.\",\n },\n subCommands: { auth, logout, start, \"check-usage\": checkUsage, debug },\n})\n\nawait runMain(main)\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAIA,MAAM,UAAU,KAAK,KAAK,GAAG,SAAS,EAAE,UAAU,SAAS,cAAc;AAEzE,MAAM,oBAAoB,KAAK,KAAK,SAAS,eAAe;AAE5D,MAAa,QAAQ;CACnB;CACA;CACD;AAED,eAAsB,cAA6B;AACjD,OAAM,GAAG,MAAM,MAAM,SAAS,EAAE,WAAW,MAAM,CAAC;AAClD,OAAM,WAAW,MAAM,kBAAkB;;AAG3C,eAAe,WAAW,UAAiC;AACzD,KAAI;AACF,QAAM,GAAG,OAAO,UAAU,GAAG,UAAU,KAAK;AAI5C,QAFc,MAAM,GAAG,KAAK,SAAS,EACX,OAAO,SACb,IAClB,OAAM,GAAG,MAAM,UAAU,IAAM;SAE3B;AACN,QAAM,GAAG,UAAU,UAAU,GAAG;AAChC,QAAM,GAAG,MAAM,UAAU,IAAM;;;;;;ACVnC,MAAaA,QAAe;CAC1B,aAAa;CACb,eAAe;CACf,eAAe;CACf,WAAW;CACZ;;;;ACpBD,MAAa,yBAAyB;CACpC,gBAAgB;CAChB,QAAQ;CACT;AAED,MAAM,kBAAkB;AACxB,MAAM,wBAAwB,gBAAgB;AAC9C,MAAM,aAAa,qBAAqB;AAExC,MAAM,cAAc;AAEpB,MAAa,kBAAkB,YAC7BC,QAAM,gBAAgB,eACpB,kCACA,eAAeA,QAAM,YAAY;AACrC,MAAa,kBAAkB,SAAc,SAAkB,UAAU;CACvE,MAAMC,UAAkC;EACtC,eAAe,UAAUD,QAAM;EAC/B,gBAAgB,iBAAiB,CAAC;EAClC,0BAA0B;EAC1B,kBAAkB,UAAUA,QAAM;EAClC,yBAAyB;EACzB,cAAc;EACd,iBAAiB;EACjB,wBAAwB;EACxB,gBAAgB,YAAY;EAC5B,uCAAuC;EACxC;AAED,KAAI,OAAQ,SAAQ,4BAA4B;AAEhD,QAAO;;AAGT,MAAa,sBAAsB;AACnC,MAAa,iBAAiB,aAAkB;CAC9C,GAAG,iBAAiB;CACpB,eAAe,SAASA,QAAM;CAC9B,kBAAkB,UAAUA,QAAM;CAClC,yBAAyB;CACzB,cAAc;CACd,wBAAwB;CACxB,uCAAuC;CACxC;AAED,MAAa,kBAAkB;AAC/B,MAAa,mBAAmB;AAChC,MAAa,oBAAoB,CAAC,YAAY,CAAC,KAAK,IAAI;;;;AC9CxD,IAAa,YAAb,MAAa,kBAAkB,MAAM;CACnC;CACA;CAEA,YAAY,SAAiB,QAAgB,cAAsB;AACjE,QAAM,QAAQ;AACd,OAAK,SAAS;AACd,OAAK,eAAe;;CAGtB,aAAa,aACX,SACA,UACoB;EACpB,MAAM,OAAO,MAAM,SAAS,MAAM;AAClC,SAAO,IAAI,UAAU,SAAS,SAAS,QAAQ,KAAK;;;AAIxD,eAAsB,aAAa,GAAY,OAAgB;AAC7D,SAAQ,MAAM,mBAAmB,MAAM;AAEvC,KAAI,iBAAiB,WAAW;EAC9B,IAAIE;AACJ,MAAI;AACF,eAAY,KAAK,MAAM,MAAM,aAAa;UACpC;AACN,eAAY,MAAM;;AAEpB,UAAQ,MAAM,eAAe,UAAU;AACvC,SAAO,EAAE,KACP,EACE,OAAO;GACL,SAAS,MAAM;GACf,MAAM;GACP,EACF,EACD,MAAM,OACP;;AAGH,QAAO,EAAE,KACP,EACE,OAAO;EACL,SAAU,MAAgB;EAC1B,MAAM;EACP,EACF,EACD,IACD;;;;;AClDH,MAAa,kBAAkB,YAAY;CACzC,MAAM,WAAW,MAAM,MACrB,GAAG,oBAAoB,6BACvB,EACE,SAAS,cAAc,MAAM,EAC9B,CACF;AAED,KAAI,CAAC,SAAS,GAAI,OAAM,MAAM,UAAU,aAAa,+BAA+B,SAAS;AAE7F,QAAQ,MAAM,SAAS,MAAM;;;;;ACN/B,eAAsB,gBAA6C;CACjE,MAAM,WAAW,MAAM,MAAM,GAAG,gBAAgB,qBAAqB;EACnE,QAAQ;EACR,SAAS,iBAAiB;EAC1B,MAAM,KAAK,UAAU;GACnB,WAAW;GACX,OAAO;GACR,CAAC;EACH,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,MAAM,UAAU,aAAa,6BAA6B,SAAS;AAE3F,QAAQ,MAAM,SAAS,MAAM;;;;;AChB/B,eAAsB,gBAAgB;CACpC,MAAM,WAAW,MAAM,MAAM,GAAG,oBAAoB,QAAQ,EAC1D,SAAS;EACP,eAAe,SAAS,MAAM;EAC9B,GAAG,iBAAiB;EACrB,EACF,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,MAAM,UAAU,aAAa,6BAA6B,SAAS;AAE3F,QAAQ,MAAM,SAAS,MAAM;;;;;ACV/B,MAAa,YAAY,YAAY;CACnC,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,UAAU,EAC9D,SAAS,eAAe,MAAM,EAC/B,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,MAAM,UAAU,aAAa,wBAAwB,SAAS;AAEtF,QAAQ,MAAM,SAAS,MAAM;;;;;ACX/B,MAAM,WAAW;AAGjB,MAAM,iBAAiB;AAMvB,eAAsB,mBAAmB;CACvC,MAAM,aAAa,IAAI,iBAAiB;CACxC,MAAM,UAAU,iBAAiB;AAC/B,aAAW,OAAO;IACjB,IAAK;AAER,KAAI;EACF,MAAM,WAAW,MAAM,MAAM,gBAAgB;GAC3C,QAAQ,WAAW;GACnB,SAAS;IACP,QAAQ;IACR,cAAc;IACf;GACF,CAAC;AAEF,MAAI,CAAC,SAAS,GACZ,QAAO;EAKT,MAAM,WAFW,MAAM,SAAS,MAAM,EAEd;AACxB,MAAI,WAAW,kBAAkB,KAAK,QAAQ,CAC5C,QAAO;AAGT,SAAO;SACD;AACN,SAAO;WACC;AACR,eAAa,QAAQ;;;;;;AChCzB,MAAa,SAAS,OACpB,IAAI,SAAS,YAAY;AACvB,YAAW,SAAS,GAAG;EACvB;AAEJ,MAAa,aAAa,UACxB,UAAU,QAAQ,UAAU;AAE9B,eAAsB,cAA6B;AAEjD,OAAM,SADS,MAAM,WAAW;;AAIlC,MAAa,qBAAqB,YAAY;CAC5C,MAAM,WAAW,MAAM,kBAAkB;AACzC,OAAM,gBAAgB;AAEtB,SAAQ,KAAK,yBAAyB,WAAW;;;;;ACbnD,eAAsB,gBACpB,YACiB;CAGjB,MAAM,iBAAiB,WAAW,WAAW,KAAK;AAClD,SAAQ,MAAM,yCAAyC,cAAc,IAAI;CAGzE,MAAM,YAAY,KAAK,KAAK,GAAG,WAAW,aAAa;AAEvD,QAAO,KAAK,KAAK,GAAG,WAAW;EAC7B,MAAM,WAAW,MAAM,MACrB,GAAG,gBAAgB,4BACnB;GACE,QAAQ;GACR,SAAS,iBAAiB;GAC1B,MAAM,KAAK,UAAU;IACnB,WAAW;IACX,aAAa,WAAW;IACxB,YAAY;IACb,CAAC;GACH,CACF;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,SAAM,MAAM,cAAc;AAC1B,WAAQ,MAAM,gCAAgC,MAAM,SAAS,MAAM,CAAC;AAEpE;;EAGF,MAAM,OAAO,MAAM,SAAS,MAAM;AAClC,UAAQ,MAAM,kCAAkC,KAAK;EAErD,MAAM,EAAE,iBAAiB;AAEzB,MAAI,aACF,QAAO;MAEP,OAAM,MAAM,cAAc;;AAI9B,OAAM,IAAI,MACR,iEACD;;;;;AC7CH,MAAM,wBAAwB,GAAG,SAAS,MAAM,mBAAmB,OAAO;AAE1E,MAAM,oBAAoB,UACxB,GAAG,UAAU,MAAM,mBAAmB,MAAM;AAE9C,MAAa,oBAAoB,YAAY;CAC3C,MAAM,EAAE,OAAO,eAAe,MAAM,iBAAiB;AACrD,OAAM,eAAe;AAGrB,SAAQ,MAAM,6CAA6C;AAC3D,KAAI,MAAM,UACR,SAAQ,KAAK,kBAAkB,MAAM;CAGvC,MAAM,mBAAmB,aAAa,MAAM;AAC5C,aAAY,YAAY;AACtB,UAAQ,MAAM,2BAA2B;AACzC,MAAI;GACF,MAAM,EAAE,mBAAU,MAAM,iBAAiB;AACzC,SAAM,eAAeC;AACrB,WAAQ,MAAM,0BAA0B;AACxC,OAAI,MAAM,UACR,SAAQ,KAAK,4BAA4BA,QAAM;WAE1C,OAAO;AAId,WAAQ,MAAM,kEAAkE,MAAM;;IAEvF,gBAAgB;;AAOrB,eAAsB,iBACpB,SACe;AACf,KAAI;EACF,MAAM,cAAc,MAAM,iBAAiB;AAE3C,MAAI,eAAe,CAAC,SAAS,OAAO;AAClC,SAAM,cAAc;AACpB,OAAI,MAAM,UACR,SAAQ,KAAK,iBAAiB,YAAY;AAE5C,SAAM,SAAS;AAEf;;AAGF,UAAQ,KAAK,0CAA0C;EACvD,MAAM,WAAW,MAAM,eAAe;AACtC,UAAQ,MAAM,yBAAyB,SAAS;AAEhD,UAAQ,KACN,0BAA0B,SAAS,UAAU,OAAO,SAAS,mBAC9D;EAED,MAAM,QAAQ,MAAM,gBAAgB,SAAS;AAC7C,QAAM,iBAAiB,MAAM;AAC7B,QAAM,cAAc;AAEpB,MAAI,MAAM,UACR,SAAQ,KAAK,iBAAiB,MAAM;AAEtC,QAAM,SAAS;UACR,OAAO;AACd,MAAI,iBAAiB,WAAW;AAC9B,WAAQ,MAAM,+BAA+B,MAAM,aAAa;AAChE,SAAM;;AAGR,UAAQ,MAAM,+BAA+B,MAAM;AACnD,QAAM;;;AAIV,eAAe,UAAU;CACvB,MAAM,OAAO,MAAM,eAAe;AAClC,SAAQ,KAAK,gBAAgB,KAAK,QAAQ;;;;;ACjF5C,eAAsB,QAAQ,SAAwC;AACpE,KAAI,QAAQ,SAAS;AACnB,UAAQ,QAAQ;AAChB,UAAQ,KAAK,0BAA0B;;AAGzC,OAAM,YAAY,QAAQ;AAE1B,OAAM,aAAa;AACnB,OAAM,iBAAiB,EAAE,OAAO,MAAM,CAAC;AACvC,SAAQ,QAAQ,2BAA2B,MAAM,kBAAkB;;AAGrE,MAAa,OAAO,cAAc;CAChC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;EACJ,SAAS;GACP,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,cAAc;GACZ,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACF;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,QAAQ;GACb,SAAS,KAAK;GACd,WAAW,KAAK;GACjB,CAAC;;CAEL,CAAC;;;;AC/CF,MAAa,kBAAkB,YAA2C;CACxE,MAAM,WAAW,MAAM,MAAM,GAAG,oBAAoB,yBAAyB,EAC3E,SAAS,cAAc,MAAM,EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,MAAM,UAAU,aAAa,+BAA+B,SAAS;AAG7E,QAAQ,MAAM,SAAS,MAAM;;;;;ACH/B,MAAa,aAAa,cAAc;CACtC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM,MAAM;AACV,QAAM,aAAa;AACnB,QAAM,kBAAkB;AACxB,MAAI;GACF,MAAM,QAAQ,MAAM,iBAAiB;GACrC,MAAM,UAAU,MAAM,gBAAgB;GACtC,MAAM,eAAe,QAAQ;GAC7B,MAAM,cAAc,eAAe,QAAQ;GAC3C,MAAM,qBACJ,eAAe,IAAK,cAAc,eAAgB,MAAM;GAC1D,MAAM,0BAA0B,QAAQ;GAGxC,SAAS,eAAe,MAAc,MAA+B;AACnE,QAAI,CAAC,KAAM,QAAO,GAAG,KAAK;IAC1B,MAAM,QAAQ,KAAK;IACnB,MAAM,OAAO,QAAQ,KAAK;IAC1B,MAAM,cAAc,QAAQ,IAAK,OAAO,QAAS,MAAM;IACvD,MAAM,mBAAmB,KAAK;AAC9B,WAAO,GAAG,KAAK,IAAI,KAAK,GAAG,MAAM,SAAS,YAAY,QAAQ,EAAE,CAAC,UAAU,iBAAiB,QAAQ,EAAE,CAAC;;GAGzG,MAAM,cAAc,YAAY,YAAY,GAAG,aAAa,SAAS,mBAAmB,QAAQ,EAAE,CAAC,UAAU,wBAAwB,QAAQ,EAAE,CAAC;GAChJ,MAAM,WAAW,eAAe,QAAQ,MAAM,gBAAgB,KAAK;GACnE,MAAM,kBAAkB,eACtB,eACA,MAAM,gBAAgB,YACvB;AAED,WAAQ,IACN,wBAAwB,MAAM,aAAa,mBACtB,MAAM,iBAAiB,iBAEnC,YAAY,MACZ,SAAS,MACT,kBACV;WACM,KAAK;AACZ,WAAQ,MAAM,kCAAkC,IAAI;AACpD,WAAQ,KAAK,EAAE;;;CAGpB,CAAC;;;;AC7BF,eAAe,oBAAqC;AAClD,KAAI;EACF,MAAM,kBAAkB,IAAI,IAAI,mBAAmB,OAAO,KAAK,IAAI,CAAC;AAMpE,SAHoB,KAAK,MAAM,MAAM,GAAG,SAAS,gBAAgB,CAAC,CAG/C;SACb;AACN,SAAO;;;AAIX,SAAS,iBAAiB;CACxB,MAAM,QAAQ,OAAO,QAAQ;AAE7B,QAAO;EACL,MAAM,QAAQ,QAAQ;EACtB,SAAS,QAAQ,IAAI,UAAU,QAAQ,QAAQ,MAAM,EAAE;EACvD,UAAU,GAAG,UAAU;EACvB,MAAM,GAAG,MAAM;EAChB;;AAGH,eAAe,mBAAqC;AAClD,KAAI;AAEF,MAAI,EADU,MAAM,GAAG,KAAK,MAAM,kBAAkB,EACzC,QAAQ,CAAE,QAAO;AAG5B,UADgB,MAAM,GAAG,SAAS,MAAM,mBAAmB,OAAO,EACnD,MAAM,CAAC,SAAS;SACzB;AACN,SAAO;;;AAIX,eAAe,eAAmC;CAChD,MAAM,CAAC,SAAS,eAAe,MAAM,QAAQ,IAAI,CAC/C,mBAAmB,EACnB,kBAAkB,CACnB,CAAC;AAEF,QAAO;EACL;EACA,SAAS,gBAAgB;EACzB,OAAO;GACL,SAAS,MAAM;GACf,mBAAmB,MAAM;GAC1B;EACD;EACD;;AAGH,SAAS,oBAAoB,MAAuB;AAClD,SAAQ,KAAK;;WAEJ,KAAK,QAAQ;WACb,KAAK,QAAQ,KAAK,GAAG,KAAK,QAAQ,QAAQ,IAAI,KAAK,QAAQ,SAAS,GAAG,KAAK,QAAQ,KAAK;;;aAGvF,KAAK,MAAM,QAAQ;uBACT,KAAK,MAAM,kBAAkB;;gBAEpC,KAAK,cAAc,QAAQ,OAAO;;AAGlD,SAAS,mBAAmB,MAAuB;AACjD,SAAQ,IAAI,KAAK,UAAU,MAAM,MAAM,EAAE,CAAC;;AAG5C,eAAsB,SAAS,SAAyC;CACtE,MAAM,YAAY,MAAM,cAAc;AAEtC,KAAI,QAAQ,KACV,oBAAmB,UAAU;KAE7B,qBAAoB,UAAU;;AAIlC,MAAa,QAAQ,cAAc;CACjC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM,EACJ,MAAM;EACJ,MAAM;EACN,SAAS;EACT,aAAa;EACd,EACF;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,SAAS,EACd,MAAM,KAAK,MACZ,CAAC;;CAEL,CAAC;;;;ACtHF,eAAsB,YAA2B;AAC/C,KAAI;AACF,QAAM,GAAG,OAAO,MAAM,kBAAkB;AACxC,UAAQ,QAAQ,iDAAiD;UAC1D,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C,SAAQ,KAAK,sCAAsC;OAC9C;AACL,WAAQ,MAAM,2BAA2B,MAAM;AAC/C,SAAM;;;;AAKZ,MAAa,SAAS,cAAc;CAClC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;AACJ,SAAO,WAAW;;CAErB,CAAC;;;;AC1BF,SAAS,aAAqB;AAC5B,QAAO,KAAK,KAAK,CAAC,SAAS,GAAG,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,EAAE;;AAuHzE,MAAaC,eAA6B;CACxC,SAAS;CACT,SAAS,EAAE;CACX,0BAAU,IAAI,KAAK;CACnB,kBAAkB;CAClB,YAAY;CACZ,kBAAkB,OAAU;CAC7B;AAED,SAAgB,YAAY,SAAkB,YAA0B;AACtE,cAAa,UAAU;AACvB,cAAa,aAAa;AAC1B,cAAa,UAAU,EAAE;AACzB,cAAa,2BAAW,IAAI,KAAK;AACjC,cAAa,mBAAmB,UAAU,YAAY,GAAG;;AAG3D,SAAgB,mBAA4B;AAC1C,QAAO,aAAa;;AAItB,SAAS,kBAAkB,UAA0C;CACnE,MAAM,MAAM,KAAK,KAAK;AAGtB,KAAI,aAAa,kBAAkB;EACjC,MAAM,UAAU,aAAa,SAAS,IAAI,aAAa,iBAAiB;AACxE,MAAI,WAAW,MAAM,QAAQ,eAAe,aAAa,kBAAkB;AACzE,WAAQ,eAAe;AACvB,UAAO,aAAa;;;CAKxB,MAAM,YAAY,YAAY;AAC9B,cAAa,mBAAmB;AAChC,cAAa,SAAS,IAAI,WAAW;EACnC,IAAI;EACJ,WAAW;EACX,cAAc;EACd,cAAc;EACd,kBAAkB;EAClB,mBAAmB;EACnB,QAAQ,EAAE;EACV;EACD,CAAC;AAEF,QAAO;;AAaT,SAAgB,cACd,UACA,SACQ;AACR,KAAI,CAAC,aAAa,QAChB,QAAO;CAGT,MAAM,YAAY,kBAAkB,SAAS;CAC7C,MAAM,UAAU,aAAa,SAAS,IAAI,UAAU;AACpD,KAAI,CAAC,QACH,QAAO;CAGT,MAAMC,QAAsB;EAC1B,IAAI,YAAY;EAChB;EACA,WAAW,KAAK,KAAK;EACrB;EACA,SAAS;GACP,OAAO,QAAQ;GACf,UAAU,QAAQ;GAClB,QAAQ,QAAQ;GAChB,OAAO,QAAQ;GACf,YAAY,QAAQ;GACpB,aAAa,QAAQ;GACrB,QAAQ,QAAQ;GACjB;EACF;AAED,cAAa,QAAQ,KAAK,MAAM;AAChC,SAAQ;AAER,KAAI,CAAC,QAAQ,OAAO,SAAS,QAAQ,MAAM,CACzC,SAAQ,OAAO,KAAK,QAAQ,MAAM;AAIpC,QAAO,aAAa,QAAQ,SAAS,aAAa,YAAY;EAC5D,MAAM,UAAU,aAAa,QAAQ,OAAO;AAE5C,MAAI,SAIF;OAHuB,aAAa,QAAQ,QACzC,MAAM,EAAE,cAAc,QAAQ,UAChC,CACkB,WAAW,EAC5B,cAAa,SAAS,OAAO,QAAQ,UAAU;;;AAKrD,QAAO,MAAM;;AAqBf,SAAgB,eACd,IACA,UACA,YACM;AACN,KAAI,CAAC,aAAa,WAAW,CAAC,GAC5B;CAGF,MAAM,QAAQ,aAAa,QAAQ,MAAM,MAAM,EAAE,OAAO,GAAG;AAC3D,KAAI,OAAO;AACT,QAAM,WAAW;AACjB,QAAM,aAAa;EAGnB,MAAM,UAAU,aAAa,SAAS,IAAI,MAAM,UAAU;AAC1D,MAAI,SAAS;AACX,WAAQ,oBAAoB,SAAS,MAAM;AAC3C,WAAQ,qBAAqB,SAAS,MAAM;AAC5C,WAAQ,eAAe,KAAK,KAAK;;;;AAKvC,SAAgB,WAAW,UAAwB,EAAE,EAAiB;CACpE,MAAM,EACJ,OAAO,GACP,QAAQ,IACR,OACA,UACA,SACA,MACA,IACA,QACA,cACE;CAEJ,IAAI,WAAW,CAAC,GAAG,aAAa,QAAQ;AAGxC,KAAI,UACF,YAAW,SAAS,QAAQ,MAAM,EAAE,cAAc,UAAU;AAG9D,KAAI,OAAO;EACT,MAAM,aAAa,MAAM,aAAa;AACtC,aAAW,SAAS,QACjB,MACC,EAAE,QAAQ,MAAM,aAAa,CAAC,SAAS,WAAW,IAC/C,EAAE,UAAU,MAAM,aAAa,CAAC,SAAS,WAAW,CAC1D;;AAGH,KAAI,SACF,YAAW,SAAS,QAAQ,MAAM,EAAE,aAAa,SAAS;AAG5D,KAAI,YAAY,OACd,YAAW,SAAS,QAAQ,MAAM,EAAE,UAAU,YAAY,QAAQ;AAGpE,KAAI,KACF,YAAW,SAAS,QAAQ,MAAM,EAAE,aAAa,KAAK;AAGxD,KAAI,GACF,YAAW,SAAS,QAAQ,MAAM,EAAE,aAAa,GAAG;AAGtD,KAAI,QAAQ;EACV,MAAM,cAAc,OAAO,aAAa;AACxC,aAAW,SAAS,QAAQ,MAAM;GAEhC,MAAM,WAAW,EAAE,QAAQ,SAAS,MAAM,MAAM;AAC9C,QAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE,QAAQ,aAAa,CAAC,SAAS,YAAY;AAEtD,QAAI,MAAM,QAAQ,EAAE,QAAQ,CAC1B,QAAO,EAAE,QAAQ,MACd,MAAM,EAAE,QAAQ,EAAE,KAAK,aAAa,CAAC,SAAS,YAAY,CAC5D;AAEH,WAAO;KACP;GAGF,MAAM,YACJ,EAAE,UAAU,WACT,OAAO,EAAE,SAAS,QAAQ,YAAY,YACtC,EAAE,SAAS,QAAQ,QAAQ,aAAa,CAAC,SAAS,YAAY;GAGnE,MAAM,YAAY,EAAE,UAAU,WAAW,MAAM,MAC7C,EAAE,KAAK,aAAa,CAAC,SAAS,YAAY,CAC3C;GAGD,MAAM,WAAW,EAAE,QAAQ,QAAQ,aAAa,CAAC,SAAS,YAAY;AAEtE,UAAO,YAAY,aAAa,aAAa;IAC7C;;AAIJ,UAAS,MAAM,GAAG,MAAM,EAAE,YAAY,EAAE,UAAU;CAElD,MAAM,QAAQ,SAAS;CACvB,MAAM,aAAa,KAAK,KAAK,QAAQ,MAAM;CAC3C,MAAMC,WAAS,OAAO,KAAK;AAG3B,QAAO;EACL,SAHc,SAAS,MAAMA,SAAOA,UAAQ,MAAM;EAIlD;EACA;EACA;EACA;EACD;;AAGH,SAAgB,SAAS,IAAsC;AAC7D,QAAO,aAAa,QAAQ,MAAM,MAAM,EAAE,OAAO,GAAG;;AAGtD,SAAgB,cAA6B;CAC3C,MAAM,WAAW,MAAM,KAAK,aAAa,SAAS,QAAQ,CAAC,CAAC,MACzD,GAAG,MAAM,EAAE,eAAe,EAAE,aAC9B;AAED,QAAO;EACL;EACA,OAAO,SAAS;EACjB;;AAGH,SAAgB,WAAW,IAAiC;AAC1D,QAAO,aAAa,SAAS,IAAI,GAAG;;AAGtC,SAAgB,kBAAkB,WAAwC;AACxE,QAAO,aAAa,QACjB,QAAQ,MAAM,EAAE,cAAc,UAAU,CACxC,MAAM,GAAG,MAAM,EAAE,YAAY,EAAE,UAAU;;AAG9C,SAAgB,eAAqB;AACnC,cAAa,UAAU,EAAE;AACzB,cAAa,2BAAW,IAAI,KAAK;AACjC,cAAa,mBAAmB,YAAY;;AAG9C,SAAgB,cAAc,WAA4B;AACxD,KAAI,CAAC,aAAa,SAAS,IAAI,UAAU,CACvC,QAAO;AAGT,cAAa,UAAU,aAAa,QAAQ,QACzC,MAAM,EAAE,cAAc,UACxB;AACD,cAAa,SAAS,OAAO,UAAU;AAEvC,KAAI,aAAa,qBAAqB,UACpC,cAAa,mBAAmB,YAAY;AAG9C,QAAO;;AAGT,SAAgB,WAAyB;CACvC,MAAM,UAAU,aAAa;CAE7B,MAAMC,YAAoC,EAAE;CAC5C,MAAMC,eAAuC,EAAE;CAC/C,MAAMC,iBAAyC,EAAE;CAEjD,IAAI,aAAa;CACjB,IAAI,cAAc;CAClB,IAAI,gBAAgB;CACpB,IAAI,gBAAgB;CACpB,IAAI,eAAe;CACnB,IAAI,YAAY;AAEhB,MAAK,MAAM,SAAS,SAAS;EAE3B,MAAM,QAAQ,MAAM,UAAU,SAAS,MAAM,QAAQ;AACrD,YAAU,UAAU,UAAU,UAAU,KAAK;AAG7C,eAAa,MAAM,aAAa,aAAa,MAAM,aAAa,KAAK;EAGrE,MAAM,OAAO,IAAI,KAAK,MAAM,UAAU,CAAC,aAAa,CAAC,MAAM,GAAG,GAAG;AACjE,iBAAe,SAAS,eAAe,SAAS,KAAK;AAErD,MAAI,MAAM,UAAU;AAClB,OAAI,MAAM,SAAS,QACjB;OAEA;AAGF,iBAAc,MAAM,SAAS,MAAM;AACnC,kBAAe,MAAM,SAAS,MAAM;;AAGtC,MAAI,MAAM,YAAY;AACpB,oBAAiB,MAAM;AACvB;;;CAKJ,MAAM,iBAAiB,OAAO,QAAQ,eAAe,CAClD,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,EAAE,CAAC,CACtC,MAAM,IAAI,CACV,KAAK,CAAC,MAAM,YAAY;EAAE;EAAM;EAAO,EAAE;CAG5C,MAAM,MAAM,KAAK,KAAK;CACtB,IAAI,iBAAiB;AACrB,MAAK,MAAM,WAAW,aAAa,SAAS,QAAQ,CAClD,KAAI,MAAM,QAAQ,eAAe,aAAa,iBAC5C;AAIJ,QAAO;EACL,eAAe,QAAQ;EACvB,oBAAoB;EACpB,gBAAgB;EAChB,kBAAkB;EAClB,mBAAmB;EACnB,mBAAmB,gBAAgB,IAAI,gBAAgB,gBAAgB;EACvE,mBAAmB;EACnB,sBAAsB;EACtB;EACA;EACD;;AAGH,SAAgB,cAAc,SAAyB,QAAgB;AACrE,KAAI,WAAW,OACb,QAAO,KAAK,UACV;EACE,UAAU,MAAM,KAAK,aAAa,SAAS,QAAQ,CAAC;EACpD,SAAS,aAAa;EACvB,EACD,MACA,EACD;CAIH,MAAM,UAAU;EACd;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD;CAED,MAAM,OAAO,aAAa,QAAQ,KAAK,MAAM;EAC3C,EAAE;EACF,EAAE;EACF,IAAI,KAAK,EAAE,UAAU,CAAC,aAAa;EACnC,EAAE;EACF,EAAE,QAAQ;EACV,EAAE,QAAQ,SAAS;EACnB,EAAE,QAAQ;EACV,EAAE,UAAU,WAAW;EACvB,EAAE,UAAU,SAAS;EACrB,EAAE,UAAU,MAAM,gBAAgB;EAClC,EAAE,UAAU,MAAM,iBAAiB;EACnC,EAAE,cAAc;EAChB,EAAE,UAAU,eAAe;EAC3B,EAAE,UAAU,SAAS;EACtB,CAAC;AAEF,QAAO,CAAC,QAAQ,KAAK,IAAI,EAAE,GAAG,KAAK,KAAK,MAAM,EAAE,KAAK,IAAI,CAAC,CAAC,CAAC,KAAK,KAAK;;;;;AC5hBxE,SAAgB,mBAAyB;AACvC,KAAI,OAAO,QAAQ,YAAa;AAEhC,KAAI;EACF,MAAM,SAAS,IAAI,OAAO;EAC1B,MAAM,0BAAU,IAAI,KAAyB;AAmD7C,sBA7CmB;GACjB,SACE,SACA,SACA;AACA,QAAI;KACF,MAAM,SACJ,OAAO,QAAQ,WAAW,WACxB,IAAI,IAAI,QAAQ,OAAO,GACtB,QAAQ;KAIb,MAAM,MAHM,eAGI,OAAO,UAAU,CAAC;KAClC,MAAM,WAAW,OAAO,IAAI,SAAS,IAAI,MAAM;AAC/C,SAAI,CAAC,UAAU;AACb,cAAQ,MAAM,sBAAsB,OAAO,WAAW;AACtD,aAAQ,OAAiC,SAAS,SAAS,QAAQ;;KAErE,IAAI,QAAQ,QAAQ,IAAI,SAAS;AACjC,SAAI,CAAC,OAAO;AACV,cAAQ,IAAI,WAAW,SAAS;AAChC,cAAQ,IAAI,UAAU,MAAM;;KAE9B,IAAI,QAAQ;AACZ,SAAI;MACF,MAAM,IAAI,IAAI,IAAI,SAAS;AAC3B,cAAQ,GAAG,EAAE,SAAS,IAAI,EAAE;aACtB;AAGR,aAAQ,MAAM,qBAAqB,OAAO,SAAS,OAAO,QAAQ;AAClE,YAAQ,MAAgC,SAAS,SAAS,QAAQ;YAC5D;AACN,YAAQ,OAAiC,SAAS,SAAS,QAAQ;;;GAGvE,QAAQ;AACN,WAAO,OAAO,OAAO;;GAEvB,UAAU;AACR,WAAO,OAAO,SAAS;;GAE1B,CAEuD;AACxD,UAAQ,MAAM,mDAAmD;UAC1D,KAAK;AACZ,UAAQ,MAAM,wBAAwB,IAAI;;;;;;ACzD9C,SAAS,WAAsB;CAC7B,MAAM,EAAE,UAAU,MAAM,QAAQC;AAEhC,KAAI,aAAa,SAAS;AACxB,MAAI;GACF,MAAM,UAAU,oDAAoD,KAAK;AAGzE,OAFsB,SAAS,SAAS,EAAE,OAAO,QAAQ,CAAC,CAAC,UAAU,CAEnD,aAAa,CAAC,SAAS,iBAAiB,CACxD,QAAO;UAEH;AACN,UAAO;;AAGT,SAAO;QACF;EACL,MAAM,YAAY,IAAI;AACtB,MAAI,WAAW;AACb,OAAI,UAAU,SAAS,MAAM,CAAE,QAAO;AACtC,OAAI,UAAU,SAAS,OAAO,CAAE,QAAO;AACvC,OAAI,UAAU,SAAS,OAAO,CAAE,QAAO;;AAGzC,SAAO;;;;;;;;;;AAWX,SAAgB,kBACd,SACA,eAAuB,IACf;CACR,MAAM,QAAQ,UAAU;CACxB,MAAM,kBAAkB,OAAO,QAAQ,QAAQ,CAAC,QAC7C,GAAG,WAAW,UAAU,OAC1B;CAED,IAAIC;AAEJ,SAAQ,OAAR;EACE,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,QAAQ,IAAI,MAAM,MAAM,QAAQ,MAAM,MAAK,CAAC,GAAG,CACrE,KAAK,KAAK;AACb;EAEF,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,OAAO,IAAI,GAAG,QAAQ,CAC5C,KAAK,MAAM;AACd;EAEF,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,WAAW,IAAI,IAAI,MAAM,QAAQ,MAAM,OAAM,CAAC,GAAG,CACvE,KAAK,KAAK;AACb;EAEF,SAAS;GAEP,MAAM,cAAc,gBACjB,KAAK,CAAC,KAAK,WAAW,GAAG,IAAI,IAAI,MAAM,QAAQ,MAAM,OAAM,CAAC,GAAG,CAC/D,KAAK,IAAI;AACZ,kBAAe,gBAAgB,SAAS,IAAI,UAAU,gBAAgB;AACtE;;;AAIJ,KAAI,gBAAgB,aAElB,QAAO,GAAG,eADQ,UAAU,QAAQ,QAAQ,SACP;AAGvC,QAAO,gBAAgB;;;;;AClFzB,MAAa,gBAAgB,YAAY;AAKvC,KAAI,CAJa,MAAM,QAAQ,OAAO,4BAA4B,EAChE,MAAM,WACP,CAAC,CAGA,OAAM,IAAI,UACR,oBACA,KACA,KAAK,UAAU,EAAE,SAAS,oBAAoB,CAAC,CAChD;;;;;ACFL,IAAM,eAAN,MAAmB;CACjB,AAAQ,QAAuC,EAAE;CACjD,AAAQ,aAAa;CACrB,AAAQ,kBAAkB;CAE1B,MAAM,QACJ,SACA,kBACY;AACZ,SAAO,IAAI,SAAS,SAAS,WAAW;AACtC,QAAK,MAAM,KAAK;IACL;IACA;IACT;IACD,CAAC;AAEF,OAAI,KAAK,MAAM,SAAS,GAAG;IACzB,MAAM,WAAW,KAAK,MAAM,KAAK,MAAM,SAAS,KAAK,iBAAiB;AACtE,YAAQ,KACN,6BAA6B,KAAK,MAAM,OAAO,oBAAoB,SAAS,GAC7E;;AAGH,GAAK,KAAK,aAAa,iBAAiB;IACxC;;CAGJ,MAAc,aAAa,kBAAyC;AAClE,MAAI,KAAK,WAAY;AACrB,OAAK,aAAa;AAElB,SAAO,KAAK,MAAM,SAAS,GAAG;GAE5B,MAAM,YADM,KAAK,KAAK,GACE,KAAK;GAC7B,MAAM,aAAa,mBAAmB;AAEtC,OAAI,KAAK,kBAAkB,KAAK,YAAY,YAAY;IACtD,MAAM,SAAS,aAAa;AAC5B,YAAQ,MAAM,uBAAuB,KAAK,KAAK,SAAS,IAAK,CAAC,GAAG;AACjE,UAAM,IAAI,SAAS,YAAY,WAAW,SAAS,OAAO,CAAC;;GAG7D,MAAM,UAAU,KAAK,MAAM,OAAO;AAClC,OAAI,CAAC,QAAS;AAEd,QAAK,kBAAkB,KAAK,KAAK;AAEjC,OAAI;IACF,MAAM,SAAS,MAAM,QAAQ,SAAS;AACtC,YAAQ,QAAQ,OAAO;YAChB,OAAO;AACd,YAAQ,OAAO,MAAM;;;AAIzB,OAAK,aAAa;;CAGpB,IAAI,SAAiB;AACnB,SAAO,KAAK,MAAM;;;AAItB,MAAM,eAAe,IAAI,cAAc;;;;;AAMvC,eAAsB,qBACpB,SACA,SACY;AAEZ,KAAIC,QAAM,qBAAqB,OAC7B,QAAO,SAAS;AAGlB,QAAO,aAAa,QAAQ,SAASA,QAAM,iBAAiB;;;;;AChF9D,MAAM,eAAe;CACnB,kBAAkB,OAAO;CACzB,mBAAmB,OAAO;CAC1B,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACzB;AAUD,MAAM,gCAAgB,IAAI,KAAsB;;;;AAKhD,MAAM,4BACJ,WACA,SACA,cACW;CACX,IAAI,SAAS;AACb,MAAK,MAAM,YAAY,WAAW;AAChC,YAAU,UAAU;AACpB,YAAU,QAAQ,OAAO,KAAK,UAAU,SAAS,CAAC,CAAC;;AAErD,WAAU,UAAU;AACpB,QAAO;;;;;AAMT,MAAM,+BACJ,cACA,YACW;CACX,IAAI,SAAS;AACb,MAAK,MAAM,QAAQ,aACjB,KAAI,KAAK,SAAS,YAGhB,WAAU,QAAQ,OAAO,KAAK,UAAU,IAAI,CAAC,SAAS;UAC7C,KAAK,KACd,WAAU,QAAQ,OAAO,KAAK,KAAK,CAAC;AAGxC,QAAO;;;;;AAMT,MAAM,0BACJ,SACA,SACA,cACW;CAGX,MAAM,mBAAmB;CAEzB,MAAM,gBAAgB;CACtB,IAAI,SAAS;AACb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,EAAE;AAClD,MAAI,OAAO,UAAU,SACnB,WAAU,QAAQ,OAAO,MAAM,CAAC;AAElC,MAAI,QAAQ,OACV,WAAU;AAEZ,MAAI,QAAQ,aACV,WAAU,yBACR,OACA,SACA,UACD;AAEH,MAAI,QAAQ,aAAa,MAAM,QAAQ,MAAM,CAC3C,WAAU,4BACR,OACA,QACD;;AAGL,QAAO;;;;;AAMT,MAAM,mBACJ,UACA,SACA,cACW;AACX,KAAI,SAAS,WAAW,EACtB,QAAO;CAET,IAAI,YAAY;AAChB,MAAK,MAAM,WAAW,SACpB,cAAa,uBAAuB,SAAS,SAAS,UAAU;AAGlE,cAAa;AACb,QAAO;;;;;AAMT,MAAM,wBAAwB,OAAO,aAAuC;AAC1E,KAAI,cAAc,IAAI,SAAS,EAAE;EAC/B,MAAM,SAAS,cAAc,IAAI,SAAS;AAC1C,MAAI,OACF,QAAO;;CAIX,MAAM,oBAAoB;AAC1B,KAAI,EAAE,qBAAqB,eAAe;EACxC,MAAM,iBAAkB,MAAM,aAAa,YAAY;AACvD,gBAAc,IAAI,UAAU,eAAe;AAC3C,SAAO;;CAGT,MAAM,iBAAkB,MAAM,aAAa,oBAAoB;AAC/D,eAAc,IAAI,UAAU,eAAe;AAC3C,QAAO;;;;;AAMT,MAAa,yBAAyB,UAAyB;AAC7D,QAAO,MAAM,aAAa,aAAa;;;;;;;;;;;;AAazC,MAAM,qBAAqB,UAAiB;AAC1C,QAAO,MAAM,OAAO,mBAAmB,MAAM,OAAO,UAChD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACV,GACD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACV;;;;;AAMP,MAAM,4BACJ,KACA,MACA,YAIW;CACX,MAAM,EAAE,SAAS,cAAc;CAC/B,IAAI,SAAS,UAAU;AAGvB,KAAI,OAAO,SAAS,YAAY,SAAS,KACvC,QAAO;CAIT,MAAM,QAAQ;CAOd,MAAM,YAAY;CAClB,MAAM,YAAY,MAAM,QAAQ;CAChC,IAAI,YAAY,MAAM,eAAe;AAGrC,KAAI,MAAM,QAAQ,MAAM,QAAQ,MAAM,KAAK,EAAE;AAC3C,YAAU,UAAU;AACpB,OAAK,MAAM,QAAQ,MAAM,MAAM;AAC7B,aAAU,UAAU;AACpB,aAAU,QAAQ,OAAO,OAAO,KAAK,CAAC,CAAC;;;AAK3C,KAAI,UAAU,SAAS,IAAI,CACzB,aAAY,UAAU,MAAM,GAAG,GAAG;CAIpC,MAAM,OAAO,GAAG,UAAU,GAAG,UAAU,GAAG;AAC1C,WAAU,QAAQ,OAAO,KAAK,CAAC;CAG/B,MAAM,eAAe,IAAI,IAAI;EAAC;EAAQ;EAAe;EAAO,CAAC;AAC7D,MAAK,MAAM,gBAAgB,OAAO,KAAK,MAAM,CAC3C,KAAI,CAAC,aAAa,IAAI,aAAa,EAAE;EACnC,MAAM,gBAAgB,MAAM;EAC5B,MAAM,eACJ,OAAO,kBAAkB,WAAW,gBAClC,KAAK,UAAU,cAAc;AAEjC,YAAU,QAAQ,OAAO,GAAG,aAAa,GAAG,eAAe,CAAC;;AAIhE,QAAO;;;;;AAMT,MAAM,6BACJ,YACA,SACA,cACW;AACX,KAAI,CAAC,cAAc,OAAO,eAAe,SACvC,QAAO;CAGT,MAAM,SAAS;CACf,IAAI,SAAS;AAEb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,CAC/C,KAAI,QAAQ,cAAc;EACxB,MAAM,aAAa;AACnB,MAAI,OAAO,KAAK,WAAW,CAAC,SAAS,GAAG;AACtC,aAAU,UAAU;AACpB,QAAK,MAAM,WAAW,OAAO,KAAK,WAAW,CAC3C,WAAU,yBAAyB,SAAS,WAAW,UAAU;IAC/D;IACA;IACD,CAAC;;QAGD;EACL,MAAM,YACJ,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,MAAM;AAC3D,YAAU,QAAQ,OAAO,GAAG,IAAI,GAAG,YAAY,CAAC;;AAIpD,QAAO;;;;;AAMT,MAAM,uBACJ,MACA,SACA,cACW;CACX,IAAI,SAAS,UAAU;CACvB,MAAM,OAAO,KAAK;CAClB,MAAM,QAAQ,KAAK;CACnB,IAAI,QAAQ,KAAK,eAAe;AAChC,KAAI,MAAM,SAAS,IAAI,CACrB,SAAQ,MAAM,MAAM,GAAG,GAAG;CAE5B,MAAM,OAAO,QAAQ,MAAM;AAC3B,WAAU,QAAQ,OAAO,KAAK,CAAC;AAC/B,KACE,OAAO,KAAK,eAAe,YACxB,KAAK,eAAe,KAEvB,WAAU,0BAA0B,KAAK,YAAY,SAAS,UAAU;AAE1E,QAAO;;;;;AAMT,MAAa,qBACX,OACA,SACA,cACW;CACX,IAAI,iBAAiB;AACrB,MAAK,MAAM,QAAQ,MACjB,mBAAkB,oBAAoB,MAAM,SAAS,UAAU;AAEjE,mBAAkB,UAAU;AAC5B,QAAO;;;;;AAMT,MAAa,gBAAgB,OAC3B,SACA,UAC+C;CAE/C,MAAM,YAAY,sBAAsB,MAAM;CAG9C,MAAM,UAAU,MAAM,sBAAsB,UAAU;CAEtD,MAAM,qBAAqB,QAAQ;CACnC,MAAM,gBAAgB,mBAAmB,QACtC,QAAQ,IAAI,SAAS,YACvB;CACD,MAAM,iBAAiB,mBAAmB,QACvC,QAAQ,IAAI,SAAS,YACvB;CAED,MAAM,YAAY,kBAAkB,MAAM;CAC1C,IAAI,cAAc,gBAAgB,eAAe,SAAS,UAAU;AACpE,KAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,EAC1C,gBAAe,kBAAkB,QAAQ,OAAO,SAAS,UAAU;CAErE,MAAM,eAAe,gBAAgB,gBAAgB,SAAS,UAAU;AAExE,QAAO;EACL,OAAO;EACP,QAAQ;EACT;;;;;AC/VH,MAAa,wBAAwB,OACnC,YACG;AACH,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,eAAe,QAAQ,SAAS,MACnC,MACC,OAAO,EAAE,YAAY,YAClB,EAAE,SAAS,MAAM,QAAMC,IAAE,SAAS,YAAY,CACpD;CAID,MAAM,cAAc,QAAQ,SAAS,MAAM,QACzC,CAAC,aAAa,OAAO,CAAC,SAAS,IAAI,KAAK,CACzC;CAGD,MAAMC,UAAkC;EACtC,GAAG,eAAe,OAAO,aAAa;EACtC,eAAe,cAAc,UAAU;EACxC;CAED,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,oBAAoB;EACxE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,qCAAqC,SAAS;AAC5D,QAAM,MAAM,UAAU,aAAa,qCAAqC,SAAS;;AAGnF,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;;;;ACvB/B,eAAsBC,mBAAiB,GAAY;CACjD,MAAM,YAAY,KAAK,KAAK;CAC5B,IAAI,UAAU,MAAM,EAAE,IAAI,MAA8B;AACxD,SAAQ,MAAM,oBAAoB,KAAK,UAAU,QAAQ,CAAC,MAAM,KAAK,CAAC;CAGtE,MAAM,YAAY,cAAc,UAAU;EACxC,OAAO,QAAQ;EACf,UAAU,sBAAsB,QAAQ,SAAS;EACjD,QAAQ,QAAQ,UAAU;EAC1B,OAAO,QAAQ,OAAO,KAAK,OAAO;GAChC,MAAM,EAAE,SAAS;GACjB,aAAa,EAAE,SAAS;GACzB,EAAE;EACH,YAAY,QAAQ,cAAc;EAClC,aAAa,QAAQ,eAAe;EACrC,CAAC;CAGF,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,QAAQ,MACjC;AAGD,KAAI;AACF,MAAI,eAAe;GACjB,MAAM,aAAa,MAAM,cAAc,SAAS,cAAc;AAC9D,WAAQ,KAAK,wBAAwB,WAAW;QAEhD,SAAQ,KAAK,sDAAsD;UAE9D,OAAO;AACd,UAAQ,KAAK,oCAAoC,MAAM;;AAGzD,KAAI,MAAM,cAAe,OAAM,eAAe;AAE9C,KAAI,UAAU,QAAQ,WAAW,EAAE;AACjC,YAAU;GACR,GAAG;GACH,YAAY,eAAe,aAAa,OAAO;GAChD;AACD,UAAQ,MAAM,sBAAsB,KAAK,UAAU,QAAQ,WAAW,CAAC;;AAGzE,KAAI;EAEF,MAAM,WAAW,MAAM,qBAAqB,aAC1C,sBAAsB,QAAQ,CAC/B;AAED,MAAIC,iBAAe,SAAS,EAAE;AAC5B,WAAQ,MAAM,2BAA2B,KAAK,UAAU,SAAS,CAAC;GAGlE,MAAM,SAAS,SAAS,QAAQ;AAChC,kBACE,WACA;IACE,SAAS;IACT,OAAO,SAAS;IAChB,OAAO;KACL,cAAc,SAAS,OAAO,iBAAiB;KAC/C,eAAe,SAAS,OAAO,qBAAqB;KACrD;IACD,aAAa,QAAQ,iBAAiB;IACtC,SACE,QAAQ,UACN;KACE,MAAM,OAAO,QAAQ;KACrB,SACE,OAAO,OAAO,QAAQ,YAAY,WAChC,OAAO,QAAQ,UACf,KAAK,UAAU,OAAO,QAAQ,QAAQ;KAC1C,YAAY,OAAO,QAAQ,YAAY,KAAK,QAAQ;MAClD,IAAI,GAAG;MACP,MAAM,GAAG;MACT,UAAU;OACR,MAAM,GAAG,SAAS;OAClB,WAAW,GAAG,SAAS;OACxB;MACF,EAAE;KACJ,GACD;IACJ,WAAW,QAAQ,SAAS,YAAY,KAAK,QAAQ;KACnD,IAAI,GAAG;KACP,MAAM,GAAG,SAAS;KAClB,OAAO,GAAG,SAAS;KACpB,EAAE;IACJ,EACD,KAAK,KAAK,GAAG,UACd;AAED,UAAO,EAAE,KAAK,SAAS;;AAGzB,UAAQ,MAAM,qBAAqB;AACnC,SAAO,UAAU,GAAG,OAAO,WAAW;GAEpC,IAAI,cAAc;GAClB,IAAI,oBAAoB;GACxB,IAAI,qBAAqB;GACzB,IAAI,qBAAqB;GACzB,IAAI,gBAAgB;GACpB,MAAMC,kBAID,EAAE;GACP,MAAMC,uCAGF,IAAI,KAAK;AAEb,OAAI;AACF,eAAW,MAAM,SAAS,UAAU;AAClC,aAAQ,MAAM,oBAAoB,KAAK,UAAU,MAAM,CAAC;AAGxD,SAAI,MAAM,QAAQ,MAAM,SAAS,SAC/B,KAAI;MACF,MAAM,SAAS,KAAK,MAAM,MAAM,KAAK;AACrC,UAAI,OAAO,SAAS,CAAC,YACnB,eAAc,OAAO;AAEvB,UAAI,OAAO,OAAO;AAChB,2BAAoB,OAAO,MAAM;AACjC,4BAAqB,OAAO,MAAM;;MAEpC,MAAM,SAAS,OAAO,QAAQ;AAC9B,UAAI,QAAQ,OAAO,QACjB,kBAAiB,OAAO,MAAM;AAEhC,UAAI,QAAQ,OAAO,WACjB,MAAK,MAAM,MAAM,OAAO,MAAM,YAAY;OACxC,MAAM,MAAM,GAAG;AACf,WAAI,CAAC,qBAAqB,IAAI,IAAI,CAChC,sBAAqB,IAAI,KAAK;QAC5B,IAAI,GAAG,MAAM;QACb,MAAM,GAAG,UAAU,QAAQ;QAC3B,WAAW;QACZ,CAAC;OAEJ,MAAM,MAAM,qBAAqB,IAAI,IAAI;AACzC,WAAI,KAAK;AACP,YAAI,GAAG,GAAI,KAAI,KAAK,GAAG;AACvB,YAAI,GAAG,UAAU,KAAM,KAAI,OAAO,GAAG,SAAS;AAC9C,YAAI,GAAG,UAAU,UACf,KAAI,aAAa,GAAG,SAAS;;;AAIrC,UAAI,QAAQ,cACV,sBAAqB,OAAO;aAExB;AAKV,WAAM,OAAO,SAAS,MAAoB;;AAI5C,SAAK,MAAM,MAAM,qBAAqB,QAAQ,CAC5C,KAAI,GAAG,MAAM,GAAG,KACd,iBAAgB,KAAK;KACnB,IAAI,GAAG;KACP,MAAM,GAAG;KACT,WAAW,GAAG;KACf,CAAC;IAKN,MAAM,sBAAsB,gBAAgB,KAAK,QAAQ;KACvD,IAAI,GAAG;KACP,MAAM;KACN,UAAU;MAAE,MAAM,GAAG;MAAM,WAAW,GAAG;MAAW;KACrD,EAAE;AAGH,mBACE,WACA;KACE,SAAS;KACT,OAAO,eAAe,QAAQ;KAC9B,OAAO;MACL,cAAc;MACd,eAAe;MAChB;KACD,aAAa,sBAAsB;KACnC,SAAS;MACP,MAAM;MACN,SAAS,iBAAiB;MAC1B,YACE,oBAAoB,SAAS,IAC3B,sBACA;MACL;KACD,WACE,gBAAgB,SAAS,IACvB,gBAAgB,KAAK,QAAQ;MAC3B,IAAI,GAAG;MACP,MAAM,GAAG;MACT,OAAO,GAAG;MACX,EAAE,GACH;KACL,EACD,KAAK,KAAK,GAAG,UACd;YACM,OAAO;AAEd,mBACE,WACA;KACE,SAAS;KACT,OAAO,eAAe,QAAQ;KAC9B,OAAO;MAAE,cAAc;MAAG,eAAe;MAAG;KAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;KAChD,SAAS;KACV,EACD,KAAK,KAAK,GAAG,UACd;AACD,UAAM;;IAER;UACK,OAAO;AAEd,iBACE,WACA;GACE,SAAS;GACT,OAAO,QAAQ;GACf,OAAO;IAAE,cAAc;IAAG,eAAe;IAAG;GAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;GAChD,SAAS;GACV,EACD,KAAK,KAAK,GAAG,UACd;AACD,QAAM;;;AAIV,MAAMF,oBACJ,aACuC,OAAO,OAAO,UAAU,UAAU;AAG3E,SAAS,sBACP,UACuB;AACvB,QAAO,SAAS,KAAK,QAAQ;EAC3B,MAAMG,SAAyB;GAC7B,MAAM,IAAI;GACV,SACE,OAAO,IAAI,YAAY,WACrB,IAAI,UACJ,KAAK,UAAU,IAAI,QAAQ;GAChC;AAGD,MAAI,gBAAgB,OAAO,IAAI,WAC7B,QAAO,aAAa,IAAI,WAAW,KAAK,QAAQ;GAC9C,IAAI,GAAG;GACP,MAAM,GAAG;GACT,UAAU;IACR,MAAM,GAAG,SAAS;IAClB,WAAW,GAAG,SAAS;IACxB;GACF,EAAE;AAIL,MAAI,kBAAkB,OAAO,IAAI,aAC/B,QAAO,eAAe,IAAI;AAI5B,MAAI,UAAU,OAAO,IAAI,KACvB,QAAO,OAAO,IAAI;AAGpB,SAAO;GACP;;;;;AC5SJ,MAAa,mBAAmB,IAAI,MAAM;AAE1C,iBAAiB,KAAK,KAAK,OAAO,MAAM;AACtC,KAAI;AACF,SAAO,MAAMC,mBAAiB,EAAE;UACzB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACVF,MAAa,mBAAmB,OAAO,YAA8B;AACnE,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,cAAc;EAClE,QAAQ;EACR,SAAS,eAAe,MAAM;EAC9B,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,MAAM,UAAU,aAAa,+BAA+B,SAAS;AAE7F,QAAQ,MAAM,SAAS,MAAM;;;;;ACP/B,MAAa,kBAAkB,IAAI,MAAM;AAEzC,gBAAgB,KAAK,KAAK,OAAO,MAAM;AACrC,KAAI;EACF,MAAM,UAAU,MAAM,EAAE,IAAI,MAAwB;EACpD,MAAM,WAAW,MAAM,iBAAiB,QAAQ;AAEhD,SAAO,EAAE,KAAK,SAAS;UAChB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACjBF,MAAa,qBAAqB,IAAI,MAAM;AAI5C,mBAAmB,KAAK,WAAW,MAAM;AACvC,QAAO,EAAE,KAAK,MAAM,IAAI;EACxB;;;;ACQF,SAAgB,iBAAiB,GAAY;AAC3C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,QAAQ,EAAE,IAAI,OAAO;CAC3B,MAAMC,UAAwB;EAC5B,MAAM,MAAM,OAAO,OAAO,SAAS,MAAM,MAAM,GAAG,GAAG;EACrD,OAAO,MAAM,QAAQ,OAAO,SAAS,MAAM,OAAO,GAAG,GAAG;EACxD,OAAO,MAAM,SAAS;EACtB,UAAU,MAAM;EAChB,SAAS,MAAM,UAAU,MAAM,YAAY,SAAS;EACpD,MAAM,MAAM,OAAO,OAAO,SAAS,MAAM,MAAM,GAAG,GAAG;EACrD,IAAI,MAAM,KAAK,OAAO,SAAS,MAAM,IAAI,GAAG,GAAG;EAC/C,QAAQ,MAAM,UAAU;EACxB,WAAW,MAAM,aAAa;EAC/B;CAED,MAAM,SAAS,WAAW,QAAQ;AAClC,QAAO,EAAE,KAAK,OAAO;;AAGvB,SAAgB,eAAe,GAAY;AACzC,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,KAAK,EAAE,IAAI,MAAM,KAAK;CAC5B,MAAM,QAAQ,SAAS,GAAG;AAE1B,KAAI,CAAC,MACH,QAAO,EAAE,KAAK,EAAE,OAAO,mBAAmB,EAAE,IAAI;AAGlD,QAAO,EAAE,KAAK,MAAM;;AAGtB,SAAgB,oBAAoB,GAAY;AAC9C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;AAGnE,eAAc;AACd,QAAO,EAAE,KAAK;EAAE,SAAS;EAAM,SAAS;EAAmB,CAAC;;AAG9D,SAAgB,eAAe,GAAY;AACzC,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,QAAQ,UAAU;AACxB,QAAO,EAAE,KAAK,MAAM;;AAGtB,SAAgB,aAAa,GAAY;AACvC,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,SAAU,EAAE,IAAI,MAAM,SAAS,IAAI;CACzC,MAAM,OAAO,cAAc,OAAO;AAElC,KAAI,WAAW,OAAO;AACpB,IAAE,OAAO,gBAAgB,WAAW;AACpC,IAAE,OAAO,uBAAuB,mCAAmC;QAC9D;AACL,IAAE,OAAO,gBAAgB,mBAAmB;AAC5C,IAAE,OAAO,uBAAuB,oCAAoC;;AAGtE,QAAO,EAAE,KAAK,KAAK;;AAIrB,SAAgB,kBAAkB,GAAY;AAC5C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,SAAS,aAAa;AAC5B,QAAO,EAAE,KAAK,OAAO;;AAGvB,SAAgB,iBAAiB,GAAY;AAC3C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,KAAK,EAAE,IAAI,MAAM,KAAK;CAC5B,MAAM,UAAU,WAAW,GAAG;AAE9B,KAAI,CAAC,QACH,QAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,EAAE,IAAI;CAIpD,MAAM,UAAU,kBAAkB,GAAG;AAErC,QAAO,EAAE,KAAK;EACZ,GAAG;EACH;EACD,CAAC;;AAGJ,SAAgB,oBAAoB,GAAY;AAC9C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,KAAK,EAAE,IAAI,MAAM,KAAK;AAG5B,KAAI,CAFY,cAAc,GAAG,CAG/B,QAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,EAAE,IAAI;AAGpD,QAAO,EAAE,KAAK;EAAE,SAAS;EAAM,SAAS;EAAmB,CAAC;;;;;ACpI9D,MAAa,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAtB,MAAa,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAtB,MAAa,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACMxB,SAAgB,eAAuB;AACrC,QAAO;;;;;;WAME,OAAO;;;IAGd,SAAS;YACD,OAAO;;;;;;;ACJnB,MAAa,gBAAgB,IAAI,MAAM;AAGvC,cAAc,IAAI,gBAAgB,iBAAiB;AACnD,cAAc,IAAI,oBAAoB,eAAe;AACrD,cAAc,OAAO,gBAAgB,oBAAoB;AACzD,cAAc,IAAI,cAAc,eAAe;AAC/C,cAAc,IAAI,eAAe,aAAa;AAG9C,cAAc,IAAI,iBAAiB,kBAAkB;AACrD,cAAc,IAAI,qBAAqB,iBAAiB;AACxD,cAAc,OAAO,qBAAqB,oBAAoB;AAG9D,cAAc,IAAI,MAAM,MAAM;AAC5B,QAAO,EAAE,KAAK,cAAc,CAAC;EAC7B;;;;AC7BF,SAAgB,+BACd,cACkC;AAClC,KAAI,iBAAiB,KACnB,QAAO;AAQT,QANsB;EACpB,MAAM;EACN,QAAQ;EACR,YAAY;EACZ,gBAAgB;EACjB,CACoB;;;;;ACevB,MAAM,yBAAyB;AAW/B,SAAS,mBAAmB,UAA0C;CACpE,MAAMC,gBAAgC,EAAE;AAExC,MAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;EACxC,MAAM,UAAU,SAAS;AACzB,gBAAc,KAAK,QAAQ;AAE3B,MACE,QAAQ,SAAS,eACd,QAAQ,cACR,QAAQ,WAAW,SAAS,GAC/B;GAEA,MAAM,qCAAqB,IAAI,KAAa;GAG5C,IAAI,IAAI,IAAI;AACZ,UAAO,IAAI,SAAS,UAAU,SAAS,GAAG,SAAS,QAAQ;IACzD,MAAM,cAAc,SAAS;AAC7B,QAAI,YAAY,aACd,oBAAmB,IAAI,YAAY,aAAa;AAElD;;AAIF,QAAK,MAAM,YAAY,QAAQ,WAC7B,KAAI,CAAC,mBAAmB,IAAI,SAAS,GAAG,EAAE;AACxC,YAAQ,MAAM,sCAAsC,SAAS,KAAK;AAClE,kBAAc,KAAK;KACjB,MAAM;KACN,cAAc,SAAS;KACvB,SAAS;KACV,CAAC;;;;AAMV,QAAO;;AAUT,SAAgB,kBACd,SACmB;CAEnB,MAAMC,kBAAmC;EACvC,qCAAqB,IAAI,KAAK;EAC9B,qCAAqB,IAAI,KAAK;EAC/B;CAED,MAAM,WAAW,mCACf,QAAQ,UACR,QAAQ,QACR,gBACD;AAED,QAAO;EACL,SAAS;GACP,OAAO,mBAAmB,QAAQ,MAAM;GAExC,UAAU,mBAAmB,SAAS;GACtC,YAAY,QAAQ;GACpB,MAAM,QAAQ;GACd,QAAQ,QAAQ;GAChB,aAAa,QAAQ;GACrB,OAAO,QAAQ;GACf,MAAM,QAAQ,UAAU;GACxB,OAAO,gCAAgC,QAAQ,OAAO,gBAAgB;GACtE,aAAa,qCACX,QAAQ,aACR,gBACD;GACF;EACD;EACD;;AAGH,SAAS,mBAAmB,OAAuB;CAGjD,MAAMC,eAAuC;EAC3C,MAAM;EACN,QAAQ;EACR,OAAO;EACR;AAED,KAAI,aAAa,OACf,QAAO,aAAa;AAOtB,KAAI,MAAM,MAAM,0BAA0B,CACxC,QAAO;AAGT,KAAI,MAAM,MAAM,wBAAwB,CACtC,QAAO;AAIT,KAAI,MAAM,MAAM,wBAAwB,CACtC,QAAO;AAGT,KAAI,MAAM,MAAM,sBAAsB,CACpC,QAAO;AAIT,KAAI,MAAM,MAAM,yBAAyB,CACvC,QAAO;AAGT,KAAI,MAAM,MAAM,yBAAyB,CACvC,QAAO;AAGT,QAAO;;AAGT,SAAS,mCACP,mBACA,QACA,iBACgB;CAChB,MAAM,iBAAiB,mBAAmB,OAAO;CAEjD,MAAM,gBAAgB,kBAAkB,SAAS,YAC/C,QAAQ,SAAS,SACf,kBAAkB,QAAQ,GAC1B,uBAAuB,SAAS,gBAAgB,CACnD;AAED,QAAO,CAAC,GAAG,gBAAgB,GAAG,cAAc;;AAG9C,SAAS,mBACP,QACgB;AAChB,KAAI,CAAC,OACH,QAAO,EAAE;AAGX,KAAI,OAAO,WAAW,SACpB,QAAO,CAAC;EAAE,MAAM;EAAU,SAAS;EAAQ,CAAC;KAG5C,QAAO,CAAC;EAAE,MAAM;EAAU,SADP,OAAO,KAAK,UAAU,MAAM,KAAK,CAAC,KAAK,OAAO;EAClB,CAAC;;AAIpD,SAAS,kBAAkB,SAA+C;CACxE,MAAMC,cAA8B,EAAE;AAEtC,KAAI,MAAM,QAAQ,QAAQ,QAAQ,EAAE;EAClC,MAAM,mBAAmB,QAAQ,QAAQ,QACtC,UACC,MAAM,SAAS,cAClB;EACD,MAAM,cAAc,QAAQ,QAAQ,QACjC,UAAU,MAAM,SAAS,cAC3B;AAGD,OAAK,MAAM,SAAS,iBAClB,aAAY,KAAK;GACf,MAAM;GACN,cAAc,MAAM;GACpB,SAAS,WAAW,MAAM,QAAQ;GACnC,CAAC;AAGJ,MAAI,YAAY,SAAS,EACvB,aAAY,KAAK;GACf,MAAM;GACN,SAAS,WAAW,YAAY;GACjC,CAAC;OAGJ,aAAY,KAAK;EACf,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CAAC;AAGJ,QAAO;;AAGT,SAAS,uBACP,SACA,iBACgB;AAChB,KAAI,CAAC,MAAM,QAAQ,QAAQ,QAAQ,CACjC,QAAO,CACL;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;CAGH,MAAM,gBAAgB,QAAQ,QAAQ,QACnC,UAA0C,MAAM,SAAS,WAC3D;CAED,MAAM,aAAa,QAAQ,QAAQ,QAChC,UAAuC,MAAM,SAAS,OACxD;CAED,MAAM,iBAAiB,QAAQ,QAAQ,QACpC,UAA2C,MAAM,SAAS,WAC5D;CAGD,MAAM,iBAAiB,CACrB,GAAG,WAAW,KAAK,MAAM,EAAE,KAAK,EAChC,GAAG,eAAe,KAAK,MAAM,EAAE,SAAS,CACzC,CAAC,KAAK,OAAO;AAEd,QAAO,cAAc,SAAS,IAC1B,CACE;EACE,MAAM;EACN,SAAS,kBAAkB;EAC3B,YAAY,cAAc,KAAK,aAAa;GAC1C,IAAI,QAAQ;GACZ,MAAM;GACN,UAAU;IACR,MAAM,qBAAqB,QAAQ,MAAM,gBAAgB;IACzD,WAAW,KAAK,UAAU,QAAQ,MAAM;IACzC;GACF,EAAE;EACJ,CACF,GACD,CACE;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;;AAGP,SAAS,WACP,SAGoC;AACpC,KAAI,OAAO,YAAY,SACrB,QAAO;AAET,KAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,QAAO;AAIT,KAAI,CADa,QAAQ,MAAM,UAAU,MAAM,SAAS,QAAQ,CAE9D,QAAO,QACJ,QACE,UACC,MAAM,SAAS,UAAU,MAAM,SAAS,WAC3C,CACA,KAAK,UAAW,MAAM,SAAS,SAAS,MAAM,OAAO,MAAM,SAAU,CACrE,KAAK,OAAO;CAGjB,MAAMC,eAAmC,EAAE;AAC3C,MAAK,MAAM,SAAS,QAClB,SAAQ,MAAM,MAAd;EACE,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAM,CAAC;AAErD;EAEF,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAU,CAAC;AAEzD;EAEF,KAAK;AACH,gBAAa,KAAK;IAChB,MAAM;IACN,WAAW,EACT,KAAK,QAAQ,MAAM,OAAO,WAAW,UAAU,MAAM,OAAO,QAC7D;IACF,CAAC;AAEF;;AAKN,QAAO;;AAKT,SAAS,qBACP,cACA,iBACQ;AAER,KAAI,aAAa,UAAU,uBACzB,QAAO;CAIT,MAAM,oBAAoB,gBAAgB,oBAAoB,IAAI,aAAa;AAC/E,KAAI,kBACF,QAAO;CAKT,IAAI,OAAO;AACX,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;EAC5C,MAAM,OAAO,aAAa,WAAW,EAAE;AACvC,UAAS,QAAQ,KAAK,OAAQ;AAC9B,SAAO,OAAO;;CAEhB,MAAM,aAAa,KAAK,IAAI,KAAK,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,EAAE;CAG1D,MAAM,gBACJ,aAAa,MAAM,GAAG,yBAAyB,EAAE,GAAG,MAAM;AAG5D,iBAAgB,oBAAoB,IAAI,eAAe,aAAa;AACpE,iBAAgB,oBAAoB,IAAI,cAAc,cAAc;AAEpE,SAAQ,MACN,yBAAyB,aAAa,QAAQ,cAAc,GAC7D;AAED,QAAO;;AAGT,SAAS,gCACP,gBACA,iBACyB;AACzB,KAAI,CAAC,eACH;AAEF,QAAO,eAAe,KAAK,UAAU;EACnC,MAAM;EACN,UAAU;GACR,MAAM,qBAAqB,KAAK,MAAM,gBAAgB;GACtD,aAAa,KAAK;GAClB,YAAY,KAAK;GAClB;EACF,EAAE;;AAGL,SAAS,qCACP,qBACA,iBACuC;AACvC,KAAI,CAAC,oBACH;AAGF,SAAQ,oBAAoB,MAA5B;EACE,KAAK,OACH,QAAO;EAET,KAAK,MACH,QAAO;EAET,KAAK;AACH,OAAI,oBAAoB,KACtB,QAAO;IACL,MAAM;IACN,UAAU,EACR,MAAM,qBACJ,oBAAoB,MACpB,gBACD,EACF;IACF;AAEH;EAEF,KAAK,OACH,QAAO;EAET,QACE;;;AAON,SAAgB,qBACd,UACA,iBACmB;AAEnB,KAAI,SAAS,QAAQ,WAAW,EAC9B,QAAO;EACL,IAAI,SAAS;EACb,MAAM;EACN,MAAM;EACN,OAAO,SAAS;EAChB,SAAS,EAAE;EACX,aAAa;EACb,eAAe;EACf,OAAO;GACL,cAAc,SAAS,OAAO,iBAAiB;GAC/C,eAAe,SAAS,OAAO,qBAAqB;GACrD;EACF;CAIH,MAAMC,gBAA2C,EAAE;CACnD,MAAMC,mBAAiD,EAAE;CACzD,IAAIC,aACF;AACF,cAAa,SAAS,QAAQ,IAAI,iBAAiB;AAGnD,MAAK,MAAM,UAAU,SAAS,SAAS;EACrC,MAAM,aAAa,uBAAuB,OAAO,QAAQ,QAAQ;EACjE,MAAM,gBAAgB,0BACpB,OAAO,QAAQ,YACf,gBACD;AAED,gBAAc,KAAK,GAAG,WAAW;AACjC,mBAAiB,KAAK,GAAG,cAAc;AAGvC,MAAI,OAAO,kBAAkB,gBAAgB,eAAe,OAC1D,cAAa,OAAO;;AAMxB,QAAO;EACL,IAAI,SAAS;EACb,MAAM;EACN,MAAM;EACN,OAAO,SAAS;EAChB,SAAS,CAAC,GAAG,eAAe,GAAG,iBAAiB;EAChD,aAAa,+BAA+B,WAAW;EACvD,eAAe;EACf,OAAO;GACL,eACG,SAAS,OAAO,iBAAiB,MAC/B,SAAS,OAAO,uBAAuB,iBAAiB;GAC7D,eAAe,SAAS,OAAO,qBAAqB;GACpD,GAAI,SAAS,OAAO,uBAAuB,kBACrC,UAAa,EACjB,yBACE,SAAS,MAAM,sBAAsB,eACxC;GACF;EACF;;AAGH,SAAS,uBACP,gBAC2B;AAC3B,KAAI,OAAO,mBAAmB,SAC5B,QAAO,CAAC;EAAE,MAAM;EAAQ,MAAM;EAAgB,CAAC;AAGjD,KAAI,MAAM,QAAQ,eAAe,CAC/B,QAAO,eACJ,QAAQ,SAA2B,KAAK,SAAS,OAAO,CACxD,KAAK,UAAU;EAAE,MAAM;EAAQ,MAAM,KAAK;EAAM,EAAE;AAGvD,QAAO,EAAE;;AAGX,SAAS,0BACP,WACA,iBAC8B;AAC9B,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,UAAU,KAAK,aAAa;EACjC,IAAIC,QAAiC,EAAE;AACvC,MAAI;AACF,WAAQ,KAAK,MAAM,SAAS,SAAS,UAAU;WACxC,OAAO;AACd,WAAQ,KACN,2CAA2C,SAAS,SAAS,KAAK,IAClE,MACD;;EAIH,MAAM,eACJ,iBAAiB,oBAAoB,IAAI,SAAS,SAAS,KAAK,IAC7D,SAAS,SAAS;AAEvB,SAAO;GACL,MAAM;GACN,IAAI,SAAS;GACb,MAAM;GACN;GACD;GACD;;;;;;;;ACjiBJ,eAAsB,kBAAkB,GAAY;AAClD,KAAI;EACF,MAAM,gBAAgB,EAAE,IAAI,OAAO,iBAAiB;EAEpD,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;EAErE,MAAM,EAAE,SAAS,kBAAkB,kBAAkB,iBAAiB;EAEtE,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,iBAAiB,MAC1C;AAED,MAAI,CAAC,eAAe;AAClB,WAAQ,KAAK,iDAAiD;AAC9D,UAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;EAGJ,MAAM,aAAa,MAAM,cAAc,eAAe,cAAc;AAEpE,MAAI,iBAAiB,SAAS,iBAAiB,MAAM,SAAS,GAAG;GAC/D,IAAI,eAAe;AACnB,OAAI,eAAe,WAAW,cAAc,CAC1C,gBAAe,iBAAiB,MAAM,MAAM,SAC1C,KAAK,KAAK,WAAW,QAAQ,CAC9B;AAEH,OAAI,CAAC,cACH;QAAI,iBAAiB,MAAM,WAAW,SAAS,CAG7C,YAAW,QAAQ,WAAW,QAAQ;aAC7B,iBAAiB,MAAM,WAAW,OAAO,CAElD,YAAW,QAAQ,WAAW,QAAQ;;;EAK5C,IAAI,kBAAkB,WAAW,QAAQ,WAAW;AACpD,MAAI,iBAAiB,MAAM,WAAW,SAAS,CAG7C,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;WAC3C,iBAAiB,MAAM,WAAW,OAAO,CAElD,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;AAGtD,UAAQ,KAAK,gBAAgB,gBAAgB;AAE7C,SAAO,EAAE,KAAK,EACZ,cAAc,iBACf,CAAC;UACK,OAAO;AACd,UAAQ,MAAM,0BAA0B,MAAM;AAC9C,SAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;;;;;AC/DN,SAAS,gBAAgB,SAAsC;AAC7D,KAAI,CAACC,QAAM,iBACT,QAAO;AAGT,QAAO,OAAO,OAAOA,QAAM,UAAU,CAAC,MACnC,OAAO,GAAG,wBAAwBA,QAAM,kBAC1C;;AAIH,SAAgB,gCACd,OACA,SACA,iBACiC;CACjC,MAAMC,WAA0C,EAAE;AAGlD,KAAI,MAAM,QAAQ,WAAW,GAAG;AAE9B,MAAI,MAAM,SAAS,CAACD,QAAM,MACxB,SAAM,QAAQ,MAAM;AAEtB,SAAOE;;CAGT,MAAM,SAAS,MAAM,QAAQ;CAC7B,MAAM,EAAE,UAAU;AAElB,KAAI,CAACF,QAAM,kBAAkB;EAE3B,MAAM,QAAQ,MAAM,SAASA,QAAM,SAAS;AAC5C,WAAO,KAAK;GACV,MAAM;GACN,SAAS;IACP,IAAI,MAAM,MAAM,OAAO,KAAK,KAAK;IACjC,MAAM;IACN,MAAM;IACN,SAAS,EAAE;IACX;IACA,aAAa;IACb,eAAe;IACf,OAAO;KACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;KAC1D,eAAe;KACf,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;KACF;IACF;GACF,CAAC;AACF,UAAM,mBAAmB;;AAG3B,KAAI,MAAM,SAAS;AACjB,MAAI,gBAAgBA,QAAM,EAAE;AAE1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM;AACN,WAAM,mBAAmB;;AAG3B,MAAI,CAACA,QAAM,kBAAkB;AAC3B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACb,eAAe;KACb,MAAM;KACN,MAAM;KACP;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KAAK;GACV,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,MAAM,MAAM;IACb;GACF,CAAC;;AAGJ,KAAI,MAAM,WACR,MAAK,MAAM,YAAY,MAAM,YAAY;AACvC,MAAI,SAAS,MAAM,SAAS,UAAU,MAAM;AAE1C,OAAIA,QAAM,kBAAkB;AAE1B,aAAO,KAAK;KACV,MAAM;KACN,OAAOA,QAAM;KACd,CAAC;AACF,YAAM;AACN,YAAM,mBAAmB;;GAI3B,MAAM,eACJ,iBAAiB,oBAAoB,IAAI,SAAS,SAAS,KAAK,IAC7D,SAAS,SAAS;GAEvB,MAAM,sBAAsBA,QAAM;AAClC,WAAM,UAAU,SAAS,SAAS;IAChC,IAAI,SAAS;IACb,MAAM;IACN;IACD;AAED,YAAO,KAAK;IACV,MAAM;IACN,OAAO;IACP,eAAe;KACb,MAAM;KACN,IAAI,SAAS;KACb,MAAM;KACN,OAAO,EAAE;KACV;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,MAAI,SAAS,UAAU,WAAW;GAChC,MAAM,eAAeA,QAAM,UAAU,SAAS;AAG9C,OAAI,aACF,UAAO,KAAK;IACV,MAAM;IACN,OAAO,aAAa;IACpB,OAAO;KACL,MAAM;KACN,cAAc,SAAS,SAAS;KACjC;IACF,CAAC;;;AAMV,KAAI,OAAO,eAAe;AACxB,MAAIA,QAAM,kBAAkB;AAC1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KACL;GACE,MAAM;GACN,OAAO;IACL,aAAa,+BAA+B,OAAO,cAAc;IACjE,eAAe;IAChB;GACD,OAAO;IACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;IAC1D,eAAe,MAAM,OAAO,qBAAqB;IACjD,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;IACF;GACF,EACD,EACE,MAAM,gBACP,CACF;;AAGH,QAAOE;;AAGT,SAAgB,sCAAgE;AAC9E,QAAO;EACL,MAAM;EACN,OAAO;GACL,MAAM;GACN,SAAS;GACV;EACF;;;;;AC1KH,eAAsB,iBAAiB,GAAY;CACjD,MAAM,YAAY,KAAK,KAAK;CAC5B,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;AACrE,SAAQ,MAAM,8BAA8B,KAAK,UAAU,iBAAiB,CAAC;CAG7E,MAAM,YAAY,cAAc,aAAa;EAC3C,OAAO,iBAAiB;EACxB,UAAU,yBAAyB,iBAAiB,SAAS;EAC7D,QAAQ,iBAAiB,UAAU;EACnC,OAAO,iBAAiB,OAAO,KAAK,OAAO;GACzC,MAAM,EAAE;GACR,aAAa,EAAE;GAChB,EAAE;EACH,YAAY,iBAAiB;EAC7B,aAAa,iBAAiB;EAC9B,QAAQ,oBAAoB,iBAAiB,OAAO;EACrD,CAAC;CAEF,MAAM,EAAE,SAAS,eAAe,oBAC9B,kBAAkB,iBAAiB;AACrC,SAAQ,MACN,sCACA,KAAK,UAAU,cAAc,CAC9B;AAED,KAAI,MAAM,cACR,OAAM,eAAe;AAGvB,KAAI;EAEF,MAAM,WAAW,MAAM,qBAAqB,aAC1C,sBAAsB,cAAc,CACrC;AAED,MAAI,eAAe,SAAS,EAAE;AAC5B,WAAQ,MACN,wCACA,KAAK,UAAU,SAAS,CAAC,MAAM,KAAK,CACrC;GACD,MAAM,oBAAoB,qBAAqB,UAAU,gBAAgB;AACzE,WAAQ,MACN,kCACA,KAAK,UAAU,kBAAkB,CAClC;AAGD,kBACE,WACA;IACE,SAAS;IACT,OAAO,kBAAkB;IACzB,OAAO,kBAAkB;IACzB,aAAa,kBAAkB,eAAe;IAC9C,SAAS;KACP,MAAM;KACN,SAAS,kBAAkB,QAAQ,KAAK,UAAU;AAChD,UAAI,MAAM,SAAS,OACjB,QAAO;OAAE,MAAM;OAAQ,MAAM,MAAM;OAAM;AAE3C,UAAI,MAAM,SAAS,WACjB,QAAO;OACL,MAAM;OACN,IAAI,MAAM;OACV,MAAM,MAAM;OACZ,OAAO,KAAK,UAAU,MAAM,MAAM;OACnC;AAEH,aAAO,EAAE,MAAM,MAAM,MAAM;OAC3B;KACH;IACD,WAAW,4BAA4B,kBAAkB,QAAQ;IAClE,EACD,KAAK,KAAK,GAAG,UACd;AAED,UAAO,EAAE,KAAK,kBAAkB;;AAGlC,UAAQ,MAAM,kCAAkC;AAChD,SAAO,UAAU,GAAG,OAAO,WAAW;GACpC,MAAMC,cAAoC;IACxC,kBAAkB;IAClB,mBAAmB;IACnB,kBAAkB;IAClB,WAAW,EAAE;IACd;GAGD,IAAI,cAAc;GAClB,IAAI,oBAAoB;GACxB,IAAI,qBAAqB;GACzB,IAAI,mBAAmB;GACvB,IAAI,gBAAgB;GACpB,MAAMC,kBAID,EAAE;GACP,IAAIC,kBACF;AAEF,OAAI;AACF,eAAW,MAAM,YAAY,UAAU;AACrC,aAAQ,MAAM,6BAA6B,KAAK,UAAU,SAAS,CAAC;AACpE,SAAI,SAAS,SAAS,SACpB;AAGF,SAAI,CAAC,SAAS,KACZ;KAGF,IAAIC;AACJ,SAAI;AACF,cAAQ,KAAK,MAAM,SAAS,KAAK;cAC1B,YAAY;AACnB,cAAQ,MACN,iCACA,YACA,SAAS,KACV;AACD;;AAIF,SAAI,MAAM,SAAS,CAAC,YAClB,eAAc,MAAM;KAGtB,MAAMC,WAAS,gCACb,OACA,aACA,gBACD;AAED,UAAK,MAAM,SAASA,UAAQ;AAC1B,cAAQ,MAAM,+BAA+B,KAAK,UAAU,MAAM,CAAC;AAGnE,cAAQ,MAAM,MAAd;OACE,KAAK;AACH,YAAI,UAAU,MAAM,MAClB,kBAAiB,MAAM,MAAM;iBACpB,kBAAkB,MAAM,SAAS,gBAC1C,iBAAgB,SAAS,MAAM,MAAM;AAGvC;OAEF,KAAK;AACH,YAAI,MAAM,cAAc,SAAS,WAC/B,mBAAkB;SAChB,IAAI,MAAM,cAAc;SACxB,MAAM,MAAM,cAAc;SAC1B,OAAO;SACR;AAGH;OAEF,KAAK;AACH,YAAI,iBAAiB;AACnB,yBAAgB,KAAK,gBAAgB;AACrC,2BAAkB;;AAGpB;OAEF,KAAK;AACH,YAAI,MAAM,MAAM,YACd,oBAAmB,MAAM,MAAM;AAEjC,YAAI,MAAM,OAAO;AACf,6BAAoB,MAAM,MAAM,gBAAgB;AAChD,8BAAqB,MAAM,MAAM;;AAGnC;;AAKJ,YAAM,OAAO,SAAS;OACpB,OAAO,MAAM;OACb,MAAM,KAAK,UAAU,MAAM;OAC5B,CAAC;;;IAKN,MAAMC,gBAAwD,EAAE;AAChE,QAAI,cACF,eAAc,KAAK;KAAE,MAAM;KAAQ,MAAM;KAAe,CAAC;AAE3D,SAAK,MAAM,MAAM,gBACf,eAAc,KAAK;KACjB,MAAM;KACN,GAAG;KACJ,CAAC;AAGJ,mBACE,WACA;KACE,SAAS;KACT,OAAO,eAAe,iBAAiB;KACvC,OAAO;MACL,cAAc;MACd,eAAe;MAChB;KACD,aAAa,oBAAoB;KACjC,SACE,cAAc,SAAS,IACrB;MAAE,MAAM;MAAa,SAAS;MAAe,GAC7C;KACJ,WACE,gBAAgB,SAAS,IACvB,gBAAgB,KAAK,QAAQ;MAC3B,IAAI,GAAG;MACP,MAAM,GAAG;MACT,OAAO,GAAG;MACX,EAAE,GACH;KACL,EACD,KAAK,KAAK,GAAG,UACd;YACM,OAAO;AACd,YAAQ,MAAM,iBAAiB,MAAM;AAGrC,mBACE,WACA;KACE,SAAS;KACT,OAAO,eAAe,iBAAiB;KACvC,OAAO;MAAE,cAAc;MAAG,eAAe;MAAG;KAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;KAChD,SAAS;KACV,EACD,KAAK,KAAK,GAAG,UACd;IAED,MAAM,aAAa,qCAAqC;AACxD,UAAM,OAAO,SAAS;KACpB,OAAO,WAAW;KAClB,MAAM,KAAK,UAAU,WAAW;KACjC,CAAC;;IAEJ;UACK,OAAO;AAEd,iBACE,WACA;GACE,SAAS;GACT,OAAO,iBAAiB;GACxB,OAAO;IAAE,cAAc;IAAG,eAAe;IAAG;GAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;GAChD,SAAS;GACV,EACD,KAAK,KAAK,GAAG,UACd;AACD,QAAM;;;AAKV,SAAS,yBACP,UACuB;AACvB,QAAO,SAAS,KAAK,QAAQ;AAC3B,MAAI,OAAO,IAAI,YAAY,SACzB,QAAO;GAAE,MAAM,IAAI;GAAM,SAAS,IAAI;GAAS;EAIjD,MAAM,UAAU,IAAI,QAAQ,KAAK,UAAU;AACzC,OAAI,MAAM,SAAS,OACjB,QAAO;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAM;AAE3C,OAAI,MAAM,SAAS,WACjB,QAAO;IACL,MAAM;IACN,IAAI,MAAM;IACV,MAAM,MAAM;IACZ,OAAO,KAAK,UAAU,MAAM,MAAM;IACnC;AAEH,OAAI,MAAM,SAAS,eAAe;IAChC,MAAM,gBACJ,OAAO,MAAM,YAAY,WACvB,MAAM,UACN,MAAM,QACH,KAAK,MAAO,EAAE,SAAS,SAAS,EAAE,OAAO,IAAI,EAAE,KAAK,GAAI,CACxD,KAAK,KAAK;AACjB,WAAO;KACL,MAAM;KACN,aAAa,MAAM;KACnB,SAAS;KACV;;AAEH,UAAO,EAAE,MAAM,MAAM,MAAM;IAC3B;AAEF,SAAO;GAAE,MAAM,IAAI;GAAM;GAAS;GAClC;;AAIJ,SAAS,oBACP,QACoB;AACpB,KAAI,CAAC,OAAQ,QAAO;AACpB,KAAI,OAAO,WAAW,SAAU,QAAO;AACvC,QAAO,OAAO,KAAK,UAAU,MAAM,KAAK,CAAC,KAAK,KAAK;;AAIrD,SAAS,4BACP,SACgE;CAChE,MAAMC,QAA4D,EAAE;AACpE,MAAK,MAAM,SAAS,QAClB,KACE,OAAO,UAAU,YACd,UAAU,QACV,UAAU,SACV,MAAM,SAAS,cACf,QAAQ,SACR,UAAU,SACV,WAAW,MAEd,OAAM,KAAK;EACT,IAAI,OAAO,MAAM,GAAG;EACpB,MAAM,OAAO,MAAM,KAAK;EACxB,OAAO,KAAK,UAAU,MAAM,MAAM;EACnC,CAAC;AAGN,QAAO,MAAM,SAAS,IAAI,QAAQ;;AAGpC,MAAM,kBACJ,aACuC,OAAO,OAAO,UAAU,UAAU;;;;ACnX3E,MAAa,gBAAgB,IAAI,MAAM;AAEvC,cAAc,KAAK,KAAK,OAAO,MAAM;AACnC,KAAI;AACF,SAAO,MAAM,iBAAiB,EAAE;UACzB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;AAEF,cAAc,KAAK,iBAAiB,OAAO,MAAM;AAC/C,KAAI;AACF,SAAO,MAAM,kBAAkB,EAAE;UAC1B,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACjBF,MAAa,cAAc,IAAI,MAAM;AAErC,YAAY,IAAI,KAAK,OAAO,MAAM;AAChC,KAAI;AACF,MAAI,CAAC,MAAM,OAET,OAAM,aAAa;EAGrB,MAAM,SAAS,MAAM,QAAQ,KAAK,KAAK,WAAW;GAChD,IAAI,MAAM;GACV,QAAQ;GACR,MAAM;GACN,SAAS;GACT,6BAAY,IAAI,KAAK,EAAE,EAAC,aAAa;GACrC,UAAU,MAAM;GAChB,cAAc,MAAM;GACrB,EAAE;AAEH,SAAO,EAAE,KAAK;GACZ,QAAQ;GACR,MAAM;GACN,UAAU;GACX,CAAC;UACK,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;AC5BF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,KAAK,OAAO,MAAM;AAC/B,KAAI;AACF,SAAO,EAAE,KAAK,EACZ,OAAO,MAAM,cACd,CAAC;UACK,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACVF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,KAAK,OAAO,MAAM;AAC/B,KAAI;EACF,MAAM,QAAQ,MAAM,iBAAiB;AACrC,SAAO,EAAE,KAAK,MAAM;UACb,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACAF,MAAa,SAAS,IAAI,MAAM;AAEhC,OAAO,IAAI,QAAQ,CAAC;AACpB,OAAO,IAAI,MAAM,CAAC;AAElB,OAAO,IAAI,MAAM,MAAM,EAAE,KAAK,iBAAiB,CAAC;AAGhD,OAAO,IAAI,YAAY,MAAM;CAC3B,MAAM,UAAU,QAAQ,MAAM,gBAAgB,MAAM,YAAY;AAChE,QAAO,EAAE,KACP;EACE,QAAQ,UAAU,YAAY;EAC9B,QAAQ;GACN,cAAc,QAAQ,MAAM,aAAa;GACzC,aAAa,QAAQ,MAAM,YAAY;GACvC,QAAQ,QAAQ,MAAM,OAAO;GAC9B;EACF,EACD,UAAU,MAAM,IACjB;EACD;AAEF,OAAO,MAAM,qBAAqB,iBAAiB;AACnD,OAAO,MAAM,WAAW,YAAY;AACpC,OAAO,MAAM,eAAe,gBAAgB;AAC5C,OAAO,MAAM,UAAU,WAAW;AAClC,OAAO,MAAM,UAAU,WAAW;AAGlC,OAAO,MAAM,wBAAwB,iBAAiB;AACtD,OAAO,MAAM,cAAc,YAAY;AACvC,OAAO,MAAM,kBAAkB,gBAAgB;AAG/C,OAAO,MAAM,gBAAgB,cAAc;AAC3C,OAAO,MAAM,sBAAsB,mBAAmB;AAGtD,OAAO,MAAM,YAAY,cAAc;;;;ACpBvC,eAAsB,UAAU,SAA0C;AACxE,KAAI,QAAQ,SACV,mBAAkB;AAGpB,KAAI,QAAQ,SAAS;AACnB,UAAQ,QAAQ;AAChB,UAAQ,KAAK,0BAA0B;;AAGzC,OAAM,cAAc,QAAQ;AAC5B,KAAI,QAAQ,gBAAgB,aAC1B,SAAQ,KAAK,SAAS,QAAQ,YAAY,sBAAsB;AAGlE,OAAM,gBAAgB,QAAQ;AAC9B,OAAM,mBAAmB,QAAQ;AACjC,OAAM,gBAAgB,QAAQ;AAC9B,OAAM,YAAY,QAAQ;AAG1B,aAAY,QAAQ,SAAS,QAAQ,aAAa;AAClD,KAAI,QAAQ,QACV,SAAQ,KAAK,kCAAkC,QAAQ,aAAa,WAAW;AAGjF,OAAM,aAAa;AACnB,OAAM,oBAAoB;AAE1B,KAAI,QAAQ,aAAa;AACvB,QAAM,cAAc,QAAQ;AAC5B,UAAQ,KAAK,8BAA8B;OAE3C,OAAM,kBAAkB;AAG1B,OAAM,mBAAmB;AACzB,OAAM,aAAa;AAEnB,SAAQ,KACN,uBAAuB,MAAM,QAAQ,KAAK,KAAK,UAAU,KAAK,MAAM,KAAK,CAAC,KAAK,KAAK,GACrF;CAGD,MAAM,YAAY,UADE,QAAQ,QAAQ,YACI,GAAG,QAAQ;AAEnD,KAAI,QAAQ,YAAY;AACtB,YAAU,MAAM,QAAQ,iCAAiC;EAEzD,MAAM,gBAAgB,MAAM,QAAQ,OAClC,0CACA;GACE,MAAM;GACN,SAAS,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,GAAG;GACpD,CACF;EAED,MAAM,qBAAqB,MAAM,QAAQ,OACvC,gDACA;GACE,MAAM;GACN,SAAS,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,GAAG;GACpD,CACF;EAED,MAAM,UAAU,kBACd;GACE,oBAAoB;GACpB,sBAAsB;GACtB,iBAAiB;GACjB,gCAAgC;GAChC,4BAA4B;GAC5B,+BAA+B;GAC/B,mCAAmC;GACnC,0CAA0C;GAC3C,EACD,SACD;AAED,MAAI;AACF,aAAU,UAAU,QAAQ;AAC5B,WAAQ,QAAQ,2CAA2C;UACrD;AACN,WAAQ,KACN,gEACD;AACD,WAAQ,IAAI,QAAQ;;;AAIxB,SAAQ,IACN,oEAAoE,UAAU,QAAQ,QAAQ,UAAU,oBAAoB,UAAU,YAAY,KACnJ;AAED,OAAM;EACJ,OAAO,OAAO;EACd,MAAM,QAAQ;EACd,UAAU,QAAQ;EACnB,CAAC;;AAGJ,MAAa,QAAQ,cAAc;CACjC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;EACJ,MAAM;GACJ,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,MAAM;GACJ,OAAO;GACP,MAAM;GACN,aACE;GACH;EACD,SAAS;GACP,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,gBAAgB;GACd,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,QAAQ;GACN,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,cAAc;GACZ,OAAO;GACP,MAAM;GACN,aAAa;GACd;EACD,MAAM;GACJ,OAAO;GACP,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,gBAAgB;GACd,OAAO;GACP,MAAM;GACN,aACE;GACH;EACD,eAAe;GACb,OAAO;GACP,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,cAAc;GACZ,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,aAAa;GACX,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,SAAS;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,iBAAiB;GACf,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACF;CACD,IAAI,EAAE,QAAQ;EACZ,MAAM,eAAe,KAAK;EAC1B,MAAM,YAEJ,iBAAiB,SAAY,SAAY,OAAO,SAAS,cAAc,GAAG;AAE5E,SAAO,UAAU;GACf,MAAM,OAAO,SAAS,KAAK,MAAM,GAAG;GACpC,MAAM,KAAK;GACX,SAAS,KAAK;GACd,aAAa,KAAK;GAClB,QAAQ,KAAK;GACb;GACA,eAAe,KAAK;GACpB,aAAa,KAAK;GAClB,YAAY,KAAK;GACjB,WAAW,KAAK;GAChB,UAAU,KAAK;GACf,SAAS,KAAK;GACd,cAAc,OAAO,SAAS,KAAK,kBAAkB,GAAG;GACzD,CAAC;;CAEL,CAAC;;;;ACjOF,QAAQ,QAAQ,cAAc,OAAO;AAErC,MAAM,OAAO,cAAc;CACzB,MAAM;EACJ,MAAM;EACN,aACE;EACH;CACD,aAAa;EAAE;EAAM;EAAQ;EAAO,eAAe;EAAY;EAAO;CACvE,CAAC;AAEF,MAAM,QAAQ,KAAK"}
1
+ {"version":3,"file":"main.js","names":["state: State","state","headers: Record<string, string>","errorJson: unknown","token","generateId","historyState: HistoryState","entry: HistoryEntry","start","modelDist: Record<string, number>","endpointDist: Record<string, number>","hourlyActivity: Record<string, number>","process","env","commandBlock: string","formatDuration","formatNumber","formatTokens","pc","tuiState: TuiState","listeners: Array<StateListener>","tabs: Array<{ key: TabType; label: string; count: number }>","request: TrackedRequest","path","DEFAULT_CONFIG: AutoCompactConfig","systemMessages: Array<Message>","newPayload: ChatCompletionsPayload","state","x","headers: Record<string, string>","handleCompletion","ctx: ResponseContext","buildFinalPayload","isNonStreaming","handleNonStreamingResponse","handleStreamingResponse","updateTrackerModel","updateTrackerStatus","recordErrorResponse","choice","markerChunk: ChatCompletionChunk","completeTracking","failTracking","result: MessageContent","handleCompletion","options: QueryOptions","fixedMessages: Array<Message>","toolNameMapping: ToolNameMapping","shortNameMap: Record<string, string>","newMessages: Array<Message>","contentParts: Array<ContentPart>","allTextBlocks: Array<AnthropicTextBlock>","allToolUseBlocks: Array<AnthropicToolUseBlock>","stopReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null","input: Record<string, unknown>","state","events: Array<AnthropicStreamEventData>","events","ctx: ResponseContext","streamState: AnthropicStreamState","chunk: ChatCompletionChunk","events","contentBlocks: Array<{ type: string; text?: string }>","tools: Array<{ id: string; name: string; input: string }>"],"sources":["../src/lib/paths.ts","../src/lib/state.ts","../src/lib/api-config.ts","../src/lib/error.ts","../src/services/github/get-copilot-token.ts","../src/services/github/get-device-code.ts","../src/services/github/get-user.ts","../src/services/copilot/get-models.ts","../src/services/get-vscode-version.ts","../src/lib/utils.ts","../src/services/github/poll-access-token.ts","../src/lib/token.ts","../src/auth.ts","../src/services/github/get-copilot-usage.ts","../src/check-usage.ts","../src/debug.ts","../src/logout.ts","../src/lib/history.ts","../src/lib/proxy.ts","../src/lib/shell.ts","../node_modules/picocolors/picocolors.js","../src/lib/tui/console-renderer.ts","../src/lib/tui/fullscreen-renderer.tsx","../src/lib/tui/tracker.ts","../src/lib/tui/middleware.ts","../src/lib/tui/index.ts","../src/lib/approval.ts","../src/lib/tokenizer.ts","../src/lib/auto-compact.ts","../src/lib/queue.ts","../src/services/copilot/create-chat-completions.ts","../src/routes/chat-completions/handler.ts","../src/routes/chat-completions/route.ts","../src/services/copilot/create-embeddings.ts","../src/routes/embeddings/route.ts","../src/routes/event-logging/route.ts","../src/routes/history/api.ts","../src/routes/history/ui/script.ts","../src/routes/history/ui/styles.ts","../src/routes/history/ui/template.ts","../src/routes/history/ui.ts","../src/routes/history/route.ts","../src/routes/messages/utils.ts","../src/routes/messages/non-stream-translation.ts","../src/routes/messages/count-tokens-handler.ts","../src/routes/messages/stream-translation.ts","../src/routes/messages/handler.ts","../src/routes/messages/route.ts","../src/routes/models/route.ts","../src/routes/token/route.ts","../src/routes/usage/route.ts","../src/server.ts","../src/start.ts","../src/main.ts"],"sourcesContent":["import fs from \"node:fs/promises\"\nimport os from \"node:os\"\nimport path from \"node:path\"\n\nconst APP_DIR = path.join(os.homedir(), \".local\", \"share\", \"copilot-api\")\n\nconst GITHUB_TOKEN_PATH = path.join(APP_DIR, \"github_token\")\n\nexport const PATHS = {\n APP_DIR,\n GITHUB_TOKEN_PATH,\n}\n\nexport async function ensurePaths(): Promise<void> {\n await fs.mkdir(PATHS.APP_DIR, { recursive: true })\n await ensureFile(PATHS.GITHUB_TOKEN_PATH)\n}\n\nasync function ensureFile(filePath: string): Promise<void> {\n try {\n await fs.access(filePath, fs.constants.W_OK)\n // File exists, ensure it has secure permissions (owner read/write only)\n const stats = await fs.stat(filePath)\n const currentMode = stats.mode & 0o777\n if (currentMode !== 0o600) {\n await fs.chmod(filePath, 0o600)\n }\n } catch {\n await fs.writeFile(filePath, \"\")\n await fs.chmod(filePath, 0o600)\n }\n}\n","import type { ModelsResponse } from \"~/services/copilot/get-models\"\n\nexport interface State {\n githubToken?: string\n copilotToken?: string\n\n accountType: string\n models?: ModelsResponse\n vsCodeVersion?: string\n\n manualApprove: boolean\n rateLimitWait: boolean\n showToken: boolean\n\n // Rate limiting configuration\n rateLimitSeconds?: number\n lastRequestTimestamp?: number\n\n // Auto-compact configuration\n autoCompact: boolean\n}\n\nexport const state: State = {\n accountType: \"individual\",\n manualApprove: false,\n rateLimitWait: false,\n showToken: false,\n autoCompact: false,\n}\n","import { randomUUID } from \"node:crypto\"\n\nimport type { State } from \"./state\"\n\nexport const standardHeaders = () => ({\n \"content-type\": \"application/json\",\n accept: \"application/json\",\n})\n\nconst COPILOT_VERSION = \"0.26.7\"\nconst EDITOR_PLUGIN_VERSION = `copilot-chat/${COPILOT_VERSION}`\nconst USER_AGENT = `GitHubCopilotChat/${COPILOT_VERSION}`\n\nconst API_VERSION = \"2025-04-01\"\n\nexport const copilotBaseUrl = (state: State) =>\n state.accountType === \"individual\" ?\n \"https://api.githubcopilot.com\"\n : `https://api.${state.accountType}.githubcopilot.com`\nexport const copilotHeaders = (state: State, vision: boolean = false) => {\n const headers: Record<string, string> = {\n Authorization: `Bearer ${state.copilotToken}`,\n \"content-type\": standardHeaders()[\"content-type\"],\n \"copilot-integration-id\": \"vscode-chat\",\n \"editor-version\": `vscode/${state.vsCodeVersion}`,\n \"editor-plugin-version\": EDITOR_PLUGIN_VERSION,\n \"user-agent\": USER_AGENT,\n \"openai-intent\": \"conversation-panel\",\n \"x-github-api-version\": API_VERSION,\n \"x-request-id\": randomUUID(),\n \"x-vscode-user-agent-library-version\": \"electron-fetch\",\n }\n\n if (vision) headers[\"copilot-vision-request\"] = \"true\"\n\n return headers\n}\n\nexport const GITHUB_API_BASE_URL = \"https://api.github.com\"\nexport const githubHeaders = (state: State) => ({\n ...standardHeaders(),\n authorization: `token ${state.githubToken}`,\n \"editor-version\": `vscode/${state.vsCodeVersion}`,\n \"editor-plugin-version\": EDITOR_PLUGIN_VERSION,\n \"user-agent\": USER_AGENT,\n \"x-github-api-version\": API_VERSION,\n \"x-vscode-user-agent-library-version\": \"electron-fetch\",\n})\n\nexport const GITHUB_BASE_URL = \"https://github.com\"\nexport const GITHUB_CLIENT_ID = \"Iv1.b507a08c87ecfe98\"\nexport const GITHUB_APP_SCOPES = [\"read:user\"].join(\" \")\n","import type { Context } from \"hono\"\nimport type { ContentfulStatusCode } from \"hono/utils/http-status\"\n\nimport consola from \"consola\"\n\nexport class HTTPError extends Error {\n status: number\n responseText: string\n\n constructor(message: string, status: number, responseText: string) {\n super(message)\n this.status = status\n this.responseText = responseText\n }\n\n static async fromResponse(\n message: string,\n response: Response,\n ): Promise<HTTPError> {\n const text = await response.text()\n return new HTTPError(message, response.status, text)\n }\n}\n\n/** Copilot error structure */\ninterface CopilotError {\n error?: {\n message?: string\n code?: string\n }\n}\n\n/** Parse token limit info from error message */\nfunction parseTokenLimitError(message: string): {\n current: number\n limit: number\n} | null {\n // Match: \"prompt token count of 135355 exceeds the limit of 128000\"\n const match = message.match(\n /prompt token count of (\\d+) exceeds the limit of (\\d+)/,\n )\n if (match) {\n return {\n current: Number.parseInt(match[1], 10),\n limit: Number.parseInt(match[2], 10),\n }\n }\n return null\n}\n\n/** Format Anthropic-compatible error for token limit exceeded */\nfunction formatTokenLimitError(current: number, limit: number) {\n const excess = current - limit\n const percentage = Math.round((excess / limit) * 100)\n\n // Return Anthropic-compatible error that clients can recognize and handle\n // The \"prompt_too_long\" type is what Anthropic's API returns for context limit errors\n // This should trigger Claude Code's auto-compact behavior\n return {\n type: \"error\",\n error: {\n type: \"invalid_request_error\",\n message:\n `prompt is too long: ${current} tokens > ${limit} maximum `\n + `(${excess} tokens over, ${percentage}% excess)`,\n },\n }\n}\n\n// eslint-disable-next-line @typescript-eslint/require-await\nexport async function forwardError(c: Context, error: unknown) {\n consola.error(\"Error occurred:\", error)\n\n if (error instanceof HTTPError) {\n let errorJson: unknown\n try {\n errorJson = JSON.parse(error.responseText)\n } catch {\n errorJson = error.responseText\n }\n consola.error(\"HTTP error:\", errorJson)\n\n // Check for token limit exceeded error from Copilot\n const copilotError = errorJson as CopilotError\n if (copilotError.error?.code === \"model_max_prompt_tokens_exceeded\") {\n const tokenInfo = parseTokenLimitError(copilotError.error.message ?? \"\")\n if (tokenInfo) {\n const formattedError = formatTokenLimitError(\n tokenInfo.current,\n tokenInfo.limit,\n )\n consola.debug(\"Returning formatted token limit error:\", formattedError)\n return c.json(formattedError, 400 as ContentfulStatusCode)\n }\n }\n\n return c.json(\n {\n error: {\n message: error.responseText,\n type: \"error\",\n },\n },\n error.status as ContentfulStatusCode,\n )\n }\n\n return c.json(\n {\n error: {\n message: (error as Error).message,\n type: \"error\",\n },\n },\n 500,\n )\n}\n","import { GITHUB_API_BASE_URL, githubHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getCopilotToken = async () => {\n const response = await fetch(\n `${GITHUB_API_BASE_URL}/copilot_internal/v2/token`,\n {\n headers: githubHeaders(state),\n },\n )\n\n if (!response.ok)\n throw await HTTPError.fromResponse(\"Failed to get Copilot token\", response)\n\n return (await response.json()) as GetCopilotTokenResponse\n}\n\n// Trimmed for the sake of simplicity\ninterface GetCopilotTokenResponse {\n expires_at: number\n refresh_in: number\n token: string\n}\n","import {\n GITHUB_APP_SCOPES,\n GITHUB_BASE_URL,\n GITHUB_CLIENT_ID,\n standardHeaders,\n} from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\n\nexport async function getDeviceCode(): Promise<DeviceCodeResponse> {\n const response = await fetch(`${GITHUB_BASE_URL}/login/device/code`, {\n method: \"POST\",\n headers: standardHeaders(),\n body: JSON.stringify({\n client_id: GITHUB_CLIENT_ID,\n scope: GITHUB_APP_SCOPES,\n }),\n })\n\n if (!response.ok)\n throw await HTTPError.fromResponse(\"Failed to get device code\", response)\n\n return (await response.json()) as DeviceCodeResponse\n}\n\nexport interface DeviceCodeResponse {\n device_code: string\n user_code: string\n verification_uri: string\n expires_in: number\n interval: number\n}\n","import { GITHUB_API_BASE_URL, standardHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport async function getGitHubUser() {\n const response = await fetch(`${GITHUB_API_BASE_URL}/user`, {\n headers: {\n authorization: `token ${state.githubToken}`,\n ...standardHeaders(),\n },\n })\n\n if (!response.ok)\n throw await HTTPError.fromResponse(\"Failed to get GitHub user\", response)\n\n return (await response.json()) as GithubUserResponse\n}\n\n// Trimmed for the sake of simplicity\ninterface GithubUserResponse {\n login: string\n}\n","import { copilotBaseUrl, copilotHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getModels = async () => {\n const response = await fetch(`${copilotBaseUrl(state)}/models`, {\n headers: copilotHeaders(state),\n })\n\n if (!response.ok)\n throw await HTTPError.fromResponse(\"Failed to get models\", response)\n\n return (await response.json()) as ModelsResponse\n}\n\nexport interface ModelsResponse {\n data: Array<Model>\n object: string\n}\n\ninterface ModelLimits {\n max_context_window_tokens?: number\n max_output_tokens?: number\n max_prompt_tokens?: number\n max_inputs?: number\n}\n\ninterface ModelSupports {\n tool_calls?: boolean\n parallel_tool_calls?: boolean\n dimensions?: boolean\n}\n\ninterface ModelCapabilities {\n family: string\n limits: ModelLimits\n object: string\n supports: ModelSupports\n tokenizer: string\n type: string\n}\n\nexport interface Model {\n capabilities: ModelCapabilities\n id: string\n model_picker_enabled: boolean\n name: string\n object: string\n preview: boolean\n vendor: string\n version: string\n policy?: {\n state: string\n terms: string\n }\n}\n","const FALLBACK = \"1.104.3\"\n\n// GitHub API endpoint for latest VSCode release\nconst GITHUB_API_URL =\n \"https://api.github.com/repos/microsoft/vscode/releases/latest\"\n\ninterface GitHubRelease {\n tag_name: string\n}\n\nexport async function getVSCodeVersion() {\n const controller = new AbortController()\n const timeout = setTimeout(() => {\n controller.abort()\n }, 5000)\n\n try {\n const response = await fetch(GITHUB_API_URL, {\n signal: controller.signal,\n headers: {\n Accept: \"application/vnd.github.v3+json\",\n \"User-Agent\": \"copilot-api\",\n },\n })\n\n if (!response.ok) {\n return FALLBACK\n }\n\n const release = (await response.json()) as GitHubRelease\n // tag_name is in format \"1.107.1\"\n const version = release.tag_name\n if (version && /^\\d+\\.\\d+\\.\\d+$/.test(version)) {\n return version\n }\n\n return FALLBACK\n } catch {\n return FALLBACK\n } finally {\n clearTimeout(timeout)\n }\n}\n","import consola from \"consola\"\n\nimport { getModels } from \"~/services/copilot/get-models\"\nimport { getVSCodeVersion } from \"~/services/get-vscode-version\"\n\nimport { state } from \"./state\"\n\nexport const sleep = (ms: number) =>\n new Promise((resolve) => {\n setTimeout(resolve, ms)\n })\n\nexport const isNullish = (value: unknown): value is null | undefined =>\n value === null || value === undefined\n\nexport async function cacheModels(): Promise<void> {\n const models = await getModels()\n state.models = models\n}\n\nexport const cacheVSCodeVersion = async () => {\n const response = await getVSCodeVersion()\n state.vsCodeVersion = response\n\n consola.info(`Using VSCode version: ${response}`)\n}\n","import consola from \"consola\"\n\nimport {\n GITHUB_BASE_URL,\n GITHUB_CLIENT_ID,\n standardHeaders,\n} from \"~/lib/api-config\"\nimport { sleep } from \"~/lib/utils\"\n\nimport type { DeviceCodeResponse } from \"./get-device-code\"\n\nexport async function pollAccessToken(\n deviceCode: DeviceCodeResponse,\n): Promise<string> {\n // Interval is in seconds, we need to multiply by 1000 to get milliseconds\n // I'm also adding another second, just to be safe\n const sleepDuration = (deviceCode.interval + 1) * 1000\n consola.debug(`Polling access token with interval of ${sleepDuration}ms`)\n\n // Calculate expiration time based on expires_in from device code response\n const expiresAt = Date.now() + deviceCode.expires_in * 1000\n\n while (Date.now() < expiresAt) {\n const response = await fetch(\n `${GITHUB_BASE_URL}/login/oauth/access_token`,\n {\n method: \"POST\",\n headers: standardHeaders(),\n body: JSON.stringify({\n client_id: GITHUB_CLIENT_ID,\n device_code: deviceCode.device_code,\n grant_type: \"urn:ietf:params:oauth:grant-type:device_code\",\n }),\n },\n )\n\n if (!response.ok) {\n await sleep(sleepDuration)\n consola.error(\"Failed to poll access token:\", await response.text())\n\n continue\n }\n\n const json = (await response.json()) as AccessTokenResponse\n consola.debug(\"Polling access token response:\", json)\n\n const { access_token } = json\n\n if (access_token) {\n return access_token\n } else {\n await sleep(sleepDuration)\n }\n }\n\n throw new Error(\n \"Device code expired. Please run the authentication flow again.\",\n )\n}\n\ninterface AccessTokenResponse {\n access_token: string\n token_type: string\n scope: string\n}\n","import consola from \"consola\"\nimport fs from \"node:fs/promises\"\n\nimport { PATHS } from \"~/lib/paths\"\nimport { getCopilotToken } from \"~/services/github/get-copilot-token\"\nimport { getDeviceCode } from \"~/services/github/get-device-code\"\nimport { getGitHubUser } from \"~/services/github/get-user\"\nimport { pollAccessToken } from \"~/services/github/poll-access-token\"\n\nimport { HTTPError } from \"./error\"\nimport { state } from \"./state\"\n\nconst readGithubToken = () => fs.readFile(PATHS.GITHUB_TOKEN_PATH, \"utf8\")\n\nconst writeGithubToken = (token: string) =>\n fs.writeFile(PATHS.GITHUB_TOKEN_PATH, token)\n\nexport const setupCopilotToken = async () => {\n const { token, refresh_in } = await getCopilotToken()\n state.copilotToken = token\n\n // Display the Copilot token to the screen\n consola.debug(\"GitHub Copilot Token fetched successfully!\")\n if (state.showToken) {\n consola.info(\"Copilot token:\", token)\n }\n\n const refreshInterval = (refresh_in - 60) * 1000\n setInterval(async () => {\n consola.debug(\"Refreshing Copilot token\")\n try {\n const { token } = await getCopilotToken()\n state.copilotToken = token\n consola.debug(\"Copilot token refreshed\")\n if (state.showToken) {\n consola.info(\"Refreshed Copilot token:\", token)\n }\n } catch (error) {\n // Log error but don't throw - throwing in setInterval crashes the process\n // The existing token will continue to work until it expires\n // Next refresh attempt will try again\n consola.error(\n \"Failed to refresh Copilot token (will retry on next interval):\",\n error,\n )\n }\n }, refreshInterval)\n}\n\ninterface SetupGitHubTokenOptions {\n force?: boolean\n}\n\nexport async function setupGitHubToken(\n options?: SetupGitHubTokenOptions,\n): Promise<void> {\n try {\n const githubToken = await readGithubToken()\n\n if (githubToken && !options?.force) {\n state.githubToken = githubToken\n if (state.showToken) {\n consola.info(\"GitHub token:\", githubToken)\n }\n await logUser()\n\n return\n }\n\n consola.info(\"Not logged in, getting new access token\")\n const response = await getDeviceCode()\n consola.debug(\"Device code response:\", response)\n\n consola.info(\n `Please enter the code \"${response.user_code}\" in ${response.verification_uri}`,\n )\n\n const token = await pollAccessToken(response)\n await writeGithubToken(token)\n state.githubToken = token\n\n if (state.showToken) {\n consola.info(\"GitHub token:\", token)\n }\n await logUser()\n } catch (error) {\n if (error instanceof HTTPError) {\n consola.error(\"Failed to get GitHub token:\", error.responseText)\n throw error\n }\n\n consola.error(\"Failed to get GitHub token:\", error)\n throw error\n }\n}\n\nasync function logUser() {\n const user = await getGitHubUser()\n consola.info(`Logged in as ${user.login}`)\n}\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport consola from \"consola\"\n\nimport { PATHS, ensurePaths } from \"./lib/paths\"\nimport { state } from \"./lib/state\"\nimport { setupGitHubToken } from \"./lib/token\"\n\ninterface RunAuthOptions {\n verbose: boolean\n showToken: boolean\n}\n\nexport async function runAuth(options: RunAuthOptions): Promise<void> {\n if (options.verbose) {\n consola.level = 5\n consola.info(\"Verbose logging enabled\")\n }\n\n state.showToken = options.showToken\n\n await ensurePaths()\n await setupGitHubToken({ force: true })\n consola.success(\"GitHub token written to\", PATHS.GITHUB_TOKEN_PATH)\n}\n\nexport const auth = defineCommand({\n meta: {\n name: \"auth\",\n description: \"Run GitHub auth flow without running the server\",\n },\n args: {\n verbose: {\n alias: \"v\",\n type: \"boolean\",\n default: false,\n description: \"Enable verbose logging\",\n },\n \"show-token\": {\n type: \"boolean\",\n default: false,\n description: \"Show GitHub token on auth\",\n },\n },\n run({ args }) {\n return runAuth({\n verbose: args.verbose,\n showToken: args[\"show-token\"],\n })\n },\n})\n","import { GITHUB_API_BASE_URL, githubHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getCopilotUsage = async (): Promise<CopilotUsageResponse> => {\n const response = await fetch(`${GITHUB_API_BASE_URL}/copilot_internal/user`, {\n headers: githubHeaders(state),\n })\n\n if (!response.ok) {\n throw await HTTPError.fromResponse(\"Failed to get Copilot usage\", response)\n }\n\n return (await response.json()) as CopilotUsageResponse\n}\n\nexport interface QuotaDetail {\n entitlement: number\n overage_count: number\n overage_permitted: boolean\n percent_remaining: number\n quota_id: string\n quota_remaining: number\n remaining: number\n unlimited: boolean\n}\n\ninterface QuotaSnapshots {\n chat: QuotaDetail\n completions: QuotaDetail\n premium_interactions: QuotaDetail\n}\n\ninterface CopilotUsageResponse {\n access_type_sku: string\n analytics_tracking_id: string\n assigned_date: string\n can_signup_for_limited: boolean\n chat_enabled: boolean\n copilot_plan: string\n organization_login_list: Array<unknown>\n organization_list: Array<unknown>\n quota_reset_date: string\n quota_snapshots: QuotaSnapshots\n}\n","import { defineCommand } from \"citty\"\nimport consola from \"consola\"\n\nimport { ensurePaths } from \"./lib/paths\"\nimport { setupGitHubToken } from \"./lib/token\"\nimport {\n getCopilotUsage,\n type QuotaDetail,\n} from \"./services/github/get-copilot-usage\"\n\nexport const checkUsage = defineCommand({\n meta: {\n name: \"check-usage\",\n description: \"Show current GitHub Copilot usage/quota information\",\n },\n async run() {\n await ensurePaths()\n await setupGitHubToken()\n try {\n const usage = await getCopilotUsage()\n const premium = usage.quota_snapshots.premium_interactions\n const premiumTotal = premium.entitlement\n const premiumUsed = premiumTotal - premium.remaining\n const premiumPercentUsed =\n premiumTotal > 0 ? (premiumUsed / premiumTotal) * 100 : 0\n const premiumPercentRemaining = premium.percent_remaining\n\n // Helper to summarize a quota snapshot\n function summarizeQuota(name: string, snap: QuotaDetail | undefined) {\n if (!snap) return `${name}: N/A`\n const total = snap.entitlement\n const used = total - snap.remaining\n const percentUsed = total > 0 ? (used / total) * 100 : 0\n const percentRemaining = snap.percent_remaining\n return `${name}: ${used}/${total} used (${percentUsed.toFixed(1)}% used, ${percentRemaining.toFixed(1)}% remaining)`\n }\n\n const premiumLine = `Premium: ${premiumUsed}/${premiumTotal} used (${premiumPercentUsed.toFixed(1)}% used, ${premiumPercentRemaining.toFixed(1)}% remaining)`\n const chatLine = summarizeQuota(\"Chat\", usage.quota_snapshots.chat)\n const completionsLine = summarizeQuota(\n \"Completions\",\n usage.quota_snapshots.completions,\n )\n\n consola.box(\n `Copilot Usage (plan: ${usage.copilot_plan})\\n`\n + `Quota resets: ${usage.quota_reset_date}\\n`\n + `\\nQuotas:\\n`\n + ` ${premiumLine}\\n`\n + ` ${chatLine}\\n`\n + ` ${completionsLine}`,\n )\n } catch (err) {\n consola.error(\"Failed to fetch Copilot usage:\", err)\n process.exit(1)\n }\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport consola from \"consola\"\nimport fs from \"node:fs/promises\"\nimport os from \"node:os\"\n\nimport { PATHS } from \"./lib/paths\"\n\ninterface DebugInfo {\n version: string\n runtime: {\n name: string\n version: string\n platform: string\n arch: string\n }\n paths: {\n APP_DIR: string\n GITHUB_TOKEN_PATH: string\n }\n tokenExists: boolean\n}\n\ninterface RunDebugOptions {\n json: boolean\n}\n\nasync function getPackageVersion(): Promise<string> {\n try {\n const packageJsonPath = new URL(\"../package.json\", import.meta.url).pathname\n // @ts-expect-error https://github.com/sindresorhus/eslint-plugin-unicorn/blob/v59.0.1/docs/rules/prefer-json-parse-buffer.md\n // JSON.parse() can actually parse buffers\n const packageJson = JSON.parse(await fs.readFile(packageJsonPath)) as {\n version: string\n }\n return packageJson.version\n } catch {\n return \"unknown\"\n }\n}\n\nfunction getRuntimeInfo() {\n const isBun = typeof Bun !== \"undefined\"\n\n return {\n name: isBun ? \"bun\" : \"node\",\n version: isBun ? Bun.version : process.version.slice(1),\n platform: os.platform(),\n arch: os.arch(),\n }\n}\n\nasync function checkTokenExists(): Promise<boolean> {\n try {\n const stats = await fs.stat(PATHS.GITHUB_TOKEN_PATH)\n if (!stats.isFile()) return false\n\n const content = await fs.readFile(PATHS.GITHUB_TOKEN_PATH, \"utf8\")\n return content.trim().length > 0\n } catch {\n return false\n }\n}\n\nasync function getDebugInfo(): Promise<DebugInfo> {\n const [version, tokenExists] = await Promise.all([\n getPackageVersion(),\n checkTokenExists(),\n ])\n\n return {\n version,\n runtime: getRuntimeInfo(),\n paths: {\n APP_DIR: PATHS.APP_DIR,\n GITHUB_TOKEN_PATH: PATHS.GITHUB_TOKEN_PATH,\n },\n tokenExists,\n }\n}\n\nfunction printDebugInfoPlain(info: DebugInfo): void {\n consola.info(`copilot-api debug\n\nVersion: ${info.version}\nRuntime: ${info.runtime.name} ${info.runtime.version} (${info.runtime.platform} ${info.runtime.arch})\n\nPaths:\n- APP_DIR: ${info.paths.APP_DIR}\n- GITHUB_TOKEN_PATH: ${info.paths.GITHUB_TOKEN_PATH}\n\nToken exists: ${info.tokenExists ? \"Yes\" : \"No\"}`)\n}\n\nfunction printDebugInfoJson(info: DebugInfo): void {\n console.log(JSON.stringify(info, null, 2))\n}\n\nexport async function runDebug(options: RunDebugOptions): Promise<void> {\n const debugInfo = await getDebugInfo()\n\n if (options.json) {\n printDebugInfoJson(debugInfo)\n } else {\n printDebugInfoPlain(debugInfo)\n }\n}\n\nexport const debug = defineCommand({\n meta: {\n name: \"debug\",\n description: \"Print debug information about the application\",\n },\n args: {\n json: {\n type: \"boolean\",\n default: false,\n description: \"Output debug information as JSON\",\n },\n },\n run({ args }) {\n return runDebug({\n json: args.json,\n })\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport consola from \"consola\"\nimport fs from \"node:fs/promises\"\n\nimport { PATHS } from \"./lib/paths\"\n\nexport async function runLogout(): Promise<void> {\n try {\n await fs.unlink(PATHS.GITHUB_TOKEN_PATH)\n consola.success(\"Logged out successfully. GitHub token removed.\")\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n consola.info(\"No token found. Already logged out.\")\n } else {\n consola.error(\"Failed to remove token:\", error)\n throw error\n }\n }\n}\n\nexport const logout = defineCommand({\n meta: {\n name: \"logout\",\n description: \"Remove stored GitHub token and log out\",\n },\n run() {\n return runLogout()\n },\n})\n","// History recording module for API requests/responses\n// Supports full message content, session grouping, and rich querying\n\n// Simple ID generator (no external deps)\nfunction generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 9)\n}\n\n// Message types for full content storage\nexport interface MessageContent {\n role: string\n content:\n | string\n | Array<{ type: string; text?: string; [key: string]: unknown }>\n tool_calls?: Array<{\n id: string\n type: string\n function: { name: string; arguments: string }\n }>\n tool_call_id?: string\n name?: string\n}\n\nexport interface ToolDefinition {\n name: string\n description?: string\n}\n\nexport interface HistoryEntry {\n id: string\n sessionId: string // Group related requests together\n timestamp: number\n endpoint: \"anthropic\" | \"openai\"\n\n request: {\n model: string\n messages: Array<MessageContent> // Full message history\n stream: boolean\n tools?: Array<ToolDefinition>\n max_tokens?: number\n temperature?: number\n system?: string // System prompt (for Anthropic)\n }\n\n response?: {\n success: boolean\n model: string\n usage: {\n input_tokens: number\n output_tokens: number\n cache_read_input_tokens?: number\n }\n stop_reason?: string\n error?: string\n content: MessageContent | null // Full response content\n toolCalls?: Array<{\n id: string\n name: string\n input: string\n }>\n }\n\n durationMs?: number\n}\n\nexport interface Session {\n id: string\n startTime: number\n lastActivity: number\n requestCount: number\n totalInputTokens: number\n totalOutputTokens: number\n models: Array<string>\n endpoint: \"anthropic\" | \"openai\"\n toolsUsed?: Array<string> // Tool names used in this session\n}\n\nexport interface HistoryState {\n enabled: boolean\n entries: Array<HistoryEntry>\n sessions: Map<string, Session>\n currentSessionId: string\n maxEntries: number\n sessionTimeoutMs: number // New session after this idle time\n}\n\nexport interface QueryOptions {\n page?: number\n limit?: number\n model?: string\n endpoint?: \"anthropic\" | \"openai\"\n success?: boolean\n from?: number\n to?: number\n search?: string\n sessionId?: string\n}\n\nexport interface HistoryResult {\n entries: Array<HistoryEntry>\n total: number\n page: number\n limit: number\n totalPages: number\n}\n\nexport interface SessionResult {\n sessions: Array<Session>\n total: number\n}\n\nexport interface HistoryStats {\n totalRequests: number\n successfulRequests: number\n failedRequests: number\n totalInputTokens: number\n totalOutputTokens: number\n averageDurationMs: number\n modelDistribution: Record<string, number>\n endpointDistribution: Record<string, number>\n recentActivity: Array<{ hour: string; count: number }>\n activeSessions: number\n}\n\n// Global history state\nexport const historyState: HistoryState = {\n enabled: false,\n entries: [],\n sessions: new Map(),\n currentSessionId: \"\",\n maxEntries: 1000,\n sessionTimeoutMs: 30 * 60 * 1000, // 30 minutes\n}\n\nexport function initHistory(enabled: boolean, maxEntries: number): void {\n historyState.enabled = enabled\n historyState.maxEntries = maxEntries\n historyState.entries = []\n historyState.sessions = new Map()\n historyState.currentSessionId = enabled ? generateId() : \"\"\n}\n\nexport function isHistoryEnabled(): boolean {\n return historyState.enabled\n}\n\n// Get or create current session\nfunction getCurrentSession(endpoint: \"anthropic\" | \"openai\"): string {\n const now = Date.now()\n\n // Check if current session is still active\n if (historyState.currentSessionId) {\n const session = historyState.sessions.get(historyState.currentSessionId)\n if (session && now - session.lastActivity < historyState.sessionTimeoutMs) {\n session.lastActivity = now\n return historyState.currentSessionId\n }\n }\n\n // Create new session\n const sessionId = generateId()\n historyState.currentSessionId = sessionId\n historyState.sessions.set(sessionId, {\n id: sessionId,\n startTime: now,\n lastActivity: now,\n requestCount: 0,\n totalInputTokens: 0,\n totalOutputTokens: 0,\n models: [],\n endpoint,\n })\n\n return sessionId\n}\n\nexport interface RecordRequestParams {\n model: string\n messages: Array<MessageContent>\n stream: boolean\n tools?: Array<ToolDefinition>\n max_tokens?: number\n temperature?: number\n system?: string\n}\n\nexport function recordRequest(\n endpoint: \"anthropic\" | \"openai\",\n request: RecordRequestParams,\n): string {\n if (!historyState.enabled) {\n return \"\"\n }\n\n const sessionId = getCurrentSession(endpoint)\n const session = historyState.sessions.get(sessionId)\n if (!session) {\n return \"\"\n }\n\n const entry: HistoryEntry = {\n id: generateId(),\n sessionId,\n timestamp: Date.now(),\n endpoint,\n request: {\n model: request.model,\n messages: request.messages,\n stream: request.stream,\n tools: request.tools,\n max_tokens: request.max_tokens,\n temperature: request.temperature,\n system: request.system,\n },\n }\n\n historyState.entries.push(entry)\n session.requestCount++\n\n if (!session.models.includes(request.model)) {\n session.models.push(request.model)\n }\n\n // Track tools used\n if (request.tools && request.tools.length > 0) {\n if (!session.toolsUsed) {\n session.toolsUsed = []\n }\n for (const tool of request.tools) {\n if (!session.toolsUsed.includes(tool.name)) {\n session.toolsUsed.push(tool.name)\n }\n }\n }\n\n // Enforce max entries limit (FIFO), skip if maxEntries is 0 (unlimited)\n while (\n historyState.maxEntries > 0\n && historyState.entries.length > historyState.maxEntries\n ) {\n const removed = historyState.entries.shift()\n // Clean up empty sessions\n if (removed) {\n const sessionEntries = historyState.entries.filter(\n (e) => e.sessionId === removed.sessionId,\n )\n if (sessionEntries.length === 0) {\n historyState.sessions.delete(removed.sessionId)\n }\n }\n }\n\n return entry.id\n}\n\nexport interface RecordResponseParams {\n success: boolean\n model: string\n usage: {\n input_tokens: number\n output_tokens: number\n cache_read_input_tokens?: number\n }\n stop_reason?: string\n error?: string\n content: MessageContent | null\n toolCalls?: Array<{\n id: string\n name: string\n input: string\n }>\n}\n\nexport function recordResponse(\n id: string,\n response: RecordResponseParams,\n durationMs: number,\n): void {\n if (!historyState.enabled || !id) {\n return\n }\n\n const entry = historyState.entries.find((e) => e.id === id)\n if (entry) {\n entry.response = response\n entry.durationMs = durationMs\n\n // Update session stats\n const session = historyState.sessions.get(entry.sessionId)\n if (session) {\n session.totalInputTokens += response.usage.input_tokens\n session.totalOutputTokens += response.usage.output_tokens\n session.lastActivity = Date.now()\n }\n }\n}\n\nexport function getHistory(options: QueryOptions = {}): HistoryResult {\n const {\n page = 1,\n limit = 50,\n model,\n endpoint,\n success,\n from,\n to,\n search,\n sessionId,\n } = options\n\n let filtered = [...historyState.entries]\n\n // Apply filters\n if (sessionId) {\n filtered = filtered.filter((e) => e.sessionId === sessionId)\n }\n\n if (model) {\n const modelLower = model.toLowerCase()\n filtered = filtered.filter(\n (e) =>\n e.request.model.toLowerCase().includes(modelLower)\n || e.response?.model.toLowerCase().includes(modelLower),\n )\n }\n\n if (endpoint) {\n filtered = filtered.filter((e) => e.endpoint === endpoint)\n }\n\n if (success !== undefined) {\n filtered = filtered.filter((e) => e.response?.success === success)\n }\n\n if (from) {\n filtered = filtered.filter((e) => e.timestamp >= from)\n }\n\n if (to) {\n filtered = filtered.filter((e) => e.timestamp <= to)\n }\n\n if (search) {\n const searchLower = search.toLowerCase()\n filtered = filtered.filter((e) => {\n // Search in messages\n const msgMatch = e.request.messages.some((m) => {\n if (typeof m.content === \"string\") {\n return m.content.toLowerCase().includes(searchLower)\n }\n if (Array.isArray(m.content)) {\n return m.content.some(\n (c) => c.text && c.text.toLowerCase().includes(searchLower),\n )\n }\n return false\n })\n\n // Search in response content\n const respMatch =\n e.response?.content\n && typeof e.response.content.content === \"string\"\n && e.response.content.content.toLowerCase().includes(searchLower)\n\n // Search in tool names\n const toolMatch = e.response?.toolCalls?.some((t) =>\n t.name.toLowerCase().includes(searchLower),\n )\n\n // Search in system prompt\n const sysMatch = e.request.system?.toLowerCase().includes(searchLower)\n\n return msgMatch || respMatch || toolMatch || sysMatch\n })\n }\n\n // Sort by timestamp descending (newest first)\n filtered.sort((a, b) => b.timestamp - a.timestamp)\n\n const total = filtered.length\n const totalPages = Math.ceil(total / limit)\n const start = (page - 1) * limit\n const entries = filtered.slice(start, start + limit)\n\n return {\n entries,\n total,\n page,\n limit,\n totalPages,\n }\n}\n\nexport function getEntry(id: string): HistoryEntry | undefined {\n return historyState.entries.find((e) => e.id === id)\n}\n\nexport function getSessions(): SessionResult {\n const sessions = Array.from(historyState.sessions.values()).sort(\n (a, b) => b.lastActivity - a.lastActivity,\n )\n\n return {\n sessions,\n total: sessions.length,\n }\n}\n\nexport function getSession(id: string): Session | undefined {\n return historyState.sessions.get(id)\n}\n\nexport function getSessionEntries(sessionId: string): Array<HistoryEntry> {\n return historyState.entries\n .filter((e) => e.sessionId === sessionId)\n .sort((a, b) => a.timestamp - b.timestamp) // Chronological order for sessions\n}\n\nexport function clearHistory(): void {\n historyState.entries = []\n historyState.sessions = new Map()\n historyState.currentSessionId = generateId()\n}\n\nexport function deleteSession(sessionId: string): boolean {\n if (!historyState.sessions.has(sessionId)) {\n return false\n }\n\n historyState.entries = historyState.entries.filter(\n (e) => e.sessionId !== sessionId,\n )\n historyState.sessions.delete(sessionId)\n\n if (historyState.currentSessionId === sessionId) {\n historyState.currentSessionId = generateId()\n }\n\n return true\n}\n\nexport function getStats(): HistoryStats {\n const entries = historyState.entries\n\n const modelDist: Record<string, number> = {}\n const endpointDist: Record<string, number> = {}\n const hourlyActivity: Record<string, number> = {}\n\n let totalInput = 0\n let totalOutput = 0\n let totalDuration = 0\n let durationCount = 0\n let successCount = 0\n let failCount = 0\n\n for (const entry of entries) {\n // Model distribution\n const model = entry.response?.model || entry.request.model\n modelDist[model] = (modelDist[model] || 0) + 1\n\n // Endpoint distribution\n endpointDist[entry.endpoint] = (endpointDist[entry.endpoint] || 0) + 1\n\n // Hourly activity (last 24 hours)\n const hour = new Date(entry.timestamp).toISOString().slice(0, 13)\n hourlyActivity[hour] = (hourlyActivity[hour] || 0) + 1\n\n if (entry.response) {\n if (entry.response.success) {\n successCount++\n } else {\n failCount++\n }\n\n totalInput += entry.response.usage.input_tokens\n totalOutput += entry.response.usage.output_tokens\n }\n\n if (entry.durationMs) {\n totalDuration += entry.durationMs\n durationCount++\n }\n }\n\n // Convert hourly activity to sorted array (last 24 entries)\n const recentActivity = Object.entries(hourlyActivity)\n .sort(([a], [b]) => a.localeCompare(b))\n .slice(-24)\n .map(([hour, count]) => ({ hour, count }))\n\n // Count active sessions (activity within timeout period)\n const now = Date.now()\n let activeSessions = 0\n for (const session of historyState.sessions.values()) {\n if (now - session.lastActivity < historyState.sessionTimeoutMs) {\n activeSessions++\n }\n }\n\n return {\n totalRequests: entries.length,\n successfulRequests: successCount,\n failedRequests: failCount,\n totalInputTokens: totalInput,\n totalOutputTokens: totalOutput,\n averageDurationMs: durationCount > 0 ? totalDuration / durationCount : 0,\n modelDistribution: modelDist,\n endpointDistribution: endpointDist,\n recentActivity,\n activeSessions,\n }\n}\n\nexport function exportHistory(format: \"json\" | \"csv\" = \"json\"): string {\n if (format === \"json\") {\n return JSON.stringify(\n {\n sessions: Array.from(historyState.sessions.values()),\n entries: historyState.entries,\n },\n null,\n 2,\n )\n }\n\n // CSV format - simplified view\n const headers = [\n \"id\",\n \"session_id\",\n \"timestamp\",\n \"endpoint\",\n \"request_model\",\n \"message_count\",\n \"stream\",\n \"success\",\n \"response_model\",\n \"input_tokens\",\n \"output_tokens\",\n \"duration_ms\",\n \"stop_reason\",\n \"error\",\n ]\n\n const rows = historyState.entries.map((e) => [\n e.id,\n e.sessionId,\n new Date(e.timestamp).toISOString(),\n e.endpoint,\n e.request.model,\n e.request.messages.length,\n e.request.stream,\n e.response?.success ?? \"\",\n e.response?.model ?? \"\",\n e.response?.usage.input_tokens ?? \"\",\n e.response?.usage.output_tokens ?? \"\",\n e.durationMs ?? \"\",\n e.response?.stop_reason ?? \"\",\n e.response?.error ?? \"\",\n ])\n\n return [headers.join(\",\"), ...rows.map((r) => r.join(\",\"))].join(\"\\n\")\n}\n","import consola from \"consola\"\nimport { getProxyForUrl } from \"proxy-from-env\"\nimport { Agent, ProxyAgent, setGlobalDispatcher, type Dispatcher } from \"undici\"\n\nexport function initProxyFromEnv(): void {\n if (typeof Bun !== \"undefined\") return\n\n try {\n const direct = new Agent()\n const proxies = new Map<string, ProxyAgent>()\n\n // We only need a minimal dispatcher that implements `dispatch` at runtime.\n // Typing the object as `Dispatcher` forces TypeScript to require many\n // additional methods. Instead, keep a plain object and cast when passing\n // to `setGlobalDispatcher`.\n const dispatcher = {\n dispatch(\n options: Dispatcher.DispatchOptions,\n handler: Dispatcher.DispatchHandler,\n ) {\n try {\n const origin =\n typeof options.origin === \"string\" ?\n new URL(options.origin)\n : (options.origin as URL)\n const get = getProxyForUrl as unknown as (\n u: string,\n ) => string | undefined\n const raw = get(origin.toString())\n const proxyUrl = raw && raw.length > 0 ? raw : undefined\n if (!proxyUrl) {\n consola.debug(`HTTP proxy bypass: ${origin.hostname}`)\n return (direct as unknown as Dispatcher).dispatch(options, handler)\n }\n let agent = proxies.get(proxyUrl)\n if (!agent) {\n agent = new ProxyAgent(proxyUrl)\n proxies.set(proxyUrl, agent)\n }\n let label = proxyUrl\n try {\n const u = new URL(proxyUrl)\n label = `${u.protocol}//${u.host}`\n } catch {\n /* noop */\n }\n consola.debug(`HTTP proxy route: ${origin.hostname} via ${label}`)\n return (agent as unknown as Dispatcher).dispatch(options, handler)\n } catch {\n return (direct as unknown as Dispatcher).dispatch(options, handler)\n }\n },\n close() {\n return direct.close()\n },\n destroy() {\n return direct.destroy()\n },\n }\n\n setGlobalDispatcher(dispatcher as unknown as Dispatcher)\n consola.debug(\"HTTP proxy configured from environment (per-URL)\")\n } catch (err) {\n consola.debug(\"Proxy setup skipped:\", err)\n }\n}\n","import { execSync } from \"node:child_process\"\nimport process from \"node:process\"\n\ntype ShellName = \"bash\" | \"zsh\" | \"fish\" | \"powershell\" | \"cmd\" | \"sh\"\ntype EnvVars = Record<string, string | undefined>\n\nfunction getShell(): ShellName {\n const { platform, ppid, env } = process\n\n if (platform === \"win32\") {\n try {\n const command = `wmic process get ParentProcessId,Name | findstr \"${ppid}\"`\n const parentProcess = execSync(command, { stdio: \"pipe\" }).toString()\n\n if (parentProcess.toLowerCase().includes(\"powershell.exe\")) {\n return \"powershell\"\n }\n } catch {\n return \"cmd\"\n }\n\n return \"cmd\"\n } else {\n const shellPath = env.SHELL\n if (shellPath) {\n if (shellPath.endsWith(\"zsh\")) return \"zsh\"\n if (shellPath.endsWith(\"fish\")) return \"fish\"\n if (shellPath.endsWith(\"bash\")) return \"bash\"\n }\n\n return \"sh\"\n }\n}\n\n/**\n * Generates a copy-pasteable script to set multiple environment variables\n * and run a subsequent command.\n * @param {EnvVars} envVars - An object of environment variables to set.\n * @param {string} commandToRun - The command to run after setting the variables.\n * @returns {string} The formatted script string.\n */\nexport function generateEnvScript(\n envVars: EnvVars,\n commandToRun: string = \"\",\n): string {\n const shell = getShell()\n const filteredEnvVars = Object.entries(envVars).filter(\n ([, value]) => value !== undefined,\n ) as Array<[string, string]>\n\n let commandBlock: string\n\n switch (shell) {\n case \"powershell\": {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `$env:${key} = \"${value.replaceAll('\"', '`\"')}\"`)\n .join(\"; \")\n break\n }\n case \"cmd\": {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `set ${key}=${value}`)\n .join(\" & \")\n break\n }\n case \"fish\": {\n commandBlock = filteredEnvVars\n .map(\n ([key, value]) =>\n `set -gx ${key} \"${value.replaceAll('\"', String.raw`\\\"`)}\"`,\n )\n .join(\"; \")\n break\n }\n default: {\n // bash, zsh, sh\n const assignments = filteredEnvVars\n .map(\n ([key, value]) => `${key}=\"${value.replaceAll('\"', String.raw`\\\"`)}\"`,\n )\n .join(\" \")\n commandBlock = filteredEnvVars.length > 0 ? `export ${assignments}` : \"\"\n break\n }\n }\n\n if (commandBlock && commandToRun) {\n const separator = shell === \"cmd\" ? \" & \" : \" && \"\n return `${commandBlock}${separator}${commandToRun}`\n }\n\n return commandBlock || commandToRun\n}\n","let p = process || {}, argv = p.argv || [], env = p.env || {}\nlet isColorSupported =\n\t!(!!env.NO_COLOR || argv.includes(\"--no-color\")) &&\n\t(!!env.FORCE_COLOR || argv.includes(\"--color\") || p.platform === \"win32\" || ((p.stdout || {}).isTTY && env.TERM !== \"dumb\") || !!env.CI)\n\nlet formatter = (open, close, replace = open) =>\n\tinput => {\n\t\tlet string = \"\" + input, index = string.indexOf(close, open.length)\n\t\treturn ~index ? open + replaceClose(string, close, replace, index) + close : open + string + close\n\t}\n\nlet replaceClose = (string, close, replace, index) => {\n\tlet result = \"\", cursor = 0\n\tdo {\n\t\tresult += string.substring(cursor, index) + replace\n\t\tcursor = index + close.length\n\t\tindex = string.indexOf(close, cursor)\n\t} while (~index)\n\treturn result + string.substring(cursor)\n}\n\nlet createColors = (enabled = isColorSupported) => {\n\tlet f = enabled ? formatter : () => String\n\treturn {\n\t\tisColorSupported: enabled,\n\t\treset: f(\"\\x1b[0m\", \"\\x1b[0m\"),\n\t\tbold: f(\"\\x1b[1m\", \"\\x1b[22m\", \"\\x1b[22m\\x1b[1m\"),\n\t\tdim: f(\"\\x1b[2m\", \"\\x1b[22m\", \"\\x1b[22m\\x1b[2m\"),\n\t\titalic: f(\"\\x1b[3m\", \"\\x1b[23m\"),\n\t\tunderline: f(\"\\x1b[4m\", \"\\x1b[24m\"),\n\t\tinverse: f(\"\\x1b[7m\", \"\\x1b[27m\"),\n\t\thidden: f(\"\\x1b[8m\", \"\\x1b[28m\"),\n\t\tstrikethrough: f(\"\\x1b[9m\", \"\\x1b[29m\"),\n\n\t\tblack: f(\"\\x1b[30m\", \"\\x1b[39m\"),\n\t\tred: f(\"\\x1b[31m\", \"\\x1b[39m\"),\n\t\tgreen: f(\"\\x1b[32m\", \"\\x1b[39m\"),\n\t\tyellow: f(\"\\x1b[33m\", \"\\x1b[39m\"),\n\t\tblue: f(\"\\x1b[34m\", \"\\x1b[39m\"),\n\t\tmagenta: f(\"\\x1b[35m\", \"\\x1b[39m\"),\n\t\tcyan: f(\"\\x1b[36m\", \"\\x1b[39m\"),\n\t\twhite: f(\"\\x1b[37m\", \"\\x1b[39m\"),\n\t\tgray: f(\"\\x1b[90m\", \"\\x1b[39m\"),\n\n\t\tbgBlack: f(\"\\x1b[40m\", \"\\x1b[49m\"),\n\t\tbgRed: f(\"\\x1b[41m\", \"\\x1b[49m\"),\n\t\tbgGreen: f(\"\\x1b[42m\", \"\\x1b[49m\"),\n\t\tbgYellow: f(\"\\x1b[43m\", \"\\x1b[49m\"),\n\t\tbgBlue: f(\"\\x1b[44m\", \"\\x1b[49m\"),\n\t\tbgMagenta: f(\"\\x1b[45m\", \"\\x1b[49m\"),\n\t\tbgCyan: f(\"\\x1b[46m\", \"\\x1b[49m\"),\n\t\tbgWhite: f(\"\\x1b[47m\", \"\\x1b[49m\"),\n\n\t\tblackBright: f(\"\\x1b[90m\", \"\\x1b[39m\"),\n\t\tredBright: f(\"\\x1b[91m\", \"\\x1b[39m\"),\n\t\tgreenBright: f(\"\\x1b[92m\", \"\\x1b[39m\"),\n\t\tyellowBright: f(\"\\x1b[93m\", \"\\x1b[39m\"),\n\t\tblueBright: f(\"\\x1b[94m\", \"\\x1b[39m\"),\n\t\tmagentaBright: f(\"\\x1b[95m\", \"\\x1b[39m\"),\n\t\tcyanBright: f(\"\\x1b[96m\", \"\\x1b[39m\"),\n\t\twhiteBright: f(\"\\x1b[97m\", \"\\x1b[39m\"),\n\n\t\tbgBlackBright: f(\"\\x1b[100m\", \"\\x1b[49m\"),\n\t\tbgRedBright: f(\"\\x1b[101m\", \"\\x1b[49m\"),\n\t\tbgGreenBright: f(\"\\x1b[102m\", \"\\x1b[49m\"),\n\t\tbgYellowBright: f(\"\\x1b[103m\", \"\\x1b[49m\"),\n\t\tbgBlueBright: f(\"\\x1b[104m\", \"\\x1b[49m\"),\n\t\tbgMagentaBright: f(\"\\x1b[105m\", \"\\x1b[49m\"),\n\t\tbgCyanBright: f(\"\\x1b[106m\", \"\\x1b[49m\"),\n\t\tbgWhiteBright: f(\"\\x1b[107m\", \"\\x1b[49m\"),\n\t}\n}\n\nmodule.exports = createColors()\nmodule.exports.createColors = createColors\n","// Console renderer - simple single-line output for each completed request\n// Replaces Hono's default logger with cleaner, more informative output\n\nimport consola from \"consola\"\nimport pc from \"picocolors\"\n\nimport type { RequestUpdate, TrackedRequest, TuiRenderer } from \"./types\"\n\n// ANSI escape codes for cursor control\nconst CLEAR_LINE = \"\\x1b[2K\\r\"\n\nfunction formatDuration(ms: number): string {\n if (ms < 1000) return `${ms}ms`\n return `${(ms / 1000).toFixed(1)}s`\n}\n\nfunction formatNumber(n: number): string {\n if (n >= 1000000) return `${(n / 1000000).toFixed(1)}M`\n if (n >= 1000) return `${(n / 1000).toFixed(1)}K`\n return String(n)\n}\n\nfunction formatTokens(input?: number, output?: number): string {\n if (input === undefined || output === undefined) return \"-\"\n return `${formatNumber(input)}/${formatNumber(output)}`\n}\n\n/**\n * Console renderer that shows request lifecycle with apt-get style footer\n *\n * Log format:\n * - Start: [....] METHOD /path model-name\n * - Streaming: [<-->] METHOD /path model-name streaming...\n * - Complete: [ OK ] METHOD /path 200 1.2s 1.5K/500 model-name\n *\n * Features:\n * - /history API requests are displayed in gray (dim)\n * - Sticky footer shows active request count, updated in-place on the last line\n * - Footer disappears when all requests complete\n */\nexport class ConsoleRenderer implements TuiRenderer {\n private activeRequests: Map<string, TrackedRequest> = new Map()\n private showActive: boolean\n private footerVisible = false\n private isTTY: boolean\n\n constructor(options?: { showActive?: boolean }) {\n this.showActive = options?.showActive ?? true\n\n this.isTTY = process.stdout.isTTY\n }\n\n /**\n * Get footer text based on active request count\n */\n private getFooterText(): string {\n const activeCount = this.activeRequests.size\n if (activeCount === 0) return \"\"\n const plural = activeCount === 1 ? \"\" : \"s\"\n return pc.dim(`[....] ${activeCount} request${plural} in progress...`)\n }\n\n /**\n * Render footer in-place on current line (no newline)\n * Only works on TTY terminals\n */\n private renderFooter(): void {\n if (!this.isTTY) return\n\n const footerText = this.getFooterText()\n if (footerText) {\n process.stdout.write(CLEAR_LINE + footerText)\n this.footerVisible = true\n } else if (this.footerVisible) {\n process.stdout.write(CLEAR_LINE)\n this.footerVisible = false\n }\n }\n\n /**\n * Clear footer and prepare for log output\n */\n private clearFooterForLog(): void {\n if (this.footerVisible && this.isTTY) {\n process.stdout.write(CLEAR_LINE)\n this.footerVisible = false\n }\n }\n\n /**\n * Print a log line with proper footer handling\n * 1. Clear footer if visible\n * 2. Print log with newline\n * 3. Re-render footer on new line (no newline after footer)\n */\n private printLog(message: string, isGray = false): void {\n this.clearFooterForLog()\n\n // Print the log message\n if (isGray) {\n consola.log(pc.dim(message))\n } else {\n consola.log(message)\n }\n\n // Re-render footer after log (stays on its own line without newline)\n this.renderFooter()\n }\n\n onRequestStart(request: TrackedRequest): void {\n this.activeRequests.set(request.id, request)\n\n if (this.showActive) {\n const modelInfo = request.model ? ` ${request.model}` : \"\"\n const queueInfo =\n request.queuePosition !== undefined && request.queuePosition > 0 ?\n ` [q#${request.queuePosition}]`\n : \"\"\n const message = `[....] ${request.method} ${request.path}${modelInfo}${queueInfo}`\n this.printLog(message, request.isHistoryAccess)\n }\n }\n\n onRequestUpdate(id: string, update: RequestUpdate): void {\n const request = this.activeRequests.get(id)\n if (!request) return\n\n // Apply updates\n Object.assign(request, update)\n\n // Show streaming status\n if (this.showActive && update.status === \"streaming\") {\n const modelInfo = request.model ? ` ${request.model}` : \"\"\n const message = `[<-->] ${request.method} ${request.path}${modelInfo} streaming...`\n this.printLog(message, request.isHistoryAccess)\n }\n }\n\n onRequestComplete(request: TrackedRequest): void {\n this.activeRequests.delete(request.id)\n\n const status = request.statusCode ?? 0\n const duration = formatDuration(request.durationMs ?? 0)\n const tokens =\n request.model ?\n formatTokens(request.inputTokens, request.outputTokens)\n : \"\"\n const modelInfo = request.model ? ` ${request.model}` : \"\"\n\n const isError = request.status === \"error\" || status >= 400\n const prefix = isError ? \"[FAIL]\" : \"[ OK ]\"\n const tokensPart = tokens ? ` ${tokens}` : \"\"\n let content = `${prefix} ${request.method} ${request.path} ${status} ${duration}${tokensPart}${modelInfo}`\n\n if (isError) {\n const errorInfo = request.error ? `: ${request.error}` : \"\"\n content += errorInfo\n }\n\n this.printLog(content, request.isHistoryAccess)\n }\n\n destroy(): void {\n if (this.footerVisible && this.isTTY) {\n process.stdout.write(CLEAR_LINE)\n this.footerVisible = false\n }\n this.activeRequests.clear()\n }\n}\n","// Fullscreen TUI renderer using Ink\n// Provides an interactive terminal interface with tabs for Active/Completed/Errors\n\nimport { Box, render, Text, useInput, useStdout } from \"ink\"\nimport React, { useEffect, useState } from \"react\"\n\nimport type { RequestUpdate, TrackedRequest, TuiRenderer } from \"./types\"\n\ntype TabType = \"active\" | \"completed\" | \"errors\"\n\ninterface TuiState {\n activeRequests: Map<string, TrackedRequest>\n completedRequests: Array<TrackedRequest>\n errorRequests: Array<TrackedRequest>\n}\n\n// Shared state that the renderer updates\nconst tuiState: TuiState = {\n activeRequests: new Map(),\n completedRequests: [],\n errorRequests: [],\n}\n\n// Event emitter for state changes\ntype StateListener = () => void\nconst listeners: Array<StateListener> = []\nfunction notifyListeners(): void {\n for (const listener of listeners) {\n listener()\n }\n}\n\nfunction formatDuration(ms: number): string {\n if (ms < 1000) return `${ms}ms`\n return `${(ms / 1000).toFixed(1)}s`\n}\n\nfunction formatNumber(n: number): string {\n if (n >= 1000000) return `${(n / 1000000).toFixed(1)}M`\n if (n >= 1000) return `${(n / 1000).toFixed(1)}K`\n return String(n)\n}\n\nfunction formatTokens(input?: number, output?: number): string {\n if (input === undefined || output === undefined) return \"-\"\n return `${formatNumber(input)}/${formatNumber(output)}`\n}\n\nfunction getElapsedTime(startTime: number): string {\n return formatDuration(Date.now() - startTime)\n}\n\n// Tab header component\nfunction TabHeader({\n currentTab,\n counts,\n}: {\n currentTab: TabType\n counts: { active: number; completed: number; errors: number }\n}): React.ReactElement {\n const tabs: Array<{ key: TabType; label: string; count: number }> = [\n { key: \"active\", label: \"Active\", count: counts.active },\n { key: \"completed\", label: \"Completed\", count: counts.completed },\n { key: \"errors\", label: \"Errors\", count: counts.errors },\n ]\n\n return (\n <Box borderStyle=\"single\" paddingX={1}>\n {tabs.map((tab, idx) => (\n <React.Fragment key={tab.key}>\n {idx > 0 && <Text> │ </Text>}\n <Text\n bold={currentTab === tab.key}\n color={currentTab === tab.key ? \"cyan\" : undefined}\n inverse={currentTab === tab.key}\n >\n {\" \"}\n [{idx + 1}] {tab.label} ({tab.count}){\" \"}\n </Text>\n </React.Fragment>\n ))}\n <Text dimColor> │ Press 1/2/3 to switch tabs, q to quit</Text>\n </Box>\n )\n}\n\nfunction getStatusColor(status: string): string {\n if (status === \"streaming\") return \"yellow\"\n if (status === \"queued\") return \"gray\"\n return \"blue\"\n}\n\nfunction getStatusIcon(status: string): string {\n if (status === \"streaming\") return \"⟳\"\n if (status === \"queued\") return \"◷\"\n return \"●\"\n}\n\n// Active request row\nfunction ActiveRequestRow({\n request,\n}: {\n request: TrackedRequest\n}): React.ReactElement {\n const [, setTick] = useState(0)\n\n // Update elapsed time every second\n useEffect(() => {\n const interval = setInterval(() => setTick((t) => t + 1), 1000)\n return () => clearInterval(interval)\n }, [])\n\n const statusColor = getStatusColor(request.status)\n const statusIcon = getStatusIcon(request.status)\n\n return (\n <Box>\n <Text color={statusColor}>{statusIcon} </Text>\n <Text bold>{request.method}</Text>\n <Text> {request.path} </Text>\n <Text dimColor>{getElapsedTime(request.startTime)} </Text>\n {request.queuePosition !== undefined && request.queuePosition > 0 && (\n <Text color=\"gray\">[queue #{request.queuePosition}] </Text>\n )}\n <Text color=\"magenta\">{request.model}</Text>\n </Box>\n )\n}\n\n// Completed request row\nfunction CompletedRequestRow({\n request,\n}: {\n request: TrackedRequest\n}): React.ReactElement {\n const isError = request.status === \"error\" || (request.statusCode ?? 0) >= 400\n\n return (\n <Box>\n <Text color={isError ? \"red\" : \"green\"}>{isError ? \"✗\" : \"✓\"} </Text>\n <Text bold>{request.method}</Text>\n <Text> {request.path} </Text>\n <Text color={isError ? \"red\" : \"green\"}>\n {request.statusCode ?? \"-\"}{\" \"}\n </Text>\n <Text dimColor>{formatDuration(request.durationMs ?? 0)} </Text>\n <Text>{formatTokens(request.inputTokens, request.outputTokens)} </Text>\n <Text color=\"magenta\">{request.model}</Text>\n </Box>\n )\n}\n\n// Error request row\nfunction ErrorRequestRow({\n request,\n}: {\n request: TrackedRequest\n}): React.ReactElement {\n return (\n <Box flexDirection=\"column\">\n <Box>\n <Text color=\"red\">✗ </Text>\n <Text bold>{request.method}</Text>\n <Text> {request.path} </Text>\n <Text color=\"red\">{request.statusCode ?? \"-\"} </Text>\n <Text dimColor>{formatDuration(request.durationMs ?? 0)} </Text>\n <Text color=\"magenta\">{request.model}</Text>\n </Box>\n {request.error && (\n <Box marginLeft={2}>\n <Text color=\"red\" dimColor>\n └─ {request.error}\n </Text>\n </Box>\n )}\n </Box>\n )\n}\n\n// Content panel component\nfunction ContentPanel({\n currentTab,\n activeList,\n completedList,\n errorList,\n contentHeight,\n}: {\n currentTab: TabType\n activeList: Array<TrackedRequest>\n completedList: Array<TrackedRequest>\n errorList: Array<TrackedRequest>\n contentHeight: number\n}): React.ReactElement {\n if (currentTab === \"active\") {\n if (activeList.length === 0) {\n return <Text dimColor>No active requests</Text>\n }\n return (\n <>\n {activeList.slice(0, contentHeight).map((req) => (\n <ActiveRequestRow key={req.id} request={req} />\n ))}\n </>\n )\n }\n\n if (currentTab === \"completed\") {\n if (completedList.length === 0) {\n return <Text dimColor>No completed requests</Text>\n }\n return (\n <>\n {completedList\n .slice(-contentHeight)\n .reverse()\n .map((req) => (\n <CompletedRequestRow key={req.id} request={req} />\n ))}\n </>\n )\n }\n\n // errors tab\n if (errorList.length === 0) {\n return <Text dimColor>No errors</Text>\n }\n return (\n <>\n {errorList\n .slice(-contentHeight)\n .reverse()\n .map((req) => (\n <ErrorRequestRow key={req.id} request={req} />\n ))}\n </>\n )\n}\n\n// Main TUI App component\nfunction TuiApp(): React.ReactElement {\n const [currentTab, setCurrentTab] = useState<TabType>(\"active\")\n const [, forceUpdate] = useState(0)\n const { stdout } = useStdout()\n\n // Subscribe to state changes\n useEffect(() => {\n const listener = (): void => forceUpdate((n) => n + 1)\n listeners.push(listener)\n return () => {\n const idx = listeners.indexOf(listener)\n if (idx !== -1) listeners.splice(idx, 1)\n }\n }, [])\n\n // Handle keyboard input\n useInput((input, key) => {\n switch (input) {\n case \"1\": {\n setCurrentTab(\"active\")\n break\n }\n case \"2\": {\n setCurrentTab(\"completed\")\n break\n }\n case \"3\": {\n setCurrentTab(\"errors\")\n break\n }\n default: {\n if (input === \"q\" || (key.ctrl && input === \"c\")) {\n process.exit(0)\n }\n }\n }\n })\n\n const activeList = Array.from(tuiState.activeRequests.values())\n const completedList = tuiState.completedRequests\n const errorList = tuiState.errorRequests\n\n const counts = {\n active: activeList.length,\n completed: completedList.length,\n errors: errorList.length,\n }\n\n // Calculate available height\n const terminalHeight = stdout.rows || 24\n const headerHeight = 3 // Tab header\n const footerHeight = 1 // Footer\n const contentHeight = terminalHeight - headerHeight - footerHeight - 2\n\n return (\n <Box flexDirection=\"column\" height={terminalHeight}>\n <TabHeader currentTab={currentTab} counts={counts} />\n <Box\n flexDirection=\"column\"\n height={contentHeight}\n borderStyle=\"single\"\n paddingX={1}\n overflow=\"hidden\"\n >\n <ContentPanel\n currentTab={currentTab}\n activeList={activeList}\n completedList={completedList}\n errorList={errorList}\n contentHeight={contentHeight}\n />\n </Box>\n <Box paddingX={1}>\n <Text dimColor>\n copilot-api │ Active: {counts.active} │ Completed: {counts.completed}{\" \"}\n │ Errors: {counts.errors}\n </Text>\n </Box>\n </Box>\n )\n}\n\n/**\n * Fullscreen TUI renderer using Ink\n * Provides interactive terminal interface with tabs\n */\nexport class FullscreenRenderer implements TuiRenderer {\n private inkInstance: ReturnType<typeof render> | null = null\n private maxHistory = 100\n\n constructor(options?: { maxHistory?: number }) {\n if (options?.maxHistory !== undefined) {\n this.maxHistory = options.maxHistory\n }\n }\n\n start(): void {\n if (this.inkInstance) return\n\n this.inkInstance = render(<TuiApp />, {\n // Use full terminal\n })\n }\n\n onRequestStart(request: TrackedRequest): void {\n tuiState.activeRequests.set(request.id, { ...request })\n notifyListeners()\n }\n\n onRequestUpdate(id: string, update: RequestUpdate): void {\n const request = tuiState.activeRequests.get(id)\n if (!request) return\n\n Object.assign(request, update)\n notifyListeners()\n }\n\n onRequestComplete(request: TrackedRequest): void {\n tuiState.activeRequests.delete(request.id)\n\n const isError =\n request.status === \"error\" || (request.statusCode ?? 0) >= 400\n\n if (isError) {\n tuiState.errorRequests.push({ ...request })\n // Trim error history\n while (tuiState.errorRequests.length > this.maxHistory) {\n tuiState.errorRequests.shift()\n }\n }\n\n tuiState.completedRequests.push({ ...request })\n // Trim completed history\n while (tuiState.completedRequests.length > this.maxHistory) {\n tuiState.completedRequests.shift()\n }\n\n notifyListeners()\n }\n\n destroy(): void {\n if (this.inkInstance) {\n this.inkInstance.unmount()\n this.inkInstance = null\n }\n tuiState.activeRequests.clear()\n tuiState.completedRequests = []\n tuiState.errorRequests = []\n }\n}\n","// Request tracker - manages request state independently of rendering\n\nimport type { RequestUpdate, TrackedRequest, TuiRenderer } from \"./types\"\n\n// Simple ID generator\nfunction generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 6)\n}\n\ninterface StartRequestOptions {\n method: string\n path: string\n model: string\n isHistoryAccess?: boolean\n}\n\nclass RequestTracker {\n private requests: Map<string, TrackedRequest> = new Map()\n private renderer: TuiRenderer | null = null\n private completedQueue: Array<TrackedRequest> = []\n private historySize = 5\n private completedDisplayMs = 2000\n\n setRenderer(renderer: TuiRenderer | null): void {\n this.renderer = renderer\n }\n\n setOptions(options: {\n historySize?: number\n completedDisplayMs?: number\n }): void {\n if (options.historySize !== undefined) {\n this.historySize = options.historySize\n }\n if (options.completedDisplayMs !== undefined) {\n this.completedDisplayMs = options.completedDisplayMs\n }\n }\n\n /**\n * Start tracking a new request\n * Returns the tracking ID\n */\n startRequest(options: StartRequestOptions): string {\n const id = generateId()\n const request: TrackedRequest = {\n id,\n method: options.method,\n path: options.path,\n model: options.model,\n startTime: Date.now(),\n status: \"executing\",\n isHistoryAccess: options.isHistoryAccess,\n }\n\n this.requests.set(id, request)\n this.renderer?.onRequestStart(request)\n\n return id\n }\n\n /**\n * Update request status\n */\n updateRequest(id: string, update: RequestUpdate): void {\n const request = this.requests.get(id)\n if (!request) return\n\n if (update.status !== undefined) request.status = update.status\n if (update.statusCode !== undefined) request.statusCode = update.statusCode\n if (update.durationMs !== undefined) request.durationMs = update.durationMs\n if (update.inputTokens !== undefined)\n request.inputTokens = update.inputTokens\n if (update.outputTokens !== undefined)\n request.outputTokens = update.outputTokens\n if (update.error !== undefined) request.error = update.error\n if (update.queuePosition !== undefined)\n request.queuePosition = update.queuePosition\n\n this.renderer?.onRequestUpdate(id, update)\n }\n\n /**\n * Mark request as completed\n */\n completeRequest(\n id: string,\n statusCode: number,\n usage?: { inputTokens: number; outputTokens: number },\n ): void {\n const request = this.requests.get(id)\n if (!request) return\n\n request.status =\n statusCode >= 200 && statusCode < 400 ? \"completed\" : \"error\"\n request.statusCode = statusCode\n request.durationMs = Date.now() - request.startTime\n\n if (usage) {\n request.inputTokens = usage.inputTokens\n request.outputTokens = usage.outputTokens\n }\n\n this.renderer?.onRequestComplete(request)\n\n // Move to completed queue\n this.requests.delete(id)\n this.completedQueue.push(request)\n\n // Trim completed queue\n while (this.completedQueue.length > this.historySize) {\n this.completedQueue.shift()\n }\n\n // Schedule removal from display after delay\n setTimeout(() => {\n const idx = this.completedQueue.indexOf(request)\n if (idx !== -1) {\n this.completedQueue.splice(idx, 1)\n }\n }, this.completedDisplayMs)\n }\n\n /**\n * Mark request as failed with error\n */\n failRequest(id: string, error: string): void {\n const request = this.requests.get(id)\n if (!request) return\n\n request.status = \"error\"\n request.error = error\n request.durationMs = Date.now() - request.startTime\n\n this.renderer?.onRequestComplete(request)\n\n this.requests.delete(id)\n this.completedQueue.push(request)\n\n while (this.completedQueue.length > this.historySize) {\n this.completedQueue.shift()\n }\n }\n\n /**\n * Get all active requests\n */\n getActiveRequests(): Array<TrackedRequest> {\n return Array.from(this.requests.values())\n }\n\n /**\n * Get recently completed requests\n */\n getCompletedRequests(): Array<TrackedRequest> {\n return [...this.completedQueue]\n }\n\n /**\n * Get request by ID\n */\n getRequest(id: string): TrackedRequest | undefined {\n return this.requests.get(id)\n }\n\n /**\n * Clear all tracked requests\n */\n clear(): void {\n this.requests.clear()\n this.completedQueue = []\n }\n}\n\n// Singleton instance\nexport const requestTracker = new RequestTracker()\n","// Custom Hono logger middleware that integrates with TUI request tracker\n// Replaces the default hono/logger for cleaner, more informative output\n\nimport type { Context, MiddlewareHandler, Next } from \"hono\"\n\nimport { requestTracker } from \"./tracker\"\n\n/**\n * Custom logger middleware that tracks requests through the TUI system\n * Shows single-line output: METHOD /path 200 1.2s 1.5K/500 model-name\n *\n * For streaming responses (SSE), the handler is responsible for calling\n * completeRequest after the stream finishes.\n */\nexport function tuiLogger(): MiddlewareHandler {\n return async (c: Context, next: Next) => {\n const method = c.req.method\n const path = c.req.path\n\n // Detect /history API access for gray display\n const isHistoryAccess = path.startsWith(\"/history\")\n\n // Start tracking with empty model (will be updated by handler if available)\n const trackingId = requestTracker.startRequest({\n method,\n path,\n model: \"\",\n isHistoryAccess,\n })\n\n // Store tracking ID in context for handlers to update\n c.set(\"trackingId\", trackingId)\n\n try {\n await next()\n\n // Check if this is a streaming response (SSE)\n const contentType = c.res.headers.get(\"content-type\") ?? \"\"\n const isStreaming = contentType.includes(\"text/event-stream\")\n\n // For streaming responses, the handler will call completeRequest\n // after the stream finishes with the actual token counts\n if (isStreaming) {\n return\n }\n\n // Complete tracking with response info for non-streaming\n const status = c.res.status\n\n // Get usage and model from response headers (set by handler if available)\n const inputTokens = c.res.headers.get(\"x-input-tokens\")\n const outputTokens = c.res.headers.get(\"x-output-tokens\")\n const model = c.res.headers.get(\"x-model\")\n\n // Update model if available\n if (model) {\n const request = requestTracker.getRequest(trackingId)\n if (request) {\n request.model = model\n }\n }\n\n requestTracker.completeRequest(\n trackingId,\n status,\n inputTokens && outputTokens ?\n {\n inputTokens: Number.parseInt(inputTokens, 10),\n outputTokens: Number.parseInt(outputTokens, 10),\n }\n : undefined,\n )\n } catch (error) {\n requestTracker.failRequest(\n trackingId,\n error instanceof Error ? error.message : \"Unknown error\",\n )\n throw error\n }\n }\n}\n","// TUI module exports\n\nexport { ConsoleRenderer } from \"./console-renderer\"\nexport { FullscreenRenderer } from \"./fullscreen-renderer\"\nexport { tuiLogger } from \"./middleware\"\nexport { requestTracker } from \"./tracker\"\nexport type {\n RequestStatus,\n RequestUpdate,\n TrackedRequest,\n TuiOptions,\n TuiRenderer,\n} from \"./types\"\n\nimport type { TuiOptions } from \"./types\"\n\nimport { ConsoleRenderer } from \"./console-renderer\"\nimport { FullscreenRenderer } from \"./fullscreen-renderer\"\nimport { requestTracker } from \"./tracker\"\n\nexport type TuiMode = \"console\" | \"fullscreen\"\n\n/**\n * Initialize the TUI system\n * @param options.mode - \"console\" for simple log output (default), \"fullscreen\" for interactive TUI\n */\nexport function initTui(options?: TuiOptions & { mode?: TuiMode }): void {\n const enabled = options?.enabled ?? process.stdout.isTTY\n const mode = options?.mode ?? \"console\"\n\n if (enabled) {\n if (mode === \"fullscreen\") {\n const renderer = new FullscreenRenderer({\n maxHistory: options?.historySize ?? 100,\n })\n requestTracker.setRenderer(renderer)\n renderer.start()\n } else {\n const renderer = new ConsoleRenderer()\n requestTracker.setRenderer(renderer)\n }\n }\n\n if (\n options?.historySize !== undefined\n || options?.completedDisplayMs !== undefined\n ) {\n requestTracker.setOptions({\n historySize: options.historySize,\n completedDisplayMs: options.completedDisplayMs,\n })\n }\n}\n","import consola from \"consola\"\n\nimport { HTTPError } from \"./error\"\n\nexport const awaitApproval = async () => {\n const response = await consola.prompt(`Accept incoming request?`, {\n type: \"confirm\",\n })\n\n if (!response)\n throw new HTTPError(\n \"Request rejected\",\n 403,\n JSON.stringify({ message: \"Request rejected\" }),\n )\n}\n","import type {\n ChatCompletionsPayload,\n ContentPart,\n Message,\n Tool,\n ToolCall,\n} from \"~/services/copilot/create-chat-completions\"\nimport type { Model } from \"~/services/copilot/get-models\"\n\n// Encoder type mapping\nconst ENCODING_MAP = {\n o200k_base: () => import(\"gpt-tokenizer/encoding/o200k_base\"),\n cl100k_base: () => import(\"gpt-tokenizer/encoding/cl100k_base\"),\n p50k_base: () => import(\"gpt-tokenizer/encoding/p50k_base\"),\n p50k_edit: () => import(\"gpt-tokenizer/encoding/p50k_edit\"),\n r50k_base: () => import(\"gpt-tokenizer/encoding/r50k_base\"),\n} as const\n\ntype SupportedEncoding = keyof typeof ENCODING_MAP\n\n// Define encoder interface\ninterface Encoder {\n encode: (text: string) => Array<number>\n}\n\n// Cache loaded encoders to avoid repeated imports\nconst encodingCache = new Map<string, Encoder>()\n\n/**\n * Calculate tokens for tool calls\n */\nconst calculateToolCallsTokens = (\n toolCalls: Array<ToolCall>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = 0\n for (const toolCall of toolCalls) {\n tokens += constants.funcInit\n tokens += encoder.encode(JSON.stringify(toolCall)).length\n }\n tokens += constants.funcEnd\n return tokens\n}\n\n/**\n * Calculate tokens for content parts\n */\nconst calculateContentPartsTokens = (\n contentParts: Array<ContentPart>,\n encoder: Encoder,\n): number => {\n let tokens = 0\n for (const part of contentParts) {\n if (part.type === \"image_url\") {\n // Image URLs incur ~85 tokens overhead for the image processing metadata\n // This is an approximation based on OpenAI's image token calculation\n tokens += encoder.encode(part.image_url.url).length + 85\n } else if (part.text) {\n tokens += encoder.encode(part.text).length\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens for a single message\n */\nconst calculateMessageTokens = (\n message: Message,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n // Each message incurs 3 tokens overhead for role/metadata framing\n // Based on OpenAI's token counting methodology\n const tokensPerMessage = 3\n // Additional token when a \"name\" field is present\n const tokensPerName = 1\n let tokens = tokensPerMessage\n for (const [key, value] of Object.entries(message)) {\n if (typeof value === \"string\") {\n tokens += encoder.encode(value).length\n }\n if (key === \"name\") {\n tokens += tokensPerName\n }\n if (key === \"tool_calls\") {\n tokens += calculateToolCallsTokens(\n value as Array<ToolCall>,\n encoder,\n constants,\n )\n }\n if (key === \"content\" && Array.isArray(value)) {\n tokens += calculateContentPartsTokens(\n value as Array<ContentPart>,\n encoder,\n )\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens using custom algorithm\n */\nconst calculateTokens = (\n messages: Array<Message>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n if (messages.length === 0) {\n return 0\n }\n let numTokens = 0\n for (const message of messages) {\n numTokens += calculateMessageTokens(message, encoder, constants)\n }\n // every reply is primed with <|start|>assistant<|message|> (3 tokens)\n numTokens += 3\n return numTokens\n}\n\n/**\n * Get the corresponding encoder module based on encoding type\n */\nconst getEncodeChatFunction = async (encoding: string): Promise<Encoder> => {\n if (encodingCache.has(encoding)) {\n const cached = encodingCache.get(encoding)\n if (cached) {\n return cached\n }\n }\n\n const supportedEncoding = encoding as SupportedEncoding\n if (!(supportedEncoding in ENCODING_MAP)) {\n const fallbackModule = (await ENCODING_MAP.o200k_base()) as Encoder\n encodingCache.set(encoding, fallbackModule)\n return fallbackModule\n }\n\n const encodingModule = (await ENCODING_MAP[supportedEncoding]()) as Encoder\n encodingCache.set(encoding, encodingModule)\n return encodingModule\n}\n\n/**\n * Get tokenizer type from model information\n */\nexport const getTokenizerFromModel = (model: Model): string => {\n return model.capabilities.tokenizer || \"o200k_base\"\n}\n\n/**\n * Get model-specific constants for token calculation.\n * These values are empirically determined based on OpenAI's function calling token overhead.\n * - funcInit: Tokens for initializing a function definition\n * - propInit: Tokens for initializing the properties section\n * - propKey: Tokens per property key\n * - enumInit: Token adjustment when enum is present (negative because type info is replaced)\n * - enumItem: Tokens per enum value\n * - funcEnd: Tokens for closing the function definition\n */\nconst getModelConstants = (model: Model) => {\n return model.id === \"gpt-3.5-turbo\" || model.id === \"gpt-4\" ?\n {\n funcInit: 10,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n }\n : {\n funcInit: 7,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n }\n}\n\n/**\n * Calculate tokens for a single parameter\n */\nconst calculateParameterTokens = (\n key: string,\n prop: unknown,\n context: {\n encoder: Encoder\n constants: ReturnType<typeof getModelConstants>\n },\n): number => {\n const { encoder, constants } = context\n let tokens = constants.propKey\n\n // Early return if prop is not an object\n if (typeof prop !== \"object\" || prop === null) {\n return tokens\n }\n\n // Type assertion for parameter properties\n const param = prop as {\n type?: string\n description?: string\n enum?: Array<unknown>\n [key: string]: unknown\n }\n\n const paramName = key\n const paramType = param.type || \"string\"\n let paramDesc = param.description || \"\"\n\n // Handle enum values\n if (param.enum && Array.isArray(param.enum)) {\n tokens += constants.enumInit\n for (const item of param.enum) {\n tokens += constants.enumItem\n tokens += encoder.encode(String(item)).length\n }\n }\n\n // Clean up description\n if (paramDesc.endsWith(\".\")) {\n paramDesc = paramDesc.slice(0, -1)\n }\n\n // Encode the main parameter line\n const line = `${paramName}:${paramType}:${paramDesc}`\n tokens += encoder.encode(line).length\n\n // Handle additional properties (excluding standard ones)\n const excludedKeys = new Set([\"type\", \"description\", \"enum\"])\n for (const propertyName of Object.keys(param)) {\n if (!excludedKeys.has(propertyName)) {\n const propertyValue = param[propertyName]\n const propertyText =\n typeof propertyValue === \"string\" ? propertyValue : (\n JSON.stringify(propertyValue)\n )\n tokens += encoder.encode(`${propertyName}:${propertyText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for function parameters\n */\nconst calculateParametersTokens = (\n parameters: unknown,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n if (!parameters || typeof parameters !== \"object\") {\n return 0\n }\n\n const params = parameters as Record<string, unknown>\n let tokens = 0\n\n for (const [key, value] of Object.entries(params)) {\n if (key === \"properties\") {\n const properties = value as Record<string, unknown>\n if (Object.keys(properties).length > 0) {\n tokens += constants.propInit\n for (const propKey of Object.keys(properties)) {\n tokens += calculateParameterTokens(propKey, properties[propKey], {\n encoder,\n constants,\n })\n }\n }\n } else {\n const paramText =\n typeof value === \"string\" ? value : JSON.stringify(value)\n tokens += encoder.encode(`${key}:${paramText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for a single tool\n */\nconst calculateToolTokens = (\n tool: Tool,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = constants.funcInit\n const func = tool.function\n const fName = func.name\n let fDesc = func.description || \"\"\n if (fDesc.endsWith(\".\")) {\n fDesc = fDesc.slice(0, -1)\n }\n const line = fName + \":\" + fDesc\n tokens += encoder.encode(line).length\n if (\n typeof func.parameters === \"object\" // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n && func.parameters !== null\n ) {\n tokens += calculateParametersTokens(func.parameters, encoder, constants)\n }\n return tokens\n}\n\n/**\n * Calculate token count for tools based on model\n */\nexport const numTokensForTools = (\n tools: Array<Tool>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let funcTokenCount = 0\n for (const tool of tools) {\n funcTokenCount += calculateToolTokens(tool, encoder, constants)\n }\n funcTokenCount += constants.funcEnd\n return funcTokenCount\n}\n\n/**\n * Calculate the token count of messages, supporting multiple GPT encoders\n */\nexport const getTokenCount = async (\n payload: ChatCompletionsPayload,\n model: Model,\n): Promise<{ input: number; output: number }> => {\n // Get tokenizer string\n const tokenizer = getTokenizerFromModel(model)\n\n // Get corresponding encoder module\n const encoder = await getEncodeChatFunction(tokenizer)\n\n const simplifiedMessages = payload.messages\n const inputMessages = simplifiedMessages.filter(\n (msg) => msg.role !== \"assistant\",\n )\n const outputMessages = simplifiedMessages.filter(\n (msg) => msg.role === \"assistant\",\n )\n\n const constants = getModelConstants(model)\n let inputTokens = calculateTokens(inputMessages, encoder, constants)\n if (payload.tools && payload.tools.length > 0) {\n inputTokens += numTokensForTools(payload.tools, encoder, constants)\n }\n const outputTokens = calculateTokens(outputMessages, encoder, constants)\n\n return {\n input: inputTokens,\n output: outputTokens,\n }\n}\n","import consola from \"consola\"\n\nimport type {\n ChatCompletionsPayload,\n Message,\n} from \"~/services/copilot/create-chat-completions\"\nimport type { Model } from \"~/services/copilot/get-models\"\n\nimport { getTokenCount } from \"~/lib/tokenizer\"\n\n/** Configuration for auto-compact behavior */\nexport interface AutoCompactConfig {\n /** Target tokens to preserve at the end (default: 100000 to leave room for output) */\n targetTokens: number\n /** Safety margin percentage to account for token counting differences (default: 10) */\n safetyMarginPercent: number\n}\n\nconst DEFAULT_CONFIG: AutoCompactConfig = {\n targetTokens: 100000, // Leave ~28k for output on 128k models\n safetyMarginPercent: 10,\n}\n\n/** Result of auto-compact operation */\nexport interface AutoCompactResult {\n /** The compacted payload (or original if no compaction needed) */\n payload: ChatCompletionsPayload\n /** Whether compaction was performed */\n wasCompacted: boolean\n /** Original token count */\n originalTokens: number\n /** Token count after compaction */\n compactedTokens: number\n /** Number of messages that were removed */\n removedMessageCount: number\n}\n\n/**\n * Check if payload needs compaction based on model limits.\n * Uses a safety margin to account for token counting differences.\n */\nexport async function checkNeedsCompaction(\n payload: ChatCompletionsPayload,\n model: Model,\n safetyMarginPercent = 10,\n): Promise<{ needed: boolean; currentTokens: number; limit: number }> {\n const tokenCount = await getTokenCount(payload, model)\n const currentTokens = tokenCount.input\n const rawLimit = model.capabilities.limits.max_prompt_tokens ?? 128000\n // Apply safety margin to trigger compaction earlier\n const limit = Math.floor(rawLimit * (1 - safetyMarginPercent / 100))\n\n return {\n needed: currentTokens > limit,\n currentTokens,\n limit,\n }\n}\n\n/**\n * Calculate approximate token count for a single message.\n * This is a fast estimation for splitting decisions.\n */\nfunction estimateMessageTokens(message: Message): number {\n let text = \"\"\n if (typeof message.content === \"string\") {\n text = message.content\n } else if (Array.isArray(message.content)) {\n for (const part of message.content) {\n if (part.type === \"text\") {\n text += part.text\n } else if (\"image_url\" in part) {\n // Images add significant tokens\n text += part.image_url.url\n }\n }\n }\n\n // Add tool calls if present\n if (message.tool_calls) {\n text += JSON.stringify(message.tool_calls)\n }\n\n // Rough estimation: ~4 characters per token + message overhead\n return Math.ceil(text.length / 4) + 10\n}\n\n/**\n * Extract system messages from the beginning of the message list.\n */\nfunction extractSystemMessages(messages: Array<Message>): {\n systemMessages: Array<Message>\n remainingMessages: Array<Message>\n} {\n const systemMessages: Array<Message> = []\n let i = 0\n\n while (i < messages.length) {\n const msg = messages[i]\n if (msg.role === \"system\" || msg.role === \"developer\") {\n systemMessages.push(msg)\n i++\n } else {\n break\n }\n }\n\n return {\n systemMessages,\n remainingMessages: messages.slice(i),\n }\n}\n\n/**\n * Find messages to keep from the end to stay under target tokens.\n * Returns the starting index of messages to preserve.\n */\nfunction findPreserveIndex(\n messages: Array<Message>,\n targetTokens: number,\n systemTokens: number,\n): number {\n const availableTokens = targetTokens - systemTokens - 500 // Reserve for truncation marker\n\n let accumulatedTokens = 0\n\n // Walk backwards from the end\n for (let i = messages.length - 1; i >= 0; i--) {\n const msgTokens = estimateMessageTokens(messages[i])\n if (accumulatedTokens + msgTokens > availableTokens) {\n // This message would put us over - start preserving from next message\n return i + 1\n }\n accumulatedTokens += msgTokens\n }\n\n // All messages fit\n return 0\n}\n\n/**\n * Calculate estimated tokens for system messages.\n */\nfunction estimateSystemTokens(systemMessages: Array<Message>): number {\n return systemMessages.reduce(\n (sum, msg) => sum + estimateMessageTokens(msg),\n 0,\n )\n}\n\n/**\n * Create a truncation marker message.\n */\nfunction createTruncationMarker(removedCount: number): Message {\n return {\n role: \"user\",\n content: `[CONTEXT TRUNCATED: ${removedCount} earlier messages were removed to fit context limits. The conversation continues below.]`,\n }\n}\n\n/**\n * Perform auto-compaction on a payload that exceeds token limits.\n * This uses simple truncation - no LLM calls required.\n */\nexport async function autoCompact(\n payload: ChatCompletionsPayload,\n model: Model,\n config: Partial<AutoCompactConfig> = {},\n): Promise<AutoCompactResult> {\n const cfg = { ...DEFAULT_CONFIG, ...config }\n\n // Check current token count\n const tokenCount = await getTokenCount(payload, model)\n const originalTokens = tokenCount.input\n const rawLimit = model.capabilities.limits.max_prompt_tokens ?? 128000\n const limit = Math.floor(rawLimit * (1 - cfg.safetyMarginPercent / 100))\n\n // If we're under the limit, no compaction needed\n if (originalTokens <= limit) {\n return {\n payload,\n wasCompacted: false,\n originalTokens,\n compactedTokens: originalTokens,\n removedMessageCount: 0,\n }\n }\n\n consola.info(\n `Auto-compact: ${originalTokens} tokens exceeds limit of ${limit}, truncating...`,\n )\n\n // Extract system messages (always preserve them)\n const { systemMessages, remainingMessages } = extractSystemMessages(\n payload.messages,\n )\n\n const systemTokens = estimateSystemTokens(systemMessages)\n consola.debug(\n `Auto-compact: ${systemMessages.length} system messages (~${systemTokens} tokens)`,\n )\n\n // Use the smaller of targetTokens or the actual limit\n const effectiveTarget = Math.min(cfg.targetTokens, limit)\n\n // Find where to start preserving messages\n const preserveIndex = findPreserveIndex(\n remainingMessages,\n effectiveTarget,\n systemTokens,\n )\n\n // If we need to keep all messages, we can't help\n if (preserveIndex === 0) {\n consola.warn(\n \"Auto-compact: Cannot truncate further without losing all conversation history\",\n )\n return {\n payload,\n wasCompacted: false,\n originalTokens,\n compactedTokens: originalTokens,\n removedMessageCount: 0,\n }\n }\n\n const removedMessages = remainingMessages.slice(0, preserveIndex)\n const preservedMessages = remainingMessages.slice(preserveIndex)\n\n consola.info(\n `Auto-compact: Removing ${removedMessages.length} messages, keeping ${preservedMessages.length}`,\n )\n\n // Build the truncation marker\n const truncationMarker = createTruncationMarker(removedMessages.length)\n\n // Build new payload\n const newPayload: ChatCompletionsPayload = {\n ...payload,\n messages: [...systemMessages, truncationMarker, ...preservedMessages],\n }\n\n // Verify the new token count\n const newTokenCount = await getTokenCount(newPayload, model)\n\n consola.info(\n `Auto-compact: Reduced from ${originalTokens} to ${newTokenCount.input} tokens`,\n )\n\n // If still over limit, try more aggressive truncation\n if (newTokenCount.input > limit) {\n consola.warn(\n `Auto-compact: Still over limit (${newTokenCount.input} > ${limit}), trying more aggressive truncation`,\n )\n\n // Recursively try with a smaller target\n const aggressiveTarget = Math.floor(effectiveTarget * 0.7)\n if (aggressiveTarget < 20000) {\n consola.error(\"Auto-compact: Cannot reduce further, target too low\")\n return {\n payload: newPayload,\n wasCompacted: true,\n originalTokens,\n compactedTokens: newTokenCount.input,\n removedMessageCount: removedMessages.length,\n }\n }\n\n return autoCompact(payload, model, {\n ...cfg,\n targetTokens: aggressiveTarget,\n })\n }\n\n return {\n payload: newPayload,\n wasCompacted: true,\n originalTokens,\n compactedTokens: newTokenCount.input,\n removedMessageCount: removedMessages.length,\n }\n}\n\n/**\n * Create a marker to append to responses indicating auto-compaction occurred.\n */\nexport function createCompactionMarker(result: AutoCompactResult): string {\n if (!result.wasCompacted) return \"\"\n\n const reduction = result.originalTokens - result.compactedTokens\n const percentage = Math.round((reduction / result.originalTokens) * 100)\n\n return (\n `\\n\\n---\\n[Auto-compacted: ${result.removedMessageCount} messages removed, `\n + `${result.originalTokens} → ${result.compactedTokens} tokens (${percentage}% reduction)]`\n )\n}\n","import consola from \"consola\"\n\nimport type { State } from \"./state\"\n\ninterface QueuedRequest<T> {\n execute: () => Promise<T>\n resolve: (value: T) => void\n reject: (error: unknown) => void\n}\n\n// Simple request queue for rate limiting\n// Instead of rejecting requests, queue them and process sequentially\nclass RequestQueue {\n private queue: Array<QueuedRequest<unknown>> = []\n private processing = false\n private lastRequestTime = 0\n\n async enqueue<T>(\n execute: () => Promise<T>,\n rateLimitSeconds: number,\n ): Promise<T> {\n return new Promise((resolve, reject) => {\n this.queue.push({\n execute: execute as () => Promise<unknown>,\n resolve: resolve as (value: unknown) => void,\n reject,\n })\n\n if (this.queue.length > 1) {\n const position = this.queue.length\n const waitTime = Math.ceil((position - 1) * rateLimitSeconds)\n const log = waitTime > 10 ? consola.warn : consola.info\n log(\n `Rate limit: request queued (position ${position}, ~${waitTime}s wait)`,\n )\n }\n\n void this.processQueue(rateLimitSeconds)\n })\n }\n\n private async processQueue(rateLimitSeconds: number): Promise<void> {\n if (this.processing) return\n this.processing = true\n\n while (this.queue.length > 0) {\n const now = Date.now()\n const elapsedMs = now - this.lastRequestTime\n const requiredMs = rateLimitSeconds * 1000\n\n if (this.lastRequestTime > 0 && elapsedMs < requiredMs) {\n const waitMs = requiredMs - elapsedMs\n const waitSec = Math.ceil(waitMs / 1000)\n const log = waitSec > 10 ? consola.warn : consola.info\n log(`Rate limit: waiting ${waitSec}s before next request...`)\n await new Promise((resolve) => setTimeout(resolve, waitMs))\n }\n\n const request = this.queue.shift()\n if (!request) break\n\n this.lastRequestTime = Date.now()\n\n try {\n const result = await request.execute()\n request.resolve(result)\n } catch (error) {\n request.reject(error)\n }\n }\n\n this.processing = false\n }\n\n get length(): number {\n return this.queue.length\n }\n}\n\nconst requestQueue = new RequestQueue()\n\n/**\n * Execute a request with rate limiting via queue.\n * Requests are queued and processed sequentially at the configured rate.\n */\nexport async function executeWithRateLimit<T>(\n state: State,\n execute: () => Promise<T>,\n): Promise<T> {\n // If no rate limit configured, execute immediately\n if (state.rateLimitSeconds === undefined) {\n return execute()\n }\n\n return requestQueue.enqueue(execute, state.rateLimitSeconds)\n}\n\nexport { requestQueue }\n","import consola from \"consola\"\nimport { events } from \"fetch-event-stream\"\n\nimport { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createChatCompletions = async (\n payload: ChatCompletionsPayload,\n) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const enableVision = payload.messages.some(\n (x) =>\n typeof x.content !== \"string\"\n && x.content?.some((x) => x.type === \"image_url\"),\n )\n\n // Agent/user check for X-Initiator header\n // Determine if any message is from an agent (\"assistant\" or \"tool\")\n const isAgentCall = payload.messages.some((msg) =>\n [\"assistant\", \"tool\"].includes(msg.role),\n )\n\n // Build headers and add X-Initiator\n const headers: Record<string, string> = {\n ...copilotHeaders(state, enableVision),\n \"X-Initiator\": isAgentCall ? \"agent\" : \"user\",\n }\n\n const response = await fetch(`${copilotBaseUrl(state)}/chat/completions`, {\n method: \"POST\",\n headers,\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) {\n consola.error(\"Failed to create chat completions\", response)\n throw await HTTPError.fromResponse(\n \"Failed to create chat completions\",\n response,\n )\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as ChatCompletionResponse\n}\n\n// Streaming types\n\nexport interface ChatCompletionChunk {\n id: string\n object: \"chat.completion.chunk\"\n created: number\n model: string\n choices: Array<Choice>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n completion_tokens_details?: {\n accepted_prediction_tokens: number\n rejected_prediction_tokens: number\n }\n }\n}\n\ninterface Delta {\n content?: string | null\n role?: \"user\" | \"assistant\" | \"system\" | \"tool\"\n tool_calls?: Array<{\n index: number\n id?: string\n type?: \"function\"\n function?: {\n name?: string\n arguments?: string\n }\n }>\n}\n\ninterface Choice {\n index: number\n delta: Delta\n finish_reason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null\n logprobs: object | null\n}\n\n// Non-streaming types\n\nexport interface ChatCompletionResponse {\n id: string\n object: \"chat.completion\"\n created: number\n model: string\n choices: Array<ChoiceNonStreaming>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n }\n}\n\ninterface ResponseMessage {\n role: \"assistant\"\n content: string | null\n tool_calls?: Array<ToolCall>\n}\n\ninterface ChoiceNonStreaming {\n index: number\n message: ResponseMessage\n logprobs: object | null\n finish_reason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\"\n}\n\n// Payload types\n\nexport interface ChatCompletionsPayload {\n messages: Array<Message>\n model: string\n temperature?: number | null\n top_p?: number | null\n max_tokens?: number | null\n stop?: string | Array<string> | null\n n?: number | null\n stream?: boolean | null\n\n frequency_penalty?: number | null\n presence_penalty?: number | null\n logit_bias?: Record<string, number> | null\n logprobs?: boolean | null\n response_format?: { type: \"json_object\" } | null\n seed?: number | null\n tools?: Array<Tool> | null\n tool_choice?:\n | \"none\"\n | \"auto\"\n | \"required\"\n | { type: \"function\"; function: { name: string } }\n | null\n user?: string | null\n}\n\nexport interface Tool {\n type: \"function\"\n function: {\n name: string\n description?: string\n parameters: Record<string, unknown>\n }\n}\n\nexport interface Message {\n role: \"user\" | \"assistant\" | \"system\" | \"tool\" | \"developer\"\n content: string | Array<ContentPart> | null\n\n name?: string\n tool_calls?: Array<ToolCall>\n tool_call_id?: string\n}\n\nexport interface ToolCall {\n id: string\n type: \"function\"\n function: {\n name: string\n arguments: string\n }\n}\n\nexport type ContentPart = TextPart | ImagePart\n\nexport interface TextPart {\n type: \"text\"\n text: string\n}\n\nexport interface ImagePart {\n type: \"image_url\"\n image_url: {\n url: string\n detail?: \"low\" | \"high\" | \"auto\"\n }\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\nimport { streamSSE, type SSEMessage } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport {\n autoCompact,\n checkNeedsCompaction,\n createCompactionMarker,\n type AutoCompactResult,\n} from \"~/lib/auto-compact\"\nimport {\n type MessageContent,\n recordRequest,\n recordResponse,\n} from \"~/lib/history\"\nimport { executeWithRateLimit } from \"~/lib/queue\"\nimport { state } from \"~/lib/state\"\nimport { getTokenCount } from \"~/lib/tokenizer\"\nimport { requestTracker } from \"~/lib/tui\"\nimport { isNullish } from \"~/lib/utils\"\nimport {\n createChatCompletions,\n type ChatCompletionChunk,\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n} from \"~/services/copilot/create-chat-completions\"\n\n/** Context for recording responses and tracking */\ninterface ResponseContext {\n historyId: string\n trackingId: string | undefined\n startTime: number\n compactResult?: AutoCompactResult\n}\n\nexport async function handleCompletion(c: Context) {\n const startTime = Date.now()\n const originalPayload = await c.req.json<ChatCompletionsPayload>()\n consola.debug(\"Request payload:\", JSON.stringify(originalPayload).slice(-400))\n\n // Update TUI tracker with model info\n const trackingId = c.get(\"trackingId\") as string | undefined\n updateTrackerModel(trackingId, originalPayload.model)\n\n // Record request to history with full messages\n const historyId = recordRequest(\"openai\", {\n model: originalPayload.model,\n messages: convertOpenAIMessages(originalPayload.messages),\n stream: originalPayload.stream ?? false,\n tools: originalPayload.tools?.map((t) => ({\n name: t.function.name,\n description: t.function.description,\n })),\n max_tokens: originalPayload.max_tokens ?? undefined,\n temperature: originalPayload.temperature ?? undefined,\n })\n\n const ctx: ResponseContext = { historyId, trackingId, startTime }\n\n // Find the selected model\n const selectedModel = state.models?.data.find(\n (model) => model.id === originalPayload.model,\n )\n\n // Calculate and display token count\n await logTokenCount(originalPayload, selectedModel)\n\n // Build the final payload with potential auto-compact and max_tokens\n const { finalPayload, compactResult } = await buildFinalPayload(\n originalPayload,\n selectedModel,\n )\n if (compactResult) {\n ctx.compactResult = compactResult\n }\n\n const payload =\n isNullish(finalPayload.max_tokens) ?\n {\n ...finalPayload,\n max_tokens: selectedModel?.capabilities.limits.max_output_tokens,\n }\n : finalPayload\n\n if (isNullish(originalPayload.max_tokens)) {\n consola.debug(\"Set max_tokens to:\", JSON.stringify(payload.max_tokens))\n }\n\n if (state.manualApprove) await awaitApproval()\n\n try {\n const response = await executeWithRateLimit(state, () =>\n createChatCompletions(payload),\n )\n\n if (isNonStreaming(response)) {\n return handleNonStreamingResponse(c, response, ctx)\n }\n\n consola.debug(\"Streaming response\")\n updateTrackerStatus(trackingId, \"streaming\")\n\n return streamSSE(c, async (stream) => {\n await handleStreamingResponse({ stream, response, payload, ctx })\n })\n } catch (error) {\n recordErrorResponse(ctx, payload.model, error)\n throw error\n }\n}\n\n// Build final payload with auto-compact if needed\nasync function buildFinalPayload(\n payload: ChatCompletionsPayload,\n model: Parameters<typeof checkNeedsCompaction>[1] | undefined,\n): Promise<{\n finalPayload: ChatCompletionsPayload\n compactResult: AutoCompactResult | null\n}> {\n if (!state.autoCompact || !model) {\n if (state.autoCompact && !model) {\n consola.warn(\n `Auto-compact: Model '${payload.model}' not found in cached models, skipping`,\n )\n }\n return { finalPayload: payload, compactResult: null }\n }\n\n try {\n const check = await checkNeedsCompaction(payload, model)\n consola.debug(\n `Auto-compact check: ${check.currentTokens} tokens, limit ${check.limit}, needed: ${check.needed}`,\n )\n if (!check.needed) {\n return { finalPayload: payload, compactResult: null }\n }\n\n consola.info(\n `Auto-compact triggered: ${check.currentTokens} tokens > ${check.limit} limit`,\n )\n const compactResult = await autoCompact(payload, model)\n return { finalPayload: compactResult.payload, compactResult }\n } catch (error) {\n consola.warn(\n \"Auto-compact failed, proceeding with original payload:\",\n error,\n )\n return { finalPayload: payload, compactResult: null }\n }\n}\n\n// Log token count for debugging\nasync function logTokenCount(\n payload: ChatCompletionsPayload,\n selectedModel: { id: string } | undefined,\n) {\n try {\n if (selectedModel) {\n const tokenCount = await getTokenCount(\n payload,\n selectedModel as Parameters<typeof getTokenCount>[1],\n )\n consola.debug(\"Current token count:\", tokenCount)\n } else {\n consola.debug(\"No model selected, skipping token count calculation\")\n }\n } catch (error) {\n consola.debug(\"Failed to calculate token count:\", error)\n }\n}\n\n// Helper to update tracker model\nfunction updateTrackerModel(trackingId: string | undefined, model: string) {\n if (!trackingId) return\n const request = requestTracker.getRequest(trackingId)\n if (request) request.model = model\n}\n\n// Helper to update tracker status\nfunction updateTrackerStatus(\n trackingId: string | undefined,\n status: \"executing\" | \"streaming\",\n) {\n if (!trackingId) return\n requestTracker.updateRequest(trackingId, { status })\n}\n\n// Record error response to history\nfunction recordErrorResponse(\n ctx: ResponseContext,\n model: string,\n error: unknown,\n) {\n recordResponse(\n ctx.historyId,\n {\n success: false,\n model,\n usage: { input_tokens: 0, output_tokens: 0 },\n error: error instanceof Error ? error.message : \"Unknown error\",\n content: null,\n },\n Date.now() - ctx.startTime,\n )\n}\n\n// Handle non-streaming response\nfunction handleNonStreamingResponse(\n c: Context,\n originalResponse: ChatCompletionResponse,\n ctx: ResponseContext,\n) {\n consola.debug(\"Non-streaming response:\", JSON.stringify(originalResponse))\n\n // Append compaction marker if auto-compact was performed\n let response = originalResponse\n if (ctx.compactResult?.wasCompacted && response.choices[0]?.message.content) {\n const marker = createCompactionMarker(ctx.compactResult)\n response = {\n ...response,\n choices: response.choices.map((choice, i) =>\n i === 0 ?\n {\n ...choice,\n message: {\n ...choice.message,\n content: (choice.message.content ?? \"\") + marker,\n },\n }\n : choice,\n ),\n }\n }\n\n const choice = response.choices[0]\n const usage = response.usage\n\n recordResponse(\n ctx.historyId,\n {\n success: true,\n model: response.model,\n usage: {\n input_tokens: usage?.prompt_tokens ?? 0,\n output_tokens: usage?.completion_tokens ?? 0,\n },\n stop_reason: choice.finish_reason,\n content: buildResponseContent(choice),\n toolCalls: extractToolCalls(choice),\n },\n Date.now() - ctx.startTime,\n )\n\n if (ctx.trackingId && usage) {\n requestTracker.updateRequest(ctx.trackingId, {\n inputTokens: usage.prompt_tokens,\n outputTokens: usage.completion_tokens,\n })\n }\n\n return c.json(response)\n}\n\n// Build response content for history\nfunction buildResponseContent(choice: ChatCompletionResponse[\"choices\"][0]) {\n return {\n role: choice.message.role,\n content:\n typeof choice.message.content === \"string\" ?\n choice.message.content\n : JSON.stringify(choice.message.content),\n tool_calls: choice.message.tool_calls?.map((tc) => ({\n id: tc.id,\n type: tc.type,\n function: { name: tc.function.name, arguments: tc.function.arguments },\n })),\n }\n}\n\n// Extract tool calls for history\nfunction extractToolCalls(choice: ChatCompletionResponse[\"choices\"][0]) {\n return choice.message.tool_calls?.map((tc) => ({\n id: tc.id,\n name: tc.function.name,\n input: tc.function.arguments,\n }))\n}\n\n/** Stream accumulator for collecting streaming response data */\ninterface StreamAccumulator {\n model: string\n inputTokens: number\n outputTokens: number\n finishReason: string\n content: string\n toolCalls: Array<{ id: string; name: string; arguments: string }>\n toolCallMap: Map<number, { id: string; name: string; arguments: string }>\n}\n\nfunction createStreamAccumulator(): StreamAccumulator {\n return {\n model: \"\",\n inputTokens: 0,\n outputTokens: 0,\n finishReason: \"\",\n content: \"\",\n toolCalls: [],\n toolCallMap: new Map(),\n }\n}\n\n/** Options for handleStreamingResponse */\ninterface StreamingOptions {\n stream: { writeSSE: (msg: SSEMessage) => Promise<void> }\n response: AsyncIterable<{ data?: string; event?: string }>\n payload: ChatCompletionsPayload\n ctx: ResponseContext\n}\n\n// Handle streaming response\nasync function handleStreamingResponse(opts: StreamingOptions) {\n const { stream, response, payload, ctx } = opts\n const acc = createStreamAccumulator()\n\n try {\n for await (const chunk of response) {\n consola.debug(\"Streaming chunk:\", JSON.stringify(chunk))\n parseStreamChunk(chunk, acc)\n await stream.writeSSE(chunk as SSEMessage)\n }\n\n // Append compaction marker as final chunk if auto-compact was performed\n if (ctx.compactResult?.wasCompacted) {\n const marker = createCompactionMarker(ctx.compactResult)\n const markerChunk: ChatCompletionChunk = {\n id: `compact-marker-${Date.now()}`,\n object: \"chat.completion.chunk\",\n created: Math.floor(Date.now() / 1000),\n model: acc.model || payload.model,\n choices: [\n {\n index: 0,\n delta: { content: marker },\n finish_reason: null,\n logprobs: null,\n },\n ],\n }\n await stream.writeSSE({\n data: JSON.stringify(markerChunk),\n event: \"message\",\n })\n acc.content += marker\n }\n\n recordStreamSuccess(acc, payload.model, ctx)\n completeTracking(ctx.trackingId, acc.inputTokens, acc.outputTokens)\n } catch (error) {\n recordStreamError({ acc, fallbackModel: payload.model, ctx, error })\n failTracking(ctx.trackingId, error)\n throw error\n }\n}\n\n// Parse a single stream chunk and accumulate data\nfunction parseStreamChunk(chunk: { data?: string }, acc: StreamAccumulator) {\n if (!chunk.data || chunk.data === \"[DONE]\") return\n\n try {\n const parsed = JSON.parse(chunk.data) as ChatCompletionChunk\n accumulateModel(parsed, acc)\n accumulateUsage(parsed, acc)\n accumulateChoice(parsed.choices[0], acc)\n } catch {\n // Ignore parse errors\n }\n}\n\nfunction accumulateModel(parsed: ChatCompletionChunk, acc: StreamAccumulator) {\n if (parsed.model && !acc.model) acc.model = parsed.model\n}\n\nfunction accumulateUsage(parsed: ChatCompletionChunk, acc: StreamAccumulator) {\n if (parsed.usage) {\n acc.inputTokens = parsed.usage.prompt_tokens\n acc.outputTokens = parsed.usage.completion_tokens\n }\n}\n\nfunction accumulateChoice(\n choice: ChatCompletionChunk[\"choices\"][0] | undefined,\n acc: StreamAccumulator,\n) {\n if (!choice) return\n if (choice.delta.content) acc.content += choice.delta.content\n if (choice.delta.tool_calls) accumulateToolCalls(choice.delta.tool_calls, acc)\n if (choice.finish_reason) acc.finishReason = choice.finish_reason\n}\n\nfunction accumulateToolCalls(\n toolCalls: NonNullable<\n ChatCompletionChunk[\"choices\"][0][\"delta\"]\n >[\"tool_calls\"],\n acc: StreamAccumulator,\n) {\n if (!toolCalls) return\n for (const tc of toolCalls) {\n const idx = tc.index\n if (!acc.toolCallMap.has(idx)) {\n acc.toolCallMap.set(idx, {\n id: tc.id ?? \"\",\n name: tc.function?.name ?? \"\",\n arguments: \"\",\n })\n }\n const item = acc.toolCallMap.get(idx)\n if (item) {\n if (tc.id) item.id = tc.id\n if (tc.function?.name) item.name = tc.function.name\n if (tc.function?.arguments) item.arguments += tc.function.arguments\n }\n }\n}\n\n// Record successful streaming response\nfunction recordStreamSuccess(\n acc: StreamAccumulator,\n fallbackModel: string,\n ctx: ResponseContext,\n) {\n // Collect tool calls from map\n for (const tc of acc.toolCallMap.values()) {\n if (tc.id && tc.name) acc.toolCalls.push(tc)\n }\n\n const toolCalls = acc.toolCalls.map((tc) => ({\n id: tc.id,\n type: \"function\" as const,\n function: { name: tc.name, arguments: tc.arguments },\n }))\n\n recordResponse(\n ctx.historyId,\n {\n success: true,\n model: acc.model || fallbackModel,\n usage: { input_tokens: acc.inputTokens, output_tokens: acc.outputTokens },\n stop_reason: acc.finishReason || undefined,\n content: {\n role: \"assistant\",\n content: acc.content,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n },\n toolCalls:\n acc.toolCalls.length > 0 ?\n acc.toolCalls.map((tc) => ({\n id: tc.id,\n name: tc.name,\n input: tc.arguments,\n }))\n : undefined,\n },\n Date.now() - ctx.startTime,\n )\n}\n\n// Record streaming error\nfunction recordStreamError(opts: {\n acc: StreamAccumulator\n fallbackModel: string\n ctx: ResponseContext\n error: unknown\n}) {\n const { acc, fallbackModel, ctx, error } = opts\n recordResponse(\n ctx.historyId,\n {\n success: false,\n model: acc.model || fallbackModel,\n usage: { input_tokens: 0, output_tokens: 0 },\n error: error instanceof Error ? error.message : \"Stream error\",\n content: null,\n },\n Date.now() - ctx.startTime,\n )\n}\n\n// Complete TUI tracking\nfunction completeTracking(\n trackingId: string | undefined,\n inputTokens: number,\n outputTokens: number,\n) {\n if (!trackingId) return\n requestTracker.updateRequest(trackingId, { inputTokens, outputTokens })\n requestTracker.completeRequest(trackingId, 200, { inputTokens, outputTokens })\n}\n\n// Fail TUI tracking\nfunction failTracking(trackingId: string | undefined, error: unknown) {\n if (!trackingId) return\n requestTracker.failRequest(\n trackingId,\n error instanceof Error ? error.message : \"Stream error\",\n )\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createChatCompletions>>,\n): response is ChatCompletionResponse => Object.hasOwn(response, \"choices\")\n\n// Convert OpenAI messages to history MessageContent format\nfunction convertOpenAIMessages(\n messages: ChatCompletionsPayload[\"messages\"],\n): Array<MessageContent> {\n return messages.map((msg) => {\n const result: MessageContent = {\n role: msg.role,\n content:\n typeof msg.content === \"string\" ?\n msg.content\n : JSON.stringify(msg.content),\n }\n\n // Handle tool calls in assistant messages\n if (\"tool_calls\" in msg && msg.tool_calls) {\n result.tool_calls = msg.tool_calls.map((tc) => ({\n id: tc.id,\n type: tc.type,\n function: {\n name: tc.function.name,\n arguments: tc.function.arguments,\n },\n }))\n }\n\n // Handle tool result messages\n if (\"tool_call_id\" in msg && msg.tool_call_id) {\n result.tool_call_id = msg.tool_call_id\n }\n\n // Handle function name\n if (\"name\" in msg && msg.name) {\n result.name = msg.name\n }\n\n return result\n })\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleCompletion } from \"./handler\"\n\nexport const completionRoutes = new Hono()\n\ncompletionRoutes.post(\"/\", async (c) => {\n try {\n return await handleCompletion(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createEmbeddings = async (payload: EmbeddingRequest) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const response = await fetch(`${copilotBaseUrl(state)}/embeddings`, {\n method: \"POST\",\n headers: copilotHeaders(state),\n body: JSON.stringify(payload),\n })\n\n if (!response.ok)\n throw await HTTPError.fromResponse(\"Failed to create embeddings\", response)\n\n return (await response.json()) as EmbeddingResponse\n}\n\nexport interface EmbeddingRequest {\n input: string | Array<string>\n model: string\n}\n\nexport interface Embedding {\n object: string\n embedding: Array<number>\n index: number\n}\n\nexport interface EmbeddingResponse {\n object: string\n data: Array<Embedding>\n model: string\n usage: {\n prompt_tokens: number\n total_tokens: number\n }\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport {\n createEmbeddings,\n type EmbeddingRequest,\n} from \"~/services/copilot/create-embeddings\"\n\nexport const embeddingRoutes = new Hono()\n\nembeddingRoutes.post(\"/\", async (c) => {\n try {\n const payload = await c.req.json<EmbeddingRequest>()\n const response = await createEmbeddings(payload)\n\n return c.json(response)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nexport const eventLoggingRoutes = new Hono()\n\n// Anthropic SDK sends telemetry to this endpoint\n// Return 200 OK to prevent errors in the SDK\neventLoggingRoutes.post(\"/batch\", (c) => {\n return c.text(\"OK\", 200)\n})\n","import type { Context } from \"hono\"\n\nimport {\n clearHistory,\n deleteSession,\n exportHistory,\n getEntry,\n getHistory,\n getSession,\n getSessionEntries,\n getSessions,\n getStats,\n isHistoryEnabled,\n type QueryOptions,\n} from \"~/lib/history\"\n\nexport function handleGetEntries(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const query = c.req.query()\n const options: QueryOptions = {\n page: query.page ? Number.parseInt(query.page, 10) : undefined,\n limit: query.limit ? Number.parseInt(query.limit, 10) : undefined,\n model: query.model || undefined,\n endpoint: query.endpoint as \"anthropic\" | \"openai\" | undefined,\n success: query.success ? query.success === \"true\" : undefined,\n from: query.from ? Number.parseInt(query.from, 10) : undefined,\n to: query.to ? Number.parseInt(query.to, 10) : undefined,\n search: query.search || undefined,\n sessionId: query.sessionId || undefined,\n }\n\n const result = getHistory(options)\n return c.json(result)\n}\n\nexport function handleGetEntry(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const id = c.req.param(\"id\")\n const entry = getEntry(id)\n\n if (!entry) {\n return c.json({ error: \"Entry not found\" }, 404)\n }\n\n return c.json(entry)\n}\n\nexport function handleDeleteEntries(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n clearHistory()\n return c.json({ success: true, message: \"History cleared\" })\n}\n\nexport function handleGetStats(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const stats = getStats()\n return c.json(stats)\n}\n\nexport function handleExport(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const format = (c.req.query(\"format\") || \"json\") as \"json\" | \"csv\"\n const data = exportHistory(format)\n\n if (format === \"csv\") {\n c.header(\"Content-Type\", \"text/csv\")\n c.header(\"Content-Disposition\", \"attachment; filename=history.csv\")\n } else {\n c.header(\"Content-Type\", \"application/json\")\n c.header(\"Content-Disposition\", \"attachment; filename=history.json\")\n }\n\n return c.body(data)\n}\n\n// Session management endpoints\nexport function handleGetSessions(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const result = getSessions()\n return c.json(result)\n}\n\nexport function handleGetSession(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const id = c.req.param(\"id\")\n const session = getSession(id)\n\n if (!session) {\n return c.json({ error: \"Session not found\" }, 404)\n }\n\n // Include entries in the session response\n const entries = getSessionEntries(id)\n\n return c.json({\n ...session,\n entries,\n })\n}\n\nexport function handleDeleteSession(c: Context) {\n if (!isHistoryEnabled()) {\n return c.json({ error: \"History recording is not enabled\" }, 400)\n }\n\n const id = c.req.param(\"id\")\n const success = deleteSession(id)\n\n if (!success) {\n return c.json({ error: \"Session not found\" }, 404)\n }\n\n return c.json({ success: true, message: \"Session deleted\" })\n}\n","// JavaScript for history viewer\nexport const script = `\nlet currentSessionId = null;\nlet currentEntryId = null;\nlet debounceTimer = null;\n\nfunction formatTime(ts) {\n const d = new Date(ts);\n return d.toLocaleTimeString([], {hour:'2-digit',minute:'2-digit',second:'2-digit'});\n}\n\nfunction formatDate(ts) {\n const d = new Date(ts);\n return d.toLocaleDateString([], {month:'short',day:'numeric'}) + ' ' + formatTime(ts);\n}\n\nfunction formatNumber(n) {\n if (n >= 1000000) return (n / 1000000).toFixed(1) + 'M';\n if (n >= 1000) return (n / 1000).toFixed(1) + 'K';\n return n.toString();\n}\n\nfunction formatDuration(ms) {\n if (!ms) return '-';\n if (ms < 1000) return ms + 'ms';\n return (ms / 1000).toFixed(1) + 's';\n}\n\nfunction getContentText(content) {\n if (!content) return '';\n if (typeof content === 'string') return content;\n if (Array.isArray(content)) {\n return content.map(c => {\n if (c.type === 'text') return c.text || '';\n if (c.type === 'tool_use') return '[tool_use: ' + c.name + ']';\n if (c.type === 'tool_result') return '[tool_result: ' + (c.tool_use_id || '').slice(0,8) + ']';\n if (c.type === 'image' || c.type === 'image_url') return '[image]';\n return c.text || '[' + (c.type || 'unknown') + ']';\n }).join('\\\\n');\n }\n return JSON.stringify(content, null, 2);\n}\n\n// Extract real user text, skipping system tags like <system-reminder>, <ide_opened_file>, etc.\nfunction extractRealUserText(content) {\n if (!content) return '';\n let text = '';\n if (typeof content === 'string') {\n text = content;\n } else if (Array.isArray(content)) {\n text = content\n .filter(c => c.type === 'text' && c.text)\n .map(c => c.text)\n .join('\\\\n');\n }\n if (!text) return '';\n\n // Remove system tags and their content\n const systemTags = [\n 'system-reminder',\n 'ide_opened_file',\n 'ide_selection',\n 'ide_visible_files',\n 'ide_diagnostics',\n 'ide_cursor_position',\n 'user-prompt-submit-hook',\n 'antml:function_calls',\n 'antml:invoke',\n 'antml:parameter'\n ];\n\n let cleaned = text;\n for (const tag of systemTags) {\n // Remove <tag>...</tag> blocks (including multiline)\n const regex = new RegExp('<' + tag + '[^>]*>[\\\\\\\\s\\\\\\\\S]*?</' + tag + '>', 'gi');\n cleaned = cleaned.replace(regex, '');\n // Remove self-closing <tag ... /> or <tag ...>content without closing\n const selfClosingRegex = new RegExp('<' + tag + '[^>]*/>', 'gi');\n cleaned = cleaned.replace(selfClosingRegex, '');\n }\n\n // Trim whitespace and return\n return cleaned.trim();\n}\n\n// Get preview text from assistant message content\nfunction getAssistantPreview(content) {\n if (!content) return '';\n if (typeof content === 'string') {\n const text = content.trim();\n if (text.length > 0) {\n return text.length > 80 ? text.slice(0, 80) + '...' : text;\n }\n return '';\n }\n if (Array.isArray(content)) {\n // First try to get text content\n const textParts = content.filter(c => c.type === 'text' && c.text).map(c => c.text);\n if (textParts.length > 0) {\n const text = textParts.join('\\\\n').trim();\n if (text.length > 0) {\n return text.length > 80 ? text.slice(0, 80) + '...' : text;\n }\n }\n // If no text, show tool_use info\n const toolUses = content.filter(c => c.type === 'tool_use');\n if (toolUses.length === 1) {\n return '[tool_use: ' + toolUses[0].name + ']';\n } else if (toolUses.length > 1) {\n return '[' + toolUses.length + ' tool_uses]';\n }\n }\n return '';\n}\n\nfunction formatContentForDisplay(content) {\n if (!content) return { summary: '', raw: 'null' };\n if (typeof content === 'string') return { summary: content, raw: JSON.stringify(content) };\n if (Array.isArray(content)) {\n const parts = [];\n for (const c of content) {\n if (c.type === 'text') {\n parts.push(c.text || '');\n } else if (c.type === 'tool_use') {\n parts.push('--- tool_use: ' + c.name + ' [' + (c.id || '').slice(0,8) + '] ---\\\\n' + JSON.stringify(c.input, null, 2));\n } else if (c.type === 'tool_result') {\n const resultContent = typeof c.content === 'string' ? c.content : JSON.stringify(c.content, null, 2);\n parts.push('--- tool_result [' + (c.tool_use_id || '').slice(0,8) + '] ---\\\\n' + resultContent);\n } else if (c.type === 'image' || c.type === 'image_url') {\n parts.push('[image data]');\n } else {\n parts.push(JSON.stringify(c, null, 2));\n }\n }\n return { summary: parts.join('\\\\n\\\\n'), raw: JSON.stringify(content, null, 2) };\n }\n const raw = JSON.stringify(content, null, 2);\n return { summary: raw, raw };\n}\n\nasync function loadStats() {\n try {\n const res = await fetch('/history/api/stats');\n const data = await res.json();\n if (data.error) return;\n document.getElementById('stat-total').textContent = formatNumber(data.totalRequests);\n document.getElementById('stat-success').textContent = formatNumber(data.successfulRequests);\n document.getElementById('stat-failed').textContent = formatNumber(data.failedRequests);\n document.getElementById('stat-input').textContent = formatNumber(data.totalInputTokens);\n document.getElementById('stat-output').textContent = formatNumber(data.totalOutputTokens);\n document.getElementById('stat-sessions').textContent = data.activeSessions;\n } catch (e) {\n console.error('Failed to load stats', e);\n }\n}\n\nasync function loadSessions() {\n try {\n const res = await fetch('/history/api/sessions');\n const data = await res.json();\n if (data.error) {\n document.getElementById('sessions-list').innerHTML = '<div class=\"empty-state\">Not enabled</div>';\n return;\n }\n\n let html = '<div class=\"session-item all' + (currentSessionId === null ? ' active' : '') + '\" onclick=\"selectSession(null)\">All Requests</div>';\n\n for (const s of data.sessions) {\n const isActive = currentSessionId === s.id;\n const shortId = s.id.slice(0, 8);\n const toolCount = s.toolsUsed ? s.toolsUsed.length : 0;\n html += \\`\n <div class=\"session-item\\${isActive ? ' active' : ''}\" onclick=\"selectSession('\\${s.id}')\">\n <div class=\"session-meta\">\n <span>\\${s.models[0] || 'Unknown'}</span>\n <span class=\"session-time\">\\${formatDate(s.startTime)}</span>\n </div>\n <div class=\"session-stats\">\n <span style=\"color:var(--text-dim);font-family:monospace;font-size:10px;\">\\${shortId}</span>\n <span>\\${s.requestCount} req</span>\n <span>\\${formatNumber(s.totalInputTokens + s.totalOutputTokens)} tok</span>\n \\${toolCount > 0 ? '<span class=\"badge tool\">' + toolCount + ' tools</span>' : ''}\n <span class=\"badge \\${s.endpoint}\">\\${s.endpoint}</span>\n </div>\n </div>\n \\`;\n }\n\n document.getElementById('sessions-list').innerHTML = html || '<div class=\"empty-state\">No sessions</div>';\n } catch (e) {\n document.getElementById('sessions-list').innerHTML = '<div class=\"empty-state\">Error loading</div>';\n }\n}\n\nfunction selectSession(id) {\n currentSessionId = id;\n loadSessions();\n loadEntries();\n closeDetail();\n}\n\nasync function loadEntries() {\n const container = document.getElementById('entries-container');\n container.innerHTML = '<div class=\"loading\">Loading...</div>';\n\n const params = new URLSearchParams();\n params.set('limit', '100');\n\n if (currentSessionId) params.set('sessionId', currentSessionId);\n\n const endpoint = document.getElementById('filter-endpoint').value;\n const success = document.getElementById('filter-success').value;\n const search = document.getElementById('filter-search').value;\n\n if (endpoint) params.set('endpoint', endpoint);\n if (success) params.set('success', success);\n if (search) params.set('search', search);\n\n try {\n const res = await fetch('/history/api/entries?' + params.toString());\n const data = await res.json();\n\n if (data.error) {\n container.innerHTML = '<div class=\"empty-state\"><h3>History Not Enabled</h3><p>Start server with --history</p></div>';\n return;\n }\n\n if (data.entries.length === 0) {\n container.innerHTML = '<div class=\"empty-state\"><h3>No entries</h3><p>Make some API requests</p></div>';\n return;\n }\n\n let html = '';\n for (const e of data.entries) {\n const isSelected = currentEntryId === e.id;\n const status = !e.response ? 'pending' : (e.response.success ? 'success' : 'error');\n const statusLabel = !e.response ? 'pending' : (e.response.success ? 'success' : 'error');\n const tokens = e.response ? formatNumber(e.response.usage.input_tokens) + '/' + formatNumber(e.response.usage.output_tokens) : '-';\n const shortId = e.id.slice(0, 8);\n\n // Get preview: show meaningful context about the request\n let lastUserMsg = '';\n const messages = e.request.messages;\n const lastMsg = messages[messages.length - 1];\n\n // If last message is tool_result, look at the previous assistant message for context\n if (lastMsg && lastMsg.role === 'user') {\n const content = lastMsg.content;\n if (Array.isArray(content) && content.length > 0 && content[0].type === 'tool_result') {\n // This is a tool_result response - look for previous assistant message\n const prevMsg = messages.length >= 2 ? messages[messages.length - 2] : null;\n if (prevMsg && prevMsg.role === 'assistant') {\n lastUserMsg = getAssistantPreview(prevMsg.content);\n }\n // If no meaningful preview from assistant, show tool_result count\n if (!lastUserMsg) {\n const toolResults = content.filter(c => c.type === 'tool_result');\n lastUserMsg = '[' + toolResults.length + ' tool_result' + (toolResults.length > 1 ? 's' : '') + ']';\n }\n } else {\n // Regular user message, extract real text\n const realText = extractRealUserText(lastMsg.content);\n if (realText.length > 0) {\n lastUserMsg = realText.slice(0, 80);\n if (realText.length > 80) lastUserMsg += '...';\n }\n }\n } else if (lastMsg && lastMsg.role === 'assistant') {\n lastUserMsg = getAssistantPreview(lastMsg.content);\n }\n\n html += \\`\n <div class=\"entry-item\\${isSelected ? ' selected' : ''}\" onclick=\"showDetail('\\${e.id}')\">\n <div class=\"entry-header\">\n <span class=\"entry-time\">\\${formatTime(e.timestamp)}</span>\n <span style=\"color:var(--text-dim);font-family:monospace;font-size:10px;\">\\${shortId}</span>\n <span class=\"badge \\${e.endpoint}\">\\${e.endpoint}</span>\n <span class=\"badge \\${status}\">\\${statusLabel}</span>\n \\${e.request.stream ? '<span class=\"badge stream\">stream</span>' : ''}\n <span class=\"entry-model\">\\${e.response?.model || e.request.model}</span>\n <span class=\"entry-tokens\">\\${tokens}</span>\n <span class=\"entry-duration\">\\${formatDuration(e.durationMs)}</span>\n </div>\n \\${lastUserMsg ? '<div class=\"entry-preview\">' + escapeHtml(lastUserMsg) + '</div>' : ''}\n </div>\n \\`;\n }\n\n container.innerHTML = html;\n } catch (e) {\n container.innerHTML = '<div class=\"empty-state\">Error: ' + e.message + '</div>';\n }\n}\n\nasync function showDetail(id) {\n // Update selected state without reloading\n const prevSelected = document.querySelector('.entry-item.selected');\n if (prevSelected) prevSelected.classList.remove('selected');\n const newSelected = document.querySelector(\\`.entry-item[onclick*=\"'\\${id}'\"]\\`);\n if (newSelected) newSelected.classList.add('selected');\n currentEntryId = id;\n\n const panel = document.getElementById('detail-panel');\n const content = document.getElementById('detail-content');\n panel.classList.add('open');\n content.innerHTML = '<div class=\"loading\">Loading...</div>';\n\n try {\n const res = await fetch('/history/api/entries/' + id);\n const entry = await res.json();\n if (entry.error) {\n content.innerHTML = '<div class=\"empty-state\">Not found</div>';\n return;\n }\n\n let html = '';\n\n // Entry metadata (IDs)\n html += \\`\n <div class=\"detail-section\">\n <h4>Entry Info</h4>\n <div class=\"response-info\">\n <div class=\"info-item\"><div class=\"info-label\">Entry ID</div><div class=\"info-value\" style=\"font-family:monospace;font-size:11px;\">\\${entry.id}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Session ID</div><div class=\"info-value\" style=\"font-family:monospace;font-size:11px;\">\\${entry.sessionId || '-'}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Timestamp</div><div class=\"info-value\">\\${formatDate(entry.timestamp)}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Endpoint</div><div class=\"info-value\"><span class=\"badge \\${entry.endpoint}\">\\${entry.endpoint}</span></div></div>\n </div>\n </div>\n \\`;\n\n // Response info\n if (entry.response) {\n html += \\`\n <div class=\"detail-section\">\n <h4>Response</h4>\n <div class=\"response-info\">\n <div class=\"info-item\"><div class=\"info-label\">Status</div><div class=\"info-value\"><span class=\"badge \\${entry.response.success ? 'success' : 'error'}\">\\${entry.response.success ? 'Success' : 'Error'}</span></div></div>\n <div class=\"info-item\"><div class=\"info-label\">Model</div><div class=\"info-value\">\\${entry.response.model}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Input Tokens</div><div class=\"info-value\">\\${formatNumber(entry.response.usage.input_tokens)}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Output Tokens</div><div class=\"info-value\">\\${formatNumber(entry.response.usage.output_tokens)}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Duration</div><div class=\"info-value\">\\${formatDuration(entry.durationMs)}</div></div>\n <div class=\"info-item\"><div class=\"info-label\">Stop Reason</div><div class=\"info-value\">\\${entry.response.stop_reason || '-'}</div></div>\n </div>\n \\${entry.response.error ? '<div class=\"error-detail\"><div class=\"error-label\">Error Details</div><pre class=\"error-content\">' + escapeHtml(entry.response.error) + '</pre></div>' : ''}\n </div>\n \\`;\n }\n\n // System prompt\n if (entry.request.system) {\n html += \\`\n <div class=\"detail-section\">\n <h4>System Prompt</h4>\n <div class=\"message system\">\n <div class=\"message-content\">\\${escapeHtml(entry.request.system)}</div>\n </div>\n </div>\n \\`;\n }\n\n // Messages\n html += '<div class=\"detail-section\"><h4>Messages</h4><div class=\"messages-list\">';\n for (const msg of entry.request.messages) {\n const roleClass = msg.role === 'user' ? 'user' : (msg.role === 'assistant' ? 'assistant' : (msg.role === 'system' ? 'system' : 'tool'));\n const formatted = formatContentForDisplay(msg.content);\n const isLong = formatted.summary.length > 500;\n const rawContent = JSON.stringify(msg, null, 2);\n\n html += \\`\n <div class=\"message \\${roleClass}\">\n <button class=\"raw-btn small\" onclick=\"showRawJson(event, \\${escapeAttr(rawContent)})\">Raw</button>\n <button class=\"copy-btn small\" onclick=\"copyText(event, this)\" data-content=\"\\${escapeAttr(formatted.summary)}\">Copy</button>\n <div class=\"message-role\">\\${msg.role}\\${msg.name ? ' (' + msg.name + ')' : ''}\\${msg.tool_call_id ? ' [' + (msg.tool_call_id || '').slice(0,8) + ']' : ''}</div>\n <div class=\"message-content\\${isLong ? ' collapsed' : ''}\" id=\"msg-\\${Math.random().toString(36).slice(2)}\">\\${escapeHtml(formatted.summary)}</div>\n \\${isLong ? '<span class=\"expand-btn\" onclick=\"toggleExpand(this)\">Show more</span>' : ''}\n \\`;\n\n // Tool calls\n if (msg.tool_calls && msg.tool_calls.length > 0) {\n for (const tc of msg.tool_calls) {\n html += \\`\n <div class=\"tool-call\">\n <span class=\"tool-name\">\\${tc.function.name}</span>\n <div class=\"tool-args\">\\${escapeHtml(tc.function.arguments)}</div>\n </div>\n \\`;\n }\n }\n\n html += '</div>';\n }\n html += '</div></div>';\n\n // Response content\n if (entry.response?.content) {\n const formatted = formatContentForDisplay(entry.response.content.content);\n const rawContent = JSON.stringify(entry.response.content, null, 2);\n html += \\`\n <div class=\"detail-section\">\n <h4>Response Content</h4>\n <div class=\"message assistant\">\n <button class=\"raw-btn small\" onclick=\"showRawJson(event, \\${escapeAttr(rawContent)})\">Raw</button>\n <button class=\"copy-btn small\" onclick=\"copyText(event, this)\" data-content=\"\\${escapeAttr(formatted.summary)}\">Copy</button>\n <div class=\"message-content\">\\${escapeHtml(formatted.summary)}</div>\n </div>\n </div>\n \\`;\n }\n\n // Response tool calls\n if (entry.response?.toolCalls && entry.response.toolCalls.length > 0) {\n html += '<div class=\"detail-section\"><h4>Tool Calls</h4>';\n for (const tc of entry.response.toolCalls) {\n const tcRaw = JSON.stringify(tc, null, 2);\n html += \\`\n <div class=\"tool-call\" style=\"position:relative;\">\n <button class=\"raw-btn small\" style=\"position:absolute;top:4px;right:4px;opacity:1;\" onclick=\"showRawJson(event, \\${escapeAttr(tcRaw)})\">Raw</button>\n <span class=\"tool-name\">\\${tc.name}</span> <span style=\"color:var(--text-muted);font-size:11px;\">[\\${(tc.id || '').slice(0,8)}]</span>\n <div class=\"tool-args\">\\${escapeHtml(tc.input)}</div>\n </div>\n \\`;\n }\n html += '</div>';\n }\n\n // Tools defined\n if (entry.request.tools && entry.request.tools.length > 0) {\n html += '<div class=\"detail-section\"><h4>Available Tools (' + entry.request.tools.length + ')</h4>';\n html += '<div style=\"font-size:11px;color:var(--text-muted)\">' + entry.request.tools.map(t => t.name).join(', ') + '</div>';\n html += '</div>';\n }\n\n content.innerHTML = html;\n } catch (e) {\n content.innerHTML = '<div class=\"empty-state\">Error: ' + e.message + '</div>';\n }\n}\n\nfunction closeDetail() {\n currentEntryId = null;\n document.getElementById('detail-panel').classList.remove('open');\n loadEntries();\n}\n\nfunction toggleExpand(btn) {\n const content = btn.previousElementSibling;\n const isCollapsed = content.classList.contains('collapsed');\n content.classList.toggle('collapsed');\n btn.textContent = isCollapsed ? 'Show less' : 'Show more';\n}\n\nfunction copyText(event, btn) {\n event.stopPropagation();\n const text = btn.getAttribute('data-content');\n navigator.clipboard.writeText(text);\n const orig = btn.textContent;\n btn.textContent = 'Copied!';\n setTimeout(() => btn.textContent = orig, 1000);\n}\n\nfunction escapeHtml(str) {\n if (!str) return '';\n return str.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/\"/g, '&quot;');\n}\n\nfunction escapeAttr(str) {\n if (!str) return '';\n return str.replace(/&/g, '&amp;').replace(/\"/g, '&quot;').replace(/'/g, '&#39;');\n}\n\nlet currentRawContent = '';\n\nfunction showRawJson(event, content) {\n event.stopPropagation();\n currentRawContent = typeof content === 'string' ? content : JSON.stringify(content, null, 2);\n document.getElementById('raw-content').textContent = currentRawContent;\n document.getElementById('raw-modal').classList.add('open');\n}\n\nfunction closeRawModal(event) {\n if (event && event.target !== event.currentTarget) return;\n document.getElementById('raw-modal').classList.remove('open');\n}\n\nfunction copyRawContent() {\n navigator.clipboard.writeText(currentRawContent);\n const btns = document.querySelectorAll('.modal-header button');\n const copyBtn = btns[0];\n const orig = copyBtn.textContent;\n copyBtn.textContent = 'Copied!';\n setTimeout(() => copyBtn.textContent = orig, 1000);\n}\n\nfunction debounceFilter() {\n clearTimeout(debounceTimer);\n debounceTimer = setTimeout(loadEntries, 300);\n}\n\nfunction refresh() {\n loadStats();\n loadSessions();\n loadEntries();\n}\n\nfunction exportData(format) {\n window.open('/history/api/export?format=' + format, '_blank');\n}\n\nasync function clearAll() {\n if (!confirm('Clear all history? This cannot be undone.')) return;\n try {\n await fetch('/history/api/entries', { method: 'DELETE' });\n currentSessionId = null;\n currentEntryId = null;\n closeDetail();\n refresh();\n } catch (e) {\n alert('Failed: ' + e.message);\n }\n}\n\n// Initial load\nloadStats();\nloadSessions();\nloadEntries();\n\n// Keyboard shortcuts\ndocument.addEventListener('keydown', (e) => {\n if (e.key === 'Escape') {\n if (document.getElementById('raw-modal').classList.contains('open')) {\n closeRawModal();\n } else {\n closeDetail();\n }\n }\n if (e.key === 'r' && (e.metaKey || e.ctrlKey)) {\n e.preventDefault();\n refresh();\n }\n});\n\n// Auto-refresh every 10 seconds\nsetInterval(() => {\n loadStats();\n loadSessions();\n}, 10000);\n`\n","// CSS styles for history viewer\nexport const styles = `\n:root {\n --bg: #0d1117;\n --bg-secondary: #161b22;\n --bg-tertiary: #21262d;\n --bg-hover: #30363d;\n --text: #e6edf3;\n --text-muted: #8b949e;\n --text-dim: #6e7681;\n --border: #30363d;\n --primary: #58a6ff;\n --success: #3fb950;\n --error: #f85149;\n --warning: #d29922;\n --purple: #a371f7;\n --cyan: #39c5cf;\n}\n@media (prefers-color-scheme: light) {\n :root {\n --bg: #ffffff;\n --bg-secondary: #f6f8fa;\n --bg-tertiary: #eaeef2;\n --bg-hover: #d0d7de;\n --text: #1f2328;\n --text-muted: #656d76;\n --text-dim: #8c959f;\n --border: #d0d7de;\n }\n}\n* { box-sizing: border-box; margin: 0; padding: 0; }\nbody {\n font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica, Arial, sans-serif;\n background: var(--bg);\n color: var(--text);\n line-height: 1.4;\n font-size: 13px;\n}\n\n/* Layout */\n.layout { display: flex; height: 100vh; }\n.sidebar {\n width: 280px;\n border-right: 1px solid var(--border);\n display: flex;\n flex-direction: column;\n background: var(--bg-secondary);\n}\n.main { flex: 1; display: flex; flex-direction: column; overflow: hidden; }\n\n/* Header */\n.header {\n padding: 12px 16px;\n border-bottom: 1px solid var(--border);\n display: flex;\n align-items: center;\n justify-content: space-between;\n gap: 12px;\n background: var(--bg-secondary);\n}\n.header h1 { font-size: 16px; font-weight: 600; }\n.header-actions { display: flex; gap: 8px; }\n\n/* Stats bar */\n.stats-bar {\n display: flex;\n gap: 16px;\n padding: 8px 16px;\n border-bottom: 1px solid var(--border);\n background: var(--bg-tertiary);\n font-size: 12px;\n}\n.stat { display: flex; align-items: center; gap: 4px; }\n.stat-value { font-weight: 600; }\n.stat-label { color: var(--text-muted); }\n\n/* Sessions sidebar */\n.sidebar-header {\n padding: 12px;\n border-bottom: 1px solid var(--border);\n font-weight: 600;\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n.sessions-list {\n flex: 1;\n overflow-y: auto;\n}\n.session-item {\n padding: 10px 12px;\n border-bottom: 1px solid var(--border);\n cursor: pointer;\n transition: background 0.15s;\n}\n.session-item:hover { background: var(--bg-hover); }\n.session-item.active { background: var(--bg-tertiary); border-left: 3px solid var(--primary); }\n.session-item.all { font-weight: 600; color: var(--primary); }\n.session-meta { display: flex; justify-content: space-between; margin-bottom: 4px; }\n.session-time { color: var(--text-muted); font-size: 11px; }\n.session-stats { display: flex; gap: 8px; font-size: 11px; color: var(--text-dim); }\n\n/* Buttons */\nbutton {\n background: var(--bg-tertiary);\n border: 1px solid var(--border);\n color: var(--text);\n padding: 5px 10px;\n border-radius: 6px;\n cursor: pointer;\n font-size: 12px;\n transition: all 0.15s;\n display: inline-flex;\n align-items: center;\n gap: 4px;\n}\nbutton:hover { background: var(--bg-hover); }\nbutton.primary { background: var(--primary); color: #fff; border-color: var(--primary); }\nbutton.danger { color: var(--error); }\nbutton.danger:hover { background: rgba(248,81,73,0.1); }\nbutton:disabled { opacity: 0.5; cursor: not-allowed; }\nbutton.small { padding: 3px 6px; font-size: 11px; }\nbutton.icon-only { padding: 5px 6px; }\n\n/* Filters */\n.filters {\n display: flex;\n gap: 8px;\n padding: 8px 16px;\n border-bottom: 1px solid var(--border);\n flex-wrap: wrap;\n}\ninput, select {\n background: var(--bg);\n border: 1px solid var(--border);\n color: var(--text);\n padding: 5px 8px;\n border-radius: 6px;\n font-size: 12px;\n}\ninput:focus, select:focus { outline: none; border-color: var(--primary); }\ninput::placeholder { color: var(--text-dim); }\n\n/* Entries list */\n.entries-container { flex: 1; overflow-y: auto; }\n.entry-item {\n border-bottom: 1px solid var(--border);\n cursor: pointer;\n transition: background 0.15s;\n}\n.entry-item:hover { background: var(--bg-secondary); }\n.entry-item.selected { background: var(--bg-tertiary); }\n.entry-header {\n display: flex;\n align-items: center;\n gap: 8px;\n padding: 8px 16px;\n}\n.entry-time { color: var(--text-muted); font-size: 11px; min-width: 70px; }\n.entry-model { font-weight: 500; flex: 1; }\n.entry-tokens { font-size: 11px; color: var(--text-dim); }\n.entry-duration { font-size: 11px; color: var(--text-dim); min-width: 50px; text-align: right; }\n.entry-preview {\n padding: 0 16px 8px 16px;\n font-size: 11px;\n color: var(--text-muted);\n overflow: hidden;\n text-overflow: ellipsis;\n white-space: nowrap;\n}\n\n/* Badges */\n.badge {\n display: inline-block;\n padding: 1px 6px;\n border-radius: 10px;\n font-size: 10px;\n font-weight: 500;\n}\n.badge.success { background: rgba(63, 185, 80, 0.15); color: var(--success); }\n.badge.error { background: rgba(248, 81, 73, 0.15); color: var(--error); }\n.badge.pending { background: rgba(136, 136, 136, 0.15); color: var(--text-muted); }\n.badge.anthropic { background: rgba(163, 113, 247, 0.15); color: var(--purple); }\n.badge.openai { background: rgba(210, 153, 34, 0.15); color: var(--warning); }\n.badge.stream { background: rgba(57, 197, 207, 0.15); color: var(--cyan); }\n.badge.tool { background: rgba(88, 166, 255, 0.15); color: var(--primary); }\n\n/* Detail panel */\n.detail-panel {\n width: 0;\n border-left: 1px solid var(--border);\n background: var(--bg-secondary);\n transition: width 0.2s;\n overflow: hidden;\n display: flex;\n flex-direction: column;\n}\n.detail-panel.open { width: 50%; min-width: 400px; }\n.detail-header {\n padding: 12px 16px;\n border-bottom: 1px solid var(--border);\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n.detail-content { flex: 1; overflow-y: auto; padding: 16px; }\n.detail-section { margin-bottom: 16px; }\n.detail-section h4 {\n font-size: 11px;\n text-transform: uppercase;\n color: var(--text-muted);\n margin-bottom: 8px;\n letter-spacing: 0.5px;\n}\n\n/* Messages display */\n.messages-list { display: flex; flex-direction: column; gap: 8px; }\n.message {\n padding: 10px 12px;\n border-radius: 8px;\n background: var(--bg);\n border: 1px solid var(--border);\n position: relative;\n}\n.message.user { border-left: 3px solid var(--primary); }\n.message.assistant { border-left: 3px solid var(--success); }\n.message.system { border-left: 3px solid var(--warning); background: var(--bg-tertiary); }\n.message.tool { border-left: 3px solid var(--purple); }\n.message-role {\n font-size: 10px;\n text-transform: uppercase;\n color: var(--text-muted);\n margin-bottom: 4px;\n font-weight: 600;\n}\n.message-content {\n white-space: pre-wrap;\n word-break: break-word;\n font-family: 'SF Mono', Monaco, 'Courier New', monospace;\n font-size: 12px;\n max-height: 300px;\n overflow-y: auto;\n}\n.message-content.collapsed { max-height: 100px; }\n.expand-btn {\n color: var(--primary);\n cursor: pointer;\n font-size: 11px;\n margin-top: 4px;\n display: inline-block;\n}\n\n/* Tool calls */\n.tool-call {\n background: var(--bg-tertiary);\n padding: 8px;\n border-radius: 6px;\n margin-top: 8px;\n font-size: 12px;\n}\n.tool-name { color: var(--purple); font-weight: 600; }\n.tool-args {\n font-family: monospace;\n font-size: 11px;\n color: var(--text-muted);\n margin-top: 4px;\n white-space: pre-wrap;\n max-height: 150px;\n overflow-y: auto;\n}\n\n/* Response info */\n.response-info {\n display: grid;\n grid-template-columns: repeat(auto-fit, minmax(100px, 1fr));\n gap: 12px;\n}\n.info-item { }\n.info-label { font-size: 11px; color: var(--text-muted); }\n.info-value { font-weight: 500; }\n\n/* Error detail display */\n.error-detail {\n margin-top: 12px;\n padding: 12px;\n background: rgba(248, 81, 73, 0.1);\n border: 1px solid rgba(248, 81, 73, 0.3);\n border-radius: 6px;\n}\n.error-label {\n font-size: 11px;\n color: var(--error);\n font-weight: 600;\n margin-bottom: 8px;\n text-transform: uppercase;\n}\n.error-content {\n margin: 0;\n font-family: 'SF Mono', Monaco, 'Courier New', monospace;\n font-size: 12px;\n color: var(--error);\n white-space: pre-wrap;\n word-break: break-word;\n max-height: 300px;\n overflow-y: auto;\n}\n\n/* Empty state */\n.empty-state {\n text-align: center;\n padding: 40px 20px;\n color: var(--text-muted);\n}\n.empty-state h3 { margin-bottom: 8px; color: var(--text); }\n\n/* Loading */\n.loading { text-align: center; padding: 20px; color: var(--text-muted); }\n\n/* Scrollbar */\n::-webkit-scrollbar { width: 8px; height: 8px; }\n::-webkit-scrollbar-track { background: var(--bg); }\n::-webkit-scrollbar-thumb { background: var(--border); border-radius: 4px; }\n::-webkit-scrollbar-thumb:hover { background: var(--text-dim); }\n\n/* Copy/Raw buttons */\n.copy-btn, .raw-btn {\n position: absolute;\n top: 4px;\n opacity: 0;\n transition: opacity 0.15s;\n}\n.copy-btn { right: 4px; }\n.raw-btn { right: 50px; }\n.message:hover .copy-btn, .message:hover .raw-btn { opacity: 1; }\n\n/* Raw JSON modal */\n.modal-overlay {\n position: fixed;\n top: 0; left: 0; right: 0; bottom: 0;\n background: rgba(0,0,0,0.6);\n display: none;\n justify-content: center;\n align-items: center;\n z-index: 1000;\n}\n.modal-overlay.open { display: flex; }\n.modal {\n background: var(--bg-secondary);\n border: 1px solid var(--border);\n border-radius: 8px;\n width: 80%;\n max-width: 800px;\n max-height: 80vh;\n display: flex;\n flex-direction: column;\n}\n.modal-header {\n padding: 12px 16px;\n border-bottom: 1px solid var(--border);\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n.modal-body {\n flex: 1;\n overflow: auto;\n padding: 16px;\n}\n.modal-body pre {\n margin: 0;\n font-family: 'SF Mono', Monaco, 'Courier New', monospace;\n font-size: 12px;\n white-space: pre-wrap;\n word-break: break-word;\n}\n`\n","// HTML template for history viewer\nexport const template = `\n<div class=\"layout\">\n <!-- Sidebar: Sessions -->\n <div class=\"sidebar\">\n <div class=\"sidebar-header\">\n <span>Sessions</span>\n <button class=\"small danger\" onclick=\"clearAll()\" title=\"Clear all\">Clear</button>\n </div>\n <div class=\"sessions-list\" id=\"sessions-list\">\n <div class=\"loading\">Loading...</div>\n </div>\n </div>\n\n <!-- Main content -->\n <div class=\"main\">\n <div class=\"header\">\n <h1>Request History</h1>\n <div class=\"header-actions\">\n <button onclick=\"refresh()\">Refresh</button>\n <button onclick=\"exportData('json')\">Export JSON</button>\n <button onclick=\"exportData('csv')\">Export CSV</button>\n </div>\n </div>\n\n <div class=\"stats-bar\" id=\"stats-bar\">\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-total\">-</span><span class=\"stat-label\">requests</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-success\">-</span><span class=\"stat-label\">success</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-failed\">-</span><span class=\"stat-label\">failed</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-input\">-</span><span class=\"stat-label\">in tokens</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-output\">-</span><span class=\"stat-label\">out tokens</span></div>\n <div class=\"stat\"><span class=\"stat-value\" id=\"stat-sessions\">-</span><span class=\"stat-label\">sessions</span></div>\n </div>\n\n <div class=\"filters\">\n <input type=\"text\" id=\"filter-search\" placeholder=\"Search messages...\" style=\"flex:1;min-width:150px\" onkeyup=\"debounceFilter()\">\n <select id=\"filter-endpoint\" onchange=\"loadEntries()\">\n <option value=\"\">All Endpoints</option>\n <option value=\"anthropic\">Anthropic</option>\n <option value=\"openai\">OpenAI</option>\n </select>\n <select id=\"filter-success\" onchange=\"loadEntries()\">\n <option value=\"\">All Status</option>\n <option value=\"true\">Success</option>\n <option value=\"false\">Failed</option>\n </select>\n </div>\n\n <div style=\"display:flex;flex:1;overflow:hidden;\">\n <div class=\"entries-container\" id=\"entries-container\">\n <div class=\"loading\">Loading...</div>\n </div>\n\n <!-- Detail panel -->\n <div class=\"detail-panel\" id=\"detail-panel\">\n <div class=\"detail-header\">\n <span>Request Details</span>\n <button class=\"icon-only\" onclick=\"closeDetail()\">&times;</button>\n </div>\n <div class=\"detail-content\" id=\"detail-content\"></div>\n </div>\n </div>\n </div>\n</div>\n\n<!-- Raw JSON Modal -->\n<div class=\"modal-overlay\" id=\"raw-modal\" onclick=\"closeRawModal(event)\">\n <div class=\"modal\" onclick=\"event.stopPropagation()\">\n <div class=\"modal-header\">\n <span>Raw JSON</span>\n <div>\n <button class=\"small\" onclick=\"copyRawContent()\">Copy</button>\n <button class=\"icon-only\" onclick=\"closeRawModal()\">&times;</button>\n </div>\n </div>\n <div class=\"modal-body\">\n <pre id=\"raw-content\"></pre>\n </div>\n </div>\n</div>\n`\n","// Web UI HTML template for history viewer\n// Features: Session grouping, full message content, compact design\n\nimport { script } from \"./ui/script\"\nimport { styles } from \"./ui/styles\"\nimport { template } from \"./ui/template\"\n\nexport function getHistoryUI(): string {\n return `<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"UTF-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n <title>Copilot API - Request History</title>\n <style>${styles}</style>\n</head>\n<body>\n ${template}\n <script>${script}</script>\n</body>\n</html>`\n}\n","import { Hono } from \"hono\"\n\nimport {\n handleDeleteEntries,\n handleDeleteSession,\n handleExport,\n handleGetEntries,\n handleGetEntry,\n handleGetSession,\n handleGetSessions,\n handleGetStats,\n} from \"./api\"\nimport { getHistoryUI } from \"./ui\"\n\nexport const historyRoutes = new Hono()\n\n// API endpoints\nhistoryRoutes.get(\"/api/entries\", handleGetEntries)\nhistoryRoutes.get(\"/api/entries/:id\", handleGetEntry)\nhistoryRoutes.delete(\"/api/entries\", handleDeleteEntries)\nhistoryRoutes.get(\"/api/stats\", handleGetStats)\nhistoryRoutes.get(\"/api/export\", handleExport)\n\n// Session endpoints\nhistoryRoutes.get(\"/api/sessions\", handleGetSessions)\nhistoryRoutes.get(\"/api/sessions/:id\", handleGetSession)\nhistoryRoutes.delete(\"/api/sessions/:id\", handleDeleteSession)\n\n// Web UI - serve HTML for the root path\nhistoryRoutes.get(\"/\", (c) => {\n return c.html(getHistoryUI())\n})\n","import { type AnthropicResponse } from \"./anthropic-types\"\n\nexport function mapOpenAIStopReasonToAnthropic(\n finishReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null,\n): AnthropicResponse[\"stop_reason\"] {\n if (finishReason === null) {\n return null\n }\n const stopReasonMap = {\n stop: \"end_turn\",\n length: \"max_tokens\",\n tool_calls: \"tool_use\",\n content_filter: \"end_turn\",\n } as const\n return stopReasonMap[finishReason]\n}\n","import consola from \"consola\"\n\nimport {\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n type ContentPart,\n type Message,\n type TextPart,\n type Tool,\n type ToolCall,\n} from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicAssistantContentBlock,\n type AnthropicAssistantMessage,\n type AnthropicMessage,\n type AnthropicMessagesPayload,\n type AnthropicResponse,\n type AnthropicTextBlock,\n type AnthropicThinkingBlock,\n type AnthropicTool,\n type AnthropicToolResultBlock,\n type AnthropicToolUseBlock,\n type AnthropicUserContentBlock,\n type AnthropicUserMessage,\n} from \"./anthropic-types\"\nimport { mapOpenAIStopReasonToAnthropic } from \"./utils\"\n\n// OpenAI limits function names to 64 characters\nconst OPENAI_TOOL_NAME_LIMIT = 64\n\n// Mapping from truncated tool names to original names\n// This is used to restore original names in responses\nexport interface ToolNameMapping {\n truncatedToOriginal: Map<string, string>\n originalToTruncated: Map<string, string>\n}\n\n// Helper function to fix message sequences by adding missing tool responses\n// This prevents \"tool_use ids were found without tool_result blocks\" errors\nfunction fixMessageSequence(messages: Array<Message>): Array<Message> {\n const fixedMessages: Array<Message> = []\n\n for (let i = 0; i < messages.length; i++) {\n const message = messages[i]\n fixedMessages.push(message)\n\n if (\n message.role === \"assistant\"\n && message.tool_calls\n && message.tool_calls.length > 0\n ) {\n // Find which tool calls already have responses\n const foundToolResponses = new Set<string>()\n\n // Look ahead to see what tool responses exist\n let j = i + 1\n while (j < messages.length && messages[j].role === \"tool\") {\n const toolMessage = messages[j]\n if (toolMessage.tool_call_id) {\n foundToolResponses.add(toolMessage.tool_call_id)\n }\n j++\n }\n\n // Add placeholder responses for missing tool calls\n for (const toolCall of message.tool_calls) {\n if (!foundToolResponses.has(toolCall.id)) {\n consola.debug(`Adding placeholder tool_result for ${toolCall.id}`)\n fixedMessages.push({\n role: \"tool\",\n tool_call_id: toolCall.id,\n content: \"Tool execution was interrupted or failed.\",\n })\n }\n }\n }\n }\n\n return fixedMessages\n}\n\n// Payload translation\n\nexport interface TranslationResult {\n payload: ChatCompletionsPayload\n toolNameMapping: ToolNameMapping\n}\n\nexport function translateToOpenAI(\n payload: AnthropicMessagesPayload,\n): TranslationResult {\n // Create tool name mapping for this request\n const toolNameMapping: ToolNameMapping = {\n truncatedToOriginal: new Map(),\n originalToTruncated: new Map(),\n }\n\n const messages = translateAnthropicMessagesToOpenAI(\n payload.messages,\n payload.system,\n toolNameMapping,\n )\n\n return {\n payload: {\n model: translateModelName(payload.model),\n // Fix message sequence to ensure all tool_use blocks have corresponding tool_result\n messages: fixMessageSequence(messages),\n max_tokens: payload.max_tokens,\n stop: payload.stop_sequences,\n stream: payload.stream,\n temperature: payload.temperature,\n top_p: payload.top_p,\n user: payload.metadata?.user_id,\n tools: translateAnthropicToolsToOpenAI(payload.tools, toolNameMapping),\n tool_choice: translateAnthropicToolChoiceToOpenAI(\n payload.tool_choice,\n toolNameMapping,\n ),\n },\n toolNameMapping,\n }\n}\n\nfunction translateModelName(model: string): string {\n // Handle short model name aliases (e.g., \"opus\", \"sonnet\", \"haiku\")\n // Maps to the latest available version in Copilot\n const shortNameMap: Record<string, string> = {\n opus: \"claude-opus-4.5\",\n sonnet: \"claude-sonnet-4.5\",\n haiku: \"claude-haiku-4.5\",\n }\n\n if (shortNameMap[model]) {\n return shortNameMap[model]\n }\n\n // Handle versioned model names from Anthropic API (e.g., claude-sonnet-4-20250514)\n // Strip date suffixes and convert to Copilot-compatible format\n\n // claude-sonnet-4-5-YYYYMMDD -> claude-sonnet-4.5\n if (/^claude-sonnet-4-5-\\d+$/.test(model)) {\n return \"claude-sonnet-4.5\"\n }\n // claude-sonnet-4-YYYYMMDD -> claude-sonnet-4\n if (/^claude-sonnet-4-\\d+$/.test(model)) {\n return \"claude-sonnet-4\"\n }\n\n // claude-opus-4-5-YYYYMMDD -> claude-opus-4.5\n if (/^claude-opus-4-5-\\d+$/.test(model)) {\n return \"claude-opus-4.5\"\n }\n // claude-opus-4-YYYYMMDD -> claude-opus-4.5 (default to latest)\n if (/^claude-opus-4-\\d+$/.test(model)) {\n return \"claude-opus-4.5\"\n }\n\n // claude-haiku-4-5-YYYYMMDD -> claude-haiku-4.5\n if (/^claude-haiku-4-5-\\d+$/.test(model)) {\n return \"claude-haiku-4.5\"\n }\n // claude-haiku-3-5-YYYYMMDD -> claude-haiku-4.5 (upgrade to latest available)\n if (/^claude-haiku-3-5-\\d+$/.test(model)) {\n return \"claude-haiku-4.5\"\n }\n\n return model\n}\n\nfunction translateAnthropicMessagesToOpenAI(\n anthropicMessages: Array<AnthropicMessage>,\n system: string | Array<AnthropicTextBlock> | undefined,\n toolNameMapping: ToolNameMapping,\n): Array<Message> {\n const systemMessages = handleSystemPrompt(system)\n\n const otherMessages = anthropicMessages.flatMap((message) =>\n message.role === \"user\" ?\n handleUserMessage(message)\n : handleAssistantMessage(message, toolNameMapping),\n )\n\n return [...systemMessages, ...otherMessages]\n}\n\nfunction handleSystemPrompt(\n system: string | Array<AnthropicTextBlock> | undefined,\n): Array<Message> {\n if (!system) {\n return []\n }\n\n if (typeof system === \"string\") {\n return [{ role: \"system\", content: system }]\n } else {\n const systemText = system.map((block) => block.text).join(\"\\n\\n\")\n return [{ role: \"system\", content: systemText }]\n }\n}\n\nfunction handleUserMessage(message: AnthropicUserMessage): Array<Message> {\n const newMessages: Array<Message> = []\n\n if (Array.isArray(message.content)) {\n const toolResultBlocks = message.content.filter(\n (block): block is AnthropicToolResultBlock =>\n block.type === \"tool_result\",\n )\n const otherBlocks = message.content.filter(\n (block) => block.type !== \"tool_result\",\n )\n\n // Tool results must come first to maintain protocol: tool_use -> tool_result -> user\n for (const block of toolResultBlocks) {\n newMessages.push({\n role: \"tool\",\n tool_call_id: block.tool_use_id,\n content: mapContent(block.content),\n })\n }\n\n if (otherBlocks.length > 0) {\n newMessages.push({\n role: \"user\",\n content: mapContent(otherBlocks),\n })\n }\n } else {\n newMessages.push({\n role: \"user\",\n content: mapContent(message.content),\n })\n }\n\n return newMessages\n}\n\nfunction handleAssistantMessage(\n message: AnthropicAssistantMessage,\n toolNameMapping: ToolNameMapping,\n): Array<Message> {\n if (!Array.isArray(message.content)) {\n return [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n },\n ]\n }\n\n const toolUseBlocks = message.content.filter(\n (block): block is AnthropicToolUseBlock => block.type === \"tool_use\",\n )\n\n const textBlocks = message.content.filter(\n (block): block is AnthropicTextBlock => block.type === \"text\",\n )\n\n const thinkingBlocks = message.content.filter(\n (block): block is AnthropicThinkingBlock => block.type === \"thinking\",\n )\n\n // Combine text and thinking blocks, as OpenAI doesn't have separate thinking blocks\n const allTextContent = [\n ...textBlocks.map((b) => b.text),\n ...thinkingBlocks.map((b) => b.thinking),\n ].join(\"\\n\\n\")\n\n return toolUseBlocks.length > 0 ?\n [\n {\n role: \"assistant\",\n content: allTextContent || null,\n tool_calls: toolUseBlocks.map((toolUse) => ({\n id: toolUse.id,\n type: \"function\",\n function: {\n name: getTruncatedToolName(toolUse.name, toolNameMapping),\n arguments: JSON.stringify(toolUse.input),\n },\n })),\n },\n ]\n : [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n },\n ]\n}\n\nfunction mapContent(\n content:\n | string\n | Array<AnthropicUserContentBlock | AnthropicAssistantContentBlock>,\n): string | Array<ContentPart> | null {\n if (typeof content === \"string\") {\n return content\n }\n if (!Array.isArray(content)) {\n return null\n }\n\n const hasImage = content.some((block) => block.type === \"image\")\n if (!hasImage) {\n return content\n .filter(\n (block): block is AnthropicTextBlock | AnthropicThinkingBlock =>\n block.type === \"text\" || block.type === \"thinking\",\n )\n .map((block) => (block.type === \"text\" ? block.text : block.thinking))\n .join(\"\\n\\n\")\n }\n\n const contentParts: Array<ContentPart> = []\n for (const block of content) {\n switch (block.type) {\n case \"text\": {\n contentParts.push({ type: \"text\", text: block.text })\n\n break\n }\n case \"thinking\": {\n contentParts.push({ type: \"text\", text: block.thinking })\n\n break\n }\n case \"image\": {\n contentParts.push({\n type: \"image_url\",\n image_url: {\n url: `data:${block.source.media_type};base64,${block.source.data}`,\n },\n })\n\n break\n }\n // No default\n }\n }\n return contentParts\n}\n\n// Truncate tool name to fit OpenAI's 64-character limit\n// Uses consistent truncation with hash suffix to avoid collisions\nfunction getTruncatedToolName(\n originalName: string,\n toolNameMapping: ToolNameMapping,\n): string {\n // If already within limit, return as-is\n if (originalName.length <= OPENAI_TOOL_NAME_LIMIT) {\n return originalName\n }\n\n // Check if we've already truncated this name\n const existingTruncated =\n toolNameMapping.originalToTruncated.get(originalName)\n if (existingTruncated) {\n return existingTruncated\n }\n\n // Create a simple hash suffix from the original name\n // Use last 8 chars of a simple hash to ensure uniqueness\n let hash = 0\n for (let i = 0; i < originalName.length; i++) {\n const char = originalName.codePointAt(i) ?? 0\n hash = (hash << 5) - hash + char\n hash = hash & hash // Convert to 32bit integer\n }\n const hashSuffix = Math.abs(hash).toString(36).slice(0, 8)\n\n // Truncate: leave room for \"_\" + 8-char hash = 9 chars\n const truncatedName =\n originalName.slice(0, OPENAI_TOOL_NAME_LIMIT - 9) + \"_\" + hashSuffix\n\n // Store mapping in both directions\n toolNameMapping.truncatedToOriginal.set(truncatedName, originalName)\n toolNameMapping.originalToTruncated.set(originalName, truncatedName)\n\n consola.debug(`Truncated tool name: \"${originalName}\" -> \"${truncatedName}\"`)\n\n return truncatedName\n}\n\nfunction translateAnthropicToolsToOpenAI(\n anthropicTools: Array<AnthropicTool> | undefined,\n toolNameMapping: ToolNameMapping,\n): Array<Tool> | undefined {\n if (!anthropicTools) {\n return undefined\n }\n return anthropicTools.map((tool) => ({\n type: \"function\",\n function: {\n name: getTruncatedToolName(tool.name, toolNameMapping),\n description: tool.description,\n parameters: tool.input_schema,\n },\n }))\n}\n\nfunction translateAnthropicToolChoiceToOpenAI(\n anthropicToolChoice: AnthropicMessagesPayload[\"tool_choice\"],\n toolNameMapping: ToolNameMapping,\n): ChatCompletionsPayload[\"tool_choice\"] {\n if (!anthropicToolChoice) {\n return undefined\n }\n\n switch (anthropicToolChoice.type) {\n case \"auto\": {\n return \"auto\"\n }\n case \"any\": {\n return \"required\"\n }\n case \"tool\": {\n if (anthropicToolChoice.name) {\n return {\n type: \"function\",\n function: {\n name: getTruncatedToolName(\n anthropicToolChoice.name,\n toolNameMapping,\n ),\n },\n }\n }\n return undefined\n }\n case \"none\": {\n return \"none\"\n }\n default: {\n return undefined\n }\n }\n}\n\n// Response translation\n\n/** Create empty response for edge case of no choices */\nfunction createEmptyResponse(\n response: ChatCompletionResponse,\n): AnthropicResponse {\n return {\n id: response.id,\n type: \"message\",\n role: \"assistant\",\n model: response.model,\n content: [],\n stop_reason: \"end_turn\",\n stop_sequence: null,\n usage: {\n input_tokens: response.usage?.prompt_tokens ?? 0,\n output_tokens: response.usage?.completion_tokens ?? 0,\n },\n }\n}\n\n/** Build usage object from response */\nfunction buildUsageObject(response: ChatCompletionResponse) {\n const cachedTokens = response.usage?.prompt_tokens_details?.cached_tokens\n return {\n input_tokens: (response.usage?.prompt_tokens ?? 0) - (cachedTokens ?? 0),\n output_tokens: response.usage?.completion_tokens ?? 0,\n ...(cachedTokens !== undefined && {\n cache_read_input_tokens: cachedTokens,\n }),\n }\n}\n\nexport function translateToAnthropic(\n response: ChatCompletionResponse,\n toolNameMapping?: ToolNameMapping,\n): AnthropicResponse {\n // Handle edge case of empty choices array\n if (response.choices.length === 0) {\n return createEmptyResponse(response)\n }\n\n // Merge content from all choices\n const allTextBlocks: Array<AnthropicTextBlock> = []\n const allToolUseBlocks: Array<AnthropicToolUseBlock> = []\n let stopReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null =\n null // default\n stopReason = response.choices[0]?.finish_reason ?? stopReason\n\n // Process all choices to extract text and tool use blocks\n for (const choice of response.choices) {\n const textBlocks = getAnthropicTextBlocks(choice.message.content)\n const toolUseBlocks = getAnthropicToolUseBlocks(\n choice.message.tool_calls,\n toolNameMapping,\n )\n\n allTextBlocks.push(...textBlocks)\n allToolUseBlocks.push(...toolUseBlocks)\n\n // Use the finish_reason from the first choice, or prioritize tool_calls\n if (choice.finish_reason === \"tool_calls\" || stopReason === \"stop\") {\n stopReason = choice.finish_reason\n }\n }\n\n // Note: GitHub Copilot doesn't generate thinking blocks, so we don't include them in responses\n\n return {\n id: response.id,\n type: \"message\",\n role: \"assistant\",\n model: response.model,\n content: [...allTextBlocks, ...allToolUseBlocks],\n stop_reason: mapOpenAIStopReasonToAnthropic(stopReason),\n stop_sequence: null,\n usage: buildUsageObject(response),\n }\n}\n\nfunction getAnthropicTextBlocks(\n messageContent: Message[\"content\"],\n): Array<AnthropicTextBlock> {\n if (typeof messageContent === \"string\") {\n return [{ type: \"text\", text: messageContent }]\n }\n\n if (Array.isArray(messageContent)) {\n return messageContent\n .filter((part): part is TextPart => part.type === \"text\")\n .map((part) => ({ type: \"text\", text: part.text }))\n }\n\n return []\n}\n\nfunction getAnthropicToolUseBlocks(\n toolCalls: Array<ToolCall> | undefined,\n toolNameMapping?: ToolNameMapping,\n): Array<AnthropicToolUseBlock> {\n if (!toolCalls) {\n return []\n }\n return toolCalls.map((toolCall) => {\n let input: Record<string, unknown> = {}\n try {\n input = JSON.parse(toolCall.function.arguments) as Record<string, unknown>\n } catch (error) {\n consola.warn(\n `Failed to parse tool call arguments for ${toolCall.function.name}:`,\n error,\n )\n }\n\n // Restore original tool name if it was truncated\n const originalName =\n toolNameMapping?.truncatedToOriginal.get(toolCall.function.name)\n ?? toolCall.function.name\n\n return {\n type: \"tool_use\",\n id: toolCall.id,\n name: originalName,\n input,\n }\n })\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\n\nimport { state } from \"~/lib/state\"\nimport { getTokenCount } from \"~/lib/tokenizer\"\n\nimport { type AnthropicMessagesPayload } from \"./anthropic-types\"\nimport { translateToOpenAI } from \"./non-stream-translation\"\n\n/**\n * Handles token counting for Anthropic messages\n */\nexport async function handleCountTokens(c: Context) {\n try {\n const anthropicBeta = c.req.header(\"anthropic-beta\")\n\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n\n const { payload: openAIPayload } = translateToOpenAI(anthropicPayload)\n\n const selectedModel = state.models?.data.find(\n (model) => model.id === anthropicPayload.model,\n )\n\n if (!selectedModel) {\n consola.warn(\"Model not found, returning default token count\")\n return c.json({\n input_tokens: 1,\n })\n }\n\n const tokenCount = await getTokenCount(openAIPayload, selectedModel)\n\n if (anthropicPayload.tools && anthropicPayload.tools.length > 0) {\n let mcpToolExist = false\n if (anthropicBeta?.startsWith(\"claude-code\")) {\n mcpToolExist = anthropicPayload.tools.some((tool) =>\n tool.name.startsWith(\"mcp__\"),\n )\n }\n if (!mcpToolExist) {\n if (anthropicPayload.model.startsWith(\"claude\")) {\n // Base token overhead for tool use capability\n // See: https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/overview#pricing\n tokenCount.input = tokenCount.input + 346\n } else if (anthropicPayload.model.startsWith(\"grok\")) {\n // Estimated base token overhead for Grok tool use (empirically determined)\n tokenCount.input = tokenCount.input + 480\n }\n }\n }\n\n let finalTokenCount = tokenCount.input + tokenCount.output\n if (anthropicPayload.model.startsWith(\"claude\")) {\n // Apply 15% buffer for Claude models to account for tokenization differences\n // between the GPT tokenizer used here and Claude's actual tokenizer\n finalTokenCount = Math.round(finalTokenCount * 1.15)\n } else if (anthropicPayload.model.startsWith(\"grok\")) {\n // Apply 3% buffer for Grok models (smaller difference from GPT tokenizer)\n finalTokenCount = Math.round(finalTokenCount * 1.03)\n }\n\n consola.debug(\"Token count:\", finalTokenCount)\n\n return c.json({\n input_tokens: finalTokenCount,\n })\n } catch (error) {\n consola.error(\"Error counting tokens:\", error)\n return c.json({\n input_tokens: 1,\n })\n }\n}\n","import { type ChatCompletionChunk } from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicStreamEventData,\n type AnthropicStreamState,\n} from \"./anthropic-types\"\nimport { type ToolNameMapping } from \"./non-stream-translation\"\nimport { mapOpenAIStopReasonToAnthropic } from \"./utils\"\n\nfunction isToolBlockOpen(state: AnthropicStreamState): boolean {\n if (!state.contentBlockOpen) {\n return false\n }\n // Check if the current block index corresponds to any known tool call\n return Object.values(state.toolCalls).some(\n (tc) => tc.anthropicBlockIndex === state.contentBlockIndex,\n )\n}\n\n// eslint-disable-next-line max-lines-per-function, complexity\nexport function translateChunkToAnthropicEvents(\n chunk: ChatCompletionChunk,\n state: AnthropicStreamState,\n toolNameMapping?: ToolNameMapping,\n): Array<AnthropicStreamEventData> {\n const events: Array<AnthropicStreamEventData> = []\n\n // Skip chunks with empty choices (e.g., first chunk with prompt_filter_results)\n if (chunk.choices.length === 0) {\n // Store model for later if available (some chunks have model but empty choices)\n if (chunk.model && !state.model) {\n state.model = chunk.model\n }\n return events\n }\n\n const choice = chunk.choices[0]\n const { delta } = choice\n\n if (!state.messageStartSent) {\n // Use model from current chunk, or from stored state (from earlier empty chunk)\n const model = chunk.model || state.model || \"unknown\"\n events.push({\n type: \"message_start\",\n message: {\n id: chunk.id || `msg_${Date.now()}`,\n type: \"message\",\n role: \"assistant\",\n content: [],\n model,\n stop_reason: null,\n stop_sequence: null,\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: 0, // Will be updated in message_delta when finished\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n })\n state.messageStartSent = true\n }\n\n if (delta.content) {\n if (isToolBlockOpen(state)) {\n // A tool block was open, so close it before starting a text block.\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n if (!state.contentBlockOpen) {\n events.push({\n type: \"content_block_start\",\n index: state.contentBlockIndex,\n content_block: {\n type: \"text\",\n text: \"\",\n },\n })\n state.contentBlockOpen = true\n }\n\n events.push({\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"text_delta\",\n text: delta.content,\n },\n })\n }\n\n if (delta.tool_calls) {\n for (const toolCall of delta.tool_calls) {\n if (toolCall.id && toolCall.function?.name) {\n // New tool call starting.\n if (state.contentBlockOpen) {\n // Close any previously open block.\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n // Restore original tool name if it was truncated\n const originalName =\n toolNameMapping?.truncatedToOriginal.get(toolCall.function.name)\n ?? toolCall.function.name\n\n const anthropicBlockIndex = state.contentBlockIndex\n state.toolCalls[toolCall.index] = {\n id: toolCall.id,\n name: originalName,\n anthropicBlockIndex,\n }\n\n events.push({\n type: \"content_block_start\",\n index: anthropicBlockIndex,\n content_block: {\n type: \"tool_use\",\n id: toolCall.id,\n name: originalName,\n input: {},\n },\n })\n state.contentBlockOpen = true\n }\n\n if (toolCall.function?.arguments) {\n const toolCallInfo = state.toolCalls[toolCall.index]\n // Tool call can still be empty\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (toolCallInfo) {\n events.push({\n type: \"content_block_delta\",\n index: toolCallInfo.anthropicBlockIndex,\n delta: {\n type: \"input_json_delta\",\n partial_json: toolCall.function.arguments,\n },\n })\n }\n }\n }\n }\n\n if (choice.finish_reason) {\n if (state.contentBlockOpen) {\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockOpen = false\n }\n\n events.push(\n {\n type: \"message_delta\",\n delta: {\n stop_reason: mapOpenAIStopReasonToAnthropic(choice.finish_reason),\n stop_sequence: null,\n },\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: chunk.usage?.completion_tokens ?? 0,\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n {\n type: \"message_stop\",\n },\n )\n }\n\n return events\n}\n\nexport function translateErrorToAnthropicErrorEvent(): AnthropicStreamEventData {\n return {\n type: \"error\",\n error: {\n type: \"api_error\",\n message: \"An unexpected error occurred during streaming.\",\n },\n }\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\nimport { streamSSE } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport {\n autoCompact,\n checkNeedsCompaction,\n createCompactionMarker,\n type AutoCompactResult,\n} from \"~/lib/auto-compact\"\nimport {\n type MessageContent,\n recordRequest,\n recordResponse,\n} from \"~/lib/history\"\nimport { executeWithRateLimit } from \"~/lib/queue\"\nimport { state } from \"~/lib/state\"\nimport { requestTracker } from \"~/lib/tui\"\nimport {\n createChatCompletions,\n type ChatCompletionChunk,\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n} from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicMessagesPayload,\n type AnthropicStreamState,\n type AnthropicStreamEventData,\n} from \"./anthropic-types\"\nimport {\n translateToAnthropic,\n translateToOpenAI,\n type ToolNameMapping,\n} from \"./non-stream-translation\"\nimport {\n translateChunkToAnthropicEvents,\n translateErrorToAnthropicErrorEvent,\n} from \"./stream-translation\"\n\n/** Context for recording responses and tracking */\ninterface ResponseContext {\n historyId: string\n trackingId: string | undefined\n startTime: number\n compactResult?: AutoCompactResult\n}\n\nexport async function handleCompletion(c: Context) {\n const startTime = Date.now()\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n consola.debug(\"Anthropic request payload:\", JSON.stringify(anthropicPayload))\n\n // Update TUI tracker with model info\n const trackingId = c.get(\"trackingId\") as string | undefined\n updateTrackerModel(trackingId, anthropicPayload.model)\n\n // Record request to history with full message content\n const historyId = recordRequest(\"anthropic\", {\n model: anthropicPayload.model,\n messages: convertAnthropicMessages(anthropicPayload.messages),\n stream: anthropicPayload.stream ?? false,\n tools: anthropicPayload.tools?.map((t) => ({\n name: t.name,\n description: t.description,\n })),\n max_tokens: anthropicPayload.max_tokens,\n temperature: anthropicPayload.temperature,\n system: extractSystemPrompt(anthropicPayload.system),\n })\n\n const ctx: ResponseContext = { historyId, trackingId, startTime }\n\n const { payload: translatedPayload, toolNameMapping } =\n translateToOpenAI(anthropicPayload)\n consola.debug(\n \"Translated OpenAI request payload:\",\n JSON.stringify(translatedPayload),\n )\n\n // Auto-compact if enabled and needed\n const selectedModel = state.models?.data.find(\n (model) => model.id === translatedPayload.model,\n )\n\n const { finalPayload: openAIPayload, compactResult } =\n await buildFinalPayload(translatedPayload, selectedModel)\n if (compactResult) {\n ctx.compactResult = compactResult\n }\n\n if (state.manualApprove) {\n await awaitApproval()\n }\n\n try {\n const response = await executeWithRateLimit(state, () =>\n createChatCompletions(openAIPayload),\n )\n\n if (isNonStreaming(response)) {\n return handleNonStreamingResponse({ c, response, toolNameMapping, ctx })\n }\n\n consola.debug(\"Streaming response from Copilot\")\n updateTrackerStatus(trackingId, \"streaming\")\n\n return streamSSE(c, async (stream) => {\n await handleStreamingResponse({\n stream,\n response,\n toolNameMapping,\n anthropicPayload,\n ctx,\n })\n })\n } catch (error) {\n recordErrorResponse(ctx, anthropicPayload.model, error)\n throw error\n }\n}\n\n// Helper to update tracker model\nfunction updateTrackerModel(trackingId: string | undefined, model: string) {\n if (!trackingId) return\n const request = requestTracker.getRequest(trackingId)\n if (request) request.model = model\n}\n\n// Build final payload with auto-compact if needed\nasync function buildFinalPayload(\n payload: ChatCompletionsPayload,\n model: Parameters<typeof checkNeedsCompaction>[1] | undefined,\n): Promise<{\n finalPayload: ChatCompletionsPayload\n compactResult: AutoCompactResult | null\n}> {\n if (!state.autoCompact || !model) {\n if (state.autoCompact && !model) {\n consola.warn(\n `Auto-compact: Model '${payload.model}' not found in cached models, skipping`,\n )\n }\n return { finalPayload: payload, compactResult: null }\n }\n\n try {\n const check = await checkNeedsCompaction(payload, model)\n consola.debug(\n `Auto-compact check: ${check.currentTokens} tokens, limit ${check.limit}, needed: ${check.needed}`,\n )\n if (!check.needed) {\n return { finalPayload: payload, compactResult: null }\n }\n\n consola.info(\n `Auto-compact triggered: ${check.currentTokens} tokens > ${check.limit} limit`,\n )\n const compactResult = await autoCompact(payload, model)\n return { finalPayload: compactResult.payload, compactResult }\n } catch (error) {\n consola.warn(\n \"Auto-compact failed, proceeding with original payload:\",\n error,\n )\n return { finalPayload: payload, compactResult: null }\n }\n}\n\n// Helper to update tracker status\nfunction updateTrackerStatus(\n trackingId: string | undefined,\n status: \"executing\" | \"streaming\",\n) {\n if (!trackingId) return\n requestTracker.updateRequest(trackingId, { status })\n}\n\n// Record error response to history\nfunction recordErrorResponse(\n ctx: ResponseContext,\n model: string,\n error: unknown,\n) {\n recordResponse(\n ctx.historyId,\n {\n success: false,\n model,\n usage: { input_tokens: 0, output_tokens: 0 },\n error: error instanceof Error ? error.message : \"Unknown error\",\n content: null,\n },\n Date.now() - ctx.startTime,\n )\n}\n\n/** Options for handleNonStreamingResponse */\ninterface NonStreamingOptions {\n c: Context\n response: ChatCompletionResponse\n toolNameMapping: ToolNameMapping\n ctx: ResponseContext\n}\n\n// Handle non-streaming response\nfunction handleNonStreamingResponse(opts: NonStreamingOptions) {\n const { c, response, toolNameMapping, ctx } = opts\n consola.debug(\n \"Non-streaming response from Copilot:\",\n JSON.stringify(response).slice(-400),\n )\n let anthropicResponse = translateToAnthropic(response, toolNameMapping)\n consola.debug(\n \"Translated Anthropic response:\",\n JSON.stringify(anthropicResponse),\n )\n\n // Append compaction marker if auto-compact was performed\n if (ctx.compactResult?.wasCompacted) {\n const marker = createCompactionMarker(ctx.compactResult)\n anthropicResponse = appendMarkerToAnthropicResponse(\n anthropicResponse,\n marker,\n )\n }\n\n recordResponse(\n ctx.historyId,\n {\n success: true,\n model: anthropicResponse.model,\n usage: anthropicResponse.usage,\n stop_reason: anthropicResponse.stop_reason ?? undefined,\n content: {\n role: \"assistant\",\n content: anthropicResponse.content.map((block) => {\n if (block.type === \"text\") {\n return { type: \"text\", text: block.text }\n }\n if (block.type === \"tool_use\") {\n return {\n type: \"tool_use\",\n id: block.id,\n name: block.name,\n input: JSON.stringify(block.input),\n }\n }\n return { type: block.type }\n }),\n },\n toolCalls: extractToolCallsFromContent(anthropicResponse.content),\n },\n Date.now() - ctx.startTime,\n )\n\n if (ctx.trackingId) {\n requestTracker.updateRequest(ctx.trackingId, {\n inputTokens: anthropicResponse.usage.input_tokens,\n outputTokens: anthropicResponse.usage.output_tokens,\n })\n }\n\n return c.json(anthropicResponse)\n}\n\n// Append marker to Anthropic response content\nfunction appendMarkerToAnthropicResponse(\n response: ReturnType<typeof translateToAnthropic>,\n marker: string,\n): ReturnType<typeof translateToAnthropic> {\n // Find last text block and append, or add new text block\n const content = [...response.content]\n const lastTextIndex = content.findLastIndex((block) => block.type === \"text\")\n\n if (lastTextIndex !== -1) {\n const textBlock = content[lastTextIndex]\n if (textBlock.type === \"text\") {\n content[lastTextIndex] = {\n ...textBlock,\n text: textBlock.text + marker,\n }\n }\n } else {\n // No text block found, add one\n content.push({ type: \"text\", text: marker })\n }\n\n return { ...response, content }\n}\n\n/** Stream accumulator for Anthropic format */\ninterface AnthropicStreamAccumulator {\n model: string\n inputTokens: number\n outputTokens: number\n stopReason: string\n content: string\n toolCalls: Array<{ id: string; name: string; input: string }>\n currentToolCall: { id: string; name: string; input: string } | null\n}\n\nfunction createAnthropicStreamAccumulator(): AnthropicStreamAccumulator {\n return {\n model: \"\",\n inputTokens: 0,\n outputTokens: 0,\n stopReason: \"\",\n content: \"\",\n toolCalls: [],\n currentToolCall: null,\n }\n}\n\n/** Options for handleStreamingResponse */\ninterface StreamHandlerOptions {\n stream: { writeSSE: (msg: { event: string; data: string }) => Promise<void> }\n response: AsyncIterable<{ data?: string }>\n toolNameMapping: ToolNameMapping\n anthropicPayload: AnthropicMessagesPayload\n ctx: ResponseContext\n}\n\n// Handle streaming response\nasync function handleStreamingResponse(opts: StreamHandlerOptions) {\n const { stream, response, toolNameMapping, anthropicPayload, ctx } = opts\n const streamState: AnthropicStreamState = {\n messageStartSent: false,\n contentBlockIndex: 0,\n contentBlockOpen: false,\n toolCalls: {},\n }\n const acc = createAnthropicStreamAccumulator()\n\n try {\n await processStreamChunks({\n stream,\n response,\n toolNameMapping,\n streamState,\n acc,\n })\n\n // Append compaction marker as final content block if auto-compact was performed\n if (ctx.compactResult?.wasCompacted) {\n const marker = createCompactionMarker(ctx.compactResult)\n await sendCompactionMarkerEvent(stream, streamState, marker)\n acc.content += marker\n }\n\n recordStreamingResponse(acc, anthropicPayload.model, ctx)\n completeTracking(ctx.trackingId, acc.inputTokens, acc.outputTokens)\n } catch (error) {\n consola.error(\"Stream error:\", error)\n recordStreamingError({\n acc,\n fallbackModel: anthropicPayload.model,\n ctx,\n error,\n })\n failTracking(ctx.trackingId, error)\n\n const errorEvent = translateErrorToAnthropicErrorEvent()\n await stream.writeSSE({\n event: errorEvent.type,\n data: JSON.stringify(errorEvent),\n })\n }\n}\n\n// Send compaction marker as Anthropic SSE events\nasync function sendCompactionMarkerEvent(\n stream: { writeSSE: (msg: { event: string; data: string }) => Promise<void> },\n streamState: AnthropicStreamState,\n marker: string,\n) {\n // Start a new content block for the marker\n const blockStartEvent = {\n type: \"content_block_start\",\n index: streamState.contentBlockIndex,\n content_block: { type: \"text\", text: \"\" },\n }\n await stream.writeSSE({\n event: \"content_block_start\",\n data: JSON.stringify(blockStartEvent),\n })\n\n // Send the marker text as a delta\n const deltaEvent = {\n type: \"content_block_delta\",\n index: streamState.contentBlockIndex,\n delta: { type: \"text_delta\", text: marker },\n }\n await stream.writeSSE({\n event: \"content_block_delta\",\n data: JSON.stringify(deltaEvent),\n })\n\n // Stop the content block\n const blockStopEvent = {\n type: \"content_block_stop\",\n index: streamState.contentBlockIndex,\n }\n await stream.writeSSE({\n event: \"content_block_stop\",\n data: JSON.stringify(blockStopEvent),\n })\n\n streamState.contentBlockIndex++\n}\n\n/** Options for processing stream chunks */\ninterface ProcessChunksOptions {\n stream: { writeSSE: (msg: { event: string; data: string }) => Promise<void> }\n response: AsyncIterable<{ data?: string }>\n toolNameMapping: ToolNameMapping\n streamState: AnthropicStreamState\n acc: AnthropicStreamAccumulator\n}\n\n// Process all stream chunks\nasync function processStreamChunks(opts: ProcessChunksOptions) {\n const { stream, response, toolNameMapping, streamState, acc } = opts\n for await (const rawEvent of response) {\n consola.debug(\"Copilot raw stream event:\", JSON.stringify(rawEvent))\n if (rawEvent.data === \"[DONE]\") break\n if (!rawEvent.data) continue\n\n let chunk: ChatCompletionChunk\n try {\n chunk = JSON.parse(rawEvent.data) as ChatCompletionChunk\n } catch (parseError) {\n consola.error(\"Failed to parse stream chunk:\", parseError, rawEvent.data)\n continue\n }\n\n if (chunk.model && !acc.model) acc.model = chunk.model\n\n const events = translateChunkToAnthropicEvents(\n chunk,\n streamState,\n toolNameMapping,\n )\n\n for (const event of events) {\n consola.debug(\"Translated Anthropic event:\", JSON.stringify(event))\n processAnthropicEvent(event, acc)\n await stream.writeSSE({\n event: event.type,\n data: JSON.stringify(event),\n })\n }\n }\n}\n\n// Process a single Anthropic event for accumulation\nfunction processAnthropicEvent(\n event: AnthropicStreamEventData,\n acc: AnthropicStreamAccumulator,\n) {\n switch (event.type) {\n case \"content_block_delta\": {\n handleContentBlockDelta(event.delta, acc)\n break\n }\n case \"content_block_start\": {\n handleContentBlockStart(event.content_block, acc)\n break\n }\n case \"content_block_stop\": {\n handleContentBlockStop(acc)\n break\n }\n case \"message_delta\": {\n handleMessageDelta(event.delta, event.usage, acc)\n break\n }\n default: {\n break\n }\n }\n}\n\n// Content block delta types\ntype ContentBlockDelta =\n | { type: \"text_delta\"; text: string }\n | { type: \"input_json_delta\"; partial_json: string }\n | { type: \"thinking_delta\"; thinking: string }\n | { type: \"signature_delta\"; signature: string }\n\nfunction handleContentBlockDelta(\n delta: ContentBlockDelta,\n acc: AnthropicStreamAccumulator,\n) {\n if (delta.type === \"text_delta\") {\n acc.content += delta.text\n } else if (delta.type === \"input_json_delta\" && acc.currentToolCall) {\n acc.currentToolCall.input += delta.partial_json\n }\n // thinking_delta and signature_delta are ignored for accumulation\n}\n\n// Content block types from anthropic-types.ts\ntype ContentBlock =\n | { type: \"text\"; text: string }\n | {\n type: \"tool_use\"\n id: string\n name: string\n input: Record<string, unknown>\n }\n | { type: \"thinking\"; thinking: string }\n\nfunction handleContentBlockStart(\n block: ContentBlock,\n acc: AnthropicStreamAccumulator,\n) {\n if (block.type === \"tool_use\") {\n acc.currentToolCall = {\n id: block.id,\n name: block.name,\n input: \"\",\n }\n }\n}\n\nfunction handleContentBlockStop(acc: AnthropicStreamAccumulator) {\n if (acc.currentToolCall) {\n acc.toolCalls.push(acc.currentToolCall)\n acc.currentToolCall = null\n }\n}\n\n// Message delta types\ninterface MessageDelta {\n stop_reason?: string | null\n stop_sequence?: string | null\n}\n\ninterface MessageUsage {\n input_tokens?: number\n output_tokens: number\n cache_creation_input_tokens?: number\n cache_read_input_tokens?: number\n}\n\nfunction handleMessageDelta(\n delta: MessageDelta,\n usage: MessageUsage | undefined,\n acc: AnthropicStreamAccumulator,\n) {\n if (delta.stop_reason) acc.stopReason = delta.stop_reason\n if (usage) {\n acc.inputTokens = usage.input_tokens ?? 0\n acc.outputTokens = usage.output_tokens\n }\n}\n\n// Record streaming response to history\nfunction recordStreamingResponse(\n acc: AnthropicStreamAccumulator,\n fallbackModel: string,\n ctx: ResponseContext,\n) {\n const contentBlocks: Array<{ type: string; text?: string }> = []\n if (acc.content) contentBlocks.push({ type: \"text\", text: acc.content })\n for (const tc of acc.toolCalls) {\n contentBlocks.push({ type: \"tool_use\", ...tc })\n }\n\n recordResponse(\n ctx.historyId,\n {\n success: true,\n model: acc.model || fallbackModel,\n usage: { input_tokens: acc.inputTokens, output_tokens: acc.outputTokens },\n stop_reason: acc.stopReason || undefined,\n content:\n contentBlocks.length > 0 ?\n { role: \"assistant\", content: contentBlocks }\n : null,\n toolCalls: acc.toolCalls.length > 0 ? acc.toolCalls : undefined,\n },\n Date.now() - ctx.startTime,\n )\n}\n\n// Record streaming error to history\nfunction recordStreamingError(opts: {\n acc: AnthropicStreamAccumulator\n fallbackModel: string\n ctx: ResponseContext\n error: unknown\n}) {\n const { acc, fallbackModel, ctx, error } = opts\n recordResponse(\n ctx.historyId,\n {\n success: false,\n model: acc.model || fallbackModel,\n usage: { input_tokens: 0, output_tokens: 0 },\n error: error instanceof Error ? error.message : \"Stream error\",\n content: null,\n },\n Date.now() - ctx.startTime,\n )\n}\n\n// Complete TUI tracking\nfunction completeTracking(\n trackingId: string | undefined,\n inputTokens: number,\n outputTokens: number,\n) {\n if (!trackingId) return\n requestTracker.updateRequest(trackingId, { inputTokens, outputTokens })\n requestTracker.completeRequest(trackingId, 200, { inputTokens, outputTokens })\n}\n\n// Fail TUI tracking\nfunction failTracking(trackingId: string | undefined, error: unknown) {\n if (!trackingId) return\n requestTracker.failRequest(\n trackingId,\n error instanceof Error ? error.message : \"Stream error\",\n )\n}\n\n// Convert Anthropic messages to history MessageContent format\nfunction convertAnthropicMessages(\n messages: AnthropicMessagesPayload[\"messages\"],\n): Array<MessageContent> {\n return messages.map((msg) => {\n if (typeof msg.content === \"string\") {\n return { role: msg.role, content: msg.content }\n }\n\n // Convert content blocks\n const content = msg.content.map((block) => {\n if (block.type === \"text\") {\n return { type: \"text\", text: block.text }\n }\n if (block.type === \"tool_use\") {\n return {\n type: \"tool_use\",\n id: block.id,\n name: block.name,\n input: JSON.stringify(block.input),\n }\n }\n if (block.type === \"tool_result\") {\n const resultContent =\n typeof block.content === \"string\" ?\n block.content\n : block.content\n .map((c) => (c.type === \"text\" ? c.text : `[${c.type}]`))\n .join(\"\\n\")\n return {\n type: \"tool_result\",\n tool_use_id: block.tool_use_id,\n content: resultContent,\n }\n }\n return { type: block.type }\n })\n\n return { role: msg.role, content }\n })\n}\n\n// Extract system prompt from Anthropic format\nfunction extractSystemPrompt(\n system: AnthropicMessagesPayload[\"system\"],\n): string | undefined {\n if (!system) return undefined\n if (typeof system === \"string\") return system\n return system.map((block) => block.text).join(\"\\n\")\n}\n\n// Extract tool calls from response content\nfunction extractToolCallsFromContent(\n content: Array<unknown>,\n): Array<{ id: string; name: string; input: string }> | undefined {\n const tools: Array<{ id: string; name: string; input: string }> = []\n for (const block of content) {\n if (\n typeof block === \"object\"\n && block !== null\n && \"type\" in block\n && block.type === \"tool_use\"\n && \"id\" in block\n && \"name\" in block\n && \"input\" in block\n ) {\n tools.push({\n id: String(block.id),\n name: String(block.name),\n input: JSON.stringify(block.input),\n })\n }\n }\n return tools.length > 0 ? tools : undefined\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createChatCompletions>>,\n): response is ChatCompletionResponse => Object.hasOwn(response, \"choices\")\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleCountTokens } from \"./count-tokens-handler\"\nimport { handleCompletion } from \"./handler\"\n\nexport const messageRoutes = new Hono()\n\nmessageRoutes.post(\"/\", async (c) => {\n try {\n return await handleCompletion(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n\nmessageRoutes.post(\"/count_tokens\", async (c) => {\n try {\n return await handleCountTokens(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\nimport { cacheModels } from \"~/lib/utils\"\n\nexport const modelRoutes = new Hono()\n\nmodelRoutes.get(\"/\", async (c) => {\n try {\n if (!state.models) {\n // This should be handled by startup logic, but as a fallback.\n await cacheModels()\n }\n\n const models = state.models?.data.map((model) => ({\n id: model.id,\n object: \"model\",\n type: \"model\",\n created: 0, // No date available from source\n created_at: new Date(0).toISOString(), // No date available from source\n owned_by: model.vendor,\n display_name: model.name,\n // Include capabilities for clients that need token limit info\n capabilities: {\n family: model.capabilities.family,\n type: model.capabilities.type,\n tokenizer: model.capabilities.tokenizer,\n limits: {\n max_context_window_tokens:\n model.capabilities.limits.max_context_window_tokens,\n max_output_tokens: model.capabilities.limits.max_output_tokens,\n max_prompt_tokens: model.capabilities.limits.max_prompt_tokens,\n },\n supports: {\n tool_calls: model.capabilities.supports.tool_calls,\n parallel_tool_calls: model.capabilities.supports.parallel_tool_calls,\n },\n },\n }))\n\n return c.json({\n object: \"list\",\n data: models,\n has_more: false,\n })\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const tokenRoute = new Hono()\n\ntokenRoute.get(\"/\", async (c) => {\n try {\n return c.json({\n token: state.copilotToken,\n })\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { getCopilotUsage } from \"~/services/github/get-copilot-usage\"\n\nexport const usageRoute = new Hono()\n\nusageRoute.get(\"/\", async (c) => {\n try {\n const usage = await getCopilotUsage()\n return c.json(usage)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\nimport { cors } from \"hono/cors\"\n\nimport { state } from \"./lib/state\"\nimport { tuiLogger } from \"./lib/tui\"\nimport { completionRoutes } from \"./routes/chat-completions/route\"\nimport { embeddingRoutes } from \"./routes/embeddings/route\"\nimport { eventLoggingRoutes } from \"./routes/event-logging/route\"\nimport { historyRoutes } from \"./routes/history/route\"\nimport { messageRoutes } from \"./routes/messages/route\"\nimport { modelRoutes } from \"./routes/models/route\"\nimport { tokenRoute } from \"./routes/token/route\"\nimport { usageRoute } from \"./routes/usage/route\"\n\nexport const server = new Hono()\n\nserver.use(tuiLogger())\nserver.use(cors())\n\nserver.get(\"/\", (c) => c.text(\"Server running\"))\n\n// Health check endpoint for container orchestration (Docker, Kubernetes)\nserver.get(\"/health\", (c) => {\n const healthy = Boolean(state.copilotToken && state.githubToken)\n return c.json(\n {\n status: healthy ? \"healthy\" : \"unhealthy\",\n checks: {\n copilotToken: Boolean(state.copilotToken),\n githubToken: Boolean(state.githubToken),\n models: Boolean(state.models),\n },\n },\n healthy ? 200 : 503,\n )\n})\n\nserver.route(\"/chat/completions\", completionRoutes)\nserver.route(\"/models\", modelRoutes)\nserver.route(\"/embeddings\", embeddingRoutes)\nserver.route(\"/usage\", usageRoute)\nserver.route(\"/token\", tokenRoute)\n\n// Compatibility with tools that expect v1/ prefix\nserver.route(\"/v1/chat/completions\", completionRoutes)\nserver.route(\"/v1/models\", modelRoutes)\nserver.route(\"/v1/embeddings\", embeddingRoutes)\n\n// Anthropic compatible endpoints\nserver.route(\"/v1/messages\", messageRoutes)\nserver.route(\"/api/event_logging\", eventLoggingRoutes)\n\n// History viewer (optional, enabled with --history flag)\nserver.route(\"/history\", historyRoutes)\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport clipboard from \"clipboardy\"\nimport consola from \"consola\"\nimport { serve, type ServerHandler } from \"srvx\"\nimport invariant from \"tiny-invariant\"\n\nimport { initHistory } from \"./lib/history\"\nimport { ensurePaths } from \"./lib/paths\"\nimport { initProxyFromEnv } from \"./lib/proxy\"\nimport { generateEnvScript } from \"./lib/shell\"\nimport { state } from \"./lib/state\"\nimport { setupCopilotToken, setupGitHubToken } from \"./lib/token\"\nimport { initTui, type TuiMode } from \"./lib/tui\"\nimport { cacheModels, cacheVSCodeVersion } from \"./lib/utils\"\nimport { server } from \"./server\"\n\ninterface RunServerOptions {\n port: number\n host?: string\n verbose: boolean\n accountType: string\n manual: boolean\n rateLimit?: number\n rateLimitWait: boolean\n githubToken?: string\n claudeCode: boolean\n showToken: boolean\n proxyEnv: boolean\n history: boolean\n historyLimit: number\n tui: TuiMode\n autoCompact: boolean\n}\n\nexport async function runServer(options: RunServerOptions): Promise<void> {\n if (options.proxyEnv) {\n initProxyFromEnv()\n }\n\n if (options.verbose) {\n consola.level = 5\n consola.info(\"Verbose logging enabled\")\n }\n\n state.accountType = options.accountType\n if (options.accountType !== \"individual\") {\n consola.info(`Using ${options.accountType} plan GitHub account`)\n }\n\n state.manualApprove = options.manual\n state.rateLimitSeconds = options.rateLimit\n state.rateLimitWait = options.rateLimitWait\n state.showToken = options.showToken\n state.autoCompact = options.autoCompact\n\n if (options.autoCompact) {\n consola.info(\n \"Auto-compact enabled: will compress context when exceeding token limits\",\n )\n }\n\n // Initialize history recording if enabled\n initHistory(options.history, options.historyLimit)\n if (options.history) {\n const limitText =\n options.historyLimit === 0 ? \"unlimited\" : `max ${options.historyLimit}`\n consola.info(`History recording enabled (${limitText} entries)`)\n }\n\n // Initialize TUI for request logging\n initTui({ enabled: true, mode: options.tui })\n\n await ensurePaths()\n await cacheVSCodeVersion()\n\n if (options.githubToken) {\n state.githubToken = options.githubToken\n consola.info(\"Using provided GitHub token\")\n } else {\n await setupGitHubToken()\n }\n\n await setupCopilotToken()\n await cacheModels()\n\n consola.info(\n `Available models: \\n${state.models?.data.map((model) => `- ${model.id}`).join(\"\\n\")}`,\n )\n\n const displayHost = options.host ?? \"localhost\"\n const serverUrl = `http://${displayHost}:${options.port}`\n\n if (options.claudeCode) {\n invariant(state.models, \"Models should be loaded by now\")\n\n const selectedModel = await consola.prompt(\n \"Select a model to use with Claude Code\",\n {\n type: \"select\",\n options: state.models.data.map((model) => model.id),\n },\n )\n\n const selectedSmallModel = await consola.prompt(\n \"Select a small model to use with Claude Code\",\n {\n type: \"select\",\n options: state.models.data.map((model) => model.id),\n },\n )\n\n const command = generateEnvScript(\n {\n ANTHROPIC_BASE_URL: serverUrl,\n ANTHROPIC_AUTH_TOKEN: \"dummy\",\n ANTHROPIC_MODEL: selectedModel,\n ANTHROPIC_DEFAULT_SONNET_MODEL: selectedModel,\n ANTHROPIC_SMALL_FAST_MODEL: selectedSmallModel,\n ANTHROPIC_DEFAULT_HAIKU_MODEL: selectedSmallModel,\n DISABLE_NON_ESSENTIAL_MODEL_CALLS: \"1\",\n CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC: \"1\",\n },\n \"claude\",\n )\n\n try {\n clipboard.writeSync(command)\n consola.success(\"Copied Claude Code command to clipboard!\")\n } catch {\n consola.warn(\n \"Failed to copy to clipboard. Here is the Claude Code command:\",\n )\n consola.log(command)\n }\n }\n\n consola.box(\n `🌐 Usage Viewer: https://ericc-ch.github.io/copilot-api?endpoint=${serverUrl}/usage${options.history ? `\\n📜 History UI: ${serverUrl}/history` : \"\"}`,\n )\n\n serve({\n fetch: server.fetch as ServerHandler,\n port: options.port,\n hostname: options.host,\n })\n}\n\nexport const start = defineCommand({\n meta: {\n name: \"start\",\n description: \"Start the Copilot API server\",\n },\n args: {\n port: {\n alias: \"p\",\n type: \"string\",\n default: \"4141\",\n description: \"Port to listen on\",\n },\n host: {\n alias: \"H\",\n type: \"string\",\n description:\n \"Host/interface to bind to (e.g., 127.0.0.1 for localhost only, 0.0.0.0 for all interfaces)\",\n },\n verbose: {\n alias: \"v\",\n type: \"boolean\",\n default: false,\n description: \"Enable verbose logging\",\n },\n \"account-type\": {\n alias: \"a\",\n type: \"string\",\n default: \"individual\",\n description: \"Account type to use (individual, business, enterprise)\",\n },\n manual: {\n type: \"boolean\",\n default: false,\n description: \"Enable manual request approval\",\n },\n \"rate-limit\": {\n alias: \"r\",\n type: \"string\",\n description: \"Rate limit in seconds between requests\",\n },\n wait: {\n alias: \"w\",\n type: \"boolean\",\n default: false,\n description:\n \"Wait instead of error when rate limit is hit. Has no effect if rate limit is not set\",\n },\n \"github-token\": {\n alias: \"g\",\n type: \"string\",\n description:\n \"Provide GitHub token directly (must be generated using the `auth` subcommand)\",\n },\n \"claude-code\": {\n alias: \"c\",\n type: \"boolean\",\n default: false,\n description:\n \"Generate a command to launch Claude Code with Copilot API config\",\n },\n \"show-token\": {\n type: \"boolean\",\n default: false,\n description: \"Show GitHub and Copilot tokens on fetch and refresh\",\n },\n \"proxy-env\": {\n type: \"boolean\",\n default: false,\n description: \"Initialize proxy from environment variables\",\n },\n history: {\n type: \"boolean\",\n default: false,\n description: \"Enable request history recording and Web UI at /history\",\n },\n \"history-limit\": {\n type: \"string\",\n default: \"1000\",\n description:\n \"Maximum number of history entries to keep in memory (0 = unlimited)\",\n },\n tui: {\n type: \"string\",\n default: \"console\",\n description:\n \"TUI mode: 'console' for simple log output, 'fullscreen' for interactive terminal UI with tabs\",\n },\n \"auto-compact\": {\n type: \"boolean\",\n default: false,\n description:\n \"Automatically compress conversation history when exceeding model token limits\",\n },\n },\n run({ args }) {\n const rateLimitRaw = args[\"rate-limit\"]\n const rateLimit =\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n rateLimitRaw === undefined ? undefined : Number.parseInt(rateLimitRaw, 10)\n\n return runServer({\n port: Number.parseInt(args.port, 10),\n host: args.host,\n verbose: args.verbose,\n accountType: args[\"account-type\"],\n manual: args.manual,\n rateLimit,\n rateLimitWait: args.wait,\n githubToken: args[\"github-token\"],\n claudeCode: args[\"claude-code\"],\n showToken: args[\"show-token\"],\n proxyEnv: args[\"proxy-env\"],\n history: args.history,\n historyLimit: Number.parseInt(args[\"history-limit\"], 10),\n tui: args.tui as TuiMode,\n autoCompact: args[\"auto-compact\"],\n })\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand, runMain } from \"citty\"\nimport consola from \"consola\"\n\nimport { auth } from \"./auth\"\nimport { checkUsage } from \"./check-usage\"\nimport { debug } from \"./debug\"\nimport { logout } from \"./logout\"\nimport { start } from \"./start\"\n\n// Configure consola to show timestamps in log output\nconsola.options.formatOptions.date = true\n\nconst main = defineCommand({\n meta: {\n name: \"copilot-api\",\n description:\n \"A wrapper around GitHub Copilot API to make it OpenAI compatible, making it usable for other tools.\",\n },\n subCommands: { auth, logout, start, \"check-usage\": checkUsage, debug },\n})\n\nawait runMain(main)\n"],"x_google_ignoreList":[20],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,MAAM,UAAU,KAAK,KAAK,GAAG,SAAS,EAAE,UAAU,SAAS,cAAc;AAEzE,MAAM,oBAAoB,KAAK,KAAK,SAAS,eAAe;AAE5D,MAAa,QAAQ;CACnB;CACA;CACD;AAED,eAAsB,cAA6B;AACjD,OAAM,GAAG,MAAM,MAAM,SAAS,EAAE,WAAW,MAAM,CAAC;AAClD,OAAM,WAAW,MAAM,kBAAkB;;AAG3C,eAAe,WAAW,UAAiC;AACzD,KAAI;AACF,QAAM,GAAG,OAAO,UAAU,GAAG,UAAU,KAAK;AAI5C,QAFc,MAAM,GAAG,KAAK,SAAS,EACX,OAAO,SACb,IAClB,OAAM,GAAG,MAAM,UAAU,IAAM;SAE3B;AACN,QAAM,GAAG,UAAU,UAAU,GAAG;AAChC,QAAM,GAAG,MAAM,UAAU,IAAM;;;;;;ACPnC,MAAaA,QAAe;CAC1B,aAAa;CACb,eAAe;CACf,eAAe;CACf,WAAW;CACX,aAAa;CACd;;;;ACxBD,MAAa,yBAAyB;CACpC,gBAAgB;CAChB,QAAQ;CACT;AAED,MAAM,kBAAkB;AACxB,MAAM,wBAAwB,gBAAgB;AAC9C,MAAM,aAAa,qBAAqB;AAExC,MAAM,cAAc;AAEpB,MAAa,kBAAkB,YAC7BC,QAAM,gBAAgB,eACpB,kCACA,eAAeA,QAAM,YAAY;AACrC,MAAa,kBAAkB,SAAc,SAAkB,UAAU;CACvE,MAAMC,UAAkC;EACtC,eAAe,UAAUD,QAAM;EAC/B,gBAAgB,iBAAiB,CAAC;EAClC,0BAA0B;EAC1B,kBAAkB,UAAUA,QAAM;EAClC,yBAAyB;EACzB,cAAc;EACd,iBAAiB;EACjB,wBAAwB;EACxB,gBAAgB,YAAY;EAC5B,uCAAuC;EACxC;AAED,KAAI,OAAQ,SAAQ,4BAA4B;AAEhD,QAAO;;AAGT,MAAa,sBAAsB;AACnC,MAAa,iBAAiB,aAAkB;CAC9C,GAAG,iBAAiB;CACpB,eAAe,SAASA,QAAM;CAC9B,kBAAkB,UAAUA,QAAM;CAClC,yBAAyB;CACzB,cAAc;CACd,wBAAwB;CACxB,uCAAuC;CACxC;AAED,MAAa,kBAAkB;AAC/B,MAAa,mBAAmB;AAChC,MAAa,oBAAoB,CAAC,YAAY,CAAC,KAAK,IAAI;;;;AC9CxD,IAAa,YAAb,MAAa,kBAAkB,MAAM;CACnC;CACA;CAEA,YAAY,SAAiB,QAAgB,cAAsB;AACjE,QAAM,QAAQ;AACd,OAAK,SAAS;AACd,OAAK,eAAe;;CAGtB,aAAa,aACX,SACA,UACoB;EACpB,MAAM,OAAO,MAAM,SAAS,MAAM;AAClC,SAAO,IAAI,UAAU,SAAS,SAAS,QAAQ,KAAK;;;;AAaxD,SAAS,qBAAqB,SAGrB;CAEP,MAAM,QAAQ,QAAQ,MACpB,yDACD;AACD,KAAI,MACF,QAAO;EACL,SAAS,OAAO,SAAS,MAAM,IAAI,GAAG;EACtC,OAAO,OAAO,SAAS,MAAM,IAAI,GAAG;EACrC;AAEH,QAAO;;;AAIT,SAAS,sBAAsB,SAAiB,OAAe;CAC7D,MAAM,SAAS,UAAU;CACzB,MAAM,aAAa,KAAK,MAAO,SAAS,QAAS,IAAI;AAKrD,QAAO;EACL,MAAM;EACN,OAAO;GACL,MAAM;GACN,SACE,uBAAuB,QAAQ,YAAY,MAAM,YAC3C,OAAO,gBAAgB,WAAW;GAC3C;EACF;;AAIH,eAAsB,aAAa,GAAY,OAAgB;AAC7D,SAAQ,MAAM,mBAAmB,MAAM;AAEvC,KAAI,iBAAiB,WAAW;EAC9B,IAAIE;AACJ,MAAI;AACF,eAAY,KAAK,MAAM,MAAM,aAAa;UACpC;AACN,eAAY,MAAM;;AAEpB,UAAQ,MAAM,eAAe,UAAU;EAGvC,MAAM,eAAe;AACrB,MAAI,aAAa,OAAO,SAAS,oCAAoC;GACnE,MAAM,YAAY,qBAAqB,aAAa,MAAM,WAAW,GAAG;AACxE,OAAI,WAAW;IACb,MAAM,iBAAiB,sBACrB,UAAU,SACV,UAAU,MACX;AACD,YAAQ,MAAM,0CAA0C,eAAe;AACvE,WAAO,EAAE,KAAK,gBAAgB,IAA4B;;;AAI9D,SAAO,EAAE,KACP,EACE,OAAO;GACL,SAAS,MAAM;GACf,MAAM;GACP,EACF,EACD,MAAM,OACP;;AAGH,QAAO,EAAE,KACP,EACE,OAAO;EACL,SAAU,MAAgB;EAC1B,MAAM;EACP,EACF,EACD,IACD;;;;;AC/GH,MAAa,kBAAkB,YAAY;CACzC,MAAM,WAAW,MAAM,MACrB,GAAG,oBAAoB,6BACvB,EACE,SAAS,cAAc,MAAM,EAC9B,CACF;AAED,KAAI,CAAC,SAAS,GACZ,OAAM,MAAM,UAAU,aAAa,+BAA+B,SAAS;AAE7E,QAAQ,MAAM,SAAS,MAAM;;;;;ACP/B,eAAsB,gBAA6C;CACjE,MAAM,WAAW,MAAM,MAAM,GAAG,gBAAgB,qBAAqB;EACnE,QAAQ;EACR,SAAS,iBAAiB;EAC1B,MAAM,KAAK,UAAU;GACnB,WAAW;GACX,OAAO;GACR,CAAC;EACH,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,MAAM,UAAU,aAAa,6BAA6B,SAAS;AAE3E,QAAQ,MAAM,SAAS,MAAM;;;;;ACjB/B,eAAsB,gBAAgB;CACpC,MAAM,WAAW,MAAM,MAAM,GAAG,oBAAoB,QAAQ,EAC1D,SAAS;EACP,eAAe,SAAS,MAAM;EAC9B,GAAG,iBAAiB;EACrB,EACF,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,MAAM,UAAU,aAAa,6BAA6B,SAAS;AAE3E,QAAQ,MAAM,SAAS,MAAM;;;;;ACX/B,MAAa,YAAY,YAAY;CACnC,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,UAAU,EAC9D,SAAS,eAAe,MAAM,EAC/B,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,MAAM,UAAU,aAAa,wBAAwB,SAAS;AAEtE,QAAQ,MAAM,SAAS,MAAM;;;;;ACZ/B,MAAM,WAAW;AAGjB,MAAM,iBACJ;AAMF,eAAsB,mBAAmB;CACvC,MAAM,aAAa,IAAI,iBAAiB;CACxC,MAAM,UAAU,iBAAiB;AAC/B,aAAW,OAAO;IACjB,IAAK;AAER,KAAI;EACF,MAAM,WAAW,MAAM,MAAM,gBAAgB;GAC3C,QAAQ,WAAW;GACnB,SAAS;IACP,QAAQ;IACR,cAAc;IACf;GACF,CAAC;AAEF,MAAI,CAAC,SAAS,GACZ,QAAO;EAKT,MAAM,WAFW,MAAM,SAAS,MAAM,EAEd;AACxB,MAAI,WAAW,kBAAkB,KAAK,QAAQ,CAC5C,QAAO;AAGT,SAAO;SACD;AACN,SAAO;WACC;AACR,eAAa,QAAQ;;;;;;ACjCzB,MAAa,SAAS,OACpB,IAAI,SAAS,YAAY;AACvB,YAAW,SAAS,GAAG;EACvB;AAEJ,MAAa,aAAa,UACxB,UAAU,QAAQ,UAAU;AAE9B,eAAsB,cAA6B;AAEjD,OAAM,SADS,MAAM,WAAW;;AAIlC,MAAa,qBAAqB,YAAY;CAC5C,MAAM,WAAW,MAAM,kBAAkB;AACzC,OAAM,gBAAgB;AAEtB,SAAQ,KAAK,yBAAyB,WAAW;;;;;ACbnD,eAAsB,gBACpB,YACiB;CAGjB,MAAM,iBAAiB,WAAW,WAAW,KAAK;AAClD,SAAQ,MAAM,yCAAyC,cAAc,IAAI;CAGzE,MAAM,YAAY,KAAK,KAAK,GAAG,WAAW,aAAa;AAEvD,QAAO,KAAK,KAAK,GAAG,WAAW;EAC7B,MAAM,WAAW,MAAM,MACrB,GAAG,gBAAgB,4BACnB;GACE,QAAQ;GACR,SAAS,iBAAiB;GAC1B,MAAM,KAAK,UAAU;IACnB,WAAW;IACX,aAAa,WAAW;IACxB,YAAY;IACb,CAAC;GACH,CACF;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,SAAM,MAAM,cAAc;AAC1B,WAAQ,MAAM,gCAAgC,MAAM,SAAS,MAAM,CAAC;AAEpE;;EAGF,MAAM,OAAQ,MAAM,SAAS,MAAM;AACnC,UAAQ,MAAM,kCAAkC,KAAK;EAErD,MAAM,EAAE,iBAAiB;AAEzB,MAAI,aACF,QAAO;MAEP,OAAM,MAAM,cAAc;;AAI9B,OAAM,IAAI,MACR,iEACD;;;;;AC7CH,MAAM,wBAAwB,GAAG,SAAS,MAAM,mBAAmB,OAAO;AAE1E,MAAM,oBAAoB,UACxB,GAAG,UAAU,MAAM,mBAAmB,MAAM;AAE9C,MAAa,oBAAoB,YAAY;CAC3C,MAAM,EAAE,OAAO,eAAe,MAAM,iBAAiB;AACrD,OAAM,eAAe;AAGrB,SAAQ,MAAM,6CAA6C;AAC3D,KAAI,MAAM,UACR,SAAQ,KAAK,kBAAkB,MAAM;CAGvC,MAAM,mBAAmB,aAAa,MAAM;AAC5C,aAAY,YAAY;AACtB,UAAQ,MAAM,2BAA2B;AACzC,MAAI;GACF,MAAM,EAAE,mBAAU,MAAM,iBAAiB;AACzC,SAAM,eAAeC;AACrB,WAAQ,MAAM,0BAA0B;AACxC,OAAI,MAAM,UACR,SAAQ,KAAK,4BAA4BA,QAAM;WAE1C,OAAO;AAId,WAAQ,MACN,kEACA,MACD;;IAEF,gBAAgB;;AAOrB,eAAsB,iBACpB,SACe;AACf,KAAI;EACF,MAAM,cAAc,MAAM,iBAAiB;AAE3C,MAAI,eAAe,CAAC,SAAS,OAAO;AAClC,SAAM,cAAc;AACpB,OAAI,MAAM,UACR,SAAQ,KAAK,iBAAiB,YAAY;AAE5C,SAAM,SAAS;AAEf;;AAGF,UAAQ,KAAK,0CAA0C;EACvD,MAAM,WAAW,MAAM,eAAe;AACtC,UAAQ,MAAM,yBAAyB,SAAS;AAEhD,UAAQ,KACN,0BAA0B,SAAS,UAAU,OAAO,SAAS,mBAC9D;EAED,MAAM,QAAQ,MAAM,gBAAgB,SAAS;AAC7C,QAAM,iBAAiB,MAAM;AAC7B,QAAM,cAAc;AAEpB,MAAI,MAAM,UACR,SAAQ,KAAK,iBAAiB,MAAM;AAEtC,QAAM,SAAS;UACR,OAAO;AACd,MAAI,iBAAiB,WAAW;AAC9B,WAAQ,MAAM,+BAA+B,MAAM,aAAa;AAChE,SAAM;;AAGR,UAAQ,MAAM,+BAA+B,MAAM;AACnD,QAAM;;;AAIV,eAAe,UAAU;CACvB,MAAM,OAAO,MAAM,eAAe;AAClC,SAAQ,KAAK,gBAAgB,KAAK,QAAQ;;;;;ACpF5C,eAAsB,QAAQ,SAAwC;AACpE,KAAI,QAAQ,SAAS;AACnB,UAAQ,QAAQ;AAChB,UAAQ,KAAK,0BAA0B;;AAGzC,OAAM,YAAY,QAAQ;AAE1B,OAAM,aAAa;AACnB,OAAM,iBAAiB,EAAE,OAAO,MAAM,CAAC;AACvC,SAAQ,QAAQ,2BAA2B,MAAM,kBAAkB;;AAGrE,MAAa,OAAO,cAAc;CAChC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;EACJ,SAAS;GACP,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,cAAc;GACZ,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACF;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,QAAQ;GACb,SAAS,KAAK;GACd,WAAW,KAAK;GACjB,CAAC;;CAEL,CAAC;;;;AC/CF,MAAa,kBAAkB,YAA2C;CACxE,MAAM,WAAW,MAAM,MAAM,GAAG,oBAAoB,yBAAyB,EAC3E,SAAS,cAAc,MAAM,EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,MAAM,UAAU,aAAa,+BAA+B,SAAS;AAG7E,QAAQ,MAAM,SAAS,MAAM;;;;;ACH/B,MAAa,aAAa,cAAc;CACtC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM,MAAM;AACV,QAAM,aAAa;AACnB,QAAM,kBAAkB;AACxB,MAAI;GACF,MAAM,QAAQ,MAAM,iBAAiB;GACrC,MAAM,UAAU,MAAM,gBAAgB;GACtC,MAAM,eAAe,QAAQ;GAC7B,MAAM,cAAc,eAAe,QAAQ;GAC3C,MAAM,qBACJ,eAAe,IAAK,cAAc,eAAgB,MAAM;GAC1D,MAAM,0BAA0B,QAAQ;GAGxC,SAAS,eAAe,MAAc,MAA+B;AACnE,QAAI,CAAC,KAAM,QAAO,GAAG,KAAK;IAC1B,MAAM,QAAQ,KAAK;IACnB,MAAM,OAAO,QAAQ,KAAK;IAC1B,MAAM,cAAc,QAAQ,IAAK,OAAO,QAAS,MAAM;IACvD,MAAM,mBAAmB,KAAK;AAC9B,WAAO,GAAG,KAAK,IAAI,KAAK,GAAG,MAAM,SAAS,YAAY,QAAQ,EAAE,CAAC,UAAU,iBAAiB,QAAQ,EAAE,CAAC;;GAGzG,MAAM,cAAc,YAAY,YAAY,GAAG,aAAa,SAAS,mBAAmB,QAAQ,EAAE,CAAC,UAAU,wBAAwB,QAAQ,EAAE,CAAC;GAChJ,MAAM,WAAW,eAAe,QAAQ,MAAM,gBAAgB,KAAK;GACnE,MAAM,kBAAkB,eACtB,eACA,MAAM,gBAAgB,YACvB;AAED,WAAQ,IACN,wBAAwB,MAAM,aAAa,mBACtB,MAAM,iBAAiB,iBAEnC,YAAY,MACZ,SAAS,MACT,kBACV;WACM,KAAK;AACZ,WAAQ,MAAM,kCAAkC,IAAI;AACpD,WAAQ,KAAK,EAAE;;;CAGpB,CAAC;;;;AC7BF,eAAe,oBAAqC;AAClD,KAAI;EACF,MAAM,kBAAkB,IAAI,IAAI,mBAAmB,OAAO,KAAK,IAAI,CAAC;AAMpE,SAHoB,KAAK,MAAM,MAAM,GAAG,SAAS,gBAAgB,CAAC,CAG/C;SACb;AACN,SAAO;;;AAIX,SAAS,iBAAiB;CACxB,MAAM,QAAQ,OAAO,QAAQ;AAE7B,QAAO;EACL,MAAM,QAAQ,QAAQ;EACtB,SAAS,QAAQ,IAAI,UAAU,QAAQ,QAAQ,MAAM,EAAE;EACvD,UAAU,GAAG,UAAU;EACvB,MAAM,GAAG,MAAM;EAChB;;AAGH,eAAe,mBAAqC;AAClD,KAAI;AAEF,MAAI,EADU,MAAM,GAAG,KAAK,MAAM,kBAAkB,EACzC,QAAQ,CAAE,QAAO;AAG5B,UADgB,MAAM,GAAG,SAAS,MAAM,mBAAmB,OAAO,EACnD,MAAM,CAAC,SAAS;SACzB;AACN,SAAO;;;AAIX,eAAe,eAAmC;CAChD,MAAM,CAAC,SAAS,eAAe,MAAM,QAAQ,IAAI,CAC/C,mBAAmB,EACnB,kBAAkB,CACnB,CAAC;AAEF,QAAO;EACL;EACA,SAAS,gBAAgB;EACzB,OAAO;GACL,SAAS,MAAM;GACf,mBAAmB,MAAM;GAC1B;EACD;EACD;;AAGH,SAAS,oBAAoB,MAAuB;AAClD,SAAQ,KAAK;;WAEJ,KAAK,QAAQ;WACb,KAAK,QAAQ,KAAK,GAAG,KAAK,QAAQ,QAAQ,IAAI,KAAK,QAAQ,SAAS,GAAG,KAAK,QAAQ,KAAK;;;aAGvF,KAAK,MAAM,QAAQ;uBACT,KAAK,MAAM,kBAAkB;;gBAEpC,KAAK,cAAc,QAAQ,OAAO;;AAGlD,SAAS,mBAAmB,MAAuB;AACjD,SAAQ,IAAI,KAAK,UAAU,MAAM,MAAM,EAAE,CAAC;;AAG5C,eAAsB,SAAS,SAAyC;CACtE,MAAM,YAAY,MAAM,cAAc;AAEtC,KAAI,QAAQ,KACV,oBAAmB,UAAU;KAE7B,qBAAoB,UAAU;;AAIlC,MAAa,QAAQ,cAAc;CACjC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM,EACJ,MAAM;EACJ,MAAM;EACN,SAAS;EACT,aAAa;EACd,EACF;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,SAAS,EACd,MAAM,KAAK,MACZ,CAAC;;CAEL,CAAC;;;;ACtHF,eAAsB,YAA2B;AAC/C,KAAI;AACF,QAAM,GAAG,OAAO,MAAM,kBAAkB;AACxC,UAAQ,QAAQ,iDAAiD;UAC1D,OAAO;AACd,MAAK,MAAgC,SAAS,SAC5C,SAAQ,KAAK,sCAAsC;OAC9C;AACL,WAAQ,MAAM,2BAA2B,MAAM;AAC/C,SAAM;;;;AAKZ,MAAa,SAAS,cAAc;CAClC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;AACJ,SAAO,WAAW;;CAErB,CAAC;;;;AC1BF,SAASC,eAAqB;AAC5B,QAAO,KAAK,KAAK,CAAC,SAAS,GAAG,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,EAAE;;AAwHzE,MAAaC,eAA6B;CACxC,SAAS;CACT,SAAS,EAAE;CACX,0BAAU,IAAI,KAAK;CACnB,kBAAkB;CAClB,YAAY;CACZ,kBAAkB,OAAU;CAC7B;AAED,SAAgB,YAAY,SAAkB,YAA0B;AACtE,cAAa,UAAU;AACvB,cAAa,aAAa;AAC1B,cAAa,UAAU,EAAE;AACzB,cAAa,2BAAW,IAAI,KAAK;AACjC,cAAa,mBAAmB,UAAUD,cAAY,GAAG;;AAG3D,SAAgB,mBAA4B;AAC1C,QAAO,aAAa;;AAItB,SAAS,kBAAkB,UAA0C;CACnE,MAAM,MAAM,KAAK,KAAK;AAGtB,KAAI,aAAa,kBAAkB;EACjC,MAAM,UAAU,aAAa,SAAS,IAAI,aAAa,iBAAiB;AACxE,MAAI,WAAW,MAAM,QAAQ,eAAe,aAAa,kBAAkB;AACzE,WAAQ,eAAe;AACvB,UAAO,aAAa;;;CAKxB,MAAM,YAAYA,cAAY;AAC9B,cAAa,mBAAmB;AAChC,cAAa,SAAS,IAAI,WAAW;EACnC,IAAI;EACJ,WAAW;EACX,cAAc;EACd,cAAc;EACd,kBAAkB;EAClB,mBAAmB;EACnB,QAAQ,EAAE;EACV;EACD,CAAC;AAEF,QAAO;;AAaT,SAAgB,cACd,UACA,SACQ;AACR,KAAI,CAAC,aAAa,QAChB,QAAO;CAGT,MAAM,YAAY,kBAAkB,SAAS;CAC7C,MAAM,UAAU,aAAa,SAAS,IAAI,UAAU;AACpD,KAAI,CAAC,QACH,QAAO;CAGT,MAAME,QAAsB;EAC1B,IAAIF,cAAY;EAChB;EACA,WAAW,KAAK,KAAK;EACrB;EACA,SAAS;GACP,OAAO,QAAQ;GACf,UAAU,QAAQ;GAClB,QAAQ,QAAQ;GAChB,OAAO,QAAQ;GACf,YAAY,QAAQ;GACpB,aAAa,QAAQ;GACrB,QAAQ,QAAQ;GACjB;EACF;AAED,cAAa,QAAQ,KAAK,MAAM;AAChC,SAAQ;AAER,KAAI,CAAC,QAAQ,OAAO,SAAS,QAAQ,MAAM,CACzC,SAAQ,OAAO,KAAK,QAAQ,MAAM;AAIpC,KAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,MAAI,CAAC,QAAQ,UACX,SAAQ,YAAY,EAAE;AAExB,OAAK,MAAM,QAAQ,QAAQ,MACzB,KAAI,CAAC,QAAQ,UAAU,SAAS,KAAK,KAAK,CACxC,SAAQ,UAAU,KAAK,KAAK,KAAK;;AAMvC,QACE,aAAa,aAAa,KACvB,aAAa,QAAQ,SAAS,aAAa,YAC9C;EACA,MAAM,UAAU,aAAa,QAAQ,OAAO;AAE5C,MAAI,SAIF;OAHuB,aAAa,QAAQ,QACzC,MAAM,EAAE,cAAc,QAAQ,UAChC,CACkB,WAAW,EAC5B,cAAa,SAAS,OAAO,QAAQ,UAAU;;;AAKrD,QAAO,MAAM;;AAqBf,SAAgB,eACd,IACA,UACA,YACM;AACN,KAAI,CAAC,aAAa,WAAW,CAAC,GAC5B;CAGF,MAAM,QAAQ,aAAa,QAAQ,MAAM,MAAM,EAAE,OAAO,GAAG;AAC3D,KAAI,OAAO;AACT,QAAM,WAAW;AACjB,QAAM,aAAa;EAGnB,MAAM,UAAU,aAAa,SAAS,IAAI,MAAM,UAAU;AAC1D,MAAI,SAAS;AACX,WAAQ,oBAAoB,SAAS,MAAM;AAC3C,WAAQ,qBAAqB,SAAS,MAAM;AAC5C,WAAQ,eAAe,KAAK,KAAK;;;;AAKvC,SAAgB,WAAW,UAAwB,EAAE,EAAiB;CACpE,MAAM,EACJ,OAAO,GACP,QAAQ,IACR,OACA,UACA,SACA,MACA,IACA,QACA,cACE;CAEJ,IAAI,WAAW,CAAC,GAAG,aAAa,QAAQ;AAGxC,KAAI,UACF,YAAW,SAAS,QAAQ,MAAM,EAAE,cAAc,UAAU;AAG9D,KAAI,OAAO;EACT,MAAM,aAAa,MAAM,aAAa;AACtC,aAAW,SAAS,QACjB,MACC,EAAE,QAAQ,MAAM,aAAa,CAAC,SAAS,WAAW,IAC/C,EAAE,UAAU,MAAM,aAAa,CAAC,SAAS,WAAW,CAC1D;;AAGH,KAAI,SACF,YAAW,SAAS,QAAQ,MAAM,EAAE,aAAa,SAAS;AAG5D,KAAI,YAAY,OACd,YAAW,SAAS,QAAQ,MAAM,EAAE,UAAU,YAAY,QAAQ;AAGpE,KAAI,KACF,YAAW,SAAS,QAAQ,MAAM,EAAE,aAAa,KAAK;AAGxD,KAAI,GACF,YAAW,SAAS,QAAQ,MAAM,EAAE,aAAa,GAAG;AAGtD,KAAI,QAAQ;EACV,MAAM,cAAc,OAAO,aAAa;AACxC,aAAW,SAAS,QAAQ,MAAM;GAEhC,MAAM,WAAW,EAAE,QAAQ,SAAS,MAAM,MAAM;AAC9C,QAAI,OAAO,EAAE,YAAY,SACvB,QAAO,EAAE,QAAQ,aAAa,CAAC,SAAS,YAAY;AAEtD,QAAI,MAAM,QAAQ,EAAE,QAAQ,CAC1B,QAAO,EAAE,QAAQ,MACd,MAAM,EAAE,QAAQ,EAAE,KAAK,aAAa,CAAC,SAAS,YAAY,CAC5D;AAEH,WAAO;KACP;GAGF,MAAM,YACJ,EAAE,UAAU,WACT,OAAO,EAAE,SAAS,QAAQ,YAAY,YACtC,EAAE,SAAS,QAAQ,QAAQ,aAAa,CAAC,SAAS,YAAY;GAGnE,MAAM,YAAY,EAAE,UAAU,WAAW,MAAM,MAC7C,EAAE,KAAK,aAAa,CAAC,SAAS,YAAY,CAC3C;GAGD,MAAM,WAAW,EAAE,QAAQ,QAAQ,aAAa,CAAC,SAAS,YAAY;AAEtE,UAAO,YAAY,aAAa,aAAa;IAC7C;;AAIJ,UAAS,MAAM,GAAG,MAAM,EAAE,YAAY,EAAE,UAAU;CAElD,MAAM,QAAQ,SAAS;CACvB,MAAM,aAAa,KAAK,KAAK,QAAQ,MAAM;CAC3C,MAAMG,WAAS,OAAO,KAAK;AAG3B,QAAO;EACL,SAHc,SAAS,MAAMA,SAAOA,UAAQ,MAAM;EAIlD;EACA;EACA;EACA;EACD;;AAGH,SAAgB,SAAS,IAAsC;AAC7D,QAAO,aAAa,QAAQ,MAAM,MAAM,EAAE,OAAO,GAAG;;AAGtD,SAAgB,cAA6B;CAC3C,MAAM,WAAW,MAAM,KAAK,aAAa,SAAS,QAAQ,CAAC,CAAC,MACzD,GAAG,MAAM,EAAE,eAAe,EAAE,aAC9B;AAED,QAAO;EACL;EACA,OAAO,SAAS;EACjB;;AAGH,SAAgB,WAAW,IAAiC;AAC1D,QAAO,aAAa,SAAS,IAAI,GAAG;;AAGtC,SAAgB,kBAAkB,WAAwC;AACxE,QAAO,aAAa,QACjB,QAAQ,MAAM,EAAE,cAAc,UAAU,CACxC,MAAM,GAAG,MAAM,EAAE,YAAY,EAAE,UAAU;;AAG9C,SAAgB,eAAqB;AACnC,cAAa,UAAU,EAAE;AACzB,cAAa,2BAAW,IAAI,KAAK;AACjC,cAAa,mBAAmBH,cAAY;;AAG9C,SAAgB,cAAc,WAA4B;AACxD,KAAI,CAAC,aAAa,SAAS,IAAI,UAAU,CACvC,QAAO;AAGT,cAAa,UAAU,aAAa,QAAQ,QACzC,MAAM,EAAE,cAAc,UACxB;AACD,cAAa,SAAS,OAAO,UAAU;AAEvC,KAAI,aAAa,qBAAqB,UACpC,cAAa,mBAAmBA,cAAY;AAG9C,QAAO;;AAGT,SAAgB,WAAyB;CACvC,MAAM,UAAU,aAAa;CAE7B,MAAMI,YAAoC,EAAE;CAC5C,MAAMC,eAAuC,EAAE;CAC/C,MAAMC,iBAAyC,EAAE;CAEjD,IAAI,aAAa;CACjB,IAAI,cAAc;CAClB,IAAI,gBAAgB;CACpB,IAAI,gBAAgB;CACpB,IAAI,eAAe;CACnB,IAAI,YAAY;AAEhB,MAAK,MAAM,SAAS,SAAS;EAE3B,MAAM,QAAQ,MAAM,UAAU,SAAS,MAAM,QAAQ;AACrD,YAAU,UAAU,UAAU,UAAU,KAAK;AAG7C,eAAa,MAAM,aAAa,aAAa,MAAM,aAAa,KAAK;EAGrE,MAAM,OAAO,IAAI,KAAK,MAAM,UAAU,CAAC,aAAa,CAAC,MAAM,GAAG,GAAG;AACjE,iBAAe,SAAS,eAAe,SAAS,KAAK;AAErD,MAAI,MAAM,UAAU;AAClB,OAAI,MAAM,SAAS,QACjB;OAEA;AAGF,iBAAc,MAAM,SAAS,MAAM;AACnC,kBAAe,MAAM,SAAS,MAAM;;AAGtC,MAAI,MAAM,YAAY;AACpB,oBAAiB,MAAM;AACvB;;;CAKJ,MAAM,iBAAiB,OAAO,QAAQ,eAAe,CAClD,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,EAAE,CAAC,CACtC,MAAM,IAAI,CACV,KAAK,CAAC,MAAM,YAAY;EAAE;EAAM;EAAO,EAAE;CAG5C,MAAM,MAAM,KAAK,KAAK;CACtB,IAAI,iBAAiB;AACrB,MAAK,MAAM,WAAW,aAAa,SAAS,QAAQ,CAClD,KAAI,MAAM,QAAQ,eAAe,aAAa,iBAC5C;AAIJ,QAAO;EACL,eAAe,QAAQ;EACvB,oBAAoB;EACpB,gBAAgB;EAChB,kBAAkB;EAClB,mBAAmB;EACnB,mBAAmB,gBAAgB,IAAI,gBAAgB,gBAAgB;EACvE,mBAAmB;EACnB,sBAAsB;EACtB;EACA;EACD;;AAGH,SAAgB,cAAc,SAAyB,QAAgB;AACrE,KAAI,WAAW,OACb,QAAO,KAAK,UACV;EACE,UAAU,MAAM,KAAK,aAAa,SAAS,QAAQ,CAAC;EACpD,SAAS,aAAa;EACvB,EACD,MACA,EACD;CAIH,MAAM,UAAU;EACd;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD;CAED,MAAM,OAAO,aAAa,QAAQ,KAAK,MAAM;EAC3C,EAAE;EACF,EAAE;EACF,IAAI,KAAK,EAAE,UAAU,CAAC,aAAa;EACnC,EAAE;EACF,EAAE,QAAQ;EACV,EAAE,QAAQ,SAAS;EACnB,EAAE,QAAQ;EACV,EAAE,UAAU,WAAW;EACvB,EAAE,UAAU,SAAS;EACrB,EAAE,UAAU,MAAM,gBAAgB;EAClC,EAAE,UAAU,MAAM,iBAAiB;EACnC,EAAE,cAAc;EAChB,EAAE,UAAU,eAAe;EAC3B,EAAE,UAAU,SAAS;EACtB,CAAC;AAEF,QAAO,CAAC,QAAQ,KAAK,IAAI,EAAE,GAAG,KAAK,KAAK,MAAM,EAAE,KAAK,IAAI,CAAC,CAAC,CAAC,KAAK,KAAK;;;;;AC5iBxE,SAAgB,mBAAyB;AACvC,KAAI,OAAO,QAAQ,YAAa;AAEhC,KAAI;EACF,MAAM,SAAS,IAAI,OAAO;EAC1B,MAAM,0BAAU,IAAI,KAAyB;AAmD7C,sBA7CmB;GACjB,SACE,SACA,SACA;AACA,QAAI;KACF,MAAM,SACJ,OAAO,QAAQ,WAAW,WACxB,IAAI,IAAI,QAAQ,OAAO,GACtB,QAAQ;KAIb,MAAM,MAHM,eAGI,OAAO,UAAU,CAAC;KAClC,MAAM,WAAW,OAAO,IAAI,SAAS,IAAI,MAAM;AAC/C,SAAI,CAAC,UAAU;AACb,cAAQ,MAAM,sBAAsB,OAAO,WAAW;AACtD,aAAQ,OAAiC,SAAS,SAAS,QAAQ;;KAErE,IAAI,QAAQ,QAAQ,IAAI,SAAS;AACjC,SAAI,CAAC,OAAO;AACV,cAAQ,IAAI,WAAW,SAAS;AAChC,cAAQ,IAAI,UAAU,MAAM;;KAE9B,IAAI,QAAQ;AACZ,SAAI;MACF,MAAM,IAAI,IAAI,IAAI,SAAS;AAC3B,cAAQ,GAAG,EAAE,SAAS,IAAI,EAAE;aACtB;AAGR,aAAQ,MAAM,qBAAqB,OAAO,SAAS,OAAO,QAAQ;AAClE,YAAQ,MAAgC,SAAS,SAAS,QAAQ;YAC5D;AACN,YAAQ,OAAiC,SAAS,SAAS,QAAQ;;;GAGvE,QAAQ;AACN,WAAO,OAAO,OAAO;;GAEvB,UAAU;AACR,WAAO,OAAO,SAAS;;GAE1B,CAEuD;AACxD,UAAQ,MAAM,mDAAmD;UAC1D,KAAK;AACZ,UAAQ,MAAM,wBAAwB,IAAI;;;;;;ACzD9C,SAAS,WAAsB;CAC7B,MAAM,EAAE,UAAU,MAAM,eAAQC;AAEhC,KAAI,aAAa,SAAS;AACxB,MAAI;GACF,MAAM,UAAU,oDAAoD,KAAK;AAGzE,OAFsB,SAAS,SAAS,EAAE,OAAO,QAAQ,CAAC,CAAC,UAAU,CAEnD,aAAa,CAAC,SAAS,iBAAiB,CACxD,QAAO;UAEH;AACN,UAAO;;AAGT,SAAO;QACF;EACL,MAAM,YAAYC,MAAI;AACtB,MAAI,WAAW;AACb,OAAI,UAAU,SAAS,MAAM,CAAE,QAAO;AACtC,OAAI,UAAU,SAAS,OAAO,CAAE,QAAO;AACvC,OAAI,UAAU,SAAS,OAAO,CAAE,QAAO;;AAGzC,SAAO;;;;;;;;;;AAWX,SAAgB,kBACd,SACA,eAAuB,IACf;CACR,MAAM,QAAQ,UAAU;CACxB,MAAM,kBAAkB,OAAO,QAAQ,QAAQ,CAAC,QAC7C,GAAG,WAAW,UAAU,OAC1B;CAED,IAAIC;AAEJ,SAAQ,OAAR;EACE,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,QAAQ,IAAI,MAAM,MAAM,WAAW,MAAK,MAAK,CAAC,GAAG,CACvE,KAAK,KAAK;AACb;EAEF,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,OAAO,IAAI,GAAG,QAAQ,CAC5C,KAAK,MAAM;AACd;EAEF,KAAK;AACH,kBAAe,gBACZ,KACE,CAAC,KAAK,WACL,WAAW,IAAI,IAAI,MAAM,WAAW,MAAK,OAAO,GAAG,KAAK,CAAC,GAC5D,CACA,KAAK,KAAK;AACb;EAEF,SAAS;GAEP,MAAM,cAAc,gBACjB,KACE,CAAC,KAAK,WAAW,GAAG,IAAI,IAAI,MAAM,WAAW,MAAK,OAAO,GAAG,KAAK,CAAC,GACpE,CACA,KAAK,IAAI;AACZ,kBAAe,gBAAgB,SAAS,IAAI,UAAU,gBAAgB;AACtE;;;AAIJ,KAAI,gBAAgB,aAElB,QAAO,GAAG,eADQ,UAAU,QAAQ,QAAQ,SACP;AAGvC,QAAO,gBAAgB;;;;;;CC3FzB,IAAI,IAAI,WAAW,EAAE,EAAE,OAAO,EAAE,QAAQ,EAAE,EAAE,MAAM,EAAE,OAAO,EAAE;CAC7D,IAAI,mBACH,EAAE,CAAC,CAAC,IAAI,YAAY,KAAK,SAAS,aAAa,MAC9C,CAAC,CAAC,IAAI,eAAe,KAAK,SAAS,UAAU,IAAI,EAAE,aAAa,YAAa,EAAE,UAAU,EAAE,EAAE,SAAS,IAAI,SAAS,UAAW,CAAC,CAAC,IAAI;CAEtI,IAAI,aAAa,MAAM,OAAO,UAAU,UACvC,UAAS;EACR,IAAI,SAAS,KAAK,OAAO,QAAQ,OAAO,QAAQ,OAAO,KAAK,OAAO;AACnE,SAAO,CAAC,QAAQ,OAAO,aAAa,QAAQ,OAAO,SAAS,MAAM,GAAG,QAAQ,OAAO,SAAS;;CAG/F,IAAI,gBAAgB,QAAQ,OAAO,SAAS,UAAU;EACrD,IAAI,SAAS,IAAI,SAAS;AAC1B,KAAG;AACF,aAAU,OAAO,UAAU,QAAQ,MAAM,GAAG;AAC5C,YAAS,QAAQ,MAAM;AACvB,WAAQ,OAAO,QAAQ,OAAO,OAAO;WAC7B,CAAC;AACV,SAAO,SAAS,OAAO,UAAU,OAAO;;CAGzC,IAAI,gBAAgB,UAAU,qBAAqB;EAClD,IAAI,IAAI,UAAU,kBAAkB;AACpC,SAAO;GACN,kBAAkB;GAClB,OAAO,EAAE,WAAW,UAAU;GAC9B,MAAM,EAAE,WAAW,YAAY,kBAAkB;GACjD,KAAK,EAAE,WAAW,YAAY,kBAAkB;GAChD,QAAQ,EAAE,WAAW,WAAW;GAChC,WAAW,EAAE,WAAW,WAAW;GACnC,SAAS,EAAE,WAAW,WAAW;GACjC,QAAQ,EAAE,WAAW,WAAW;GAChC,eAAe,EAAE,WAAW,WAAW;GAEvC,OAAO,EAAE,YAAY,WAAW;GAChC,KAAK,EAAE,YAAY,WAAW;GAC9B,OAAO,EAAE,YAAY,WAAW;GAChC,QAAQ,EAAE,YAAY,WAAW;GACjC,MAAM,EAAE,YAAY,WAAW;GAC/B,SAAS,EAAE,YAAY,WAAW;GAClC,MAAM,EAAE,YAAY,WAAW;GAC/B,OAAO,EAAE,YAAY,WAAW;GAChC,MAAM,EAAE,YAAY,WAAW;GAE/B,SAAS,EAAE,YAAY,WAAW;GAClC,OAAO,EAAE,YAAY,WAAW;GAChC,SAAS,EAAE,YAAY,WAAW;GAClC,UAAU,EAAE,YAAY,WAAW;GACnC,QAAQ,EAAE,YAAY,WAAW;GACjC,WAAW,EAAE,YAAY,WAAW;GACpC,QAAQ,EAAE,YAAY,WAAW;GACjC,SAAS,EAAE,YAAY,WAAW;GAElC,aAAa,EAAE,YAAY,WAAW;GACtC,WAAW,EAAE,YAAY,WAAW;GACpC,aAAa,EAAE,YAAY,WAAW;GACtC,cAAc,EAAE,YAAY,WAAW;GACvC,YAAY,EAAE,YAAY,WAAW;GACrC,eAAe,EAAE,YAAY,WAAW;GACxC,YAAY,EAAE,YAAY,WAAW;GACrC,aAAa,EAAE,YAAY,WAAW;GAEtC,eAAe,EAAE,aAAa,WAAW;GACzC,aAAa,EAAE,aAAa,WAAW;GACvC,eAAe,EAAE,aAAa,WAAW;GACzC,gBAAgB,EAAE,aAAa,WAAW;GAC1C,cAAc,EAAE,aAAa,WAAW;GACxC,iBAAiB,EAAE,aAAa,WAAW;GAC3C,cAAc,EAAE,aAAa,WAAW;GACxC,eAAe,EAAE,aAAa,WAAW;GACzC;;AAGF,QAAO,UAAU,cAAc;AAC/B,QAAO,QAAQ,eAAe;;;;;;ACjE9B,MAAM,aAAa;AAEnB,SAASC,iBAAe,IAAoB;AAC1C,KAAI,KAAK,IAAM,QAAO,GAAG,GAAG;AAC5B,QAAO,IAAI,KAAK,KAAM,QAAQ,EAAE,CAAC;;AAGnC,SAASC,eAAa,GAAmB;AACvC,KAAI,KAAK,IAAS,QAAO,IAAI,IAAI,KAAS,QAAQ,EAAE,CAAC;AACrD,KAAI,KAAK,IAAM,QAAO,IAAI,IAAI,KAAM,QAAQ,EAAE,CAAC;AAC/C,QAAO,OAAO,EAAE;;AAGlB,SAASC,eAAa,OAAgB,QAAyB;AAC7D,KAAI,UAAU,UAAa,WAAW,OAAW,QAAO;AACxD,QAAO,GAAGD,eAAa,MAAM,CAAC,GAAGA,eAAa,OAAO;;;;;;;;;;;;;;;AAgBvD,IAAa,kBAAb,MAAoD;CAClD,AAAQ,iCAA8C,IAAI,KAAK;CAC/D,AAAQ;CACR,AAAQ,gBAAgB;CACxB,AAAQ;CAER,YAAY,SAAoC;AAC9C,OAAK,aAAa,SAAS,cAAc;AAEzC,OAAK,QAAQ,QAAQ,OAAO;;;;;CAM9B,AAAQ,gBAAwB;EAC9B,MAAM,cAAc,KAAK,eAAe;AACxC,MAAI,gBAAgB,EAAG,QAAO;EAC9B,MAAM,SAAS,gBAAgB,IAAI,KAAK;AACxC,SAAOE,0BAAG,IAAI,UAAU,YAAY,UAAU,OAAO,iBAAiB;;;;;;CAOxE,AAAQ,eAAqB;AAC3B,MAAI,CAAC,KAAK,MAAO;EAEjB,MAAM,aAAa,KAAK,eAAe;AACvC,MAAI,YAAY;AACd,WAAQ,OAAO,MAAM,aAAa,WAAW;AAC7C,QAAK,gBAAgB;aACZ,KAAK,eAAe;AAC7B,WAAQ,OAAO,MAAM,WAAW;AAChC,QAAK,gBAAgB;;;;;;CAOzB,AAAQ,oBAA0B;AAChC,MAAI,KAAK,iBAAiB,KAAK,OAAO;AACpC,WAAQ,OAAO,MAAM,WAAW;AAChC,QAAK,gBAAgB;;;;;;;;;CAUzB,AAAQ,SAAS,SAAiB,SAAS,OAAa;AACtD,OAAK,mBAAmB;AAGxB,MAAI,OACF,SAAQ,IAAIA,0BAAG,IAAI,QAAQ,CAAC;MAE5B,SAAQ,IAAI,QAAQ;AAItB,OAAK,cAAc;;CAGrB,eAAe,SAA+B;AAC5C,OAAK,eAAe,IAAI,QAAQ,IAAI,QAAQ;AAE5C,MAAI,KAAK,YAAY;GACnB,MAAM,YAAY,QAAQ,QAAQ,IAAI,QAAQ,UAAU;GACxD,MAAM,YACJ,QAAQ,kBAAkB,UAAa,QAAQ,gBAAgB,IAC7D,OAAO,QAAQ,cAAc,KAC7B;GACJ,MAAM,UAAU,UAAU,QAAQ,OAAO,GAAG,QAAQ,OAAO,YAAY;AACvE,QAAK,SAAS,SAAS,QAAQ,gBAAgB;;;CAInD,gBAAgB,IAAY,QAA6B;EACvD,MAAM,UAAU,KAAK,eAAe,IAAI,GAAG;AAC3C,MAAI,CAAC,QAAS;AAGd,SAAO,OAAO,SAAS,OAAO;AAG9B,MAAI,KAAK,cAAc,OAAO,WAAW,aAAa;GACpD,MAAM,YAAY,QAAQ,QAAQ,IAAI,QAAQ,UAAU;GACxD,MAAM,UAAU,UAAU,QAAQ,OAAO,GAAG,QAAQ,OAAO,UAAU;AACrE,QAAK,SAAS,SAAS,QAAQ,gBAAgB;;;CAInD,kBAAkB,SAA+B;AAC/C,OAAK,eAAe,OAAO,QAAQ,GAAG;EAEtC,MAAM,SAAS,QAAQ,cAAc;EACrC,MAAM,WAAWH,iBAAe,QAAQ,cAAc,EAAE;EACxD,MAAM,SACJ,QAAQ,QACNE,eAAa,QAAQ,aAAa,QAAQ,aAAa,GACvD;EACJ,MAAM,YAAY,QAAQ,QAAQ,IAAI,QAAQ,UAAU;EAExD,MAAM,UAAU,QAAQ,WAAW,WAAW,UAAU;EACxD,MAAM,SAAS,UAAU,WAAW;EACpC,MAAM,aAAa,SAAS,IAAI,WAAW;EAC3C,IAAI,UAAU,GAAG,OAAO,GAAG,QAAQ,OAAO,GAAG,QAAQ,KAAK,GAAG,OAAO,GAAG,WAAW,aAAa;AAE/F,MAAI,SAAS;GACX,MAAM,YAAY,QAAQ,QAAQ,KAAK,QAAQ,UAAU;AACzD,cAAW;;AAGb,OAAK,SAAS,SAAS,QAAQ,gBAAgB;;CAGjD,UAAgB;AACd,MAAI,KAAK,iBAAiB,KAAK,OAAO;AACpC,WAAQ,OAAO,MAAM,WAAW;AAChC,QAAK,gBAAgB;;AAEvB,OAAK,eAAe,OAAO;;;;;;ACtJ/B,MAAME,WAAqB;CACzB,gCAAgB,IAAI,KAAK;CACzB,mBAAmB,EAAE;CACrB,eAAe,EAAE;CAClB;AAID,MAAMC,YAAkC,EAAE;AAC1C,SAAS,kBAAwB;AAC/B,MAAK,MAAM,YAAY,UACrB,WAAU;;AAId,SAAS,eAAe,IAAoB;AAC1C,KAAI,KAAK,IAAM,QAAO,GAAG,GAAG;AAC5B,QAAO,IAAI,KAAK,KAAM,QAAQ,EAAE,CAAC;;AAGnC,SAAS,aAAa,GAAmB;AACvC,KAAI,KAAK,IAAS,QAAO,IAAI,IAAI,KAAS,QAAQ,EAAE,CAAC;AACrD,KAAI,KAAK,IAAM,QAAO,IAAI,IAAI,KAAM,QAAQ,EAAE,CAAC;AAC/C,QAAO,OAAO,EAAE;;AAGlB,SAAS,aAAa,OAAgB,QAAyB;AAC7D,KAAI,UAAU,UAAa,WAAW,OAAW,QAAO;AACxD,QAAO,GAAG,aAAa,MAAM,CAAC,GAAG,aAAa,OAAO;;AAGvD,SAAS,eAAe,WAA2B;AACjD,QAAO,eAAe,KAAK,KAAK,GAAG,UAAU;;AAI/C,SAAS,UAAU,EACjB,YACA,UAIqB;CACrB,MAAMC,OAA8D;EAClE;GAAE,KAAK;GAAU,OAAO;GAAU,OAAO,OAAO;GAAQ;EACxD;GAAE,KAAK;GAAa,OAAO;GAAa,OAAO,OAAO;GAAW;EACjE;GAAE,KAAK;GAAU,OAAO;GAAU,OAAO,OAAO;GAAQ;EACzD;AAED,QACE,qBAAC;EAAI,aAAY;EAAS,UAAU;aACjC,KAAK,KAAK,KAAK,QACd,qBAAC,MAAM,uBACJ,MAAM,KAAK,oBAAC,kBAAK,QAAU,EAC5B,qBAAC;GACC,MAAM,eAAe,IAAI;GACzB,OAAO,eAAe,IAAI,MAAM,SAAS;GACzC,SAAS,eAAe,IAAI;;IAE3B;IAAI;IACH,MAAM;IAAE;IAAG,IAAI;IAAM;IAAG,IAAI;IAAM;IAAE;;IACjC,KATY,IAAI,IAUR,CACjB,EACF,oBAAC;GAAK;aAAS;IAA+C;GAC1D;;AAIV,SAAS,eAAe,QAAwB;AAC9C,KAAI,WAAW,YAAa,QAAO;AACnC,KAAI,WAAW,SAAU,QAAO;AAChC,QAAO;;AAGT,SAAS,cAAc,QAAwB;AAC7C,KAAI,WAAW,YAAa,QAAO;AACnC,KAAI,WAAW,SAAU,QAAO;AAChC,QAAO;;AAIT,SAAS,iBAAiB,EACxB,WAGqB;CACrB,MAAM,GAAG,WAAW,SAAS,EAAE;AAG/B,iBAAgB;EACd,MAAM,WAAW,kBAAkB,SAAS,MAAM,IAAI,EAAE,EAAE,IAAK;AAC/D,eAAa,cAAc,SAAS;IACnC,EAAE,CAAC;CAEN,MAAM,cAAc,eAAe,QAAQ,OAAO;CAClD,MAAM,aAAa,cAAc,QAAQ,OAAO;AAEhD,QACE,qBAAC;EACC,qBAAC;GAAK,OAAO;cAAc,YAAW;IAAQ;EAC9C,oBAAC;GAAK;aAAM,QAAQ;IAAc;EAClC,qBAAC;GAAK;GAAE,QAAQ;GAAK;MAAQ;EAC7B,qBAAC;GAAK;cAAU,eAAe,QAAQ,UAAU,EAAC;IAAQ;EACzD,QAAQ,kBAAkB,UAAa,QAAQ,gBAAgB,KAC9D,qBAAC;GAAK,OAAM;;IAAO;IAAS,QAAQ;IAAc;;IAAS;EAE7D,oBAAC;GAAK,OAAM;aAAW,QAAQ;IAAa;KACxC;;AAKV,SAAS,oBAAoB,EAC3B,WAGqB;CACrB,MAAM,UAAU,QAAQ,WAAW,YAAY,QAAQ,cAAc,MAAM;AAE3E,QACE,qBAAC;EACC,qBAAC;GAAK,OAAO,UAAU,QAAQ;cAAU,UAAU,MAAM,KAAI;IAAQ;EACrE,oBAAC;GAAK;aAAM,QAAQ;IAAc;EAClC,qBAAC;GAAK;GAAE,QAAQ;GAAK;MAAQ;EAC7B,qBAAC;GAAK,OAAO,UAAU,QAAQ;cAC5B,QAAQ,cAAc,KAAK;IACvB;EACP,qBAAC;GAAK;cAAU,eAAe,QAAQ,cAAc,EAAE,EAAC;IAAQ;EAChE,qBAAC,mBAAM,aAAa,QAAQ,aAAa,QAAQ,aAAa,EAAC,OAAQ;EACvE,oBAAC;GAAK,OAAM;aAAW,QAAQ;IAAa;KACxC;;AAKV,SAAS,gBAAgB,EACvB,WAGqB;AACrB,QACE,qBAAC;EAAI,eAAc;aACjB,qBAAC;GACC,oBAAC;IAAK,OAAM;cAAM;KAAS;GAC3B,oBAAC;IAAK;cAAM,QAAQ;KAAc;GAClC,qBAAC;IAAK;IAAE,QAAQ;IAAK;OAAQ;GAC7B,qBAAC;IAAK,OAAM;eAAO,QAAQ,cAAc,KAAI;KAAQ;GACrD,qBAAC;IAAK;eAAU,eAAe,QAAQ,cAAc,EAAE,EAAC;KAAQ;GAChE,oBAAC;IAAK,OAAM;cAAW,QAAQ;KAAa;MACxC,EACL,QAAQ,SACP,oBAAC;GAAI,YAAY;aACf,qBAAC;IAAK,OAAM;IAAM;eAAS,OACrB,QAAQ;KACP;IACH;GAEJ;;AAKV,SAAS,aAAa,EACpB,YACA,YACA,eACA,WACA,iBAOqB;AACrB,KAAI,eAAe,UAAU;AAC3B,MAAI,WAAW,WAAW,EACxB,QAAO,oBAAC;GAAK;aAAS;IAAyB;AAEjD,SACE,0CACG,WAAW,MAAM,GAAG,cAAc,CAAC,KAAK,QACvC,oBAAC,oBAA8B,SAAS,OAAjB,IAAI,GAAoB,CAC/C,GACD;;AAIP,KAAI,eAAe,aAAa;AAC9B,MAAI,cAAc,WAAW,EAC3B,QAAO,oBAAC;GAAK;aAAS;IAA4B;AAEpD,SACE,0CACG,cACE,MAAM,CAAC,cAAc,CACrB,SAAS,CACT,KAAK,QACJ,oBAAC,uBAAiC,SAAS,OAAjB,IAAI,GAAoB,CAClD,GACH;;AAKP,KAAI,UAAU,WAAW,EACvB,QAAO,oBAAC;EAAK;YAAS;GAAgB;AAExC,QACE,0CACG,UACE,MAAM,CAAC,cAAc,CACrB,SAAS,CACT,KAAK,QACJ,oBAAC,mBAA6B,SAAS,OAAjB,IAAI,GAAoB,CAC9C,GACH;;AAKP,SAAS,SAA6B;CACpC,MAAM,CAAC,YAAY,iBAAiB,SAAkB,SAAS;CAC/D,MAAM,GAAG,eAAe,SAAS,EAAE;CACnC,MAAM,EAAE,WAAW,WAAW;AAG9B,iBAAgB;EACd,MAAM,iBAAuB,aAAa,MAAM,IAAI,EAAE;AACtD,YAAU,KAAK,SAAS;AACxB,eAAa;GACX,MAAM,MAAM,UAAU,QAAQ,SAAS;AACvC,OAAI,QAAQ,GAAI,WAAU,OAAO,KAAK,EAAE;;IAEzC,EAAE,CAAC;AAGN,WAAU,OAAO,QAAQ;AACvB,UAAQ,OAAR;GACE,KAAK;AACH,kBAAc,SAAS;AACvB;GAEF,KAAK;AACH,kBAAc,YAAY;AAC1B;GAEF,KAAK;AACH,kBAAc,SAAS;AACvB;GAEF,QACE,KAAI,UAAU,OAAQ,IAAI,QAAQ,UAAU,IAC1C,SAAQ,KAAK,EAAE;;GAIrB;CAEF,MAAM,aAAa,MAAM,KAAK,SAAS,eAAe,QAAQ,CAAC;CAC/D,MAAM,gBAAgB,SAAS;CAC/B,MAAM,YAAY,SAAS;CAE3B,MAAM,SAAS;EACb,QAAQ,WAAW;EACnB,WAAW,cAAc;EACzB,QAAQ,UAAU;EACnB;CAGD,MAAM,iBAAiB,OAAO,QAAQ;CAGtC,MAAM,gBAAgB,iBAFD,IACA,IACgD;AAErE,QACE,qBAAC;EAAI,eAAc;EAAS,QAAQ;;GAClC,oBAAC;IAAsB;IAAoB;KAAU;GACrD,oBAAC;IACC,eAAc;IACd,QAAQ;IACR,aAAY;IACZ,UAAU;IACV,UAAS;cAET,oBAAC;KACa;KACA;KACG;KACJ;KACI;MACf;KACE;GACN,oBAAC;IAAI,UAAU;cACb,qBAAC;KAAK;;MAAS;MACU,OAAO;MAAO;MAAe,OAAO;MAAW;MAAI;MAC/D,OAAO;;MACb;KACH;;GACF;;;;;;AAQV,IAAa,qBAAb,MAAuD;CACrD,AAAQ,cAAgD;CACxD,AAAQ,aAAa;CAErB,YAAY,SAAmC;AAC7C,MAAI,SAAS,eAAe,OAC1B,MAAK,aAAa,QAAQ;;CAI9B,QAAc;AACZ,MAAI,KAAK,YAAa;AAEtB,OAAK,cAAc,OAAO,oBAAC,WAAS,EAAE,EAErC,CAAC;;CAGJ,eAAe,SAA+B;AAC5C,WAAS,eAAe,IAAI,QAAQ,IAAI,EAAE,GAAG,SAAS,CAAC;AACvD,mBAAiB;;CAGnB,gBAAgB,IAAY,QAA6B;EACvD,MAAM,UAAU,SAAS,eAAe,IAAI,GAAG;AAC/C,MAAI,CAAC,QAAS;AAEd,SAAO,OAAO,SAAS,OAAO;AAC9B,mBAAiB;;CAGnB,kBAAkB,SAA+B;AAC/C,WAAS,eAAe,OAAO,QAAQ,GAAG;AAK1C,MAFE,QAAQ,WAAW,YAAY,QAAQ,cAAc,MAAM,KAEhD;AACX,YAAS,cAAc,KAAK,EAAE,GAAG,SAAS,CAAC;AAE3C,UAAO,SAAS,cAAc,SAAS,KAAK,WAC1C,UAAS,cAAc,OAAO;;AAIlC,WAAS,kBAAkB,KAAK,EAAE,GAAG,SAAS,CAAC;AAE/C,SAAO,SAAS,kBAAkB,SAAS,KAAK,WAC9C,UAAS,kBAAkB,OAAO;AAGpC,mBAAiB;;CAGnB,UAAgB;AACd,MAAI,KAAK,aAAa;AACpB,QAAK,YAAY,SAAS;AAC1B,QAAK,cAAc;;AAErB,WAAS,eAAe,OAAO;AAC/B,WAAS,oBAAoB,EAAE;AAC/B,WAAS,gBAAgB,EAAE;;;;;;AC7X/B,SAAS,aAAqB;AAC5B,QAAO,KAAK,KAAK,CAAC,SAAS,GAAG,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,EAAE;;AAUzE,IAAM,iBAAN,MAAqB;CACnB,AAAQ,2BAAwC,IAAI,KAAK;CACzD,AAAQ,WAA+B;CACvC,AAAQ,iBAAwC,EAAE;CAClD,AAAQ,cAAc;CACtB,AAAQ,qBAAqB;CAE7B,YAAY,UAAoC;AAC9C,OAAK,WAAW;;CAGlB,WAAW,SAGF;AACP,MAAI,QAAQ,gBAAgB,OAC1B,MAAK,cAAc,QAAQ;AAE7B,MAAI,QAAQ,uBAAuB,OACjC,MAAK,qBAAqB,QAAQ;;;;;;CAQtC,aAAa,SAAsC;EACjD,MAAM,KAAK,YAAY;EACvB,MAAMC,UAA0B;GAC9B;GACA,QAAQ,QAAQ;GAChB,MAAM,QAAQ;GACd,OAAO,QAAQ;GACf,WAAW,KAAK,KAAK;GACrB,QAAQ;GACR,iBAAiB,QAAQ;GAC1B;AAED,OAAK,SAAS,IAAI,IAAI,QAAQ;AAC9B,OAAK,UAAU,eAAe,QAAQ;AAEtC,SAAO;;;;;CAMT,cAAc,IAAY,QAA6B;EACrD,MAAM,UAAU,KAAK,SAAS,IAAI,GAAG;AACrC,MAAI,CAAC,QAAS;AAEd,MAAI,OAAO,WAAW,OAAW,SAAQ,SAAS,OAAO;AACzD,MAAI,OAAO,eAAe,OAAW,SAAQ,aAAa,OAAO;AACjE,MAAI,OAAO,eAAe,OAAW,SAAQ,aAAa,OAAO;AACjE,MAAI,OAAO,gBAAgB,OACzB,SAAQ,cAAc,OAAO;AAC/B,MAAI,OAAO,iBAAiB,OAC1B,SAAQ,eAAe,OAAO;AAChC,MAAI,OAAO,UAAU,OAAW,SAAQ,QAAQ,OAAO;AACvD,MAAI,OAAO,kBAAkB,OAC3B,SAAQ,gBAAgB,OAAO;AAEjC,OAAK,UAAU,gBAAgB,IAAI,OAAO;;;;;CAM5C,gBACE,IACA,YACA,OACM;EACN,MAAM,UAAU,KAAK,SAAS,IAAI,GAAG;AACrC,MAAI,CAAC,QAAS;AAEd,UAAQ,SACN,cAAc,OAAO,aAAa,MAAM,cAAc;AACxD,UAAQ,aAAa;AACrB,UAAQ,aAAa,KAAK,KAAK,GAAG,QAAQ;AAE1C,MAAI,OAAO;AACT,WAAQ,cAAc,MAAM;AAC5B,WAAQ,eAAe,MAAM;;AAG/B,OAAK,UAAU,kBAAkB,QAAQ;AAGzC,OAAK,SAAS,OAAO,GAAG;AACxB,OAAK,eAAe,KAAK,QAAQ;AAGjC,SAAO,KAAK,eAAe,SAAS,KAAK,YACvC,MAAK,eAAe,OAAO;AAI7B,mBAAiB;GACf,MAAM,MAAM,KAAK,eAAe,QAAQ,QAAQ;AAChD,OAAI,QAAQ,GACV,MAAK,eAAe,OAAO,KAAK,EAAE;KAEnC,KAAK,mBAAmB;;;;;CAM7B,YAAY,IAAY,OAAqB;EAC3C,MAAM,UAAU,KAAK,SAAS,IAAI,GAAG;AACrC,MAAI,CAAC,QAAS;AAEd,UAAQ,SAAS;AACjB,UAAQ,QAAQ;AAChB,UAAQ,aAAa,KAAK,KAAK,GAAG,QAAQ;AAE1C,OAAK,UAAU,kBAAkB,QAAQ;AAEzC,OAAK,SAAS,OAAO,GAAG;AACxB,OAAK,eAAe,KAAK,QAAQ;AAEjC,SAAO,KAAK,eAAe,SAAS,KAAK,YACvC,MAAK,eAAe,OAAO;;;;;CAO/B,oBAA2C;AACzC,SAAO,MAAM,KAAK,KAAK,SAAS,QAAQ,CAAC;;;;;CAM3C,uBAA8C;AAC5C,SAAO,CAAC,GAAG,KAAK,eAAe;;;;;CAMjC,WAAW,IAAwC;AACjD,SAAO,KAAK,SAAS,IAAI,GAAG;;;;;CAM9B,QAAc;AACZ,OAAK,SAAS,OAAO;AACrB,OAAK,iBAAiB,EAAE;;;AAK5B,MAAa,iBAAiB,IAAI,gBAAgB;;;;;;;;;;;ACjKlD,SAAgB,YAA+B;AAC7C,QAAO,OAAO,GAAY,SAAe;EACvC,MAAM,SAAS,EAAE,IAAI;EACrB,MAAMC,SAAO,EAAE,IAAI;EAGnB,MAAM,kBAAkBA,OAAK,WAAW,WAAW;EAGnD,MAAM,aAAa,eAAe,aAAa;GAC7C;GACA;GACA,OAAO;GACP;GACD,CAAC;AAGF,IAAE,IAAI,cAAc,WAAW;AAE/B,MAAI;AACF,SAAM,MAAM;AAQZ,QALoB,EAAE,IAAI,QAAQ,IAAI,eAAe,IAAI,IACzB,SAAS,oBAAoB,CAK3D;GAIF,MAAM,SAAS,EAAE,IAAI;GAGrB,MAAM,cAAc,EAAE,IAAI,QAAQ,IAAI,iBAAiB;GACvD,MAAM,eAAe,EAAE,IAAI,QAAQ,IAAI,kBAAkB;GACzD,MAAM,QAAQ,EAAE,IAAI,QAAQ,IAAI,UAAU;AAG1C,OAAI,OAAO;IACT,MAAM,UAAU,eAAe,WAAW,WAAW;AACrD,QAAI,QACF,SAAQ,QAAQ;;AAIpB,kBAAe,gBACb,YACA,QACA,eAAe,eACb;IACE,aAAa,OAAO,SAAS,aAAa,GAAG;IAC7C,cAAc,OAAO,SAAS,cAAc,GAAG;IAChD,GACD,OACH;WACM,OAAO;AACd,kBAAe,YACb,YACA,iBAAiB,QAAQ,MAAM,UAAU,gBAC1C;AACD,SAAM;;;;;;;;;;;ACnDZ,SAAgB,QAAQ,SAAiD;CACvE,MAAM,UAAU,SAAS,WAAW,QAAQ,OAAO;CACnD,MAAM,OAAO,SAAS,QAAQ;AAE9B,KAAI,QACF,KAAI,SAAS,cAAc;EACzB,MAAM,WAAW,IAAI,mBAAmB,EACtC,YAAY,SAAS,eAAe,KACrC,CAAC;AACF,iBAAe,YAAY,SAAS;AACpC,WAAS,OAAO;QACX;EACL,MAAM,WAAW,IAAI,iBAAiB;AACtC,iBAAe,YAAY,SAAS;;AAIxC,KACE,SAAS,gBAAgB,UACtB,SAAS,uBAAuB,OAEnC,gBAAe,WAAW;EACxB,aAAa,QAAQ;EACrB,oBAAoB,QAAQ;EAC7B,CAAC;;;;;AC9CN,MAAa,gBAAgB,YAAY;AAKvC,KAAI,CAJa,MAAM,QAAQ,OAAO,4BAA4B,EAChE,MAAM,WACP,CAAC,CAGA,OAAM,IAAI,UACR,oBACA,KACA,KAAK,UAAU,EAAE,SAAS,oBAAoB,CAAC,CAChD;;;;;ACJL,MAAM,eAAe;CACnB,kBAAkB,OAAO;CACzB,mBAAmB,OAAO;CAC1B,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACzB;AAUD,MAAM,gCAAgB,IAAI,KAAsB;;;;AAKhD,MAAM,4BACJ,WACA,SACA,cACW;CACX,IAAI,SAAS;AACb,MAAK,MAAM,YAAY,WAAW;AAChC,YAAU,UAAU;AACpB,YAAU,QAAQ,OAAO,KAAK,UAAU,SAAS,CAAC,CAAC;;AAErD,WAAU,UAAU;AACpB,QAAO;;;;;AAMT,MAAM,+BACJ,cACA,YACW;CACX,IAAI,SAAS;AACb,MAAK,MAAM,QAAQ,aACjB,KAAI,KAAK,SAAS,YAGhB,WAAU,QAAQ,OAAO,KAAK,UAAU,IAAI,CAAC,SAAS;UAC7C,KAAK,KACd,WAAU,QAAQ,OAAO,KAAK,KAAK,CAAC;AAGxC,QAAO;;;;;AAMT,MAAM,0BACJ,SACA,SACA,cACW;CAGX,MAAM,mBAAmB;CAEzB,MAAM,gBAAgB;CACtB,IAAI,SAAS;AACb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,EAAE;AAClD,MAAI,OAAO,UAAU,SACnB,WAAU,QAAQ,OAAO,MAAM,CAAC;AAElC,MAAI,QAAQ,OACV,WAAU;AAEZ,MAAI,QAAQ,aACV,WAAU,yBACR,OACA,SACA,UACD;AAEH,MAAI,QAAQ,aAAa,MAAM,QAAQ,MAAM,CAC3C,WAAU,4BACR,OACA,QACD;;AAGL,QAAO;;;;;AAMT,MAAM,mBACJ,UACA,SACA,cACW;AACX,KAAI,SAAS,WAAW,EACtB,QAAO;CAET,IAAI,YAAY;AAChB,MAAK,MAAM,WAAW,SACpB,cAAa,uBAAuB,SAAS,SAAS,UAAU;AAGlE,cAAa;AACb,QAAO;;;;;AAMT,MAAM,wBAAwB,OAAO,aAAuC;AAC1E,KAAI,cAAc,IAAI,SAAS,EAAE;EAC/B,MAAM,SAAS,cAAc,IAAI,SAAS;AAC1C,MAAI,OACF,QAAO;;CAIX,MAAM,oBAAoB;AAC1B,KAAI,EAAE,qBAAqB,eAAe;EACxC,MAAM,iBAAkB,MAAM,aAAa,YAAY;AACvD,gBAAc,IAAI,UAAU,eAAe;AAC3C,SAAO;;CAGT,MAAM,iBAAkB,MAAM,aAAa,oBAAoB;AAC/D,eAAc,IAAI,UAAU,eAAe;AAC3C,QAAO;;;;;AAMT,MAAa,yBAAyB,UAAyB;AAC7D,QAAO,MAAM,aAAa,aAAa;;;;;;;;;;;;AAazC,MAAM,qBAAqB,UAAiB;AAC1C,QAAO,MAAM,OAAO,mBAAmB,MAAM,OAAO,UAChD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACV,GACD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACV;;;;;AAMP,MAAM,4BACJ,KACA,MACA,YAIW;CACX,MAAM,EAAE,SAAS,cAAc;CAC/B,IAAI,SAAS,UAAU;AAGvB,KAAI,OAAO,SAAS,YAAY,SAAS,KACvC,QAAO;CAIT,MAAM,QAAQ;CAOd,MAAM,YAAY;CAClB,MAAM,YAAY,MAAM,QAAQ;CAChC,IAAI,YAAY,MAAM,eAAe;AAGrC,KAAI,MAAM,QAAQ,MAAM,QAAQ,MAAM,KAAK,EAAE;AAC3C,YAAU,UAAU;AACpB,OAAK,MAAM,QAAQ,MAAM,MAAM;AAC7B,aAAU,UAAU;AACpB,aAAU,QAAQ,OAAO,OAAO,KAAK,CAAC,CAAC;;;AAK3C,KAAI,UAAU,SAAS,IAAI,CACzB,aAAY,UAAU,MAAM,GAAG,GAAG;CAIpC,MAAM,OAAO,GAAG,UAAU,GAAG,UAAU,GAAG;AAC1C,WAAU,QAAQ,OAAO,KAAK,CAAC;CAG/B,MAAM,eAAe,IAAI,IAAI;EAAC;EAAQ;EAAe;EAAO,CAAC;AAC7D,MAAK,MAAM,gBAAgB,OAAO,KAAK,MAAM,CAC3C,KAAI,CAAC,aAAa,IAAI,aAAa,EAAE;EACnC,MAAM,gBAAgB,MAAM;EAC5B,MAAM,eACJ,OAAO,kBAAkB,WAAW,gBAClC,KAAK,UAAU,cAAc;AAEjC,YAAU,QAAQ,OAAO,GAAG,aAAa,GAAG,eAAe,CAAC;;AAIhE,QAAO;;;;;AAMT,MAAM,6BACJ,YACA,SACA,cACW;AACX,KAAI,CAAC,cAAc,OAAO,eAAe,SACvC,QAAO;CAGT,MAAM,SAAS;CACf,IAAI,SAAS;AAEb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,CAC/C,KAAI,QAAQ,cAAc;EACxB,MAAM,aAAa;AACnB,MAAI,OAAO,KAAK,WAAW,CAAC,SAAS,GAAG;AACtC,aAAU,UAAU;AACpB,QAAK,MAAM,WAAW,OAAO,KAAK,WAAW,CAC3C,WAAU,yBAAyB,SAAS,WAAW,UAAU;IAC/D;IACA;IACD,CAAC;;QAGD;EACL,MAAM,YACJ,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,MAAM;AAC3D,YAAU,QAAQ,OAAO,GAAG,IAAI,GAAG,YAAY,CAAC;;AAIpD,QAAO;;;;;AAMT,MAAM,uBACJ,MACA,SACA,cACW;CACX,IAAI,SAAS,UAAU;CACvB,MAAM,OAAO,KAAK;CAClB,MAAM,QAAQ,KAAK;CACnB,IAAI,QAAQ,KAAK,eAAe;AAChC,KAAI,MAAM,SAAS,IAAI,CACrB,SAAQ,MAAM,MAAM,GAAG,GAAG;CAE5B,MAAM,OAAO,QAAQ,MAAM;AAC3B,WAAU,QAAQ,OAAO,KAAK,CAAC;AAC/B,KACE,OAAO,KAAK,eAAe,YACxB,KAAK,eAAe,KAEvB,WAAU,0BAA0B,KAAK,YAAY,SAAS,UAAU;AAE1E,QAAO;;;;;AAMT,MAAa,qBACX,OACA,SACA,cACW;CACX,IAAI,iBAAiB;AACrB,MAAK,MAAM,QAAQ,MACjB,mBAAkB,oBAAoB,MAAM,SAAS,UAAU;AAEjE,mBAAkB,UAAU;AAC5B,QAAO;;;;;AAMT,MAAa,gBAAgB,OAC3B,SACA,UAC+C;CAE/C,MAAM,YAAY,sBAAsB,MAAM;CAG9C,MAAM,UAAU,MAAM,sBAAsB,UAAU;CAEtD,MAAM,qBAAqB,QAAQ;CACnC,MAAM,gBAAgB,mBAAmB,QACtC,QAAQ,IAAI,SAAS,YACvB;CACD,MAAM,iBAAiB,mBAAmB,QACvC,QAAQ,IAAI,SAAS,YACvB;CAED,MAAM,YAAY,kBAAkB,MAAM;CAC1C,IAAI,cAAc,gBAAgB,eAAe,SAAS,UAAU;AACpE,KAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,EAC1C,gBAAe,kBAAkB,QAAQ,OAAO,SAAS,UAAU;CAErE,MAAM,eAAe,gBAAgB,gBAAgB,SAAS,UAAU;AAExE,QAAO;EACL,OAAO;EACP,QAAQ;EACT;;;;;ACpVH,MAAMC,iBAAoC;CACxC,cAAc;CACd,qBAAqB;CACtB;;;;;AAoBD,eAAsB,qBACpB,SACA,OACA,sBAAsB,IAC8C;CAEpE,MAAM,iBADa,MAAM,cAAc,SAAS,MAAM,EACrB;CACjC,MAAM,WAAW,MAAM,aAAa,OAAO,qBAAqB;CAEhE,MAAM,QAAQ,KAAK,MAAM,YAAY,IAAI,sBAAsB,KAAK;AAEpE,QAAO;EACL,QAAQ,gBAAgB;EACxB;EACA;EACD;;;;;;AAOH,SAAS,sBAAsB,SAA0B;CACvD,IAAI,OAAO;AACX,KAAI,OAAO,QAAQ,YAAY,SAC7B,QAAO,QAAQ;UACN,MAAM,QAAQ,QAAQ,QAAQ,EACvC;OAAK,MAAM,QAAQ,QAAQ,QACzB,KAAI,KAAK,SAAS,OAChB,SAAQ,KAAK;WACJ,eAAe,KAExB,SAAQ,KAAK,UAAU;;AAM7B,KAAI,QAAQ,WACV,SAAQ,KAAK,UAAU,QAAQ,WAAW;AAI5C,QAAO,KAAK,KAAK,KAAK,SAAS,EAAE,GAAG;;;;;AAMtC,SAAS,sBAAsB,UAG7B;CACA,MAAMC,iBAAiC,EAAE;CACzC,IAAI,IAAI;AAER,QAAO,IAAI,SAAS,QAAQ;EAC1B,MAAM,MAAM,SAAS;AACrB,MAAI,IAAI,SAAS,YAAY,IAAI,SAAS,aAAa;AACrD,kBAAe,KAAK,IAAI;AACxB;QAEA;;AAIJ,QAAO;EACL;EACA,mBAAmB,SAAS,MAAM,EAAE;EACrC;;;;;;AAOH,SAAS,kBACP,UACA,cACA,cACQ;CACR,MAAM,kBAAkB,eAAe,eAAe;CAEtD,IAAI,oBAAoB;AAGxB,MAAK,IAAI,IAAI,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;EAC7C,MAAM,YAAY,sBAAsB,SAAS,GAAG;AACpD,MAAI,oBAAoB,YAAY,gBAElC,QAAO,IAAI;AAEb,uBAAqB;;AAIvB,QAAO;;;;;AAMT,SAAS,qBAAqB,gBAAwC;AACpE,QAAO,eAAe,QACnB,KAAK,QAAQ,MAAM,sBAAsB,IAAI,EAC9C,EACD;;;;;AAMH,SAAS,uBAAuB,cAA+B;AAC7D,QAAO;EACL,MAAM;EACN,SAAS,uBAAuB,aAAa;EAC9C;;;;;;AAOH,eAAsB,YACpB,SACA,OACA,SAAqC,EAAE,EACX;CAC5B,MAAM,MAAM;EAAE,GAAG;EAAgB,GAAG;EAAQ;CAI5C,MAAM,kBADa,MAAM,cAAc,SAAS,MAAM,EACpB;CAClC,MAAM,WAAW,MAAM,aAAa,OAAO,qBAAqB;CAChE,MAAM,QAAQ,KAAK,MAAM,YAAY,IAAI,IAAI,sBAAsB,KAAK;AAGxE,KAAI,kBAAkB,MACpB,QAAO;EACL;EACA,cAAc;EACd;EACA,iBAAiB;EACjB,qBAAqB;EACtB;AAGH,SAAQ,KACN,iBAAiB,eAAe,2BAA2B,MAAM,iBAClE;CAGD,MAAM,EAAE,gBAAgB,sBAAsB,sBAC5C,QAAQ,SACT;CAED,MAAM,eAAe,qBAAqB,eAAe;AACzD,SAAQ,MACN,iBAAiB,eAAe,OAAO,qBAAqB,aAAa,UAC1E;CAGD,MAAM,kBAAkB,KAAK,IAAI,IAAI,cAAc,MAAM;CAGzD,MAAM,gBAAgB,kBACpB,mBACA,iBACA,aACD;AAGD,KAAI,kBAAkB,GAAG;AACvB,UAAQ,KACN,gFACD;AACD,SAAO;GACL;GACA,cAAc;GACd;GACA,iBAAiB;GACjB,qBAAqB;GACtB;;CAGH,MAAM,kBAAkB,kBAAkB,MAAM,GAAG,cAAc;CACjE,MAAM,oBAAoB,kBAAkB,MAAM,cAAc;AAEhE,SAAQ,KACN,0BAA0B,gBAAgB,OAAO,qBAAqB,kBAAkB,SACzF;CAGD,MAAM,mBAAmB,uBAAuB,gBAAgB,OAAO;CAGvE,MAAMC,aAAqC;EACzC,GAAG;EACH,UAAU;GAAC,GAAG;GAAgB;GAAkB,GAAG;GAAkB;EACtE;CAGD,MAAM,gBAAgB,MAAM,cAAc,YAAY,MAAM;AAE5D,SAAQ,KACN,8BAA8B,eAAe,MAAM,cAAc,MAAM,SACxE;AAGD,KAAI,cAAc,QAAQ,OAAO;AAC/B,UAAQ,KACN,mCAAmC,cAAc,MAAM,KAAK,MAAM,sCACnE;EAGD,MAAM,mBAAmB,KAAK,MAAM,kBAAkB,GAAI;AAC1D,MAAI,mBAAmB,KAAO;AAC5B,WAAQ,MAAM,sDAAsD;AACpE,UAAO;IACL,SAAS;IACT,cAAc;IACd;IACA,iBAAiB,cAAc;IAC/B,qBAAqB,gBAAgB;IACtC;;AAGH,SAAO,YAAY,SAAS,OAAO;GACjC,GAAG;GACH,cAAc;GACf,CAAC;;AAGJ,QAAO;EACL,SAAS;EACT,cAAc;EACd;EACA,iBAAiB,cAAc;EAC/B,qBAAqB,gBAAgB;EACtC;;;;;AAMH,SAAgB,uBAAuB,QAAmC;AACxE,KAAI,CAAC,OAAO,aAAc,QAAO;CAEjC,MAAM,YAAY,OAAO,iBAAiB,OAAO;CACjD,MAAM,aAAa,KAAK,MAAO,YAAY,OAAO,iBAAkB,IAAI;AAExE,QACE,6BAA6B,OAAO,oBAAoB,qBACnD,OAAO,eAAe,KAAK,OAAO,gBAAgB,WAAW,WAAW;;;;;AC1RjF,IAAM,eAAN,MAAmB;CACjB,AAAQ,QAAuC,EAAE;CACjD,AAAQ,aAAa;CACrB,AAAQ,kBAAkB;CAE1B,MAAM,QACJ,SACA,kBACY;AACZ,SAAO,IAAI,SAAS,SAAS,WAAW;AACtC,QAAK,MAAM,KAAK;IACL;IACA;IACT;IACD,CAAC;AAEF,OAAI,KAAK,MAAM,SAAS,GAAG;IACzB,MAAM,WAAW,KAAK,MAAM;IAC5B,MAAM,WAAW,KAAK,MAAM,WAAW,KAAK,iBAAiB;AAE7D,KADY,WAAW,KAAK,QAAQ,OAAO,QAAQ,MAEjD,wCAAwC,SAAS,KAAK,SAAS,SAChE;;AAGH,GAAK,KAAK,aAAa,iBAAiB;IACxC;;CAGJ,MAAc,aAAa,kBAAyC;AAClE,MAAI,KAAK,WAAY;AACrB,OAAK,aAAa;AAElB,SAAO,KAAK,MAAM,SAAS,GAAG;GAE5B,MAAM,YADM,KAAK,KAAK,GACE,KAAK;GAC7B,MAAM,aAAa,mBAAmB;AAEtC,OAAI,KAAK,kBAAkB,KAAK,YAAY,YAAY;IACtD,MAAM,SAAS,aAAa;IAC5B,MAAM,UAAU,KAAK,KAAK,SAAS,IAAK;AAExC,KADY,UAAU,KAAK,QAAQ,OAAO,QAAQ,MAC9C,uBAAuB,QAAQ,0BAA0B;AAC7D,UAAM,IAAI,SAAS,YAAY,WAAW,SAAS,OAAO,CAAC;;GAG7D,MAAM,UAAU,KAAK,MAAM,OAAO;AAClC,OAAI,CAAC,QAAS;AAEd,QAAK,kBAAkB,KAAK,KAAK;AAEjC,OAAI;IACF,MAAM,SAAS,MAAM,QAAQ,SAAS;AACtC,YAAQ,QAAQ,OAAO;YAChB,OAAO;AACd,YAAQ,OAAO,MAAM;;;AAIzB,OAAK,aAAa;;CAGpB,IAAI,SAAiB;AACnB,SAAO,KAAK,MAAM;;;AAItB,MAAM,eAAe,IAAI,cAAc;;;;;AAMvC,eAAsB,qBACpB,SACA,SACY;AAEZ,KAAIC,QAAM,qBAAqB,OAC7B,QAAO,SAAS;AAGlB,QAAO,aAAa,QAAQ,SAASA,QAAM,iBAAiB;;;;;ACvF9D,MAAa,wBAAwB,OACnC,YACG;AACH,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,eAAe,QAAQ,SAAS,MACnC,MACC,OAAO,EAAE,YAAY,YAClB,EAAE,SAAS,MAAM,QAAMC,IAAE,SAAS,YAAY,CACpD;CAID,MAAM,cAAc,QAAQ,SAAS,MAAM,QACzC,CAAC,aAAa,OAAO,CAAC,SAAS,IAAI,KAAK,CACzC;CAGD,MAAMC,UAAkC;EACtC,GAAG,eAAe,OAAO,aAAa;EACtC,eAAe,cAAc,UAAU;EACxC;CAED,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,oBAAoB;EACxE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,qCAAqC,SAAS;AAC5D,QAAM,MAAM,UAAU,aACpB,qCACA,SACD;;AAGH,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;;;;ACX/B,eAAsBC,mBAAiB,GAAY;CACjD,MAAM,YAAY,KAAK,KAAK;CAC5B,MAAM,kBAAkB,MAAM,EAAE,IAAI,MAA8B;AAClE,SAAQ,MAAM,oBAAoB,KAAK,UAAU,gBAAgB,CAAC,MAAM,KAAK,CAAC;CAG9E,MAAM,aAAa,EAAE,IAAI,aAAa;AACtC,sBAAmB,YAAY,gBAAgB,MAAM;CAerD,MAAMC,MAAuB;EAAE,WAZb,cAAc,UAAU;GACxC,OAAO,gBAAgB;GACvB,UAAU,sBAAsB,gBAAgB,SAAS;GACzD,QAAQ,gBAAgB,UAAU;GAClC,OAAO,gBAAgB,OAAO,KAAK,OAAO;IACxC,MAAM,EAAE,SAAS;IACjB,aAAa,EAAE,SAAS;IACzB,EAAE;GACH,YAAY,gBAAgB,cAAc;GAC1C,aAAa,gBAAgB,eAAe;GAC7C,CAAC;EAEwC;EAAY;EAAW;CAGjE,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,gBAAgB,MACzC;AAGD,OAAM,cAAc,iBAAiB,cAAc;CAGnD,MAAM,EAAE,cAAc,kBAAkB,MAAMC,oBAC5C,iBACA,cACD;AACD,KAAI,cACF,KAAI,gBAAgB;CAGtB,MAAM,UACJ,UAAU,aAAa,WAAW,GAChC;EACE,GAAG;EACH,YAAY,eAAe,aAAa,OAAO;EAChD,GACD;AAEJ,KAAI,UAAU,gBAAgB,WAAW,CACvC,SAAQ,MAAM,sBAAsB,KAAK,UAAU,QAAQ,WAAW,CAAC;AAGzE,KAAI,MAAM,cAAe,OAAM,eAAe;AAE9C,KAAI;EACF,MAAM,WAAW,MAAM,qBAAqB,aAC1C,sBAAsB,QAAQ,CAC/B;AAED,MAAIC,iBAAe,SAAS,CAC1B,QAAOC,6BAA2B,GAAG,UAAU,IAAI;AAGrD,UAAQ,MAAM,qBAAqB;AACnC,wBAAoB,YAAY,YAAY;AAE5C,SAAO,UAAU,GAAG,OAAO,WAAW;AACpC,SAAMC,0BAAwB;IAAE;IAAQ;IAAU;IAAS;IAAK,CAAC;IACjE;UACK,OAAO;AACd,wBAAoB,KAAK,QAAQ,OAAO,MAAM;AAC9C,QAAM;;;AAKV,eAAeH,oBACb,SACA,OAIC;AACD,KAAI,CAAC,MAAM,eAAe,CAAC,OAAO;AAChC,MAAI,MAAM,eAAe,CAAC,MACxB,SAAQ,KACN,wBAAwB,QAAQ,MAAM,wCACvC;AAEH,SAAO;GAAE,cAAc;GAAS,eAAe;GAAM;;AAGvD,KAAI;EACF,MAAM,QAAQ,MAAM,qBAAqB,SAAS,MAAM;AACxD,UAAQ,MACN,uBAAuB,MAAM,cAAc,iBAAiB,MAAM,MAAM,YAAY,MAAM,SAC3F;AACD,MAAI,CAAC,MAAM,OACT,QAAO;GAAE,cAAc;GAAS,eAAe;GAAM;AAGvD,UAAQ,KACN,2BAA2B,MAAM,cAAc,YAAY,MAAM,MAAM,QACxE;EACD,MAAM,gBAAgB,MAAM,YAAY,SAAS,MAAM;AACvD,SAAO;GAAE,cAAc,cAAc;GAAS;GAAe;UACtD,OAAO;AACd,UAAQ,KACN,0DACA,MACD;AACD,SAAO;GAAE,cAAc;GAAS,eAAe;GAAM;;;AAKzD,eAAe,cACb,SACA,eACA;AACA,KAAI;AACF,MAAI,eAAe;GACjB,MAAM,aAAa,MAAM,cACvB,SACA,cACD;AACD,WAAQ,MAAM,wBAAwB,WAAW;QAEjD,SAAQ,MAAM,sDAAsD;UAE/D,OAAO;AACd,UAAQ,MAAM,oCAAoC,MAAM;;;AAK5D,SAASI,qBAAmB,YAAgC,OAAe;AACzE,KAAI,CAAC,WAAY;CACjB,MAAM,UAAU,eAAe,WAAW,WAAW;AACrD,KAAI,QAAS,SAAQ,QAAQ;;AAI/B,SAASC,sBACP,YACA,QACA;AACA,KAAI,CAAC,WAAY;AACjB,gBAAe,cAAc,YAAY,EAAE,QAAQ,CAAC;;AAItD,SAASC,sBACP,KACA,OACA,OACA;AACA,gBACE,IAAI,WACJ;EACE,SAAS;EACT;EACA,OAAO;GAAE,cAAc;GAAG,eAAe;GAAG;EAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;EAChD,SAAS;EACV,EACD,KAAK,KAAK,GAAG,IAAI,UAClB;;AAIH,SAASJ,6BACP,GACA,kBACA,KACA;AACA,SAAQ,MAAM,2BAA2B,KAAK,UAAU,iBAAiB,CAAC;CAG1E,IAAI,WAAW;AACf,KAAI,IAAI,eAAe,gBAAgB,SAAS,QAAQ,IAAI,QAAQ,SAAS;EAC3E,MAAM,SAAS,uBAAuB,IAAI,cAAc;AACxD,aAAW;GACT,GAAG;GACH,SAAS,SAAS,QAAQ,KAAK,UAAQ,MACrC,MAAM,IACJ;IACE,GAAGK;IACH,SAAS;KACP,GAAGA,SAAO;KACV,UAAUA,SAAO,QAAQ,WAAW,MAAM;KAC3C;IACF,GACDA,SACH;GACF;;CAGH,MAAM,SAAS,SAAS,QAAQ;CAChC,MAAM,QAAQ,SAAS;AAEvB,gBACE,IAAI,WACJ;EACE,SAAS;EACT,OAAO,SAAS;EAChB,OAAO;GACL,cAAc,OAAO,iBAAiB;GACtC,eAAe,OAAO,qBAAqB;GAC5C;EACD,aAAa,OAAO;EACpB,SAAS,qBAAqB,OAAO;EACrC,WAAW,iBAAiB,OAAO;EACpC,EACD,KAAK,KAAK,GAAG,IAAI,UAClB;AAED,KAAI,IAAI,cAAc,MACpB,gBAAe,cAAc,IAAI,YAAY;EAC3C,aAAa,MAAM;EACnB,cAAc,MAAM;EACrB,CAAC;AAGJ,QAAO,EAAE,KAAK,SAAS;;AAIzB,SAAS,qBAAqB,QAA8C;AAC1E,QAAO;EACL,MAAM,OAAO,QAAQ;EACrB,SACE,OAAO,OAAO,QAAQ,YAAY,WAChC,OAAO,QAAQ,UACf,KAAK,UAAU,OAAO,QAAQ,QAAQ;EAC1C,YAAY,OAAO,QAAQ,YAAY,KAAK,QAAQ;GAClD,IAAI,GAAG;GACP,MAAM,GAAG;GACT,UAAU;IAAE,MAAM,GAAG,SAAS;IAAM,WAAW,GAAG,SAAS;IAAW;GACvE,EAAE;EACJ;;AAIH,SAAS,iBAAiB,QAA8C;AACtE,QAAO,OAAO,QAAQ,YAAY,KAAK,QAAQ;EAC7C,IAAI,GAAG;EACP,MAAM,GAAG,SAAS;EAClB,OAAO,GAAG,SAAS;EACpB,EAAE;;AAcL,SAAS,0BAA6C;AACpD,QAAO;EACL,OAAO;EACP,aAAa;EACb,cAAc;EACd,cAAc;EACd,SAAS;EACT,WAAW,EAAE;EACb,6BAAa,IAAI,KAAK;EACvB;;AAYH,eAAeJ,0BAAwB,MAAwB;CAC7D,MAAM,EAAE,QAAQ,UAAU,SAAS,QAAQ;CAC3C,MAAM,MAAM,yBAAyB;AAErC,KAAI;AACF,aAAW,MAAM,SAAS,UAAU;AAClC,WAAQ,MAAM,oBAAoB,KAAK,UAAU,MAAM,CAAC;AACxD,oBAAiB,OAAO,IAAI;AAC5B,SAAM,OAAO,SAAS,MAAoB;;AAI5C,MAAI,IAAI,eAAe,cAAc;GACnC,MAAM,SAAS,uBAAuB,IAAI,cAAc;GACxD,MAAMK,cAAmC;IACvC,IAAI,kBAAkB,KAAK,KAAK;IAChC,QAAQ;IACR,SAAS,KAAK,MAAM,KAAK,KAAK,GAAG,IAAK;IACtC,OAAO,IAAI,SAAS,QAAQ;IAC5B,SAAS,CACP;KACE,OAAO;KACP,OAAO,EAAE,SAAS,QAAQ;KAC1B,eAAe;KACf,UAAU;KACX,CACF;IACF;AACD,SAAM,OAAO,SAAS;IACpB,MAAM,KAAK,UAAU,YAAY;IACjC,OAAO;IACR,CAAC;AACF,OAAI,WAAW;;AAGjB,sBAAoB,KAAK,QAAQ,OAAO,IAAI;AAC5C,qBAAiB,IAAI,YAAY,IAAI,aAAa,IAAI,aAAa;UAC5D,OAAO;AACd,oBAAkB;GAAE;GAAK,eAAe,QAAQ;GAAO;GAAK;GAAO,CAAC;AACpE,iBAAa,IAAI,YAAY,MAAM;AACnC,QAAM;;;AAKV,SAAS,iBAAiB,OAA0B,KAAwB;AAC1E,KAAI,CAAC,MAAM,QAAQ,MAAM,SAAS,SAAU;AAE5C,KAAI;EACF,MAAM,SAAS,KAAK,MAAM,MAAM,KAAK;AACrC,kBAAgB,QAAQ,IAAI;AAC5B,kBAAgB,QAAQ,IAAI;AAC5B,mBAAiB,OAAO,QAAQ,IAAI,IAAI;SAClC;;AAKV,SAAS,gBAAgB,QAA6B,KAAwB;AAC5E,KAAI,OAAO,SAAS,CAAC,IAAI,MAAO,KAAI,QAAQ,OAAO;;AAGrD,SAAS,gBAAgB,QAA6B,KAAwB;AAC5E,KAAI,OAAO,OAAO;AAChB,MAAI,cAAc,OAAO,MAAM;AAC/B,MAAI,eAAe,OAAO,MAAM;;;AAIpC,SAAS,iBACP,QACA,KACA;AACA,KAAI,CAAC,OAAQ;AACb,KAAI,OAAO,MAAM,QAAS,KAAI,WAAW,OAAO,MAAM;AACtD,KAAI,OAAO,MAAM,WAAY,qBAAoB,OAAO,MAAM,YAAY,IAAI;AAC9E,KAAI,OAAO,cAAe,KAAI,eAAe,OAAO;;AAGtD,SAAS,oBACP,WAGA,KACA;AACA,KAAI,CAAC,UAAW;AAChB,MAAK,MAAM,MAAM,WAAW;EAC1B,MAAM,MAAM,GAAG;AACf,MAAI,CAAC,IAAI,YAAY,IAAI,IAAI,CAC3B,KAAI,YAAY,IAAI,KAAK;GACvB,IAAI,GAAG,MAAM;GACb,MAAM,GAAG,UAAU,QAAQ;GAC3B,WAAW;GACZ,CAAC;EAEJ,MAAM,OAAO,IAAI,YAAY,IAAI,IAAI;AACrC,MAAI,MAAM;AACR,OAAI,GAAG,GAAI,MAAK,KAAK,GAAG;AACxB,OAAI,GAAG,UAAU,KAAM,MAAK,OAAO,GAAG,SAAS;AAC/C,OAAI,GAAG,UAAU,UAAW,MAAK,aAAa,GAAG,SAAS;;;;AAMhE,SAAS,oBACP,KACA,eACA,KACA;AAEA,MAAK,MAAM,MAAM,IAAI,YAAY,QAAQ,CACvC,KAAI,GAAG,MAAM,GAAG,KAAM,KAAI,UAAU,KAAK,GAAG;CAG9C,MAAM,YAAY,IAAI,UAAU,KAAK,QAAQ;EAC3C,IAAI,GAAG;EACP,MAAM;EACN,UAAU;GAAE,MAAM,GAAG;GAAM,WAAW,GAAG;GAAW;EACrD,EAAE;AAEH,gBACE,IAAI,WACJ;EACE,SAAS;EACT,OAAO,IAAI,SAAS;EACpB,OAAO;GAAE,cAAc,IAAI;GAAa,eAAe,IAAI;GAAc;EACzE,aAAa,IAAI,gBAAgB;EACjC,SAAS;GACP,MAAM;GACN,SAAS,IAAI;GACb,YAAY,UAAU,SAAS,IAAI,YAAY;GAChD;EACD,WACE,IAAI,UAAU,SAAS,IACrB,IAAI,UAAU,KAAK,QAAQ;GACzB,IAAI,GAAG;GACP,MAAM,GAAG;GACT,OAAO,GAAG;GACX,EAAE,GACH;EACL,EACD,KAAK,KAAK,GAAG,IAAI,UAClB;;AAIH,SAAS,kBAAkB,MAKxB;CACD,MAAM,EAAE,KAAK,eAAe,KAAK,UAAU;AAC3C,gBACE,IAAI,WACJ;EACE,SAAS;EACT,OAAO,IAAI,SAAS;EACpB,OAAO;GAAE,cAAc;GAAG,eAAe;GAAG;EAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;EAChD,SAAS;EACV,EACD,KAAK,KAAK,GAAG,IAAI,UAClB;;AAIH,SAASC,mBACP,YACA,aACA,cACA;AACA,KAAI,CAAC,WAAY;AACjB,gBAAe,cAAc,YAAY;EAAE;EAAa;EAAc,CAAC;AACvE,gBAAe,gBAAgB,YAAY,KAAK;EAAE;EAAa;EAAc,CAAC;;AAIhF,SAASC,eAAa,YAAgC,OAAgB;AACpE,KAAI,CAAC,WAAY;AACjB,gBAAe,YACb,YACA,iBAAiB,QAAQ,MAAM,UAAU,eAC1C;;AAGH,MAAMT,oBACJ,aACuC,OAAO,OAAO,UAAU,UAAU;AAG3E,SAAS,sBACP,UACuB;AACvB,QAAO,SAAS,KAAK,QAAQ;EAC3B,MAAMU,SAAyB;GAC7B,MAAM,IAAI;GACV,SACE,OAAO,IAAI,YAAY,WACrB,IAAI,UACJ,KAAK,UAAU,IAAI,QAAQ;GAChC;AAGD,MAAI,gBAAgB,OAAO,IAAI,WAC7B,QAAO,aAAa,IAAI,WAAW,KAAK,QAAQ;GAC9C,IAAI,GAAG;GACP,MAAM,GAAG;GACT,UAAU;IACR,MAAM,GAAG,SAAS;IAClB,WAAW,GAAG,SAAS;IACxB;GACF,EAAE;AAIL,MAAI,kBAAkB,OAAO,IAAI,aAC/B,QAAO,eAAe,IAAI;AAI5B,MAAI,UAAU,OAAO,IAAI,KACvB,QAAO,OAAO,IAAI;AAGpB,SAAO;GACP;;;;;AC/hBJ,MAAa,mBAAmB,IAAI,MAAM;AAE1C,iBAAiB,KAAK,KAAK,OAAO,MAAM;AACtC,KAAI;AACF,SAAO,MAAMC,mBAAiB,EAAE;UACzB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACVF,MAAa,mBAAmB,OAAO,YAA8B;AACnE,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,cAAc;EAClE,QAAQ;EACR,SAAS,eAAe,MAAM;EAC9B,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,MAAM,UAAU,aAAa,+BAA+B,SAAS;AAE7E,QAAQ,MAAM,SAAS,MAAM;;;;;ACR/B,MAAa,kBAAkB,IAAI,MAAM;AAEzC,gBAAgB,KAAK,KAAK,OAAO,MAAM;AACrC,KAAI;EACF,MAAM,UAAU,MAAM,EAAE,IAAI,MAAwB;EACpD,MAAM,WAAW,MAAM,iBAAiB,QAAQ;AAEhD,SAAO,EAAE,KAAK,SAAS;UAChB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACjBF,MAAa,qBAAqB,IAAI,MAAM;AAI5C,mBAAmB,KAAK,WAAW,MAAM;AACvC,QAAO,EAAE,KAAK,MAAM,IAAI;EACxB;;;;ACQF,SAAgB,iBAAiB,GAAY;AAC3C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,QAAQ,EAAE,IAAI,OAAO;CAC3B,MAAMC,UAAwB;EAC5B,MAAM,MAAM,OAAO,OAAO,SAAS,MAAM,MAAM,GAAG,GAAG;EACrD,OAAO,MAAM,QAAQ,OAAO,SAAS,MAAM,OAAO,GAAG,GAAG;EACxD,OAAO,MAAM,SAAS;EACtB,UAAU,MAAM;EAChB,SAAS,MAAM,UAAU,MAAM,YAAY,SAAS;EACpD,MAAM,MAAM,OAAO,OAAO,SAAS,MAAM,MAAM,GAAG,GAAG;EACrD,IAAI,MAAM,KAAK,OAAO,SAAS,MAAM,IAAI,GAAG,GAAG;EAC/C,QAAQ,MAAM,UAAU;EACxB,WAAW,MAAM,aAAa;EAC/B;CAED,MAAM,SAAS,WAAW,QAAQ;AAClC,QAAO,EAAE,KAAK,OAAO;;AAGvB,SAAgB,eAAe,GAAY;AACzC,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,KAAK,EAAE,IAAI,MAAM,KAAK;CAC5B,MAAM,QAAQ,SAAS,GAAG;AAE1B,KAAI,CAAC,MACH,QAAO,EAAE,KAAK,EAAE,OAAO,mBAAmB,EAAE,IAAI;AAGlD,QAAO,EAAE,KAAK,MAAM;;AAGtB,SAAgB,oBAAoB,GAAY;AAC9C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;AAGnE,eAAc;AACd,QAAO,EAAE,KAAK;EAAE,SAAS;EAAM,SAAS;EAAmB,CAAC;;AAG9D,SAAgB,eAAe,GAAY;AACzC,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,QAAQ,UAAU;AACxB,QAAO,EAAE,KAAK,MAAM;;AAGtB,SAAgB,aAAa,GAAY;AACvC,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,SAAU,EAAE,IAAI,MAAM,SAAS,IAAI;CACzC,MAAM,OAAO,cAAc,OAAO;AAElC,KAAI,WAAW,OAAO;AACpB,IAAE,OAAO,gBAAgB,WAAW;AACpC,IAAE,OAAO,uBAAuB,mCAAmC;QAC9D;AACL,IAAE,OAAO,gBAAgB,mBAAmB;AAC5C,IAAE,OAAO,uBAAuB,oCAAoC;;AAGtE,QAAO,EAAE,KAAK,KAAK;;AAIrB,SAAgB,kBAAkB,GAAY;AAC5C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,SAAS,aAAa;AAC5B,QAAO,EAAE,KAAK,OAAO;;AAGvB,SAAgB,iBAAiB,GAAY;AAC3C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,KAAK,EAAE,IAAI,MAAM,KAAK;CAC5B,MAAM,UAAU,WAAW,GAAG;AAE9B,KAAI,CAAC,QACH,QAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,EAAE,IAAI;CAIpD,MAAM,UAAU,kBAAkB,GAAG;AAErC,QAAO,EAAE,KAAK;EACZ,GAAG;EACH;EACD,CAAC;;AAGJ,SAAgB,oBAAoB,GAAY;AAC9C,KAAI,CAAC,kBAAkB,CACrB,QAAO,EAAE,KAAK,EAAE,OAAO,oCAAoC,EAAE,IAAI;CAGnE,MAAM,KAAK,EAAE,IAAI,MAAM,KAAK;AAG5B,KAAI,CAFY,cAAc,GAAG,CAG/B,QAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,EAAE,IAAI;AAGpD,QAAO,EAAE,KAAK;EAAE,SAAS;EAAM,SAAS;EAAmB,CAAC;;;;;ACpI9D,MAAa,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAtB,MAAa,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAtB,MAAa,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACMxB,SAAgB,eAAuB;AACrC,QAAO;;;;;;WAME,OAAO;;;IAGd,SAAS;YACD,OAAO;;;;;;;ACJnB,MAAa,gBAAgB,IAAI,MAAM;AAGvC,cAAc,IAAI,gBAAgB,iBAAiB;AACnD,cAAc,IAAI,oBAAoB,eAAe;AACrD,cAAc,OAAO,gBAAgB,oBAAoB;AACzD,cAAc,IAAI,cAAc,eAAe;AAC/C,cAAc,IAAI,eAAe,aAAa;AAG9C,cAAc,IAAI,iBAAiB,kBAAkB;AACrD,cAAc,IAAI,qBAAqB,iBAAiB;AACxD,cAAc,OAAO,qBAAqB,oBAAoB;AAG9D,cAAc,IAAI,MAAM,MAAM;AAC5B,QAAO,EAAE,KAAK,cAAc,CAAC;EAC7B;;;;AC7BF,SAAgB,+BACd,cACkC;AAClC,KAAI,iBAAiB,KACnB,QAAO;AAQT,QANsB;EACpB,MAAM;EACN,QAAQ;EACR,YAAY;EACZ,gBAAgB;EACjB,CACoB;;;;;ACevB,MAAM,yBAAyB;AAW/B,SAAS,mBAAmB,UAA0C;CACpE,MAAMC,gBAAgC,EAAE;AAExC,MAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;EACxC,MAAM,UAAU,SAAS;AACzB,gBAAc,KAAK,QAAQ;AAE3B,MACE,QAAQ,SAAS,eACd,QAAQ,cACR,QAAQ,WAAW,SAAS,GAC/B;GAEA,MAAM,qCAAqB,IAAI,KAAa;GAG5C,IAAI,IAAI,IAAI;AACZ,UAAO,IAAI,SAAS,UAAU,SAAS,GAAG,SAAS,QAAQ;IACzD,MAAM,cAAc,SAAS;AAC7B,QAAI,YAAY,aACd,oBAAmB,IAAI,YAAY,aAAa;AAElD;;AAIF,QAAK,MAAM,YAAY,QAAQ,WAC7B,KAAI,CAAC,mBAAmB,IAAI,SAAS,GAAG,EAAE;AACxC,YAAQ,MAAM,sCAAsC,SAAS,KAAK;AAClE,kBAAc,KAAK;KACjB,MAAM;KACN,cAAc,SAAS;KACvB,SAAS;KACV,CAAC;;;;AAMV,QAAO;;AAUT,SAAgB,kBACd,SACmB;CAEnB,MAAMC,kBAAmC;EACvC,qCAAqB,IAAI,KAAK;EAC9B,qCAAqB,IAAI,KAAK;EAC/B;CAED,MAAM,WAAW,mCACf,QAAQ,UACR,QAAQ,QACR,gBACD;AAED,QAAO;EACL,SAAS;GACP,OAAO,mBAAmB,QAAQ,MAAM;GAExC,UAAU,mBAAmB,SAAS;GACtC,YAAY,QAAQ;GACpB,MAAM,QAAQ;GACd,QAAQ,QAAQ;GAChB,aAAa,QAAQ;GACrB,OAAO,QAAQ;GACf,MAAM,QAAQ,UAAU;GACxB,OAAO,gCAAgC,QAAQ,OAAO,gBAAgB;GACtE,aAAa,qCACX,QAAQ,aACR,gBACD;GACF;EACD;EACD;;AAGH,SAAS,mBAAmB,OAAuB;CAGjD,MAAMC,eAAuC;EAC3C,MAAM;EACN,QAAQ;EACR,OAAO;EACR;AAED,KAAI,aAAa,OACf,QAAO,aAAa;AAOtB,KAAI,0BAA0B,KAAK,MAAM,CACvC,QAAO;AAGT,KAAI,wBAAwB,KAAK,MAAM,CACrC,QAAO;AAIT,KAAI,wBAAwB,KAAK,MAAM,CACrC,QAAO;AAGT,KAAI,sBAAsB,KAAK,MAAM,CACnC,QAAO;AAIT,KAAI,yBAAyB,KAAK,MAAM,CACtC,QAAO;AAGT,KAAI,yBAAyB,KAAK,MAAM,CACtC,QAAO;AAGT,QAAO;;AAGT,SAAS,mCACP,mBACA,QACA,iBACgB;CAChB,MAAM,iBAAiB,mBAAmB,OAAO;CAEjD,MAAM,gBAAgB,kBAAkB,SAAS,YAC/C,QAAQ,SAAS,SACf,kBAAkB,QAAQ,GAC1B,uBAAuB,SAAS,gBAAgB,CACnD;AAED,QAAO,CAAC,GAAG,gBAAgB,GAAG,cAAc;;AAG9C,SAAS,mBACP,QACgB;AAChB,KAAI,CAAC,OACH,QAAO,EAAE;AAGX,KAAI,OAAO,WAAW,SACpB,QAAO,CAAC;EAAE,MAAM;EAAU,SAAS;EAAQ,CAAC;KAG5C,QAAO,CAAC;EAAE,MAAM;EAAU,SADP,OAAO,KAAK,UAAU,MAAM,KAAK,CAAC,KAAK,OAAO;EAClB,CAAC;;AAIpD,SAAS,kBAAkB,SAA+C;CACxE,MAAMC,cAA8B,EAAE;AAEtC,KAAI,MAAM,QAAQ,QAAQ,QAAQ,EAAE;EAClC,MAAM,mBAAmB,QAAQ,QAAQ,QACtC,UACC,MAAM,SAAS,cAClB;EACD,MAAM,cAAc,QAAQ,QAAQ,QACjC,UAAU,MAAM,SAAS,cAC3B;AAGD,OAAK,MAAM,SAAS,iBAClB,aAAY,KAAK;GACf,MAAM;GACN,cAAc,MAAM;GACpB,SAAS,WAAW,MAAM,QAAQ;GACnC,CAAC;AAGJ,MAAI,YAAY,SAAS,EACvB,aAAY,KAAK;GACf,MAAM;GACN,SAAS,WAAW,YAAY;GACjC,CAAC;OAGJ,aAAY,KAAK;EACf,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CAAC;AAGJ,QAAO;;AAGT,SAAS,uBACP,SACA,iBACgB;AAChB,KAAI,CAAC,MAAM,QAAQ,QAAQ,QAAQ,CACjC,QAAO,CACL;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;CAGH,MAAM,gBAAgB,QAAQ,QAAQ,QACnC,UAA0C,MAAM,SAAS,WAC3D;CAED,MAAM,aAAa,QAAQ,QAAQ,QAChC,UAAuC,MAAM,SAAS,OACxD;CAED,MAAM,iBAAiB,QAAQ,QAAQ,QACpC,UAA2C,MAAM,SAAS,WAC5D;CAGD,MAAM,iBAAiB,CACrB,GAAG,WAAW,KAAK,MAAM,EAAE,KAAK,EAChC,GAAG,eAAe,KAAK,MAAM,EAAE,SAAS,CACzC,CAAC,KAAK,OAAO;AAEd,QAAO,cAAc,SAAS,IAC1B,CACE;EACE,MAAM;EACN,SAAS,kBAAkB;EAC3B,YAAY,cAAc,KAAK,aAAa;GAC1C,IAAI,QAAQ;GACZ,MAAM;GACN,UAAU;IACR,MAAM,qBAAqB,QAAQ,MAAM,gBAAgB;IACzD,WAAW,KAAK,UAAU,QAAQ,MAAM;IACzC;GACF,EAAE;EACJ,CACF,GACD,CACE;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;;AAGP,SAAS,WACP,SAGoC;AACpC,KAAI,OAAO,YAAY,SACrB,QAAO;AAET,KAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,QAAO;AAIT,KAAI,CADa,QAAQ,MAAM,UAAU,MAAM,SAAS,QAAQ,CAE9D,QAAO,QACJ,QACE,UACC,MAAM,SAAS,UAAU,MAAM,SAAS,WAC3C,CACA,KAAK,UAAW,MAAM,SAAS,SAAS,MAAM,OAAO,MAAM,SAAU,CACrE,KAAK,OAAO;CAGjB,MAAMC,eAAmC,EAAE;AAC3C,MAAK,MAAM,SAAS,QAClB,SAAQ,MAAM,MAAd;EACE,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAM,CAAC;AAErD;EAEF,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAU,CAAC;AAEzD;EAEF,KAAK;AACH,gBAAa,KAAK;IAChB,MAAM;IACN,WAAW,EACT,KAAK,QAAQ,MAAM,OAAO,WAAW,UAAU,MAAM,OAAO,QAC7D;IACF,CAAC;AAEF;;AAKN,QAAO;;AAKT,SAAS,qBACP,cACA,iBACQ;AAER,KAAI,aAAa,UAAU,uBACzB,QAAO;CAIT,MAAM,oBACJ,gBAAgB,oBAAoB,IAAI,aAAa;AACvD,KAAI,kBACF,QAAO;CAKT,IAAI,OAAO;AACX,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;EAC5C,MAAM,OAAO,aAAa,YAAY,EAAE,IAAI;AAC5C,UAAQ,QAAQ,KAAK,OAAO;AAC5B,SAAO,OAAO;;CAEhB,MAAM,aAAa,KAAK,IAAI,KAAK,CAAC,SAAS,GAAG,CAAC,MAAM,GAAG,EAAE;CAG1D,MAAM,gBACJ,aAAa,MAAM,GAAG,yBAAyB,EAAE,GAAG,MAAM;AAG5D,iBAAgB,oBAAoB,IAAI,eAAe,aAAa;AACpE,iBAAgB,oBAAoB,IAAI,cAAc,cAAc;AAEpE,SAAQ,MAAM,yBAAyB,aAAa,QAAQ,cAAc,GAAG;AAE7E,QAAO;;AAGT,SAAS,gCACP,gBACA,iBACyB;AACzB,KAAI,CAAC,eACH;AAEF,QAAO,eAAe,KAAK,UAAU;EACnC,MAAM;EACN,UAAU;GACR,MAAM,qBAAqB,KAAK,MAAM,gBAAgB;GACtD,aAAa,KAAK;GAClB,YAAY,KAAK;GAClB;EACF,EAAE;;AAGL,SAAS,qCACP,qBACA,iBACuC;AACvC,KAAI,CAAC,oBACH;AAGF,SAAQ,oBAAoB,MAA5B;EACE,KAAK,OACH,QAAO;EAET,KAAK,MACH,QAAO;EAET,KAAK;AACH,OAAI,oBAAoB,KACtB,QAAO;IACL,MAAM;IACN,UAAU,EACR,MAAM,qBACJ,oBAAoB,MACpB,gBACD,EACF;IACF;AAEH;EAEF,KAAK,OACH,QAAO;EAET,QACE;;;;AAQN,SAAS,oBACP,UACmB;AACnB,QAAO;EACL,IAAI,SAAS;EACb,MAAM;EACN,MAAM;EACN,OAAO,SAAS;EAChB,SAAS,EAAE;EACX,aAAa;EACb,eAAe;EACf,OAAO;GACL,cAAc,SAAS,OAAO,iBAAiB;GAC/C,eAAe,SAAS,OAAO,qBAAqB;GACrD;EACF;;;AAIH,SAAS,iBAAiB,UAAkC;CAC1D,MAAM,eAAe,SAAS,OAAO,uBAAuB;AAC5D,QAAO;EACL,eAAe,SAAS,OAAO,iBAAiB,MAAM,gBAAgB;EACtE,eAAe,SAAS,OAAO,qBAAqB;EACpD,GAAI,iBAAiB,UAAa,EAChC,yBAAyB,cAC1B;EACF;;AAGH,SAAgB,qBACd,UACA,iBACmB;AAEnB,KAAI,SAAS,QAAQ,WAAW,EAC9B,QAAO,oBAAoB,SAAS;CAItC,MAAMC,gBAA2C,EAAE;CACnD,MAAMC,mBAAiD,EAAE;CACzD,IAAIC,aACF;AACF,cAAa,SAAS,QAAQ,IAAI,iBAAiB;AAGnD,MAAK,MAAM,UAAU,SAAS,SAAS;EACrC,MAAM,aAAa,uBAAuB,OAAO,QAAQ,QAAQ;EACjE,MAAM,gBAAgB,0BACpB,OAAO,QAAQ,YACf,gBACD;AAED,gBAAc,KAAK,GAAG,WAAW;AACjC,mBAAiB,KAAK,GAAG,cAAc;AAGvC,MAAI,OAAO,kBAAkB,gBAAgB,eAAe,OAC1D,cAAa,OAAO;;AAMxB,QAAO;EACL,IAAI,SAAS;EACb,MAAM;EACN,MAAM;EACN,OAAO,SAAS;EAChB,SAAS,CAAC,GAAG,eAAe,GAAG,iBAAiB;EAChD,aAAa,+BAA+B,WAAW;EACvD,eAAe;EACf,OAAO,iBAAiB,SAAS;EAClC;;AAGH,SAAS,uBACP,gBAC2B;AAC3B,KAAI,OAAO,mBAAmB,SAC5B,QAAO,CAAC;EAAE,MAAM;EAAQ,MAAM;EAAgB,CAAC;AAGjD,KAAI,MAAM,QAAQ,eAAe,CAC/B,QAAO,eACJ,QAAQ,SAA2B,KAAK,SAAS,OAAO,CACxD,KAAK,UAAU;EAAE,MAAM;EAAQ,MAAM,KAAK;EAAM,EAAE;AAGvD,QAAO,EAAE;;AAGX,SAAS,0BACP,WACA,iBAC8B;AAC9B,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,UAAU,KAAK,aAAa;EACjC,IAAIC,QAAiC,EAAE;AACvC,MAAI;AACF,WAAQ,KAAK,MAAM,SAAS,SAAS,UAAU;WACxC,OAAO;AACd,WAAQ,KACN,2CAA2C,SAAS,SAAS,KAAK,IAClE,MACD;;EAIH,MAAM,eACJ,iBAAiB,oBAAoB,IAAI,SAAS,SAAS,KAAK,IAC7D,SAAS,SAAS;AAEvB,SAAO;GACL,MAAM;GACN,IAAI,SAAS;GACb,MAAM;GACN;GACD;GACD;;;;;;;;ACziBJ,eAAsB,kBAAkB,GAAY;AAClD,KAAI;EACF,MAAM,gBAAgB,EAAE,IAAI,OAAO,iBAAiB;EAEpD,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;EAErE,MAAM,EAAE,SAAS,kBAAkB,kBAAkB,iBAAiB;EAEtE,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,iBAAiB,MAC1C;AAED,MAAI,CAAC,eAAe;AAClB,WAAQ,KAAK,iDAAiD;AAC9D,UAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;EAGJ,MAAM,aAAa,MAAM,cAAc,eAAe,cAAc;AAEpE,MAAI,iBAAiB,SAAS,iBAAiB,MAAM,SAAS,GAAG;GAC/D,IAAI,eAAe;AACnB,OAAI,eAAe,WAAW,cAAc,CAC1C,gBAAe,iBAAiB,MAAM,MAAM,SAC1C,KAAK,KAAK,WAAW,QAAQ,CAC9B;AAEH,OAAI,CAAC,cACH;QAAI,iBAAiB,MAAM,WAAW,SAAS,CAG7C,YAAW,QAAQ,WAAW,QAAQ;aAC7B,iBAAiB,MAAM,WAAW,OAAO,CAElD,YAAW,QAAQ,WAAW,QAAQ;;;EAK5C,IAAI,kBAAkB,WAAW,QAAQ,WAAW;AACpD,MAAI,iBAAiB,MAAM,WAAW,SAAS,CAG7C,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;WAC3C,iBAAiB,MAAM,WAAW,OAAO,CAElD,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;AAGtD,UAAQ,MAAM,gBAAgB,gBAAgB;AAE9C,SAAO,EAAE,KAAK,EACZ,cAAc,iBACf,CAAC;UACK,OAAO;AACd,UAAQ,MAAM,0BAA0B,MAAM;AAC9C,SAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;;;;;AC/DN,SAAS,gBAAgB,SAAsC;AAC7D,KAAI,CAACC,QAAM,iBACT,QAAO;AAGT,QAAO,OAAO,OAAOA,QAAM,UAAU,CAAC,MACnC,OAAO,GAAG,wBAAwBA,QAAM,kBAC1C;;AAIH,SAAgB,gCACd,OACA,SACA,iBACiC;CACjC,MAAMC,WAA0C,EAAE;AAGlD,KAAI,MAAM,QAAQ,WAAW,GAAG;AAE9B,MAAI,MAAM,SAAS,CAACD,QAAM,MACxB,SAAM,QAAQ,MAAM;AAEtB,SAAOE;;CAGT,MAAM,SAAS,MAAM,QAAQ;CAC7B,MAAM,EAAE,UAAU;AAElB,KAAI,CAACF,QAAM,kBAAkB;EAE3B,MAAM,QAAQ,MAAM,SAASA,QAAM,SAAS;AAC5C,WAAO,KAAK;GACV,MAAM;GACN,SAAS;IACP,IAAI,MAAM,MAAM,OAAO,KAAK,KAAK;IACjC,MAAM;IACN,MAAM;IACN,SAAS,EAAE;IACX;IACA,aAAa;IACb,eAAe;IACf,OAAO;KACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;KAC1D,eAAe;KACf,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;KACF;IACF;GACF,CAAC;AACF,UAAM,mBAAmB;;AAG3B,KAAI,MAAM,SAAS;AACjB,MAAI,gBAAgBA,QAAM,EAAE;AAE1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM;AACN,WAAM,mBAAmB;;AAG3B,MAAI,CAACA,QAAM,kBAAkB;AAC3B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACb,eAAe;KACb,MAAM;KACN,MAAM;KACP;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KAAK;GACV,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,MAAM,MAAM;IACb;GACF,CAAC;;AAGJ,KAAI,MAAM,WACR,MAAK,MAAM,YAAY,MAAM,YAAY;AACvC,MAAI,SAAS,MAAM,SAAS,UAAU,MAAM;AAE1C,OAAIA,QAAM,kBAAkB;AAE1B,aAAO,KAAK;KACV,MAAM;KACN,OAAOA,QAAM;KACd,CAAC;AACF,YAAM;AACN,YAAM,mBAAmB;;GAI3B,MAAM,eACJ,iBAAiB,oBAAoB,IAAI,SAAS,SAAS,KAAK,IAC7D,SAAS,SAAS;GAEvB,MAAM,sBAAsBA,QAAM;AAClC,WAAM,UAAU,SAAS,SAAS;IAChC,IAAI,SAAS;IACb,MAAM;IACN;IACD;AAED,YAAO,KAAK;IACV,MAAM;IACN,OAAO;IACP,eAAe;KACb,MAAM;KACN,IAAI,SAAS;KACb,MAAM;KACN,OAAO,EAAE;KACV;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,MAAI,SAAS,UAAU,WAAW;GAChC,MAAM,eAAeA,QAAM,UAAU,SAAS;AAG9C,OAAI,aACF,UAAO,KAAK;IACV,MAAM;IACN,OAAO,aAAa;IACpB,OAAO;KACL,MAAM;KACN,cAAc,SAAS,SAAS;KACjC;IACF,CAAC;;;AAMV,KAAI,OAAO,eAAe;AACxB,MAAIA,QAAM,kBAAkB;AAC1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KACL;GACE,MAAM;GACN,OAAO;IACL,aAAa,+BAA+B,OAAO,cAAc;IACjE,eAAe;IAChB;GACD,OAAO;IACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;IAC1D,eAAe,MAAM,OAAO,qBAAqB;IACjD,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;IACF;GACF,EACD,EACE,MAAM,gBACP,CACF;;AAGH,QAAOE;;AAGT,SAAgB,sCAAgE;AAC9E,QAAO;EACL,MAAM;EACN,OAAO;GACL,MAAM;GACN,SAAS;GACV;EACF;;;;;ACxJH,eAAsB,iBAAiB,GAAY;CACjD,MAAM,YAAY,KAAK,KAAK;CAC5B,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;AACrE,SAAQ,MAAM,8BAA8B,KAAK,UAAU,iBAAiB,CAAC;CAG7E,MAAM,aAAa,EAAE,IAAI,aAAa;AACtC,oBAAmB,YAAY,iBAAiB,MAAM;CAgBtD,MAAMC,MAAuB;EAAE,WAbb,cAAc,aAAa;GAC3C,OAAO,iBAAiB;GACxB,UAAU,yBAAyB,iBAAiB,SAAS;GAC7D,QAAQ,iBAAiB,UAAU;GACnC,OAAO,iBAAiB,OAAO,KAAK,OAAO;IACzC,MAAM,EAAE;IACR,aAAa,EAAE;IAChB,EAAE;GACH,YAAY,iBAAiB;GAC7B,aAAa,iBAAiB;GAC9B,QAAQ,oBAAoB,iBAAiB,OAAO;GACrD,CAAC;EAEwC;EAAY;EAAW;CAEjE,MAAM,EAAE,SAAS,mBAAmB,oBAClC,kBAAkB,iBAAiB;AACrC,SAAQ,MACN,sCACA,KAAK,UAAU,kBAAkB,CAClC;CAGD,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,kBAAkB,MAC3C;CAED,MAAM,EAAE,cAAc,eAAe,kBACnC,MAAM,kBAAkB,mBAAmB,cAAc;AAC3D,KAAI,cACF,KAAI,gBAAgB;AAGtB,KAAI,MAAM,cACR,OAAM,eAAe;AAGvB,KAAI;EACF,MAAM,WAAW,MAAM,qBAAqB,aAC1C,sBAAsB,cAAc,CACrC;AAED,MAAI,eAAe,SAAS,CAC1B,QAAO,2BAA2B;GAAE;GAAG;GAAU;GAAiB;GAAK,CAAC;AAG1E,UAAQ,MAAM,kCAAkC;AAChD,sBAAoB,YAAY,YAAY;AAE5C,SAAO,UAAU,GAAG,OAAO,WAAW;AACpC,SAAM,wBAAwB;IAC5B;IACA;IACA;IACA;IACA;IACD,CAAC;IACF;UACK,OAAO;AACd,sBAAoB,KAAK,iBAAiB,OAAO,MAAM;AACvD,QAAM;;;AAKV,SAAS,mBAAmB,YAAgC,OAAe;AACzE,KAAI,CAAC,WAAY;CACjB,MAAM,UAAU,eAAe,WAAW,WAAW;AACrD,KAAI,QAAS,SAAQ,QAAQ;;AAI/B,eAAe,kBACb,SACA,OAIC;AACD,KAAI,CAAC,MAAM,eAAe,CAAC,OAAO;AAChC,MAAI,MAAM,eAAe,CAAC,MACxB,SAAQ,KACN,wBAAwB,QAAQ,MAAM,wCACvC;AAEH,SAAO;GAAE,cAAc;GAAS,eAAe;GAAM;;AAGvD,KAAI;EACF,MAAM,QAAQ,MAAM,qBAAqB,SAAS,MAAM;AACxD,UAAQ,MACN,uBAAuB,MAAM,cAAc,iBAAiB,MAAM,MAAM,YAAY,MAAM,SAC3F;AACD,MAAI,CAAC,MAAM,OACT,QAAO;GAAE,cAAc;GAAS,eAAe;GAAM;AAGvD,UAAQ,KACN,2BAA2B,MAAM,cAAc,YAAY,MAAM,MAAM,QACxE;EACD,MAAM,gBAAgB,MAAM,YAAY,SAAS,MAAM;AACvD,SAAO;GAAE,cAAc,cAAc;GAAS;GAAe;UACtD,OAAO;AACd,UAAQ,KACN,0DACA,MACD;AACD,SAAO;GAAE,cAAc;GAAS,eAAe;GAAM;;;AAKzD,SAAS,oBACP,YACA,QACA;AACA,KAAI,CAAC,WAAY;AACjB,gBAAe,cAAc,YAAY,EAAE,QAAQ,CAAC;;AAItD,SAAS,oBACP,KACA,OACA,OACA;AACA,gBACE,IAAI,WACJ;EACE,SAAS;EACT;EACA,OAAO;GAAE,cAAc;GAAG,eAAe;GAAG;EAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;EAChD,SAAS;EACV,EACD,KAAK,KAAK,GAAG,IAAI,UAClB;;AAYH,SAAS,2BAA2B,MAA2B;CAC7D,MAAM,EAAE,GAAG,UAAU,iBAAiB,QAAQ;AAC9C,SAAQ,MACN,wCACA,KAAK,UAAU,SAAS,CAAC,MAAM,KAAK,CACrC;CACD,IAAI,oBAAoB,qBAAqB,UAAU,gBAAgB;AACvE,SAAQ,MACN,kCACA,KAAK,UAAU,kBAAkB,CAClC;AAGD,KAAI,IAAI,eAAe,cAAc;EACnC,MAAM,SAAS,uBAAuB,IAAI,cAAc;AACxD,sBAAoB,gCAClB,mBACA,OACD;;AAGH,gBACE,IAAI,WACJ;EACE,SAAS;EACT,OAAO,kBAAkB;EACzB,OAAO,kBAAkB;EACzB,aAAa,kBAAkB,eAAe;EAC9C,SAAS;GACP,MAAM;GACN,SAAS,kBAAkB,QAAQ,KAAK,UAAU;AAChD,QAAI,MAAM,SAAS,OACjB,QAAO;KAAE,MAAM;KAAQ,MAAM,MAAM;KAAM;AAE3C,QAAI,MAAM,SAAS,WACjB,QAAO;KACL,MAAM;KACN,IAAI,MAAM;KACV,MAAM,MAAM;KACZ,OAAO,KAAK,UAAU,MAAM,MAAM;KACnC;AAEH,WAAO,EAAE,MAAM,MAAM,MAAM;KAC3B;GACH;EACD,WAAW,4BAA4B,kBAAkB,QAAQ;EAClE,EACD,KAAK,KAAK,GAAG,IAAI,UAClB;AAED,KAAI,IAAI,WACN,gBAAe,cAAc,IAAI,YAAY;EAC3C,aAAa,kBAAkB,MAAM;EACrC,cAAc,kBAAkB,MAAM;EACvC,CAAC;AAGJ,QAAO,EAAE,KAAK,kBAAkB;;AAIlC,SAAS,gCACP,UACA,QACyC;CAEzC,MAAM,UAAU,CAAC,GAAG,SAAS,QAAQ;CACrC,MAAM,gBAAgB,QAAQ,eAAe,UAAU,MAAM,SAAS,OAAO;AAE7E,KAAI,kBAAkB,IAAI;EACxB,MAAM,YAAY,QAAQ;AAC1B,MAAI,UAAU,SAAS,OACrB,SAAQ,iBAAiB;GACvB,GAAG;GACH,MAAM,UAAU,OAAO;GACxB;OAIH,SAAQ,KAAK;EAAE,MAAM;EAAQ,MAAM;EAAQ,CAAC;AAG9C,QAAO;EAAE,GAAG;EAAU;EAAS;;AAcjC,SAAS,mCAA+D;AACtE,QAAO;EACL,OAAO;EACP,aAAa;EACb,cAAc;EACd,YAAY;EACZ,SAAS;EACT,WAAW,EAAE;EACb,iBAAiB;EAClB;;AAaH,eAAe,wBAAwB,MAA4B;CACjE,MAAM,EAAE,QAAQ,UAAU,iBAAiB,kBAAkB,QAAQ;CACrE,MAAMC,cAAoC;EACxC,kBAAkB;EAClB,mBAAmB;EACnB,kBAAkB;EAClB,WAAW,EAAE;EACd;CACD,MAAM,MAAM,kCAAkC;AAE9C,KAAI;AACF,QAAM,oBAAoB;GACxB;GACA;GACA;GACA;GACA;GACD,CAAC;AAGF,MAAI,IAAI,eAAe,cAAc;GACnC,MAAM,SAAS,uBAAuB,IAAI,cAAc;AACxD,SAAM,0BAA0B,QAAQ,aAAa,OAAO;AAC5D,OAAI,WAAW;;AAGjB,0BAAwB,KAAK,iBAAiB,OAAO,IAAI;AACzD,mBAAiB,IAAI,YAAY,IAAI,aAAa,IAAI,aAAa;UAC5D,OAAO;AACd,UAAQ,MAAM,iBAAiB,MAAM;AACrC,uBAAqB;GACnB;GACA,eAAe,iBAAiB;GAChC;GACA;GACD,CAAC;AACF,eAAa,IAAI,YAAY,MAAM;EAEnC,MAAM,aAAa,qCAAqC;AACxD,QAAM,OAAO,SAAS;GACpB,OAAO,WAAW;GAClB,MAAM,KAAK,UAAU,WAAW;GACjC,CAAC;;;AAKN,eAAe,0BACb,QACA,aACA,QACA;CAEA,MAAM,kBAAkB;EACtB,MAAM;EACN,OAAO,YAAY;EACnB,eAAe;GAAE,MAAM;GAAQ,MAAM;GAAI;EAC1C;AACD,OAAM,OAAO,SAAS;EACpB,OAAO;EACP,MAAM,KAAK,UAAU,gBAAgB;EACtC,CAAC;CAGF,MAAM,aAAa;EACjB,MAAM;EACN,OAAO,YAAY;EACnB,OAAO;GAAE,MAAM;GAAc,MAAM;GAAQ;EAC5C;AACD,OAAM,OAAO,SAAS;EACpB,OAAO;EACP,MAAM,KAAK,UAAU,WAAW;EACjC,CAAC;CAGF,MAAM,iBAAiB;EACrB,MAAM;EACN,OAAO,YAAY;EACpB;AACD,OAAM,OAAO,SAAS;EACpB,OAAO;EACP,MAAM,KAAK,UAAU,eAAe;EACrC,CAAC;AAEF,aAAY;;AAad,eAAe,oBAAoB,MAA4B;CAC7D,MAAM,EAAE,QAAQ,UAAU,iBAAiB,aAAa,QAAQ;AAChE,YAAW,MAAM,YAAY,UAAU;AACrC,UAAQ,MAAM,6BAA6B,KAAK,UAAU,SAAS,CAAC;AACpE,MAAI,SAAS,SAAS,SAAU;AAChC,MAAI,CAAC,SAAS,KAAM;EAEpB,IAAIC;AACJ,MAAI;AACF,WAAQ,KAAK,MAAM,SAAS,KAAK;WAC1B,YAAY;AACnB,WAAQ,MAAM,iCAAiC,YAAY,SAAS,KAAK;AACzE;;AAGF,MAAI,MAAM,SAAS,CAAC,IAAI,MAAO,KAAI,QAAQ,MAAM;EAEjD,MAAMC,WAAS,gCACb,OACA,aACA,gBACD;AAED,OAAK,MAAM,SAASA,UAAQ;AAC1B,WAAQ,MAAM,+BAA+B,KAAK,UAAU,MAAM,CAAC;AACnE,yBAAsB,OAAO,IAAI;AACjC,SAAM,OAAO,SAAS;IACpB,OAAO,MAAM;IACb,MAAM,KAAK,UAAU,MAAM;IAC5B,CAAC;;;;AAMR,SAAS,sBACP,OACA,KACA;AACA,SAAQ,MAAM,MAAd;EACE,KAAK;AACH,2BAAwB,MAAM,OAAO,IAAI;AACzC;EAEF,KAAK;AACH,2BAAwB,MAAM,eAAe,IAAI;AACjD;EAEF,KAAK;AACH,0BAAuB,IAAI;AAC3B;EAEF,KAAK;AACH,sBAAmB,MAAM,OAAO,MAAM,OAAO,IAAI;AACjD;EAEF,QACE;;;AAYN,SAAS,wBACP,OACA,KACA;AACA,KAAI,MAAM,SAAS,aACjB,KAAI,WAAW,MAAM;UACZ,MAAM,SAAS,sBAAsB,IAAI,gBAClD,KAAI,gBAAgB,SAAS,MAAM;;AAgBvC,SAAS,wBACP,OACA,KACA;AACA,KAAI,MAAM,SAAS,WACjB,KAAI,kBAAkB;EACpB,IAAI,MAAM;EACV,MAAM,MAAM;EACZ,OAAO;EACR;;AAIL,SAAS,uBAAuB,KAAiC;AAC/D,KAAI,IAAI,iBAAiB;AACvB,MAAI,UAAU,KAAK,IAAI,gBAAgB;AACvC,MAAI,kBAAkB;;;AAiB1B,SAAS,mBACP,OACA,OACA,KACA;AACA,KAAI,MAAM,YAAa,KAAI,aAAa,MAAM;AAC9C,KAAI,OAAO;AACT,MAAI,cAAc,MAAM,gBAAgB;AACxC,MAAI,eAAe,MAAM;;;AAK7B,SAAS,wBACP,KACA,eACA,KACA;CACA,MAAMC,gBAAwD,EAAE;AAChE,KAAI,IAAI,QAAS,eAAc,KAAK;EAAE,MAAM;EAAQ,MAAM,IAAI;EAAS,CAAC;AACxE,MAAK,MAAM,MAAM,IAAI,UACnB,eAAc,KAAK;EAAE,MAAM;EAAY,GAAG;EAAI,CAAC;AAGjD,gBACE,IAAI,WACJ;EACE,SAAS;EACT,OAAO,IAAI,SAAS;EACpB,OAAO;GAAE,cAAc,IAAI;GAAa,eAAe,IAAI;GAAc;EACzE,aAAa,IAAI,cAAc;EAC/B,SACE,cAAc,SAAS,IACrB;GAAE,MAAM;GAAa,SAAS;GAAe,GAC7C;EACJ,WAAW,IAAI,UAAU,SAAS,IAAI,IAAI,YAAY;EACvD,EACD,KAAK,KAAK,GAAG,IAAI,UAClB;;AAIH,SAAS,qBAAqB,MAK3B;CACD,MAAM,EAAE,KAAK,eAAe,KAAK,UAAU;AAC3C,gBACE,IAAI,WACJ;EACE,SAAS;EACT,OAAO,IAAI,SAAS;EACpB,OAAO;GAAE,cAAc;GAAG,eAAe;GAAG;EAC5C,OAAO,iBAAiB,QAAQ,MAAM,UAAU;EAChD,SAAS;EACV,EACD,KAAK,KAAK,GAAG,IAAI,UAClB;;AAIH,SAAS,iBACP,YACA,aACA,cACA;AACA,KAAI,CAAC,WAAY;AACjB,gBAAe,cAAc,YAAY;EAAE;EAAa;EAAc,CAAC;AACvE,gBAAe,gBAAgB,YAAY,KAAK;EAAE;EAAa;EAAc,CAAC;;AAIhF,SAAS,aAAa,YAAgC,OAAgB;AACpE,KAAI,CAAC,WAAY;AACjB,gBAAe,YACb,YACA,iBAAiB,QAAQ,MAAM,UAAU,eAC1C;;AAIH,SAAS,yBACP,UACuB;AACvB,QAAO,SAAS,KAAK,QAAQ;AAC3B,MAAI,OAAO,IAAI,YAAY,SACzB,QAAO;GAAE,MAAM,IAAI;GAAM,SAAS,IAAI;GAAS;EAIjD,MAAM,UAAU,IAAI,QAAQ,KAAK,UAAU;AACzC,OAAI,MAAM,SAAS,OACjB,QAAO;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAM;AAE3C,OAAI,MAAM,SAAS,WACjB,QAAO;IACL,MAAM;IACN,IAAI,MAAM;IACV,MAAM,MAAM;IACZ,OAAO,KAAK,UAAU,MAAM,MAAM;IACnC;AAEH,OAAI,MAAM,SAAS,eAAe;IAChC,MAAM,gBACJ,OAAO,MAAM,YAAY,WACvB,MAAM,UACN,MAAM,QACH,KAAK,MAAO,EAAE,SAAS,SAAS,EAAE,OAAO,IAAI,EAAE,KAAK,GAAI,CACxD,KAAK,KAAK;AACjB,WAAO;KACL,MAAM;KACN,aAAa,MAAM;KACnB,SAAS;KACV;;AAEH,UAAO,EAAE,MAAM,MAAM,MAAM;IAC3B;AAEF,SAAO;GAAE,MAAM,IAAI;GAAM;GAAS;GAClC;;AAIJ,SAAS,oBACP,QACoB;AACpB,KAAI,CAAC,OAAQ,QAAO;AACpB,KAAI,OAAO,WAAW,SAAU,QAAO;AACvC,QAAO,OAAO,KAAK,UAAU,MAAM,KAAK,CAAC,KAAK,KAAK;;AAIrD,SAAS,4BACP,SACgE;CAChE,MAAMC,QAA4D,EAAE;AACpE,MAAK,MAAM,SAAS,QAClB,KACE,OAAO,UAAU,YACd,UAAU,QACV,UAAU,SACV,MAAM,SAAS,cACf,QAAQ,SACR,UAAU,SACV,WAAW,MAEd,OAAM,KAAK;EACT,IAAI,OAAO,MAAM,GAAG;EACpB,MAAM,OAAO,MAAM,KAAK;EACxB,OAAO,KAAK,UAAU,MAAM,MAAM;EACnC,CAAC;AAGN,QAAO,MAAM,SAAS,IAAI,QAAQ;;AAGpC,MAAM,kBACJ,aACuC,OAAO,OAAO,UAAU,UAAU;;;;AC7rB3E,MAAa,gBAAgB,IAAI,MAAM;AAEvC,cAAc,KAAK,KAAK,OAAO,MAAM;AACnC,KAAI;AACF,SAAO,MAAM,iBAAiB,EAAE;UACzB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;AAEF,cAAc,KAAK,iBAAiB,OAAO,MAAM;AAC/C,KAAI;AACF,SAAO,MAAM,kBAAkB,EAAE;UAC1B,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACjBF,MAAa,cAAc,IAAI,MAAM;AAErC,YAAY,IAAI,KAAK,OAAO,MAAM;AAChC,KAAI;AACF,MAAI,CAAC,MAAM,OAET,OAAM,aAAa;EAGrB,MAAM,SAAS,MAAM,QAAQ,KAAK,KAAK,WAAW;GAChD,IAAI,MAAM;GACV,QAAQ;GACR,MAAM;GACN,SAAS;GACT,6BAAY,IAAI,KAAK,EAAE,EAAC,aAAa;GACrC,UAAU,MAAM;GAChB,cAAc,MAAM;GAEpB,cAAc;IACZ,QAAQ,MAAM,aAAa;IAC3B,MAAM,MAAM,aAAa;IACzB,WAAW,MAAM,aAAa;IAC9B,QAAQ;KACN,2BACE,MAAM,aAAa,OAAO;KAC5B,mBAAmB,MAAM,aAAa,OAAO;KAC7C,mBAAmB,MAAM,aAAa,OAAO;KAC9C;IACD,UAAU;KACR,YAAY,MAAM,aAAa,SAAS;KACxC,qBAAqB,MAAM,aAAa,SAAS;KAClD;IACF;GACF,EAAE;AAEH,SAAO,EAAE,KAAK;GACZ,QAAQ;GACR,MAAM;GACN,UAAU;GACX,CAAC;UACK,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;AC5CF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,KAAK,OAAO,MAAM;AAC/B,KAAI;AACF,SAAO,EAAE,KAAK,EACZ,OAAO,MAAM,cACd,CAAC;UACK,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACVF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,KAAK,OAAO,MAAM;AAC/B,KAAI;EACF,MAAM,QAAQ,MAAM,iBAAiB;AACrC,SAAO,EAAE,KAAK,MAAM;UACb,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACAF,MAAa,SAAS,IAAI,MAAM;AAEhC,OAAO,IAAI,WAAW,CAAC;AACvB,OAAO,IAAI,MAAM,CAAC;AAElB,OAAO,IAAI,MAAM,MAAM,EAAE,KAAK,iBAAiB,CAAC;AAGhD,OAAO,IAAI,YAAY,MAAM;CAC3B,MAAM,UAAU,QAAQ,MAAM,gBAAgB,MAAM,YAAY;AAChE,QAAO,EAAE,KACP;EACE,QAAQ,UAAU,YAAY;EAC9B,QAAQ;GACN,cAAc,QAAQ,MAAM,aAAa;GACzC,aAAa,QAAQ,MAAM,YAAY;GACvC,QAAQ,QAAQ,MAAM,OAAO;GAC9B;EACF,EACD,UAAU,MAAM,IACjB;EACD;AAEF,OAAO,MAAM,qBAAqB,iBAAiB;AACnD,OAAO,MAAM,WAAW,YAAY;AACpC,OAAO,MAAM,eAAe,gBAAgB;AAC5C,OAAO,MAAM,UAAU,WAAW;AAClC,OAAO,MAAM,UAAU,WAAW;AAGlC,OAAO,MAAM,wBAAwB,iBAAiB;AACtD,OAAO,MAAM,cAAc,YAAY;AACvC,OAAO,MAAM,kBAAkB,gBAAgB;AAG/C,OAAO,MAAM,gBAAgB,cAAc;AAC3C,OAAO,MAAM,sBAAsB,mBAAmB;AAGtD,OAAO,MAAM,YAAY,cAAc;;;;ACjBvC,eAAsB,UAAU,SAA0C;AACxE,KAAI,QAAQ,SACV,mBAAkB;AAGpB,KAAI,QAAQ,SAAS;AACnB,UAAQ,QAAQ;AAChB,UAAQ,KAAK,0BAA0B;;AAGzC,OAAM,cAAc,QAAQ;AAC5B,KAAI,QAAQ,gBAAgB,aAC1B,SAAQ,KAAK,SAAS,QAAQ,YAAY,sBAAsB;AAGlE,OAAM,gBAAgB,QAAQ;AAC9B,OAAM,mBAAmB,QAAQ;AACjC,OAAM,gBAAgB,QAAQ;AAC9B,OAAM,YAAY,QAAQ;AAC1B,OAAM,cAAc,QAAQ;AAE5B,KAAI,QAAQ,YACV,SAAQ,KACN,0EACD;AAIH,aAAY,QAAQ,SAAS,QAAQ,aAAa;AAClD,KAAI,QAAQ,SAAS;EACnB,MAAM,YACJ,QAAQ,iBAAiB,IAAI,cAAc,OAAO,QAAQ;AAC5D,UAAQ,KAAK,8BAA8B,UAAU,WAAW;;AAIlE,SAAQ;EAAE,SAAS;EAAM,MAAM,QAAQ;EAAK,CAAC;AAE7C,OAAM,aAAa;AACnB,OAAM,oBAAoB;AAE1B,KAAI,QAAQ,aAAa;AACvB,QAAM,cAAc,QAAQ;AAC5B,UAAQ,KAAK,8BAA8B;OAE3C,OAAM,kBAAkB;AAG1B,OAAM,mBAAmB;AACzB,OAAM,aAAa;AAEnB,SAAQ,KACN,uBAAuB,MAAM,QAAQ,KAAK,KAAK,UAAU,KAAK,MAAM,KAAK,CAAC,KAAK,KAAK,GACrF;CAGD,MAAM,YAAY,UADE,QAAQ,QAAQ,YACI,GAAG,QAAQ;AAEnD,KAAI,QAAQ,YAAY;AACtB,YAAU,MAAM,QAAQ,iCAAiC;EAEzD,MAAM,gBAAgB,MAAM,QAAQ,OAClC,0CACA;GACE,MAAM;GACN,SAAS,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,GAAG;GACpD,CACF;EAED,MAAM,qBAAqB,MAAM,QAAQ,OACvC,gDACA;GACE,MAAM;GACN,SAAS,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,GAAG;GACpD,CACF;EAED,MAAM,UAAU,kBACd;GACE,oBAAoB;GACpB,sBAAsB;GACtB,iBAAiB;GACjB,gCAAgC;GAChC,4BAA4B;GAC5B,+BAA+B;GAC/B,mCAAmC;GACnC,0CAA0C;GAC3C,EACD,SACD;AAED,MAAI;AACF,aAAU,UAAU,QAAQ;AAC5B,WAAQ,QAAQ,2CAA2C;UACrD;AACN,WAAQ,KACN,gEACD;AACD,WAAQ,IAAI,QAAQ;;;AAIxB,SAAQ,IACN,oEAAoE,UAAU,QAAQ,QAAQ,UAAU,oBAAoB,UAAU,YAAY,KACnJ;AAED,OAAM;EACJ,OAAO,OAAO;EACd,MAAM,QAAQ;EACd,UAAU,QAAQ;EACnB,CAAC;;AAGJ,MAAa,QAAQ,cAAc;CACjC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;EACJ,MAAM;GACJ,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,MAAM;GACJ,OAAO;GACP,MAAM;GACN,aACE;GACH;EACD,SAAS;GACP,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,gBAAgB;GACd,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,QAAQ;GACN,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,cAAc;GACZ,OAAO;GACP,MAAM;GACN,aAAa;GACd;EACD,MAAM;GACJ,OAAO;GACP,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,gBAAgB;GACd,OAAO;GACP,MAAM;GACN,aACE;GACH;EACD,eAAe;GACb,OAAO;GACP,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,cAAc;GACZ,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,aAAa;GACX,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,SAAS;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,iBAAiB;GACf,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,KAAK;GACH,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,gBAAgB;GACd,MAAM;GACN,SAAS;GACT,aACE;GACH;EACF;CACD,IAAI,EAAE,QAAQ;EACZ,MAAM,eAAe,KAAK;EAC1B,MAAM,YAEJ,iBAAiB,SAAY,SAAY,OAAO,SAAS,cAAc,GAAG;AAE5E,SAAO,UAAU;GACf,MAAM,OAAO,SAAS,KAAK,MAAM,GAAG;GACpC,MAAM,KAAK;GACX,SAAS,KAAK;GACd,aAAa,KAAK;GAClB,QAAQ,KAAK;GACb;GACA,eAAe,KAAK;GACpB,aAAa,KAAK;GAClB,YAAY,KAAK;GACjB,WAAW,KAAK;GAChB,UAAU,KAAK;GACf,SAAS,KAAK;GACd,cAAc,OAAO,SAAS,KAAK,kBAAkB,GAAG;GACxD,KAAK,KAAK;GACV,aAAa,KAAK;GACnB,CAAC;;CAEL,CAAC;;;;AC/PF,QAAQ,QAAQ,cAAc,OAAO;AAErC,MAAM,OAAO,cAAc;CACzB,MAAM;EACJ,MAAM;EACN,aACE;EACH;CACD,aAAa;EAAE;EAAM;EAAQ;EAAO,eAAe;EAAY;EAAO;CACvE,CAAC;AAEF,MAAM,QAAQ,KAAK"}