github-router 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"file":"main.js","names":["state: State","state","headers: Record<string, string>","errorJson: unknown","token","process","commandBlock: string","state","x","headers: Record<string, string>","handleCompletion","isNonStreaming","handleCompletion","newMessages: Array<Message>","contentParts: Array<ContentPart>","allTextBlocks: Array<AnthropicTextBlock>","allToolUseBlocks: Array<AnthropicToolUseBlock>","stopReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null","state","events: Array<AnthropicStreamEventData>","events","isNonStreaming","streamState: AnthropicStreamState","events","headers: Record<string, string>","searchTimestamps: Array<number>","references: Array<{ title: string; url: string }>"],"sources":["../src/lib/paths.ts","../src/lib/state.ts","../src/lib/api-config.ts","../src/lib/error.ts","../src/services/github/get-copilot-token.ts","../src/services/github/get-device-code.ts","../src/services/github/get-user.ts","../src/services/copilot/get-models.ts","../src/services/get-vscode-version.ts","../src/lib/utils.ts","../src/services/github/poll-access-token.ts","../src/lib/token.ts","../src/auth.ts","../src/services/github/get-copilot-usage.ts","../src/check-usage.ts","../src/debug.ts","../src/lib/proxy.ts","../src/lib/shell.ts","../src/lib/approval.ts","../src/lib/rate-limit.ts","../src/lib/tokenizer.ts","../src/services/copilot/create-chat-completions.ts","../src/routes/chat-completions/handler.ts","../src/routes/chat-completions/route.ts","../src/services/copilot/create-embeddings.ts","../src/routes/embeddings/route.ts","../src/routes/messages/utils.ts","../src/routes/messages/non-stream-translation.ts","../src/routes/messages/count-tokens-handler.ts","../src/routes/messages/stream-translation.ts","../src/routes/messages/handler.ts","../src/routes/messages/route.ts","../src/routes/models/route.ts","../src/services/copilot/create-responses.ts","../src/services/copilot/web-search.ts","../src/routes/responses/handler.ts","../src/routes/responses/route.ts","../src/routes/search/route.ts","../src/routes/token/route.ts","../src/routes/usage/route.ts","../src/server.ts","../src/start.ts","../src/main.ts"],"sourcesContent":["import fs from \"node:fs/promises\"\nimport os from \"node:os\"\nimport path from \"node:path\"\n\nconst APP_DIR = path.join(os.homedir(), \".local\", \"share\", \"openroute\")\n\nconst GITHUB_TOKEN_PATH = path.join(APP_DIR, \"github_token\")\n\nexport const PATHS = {\n APP_DIR,\n GITHUB_TOKEN_PATH,\n}\n\nexport async function ensurePaths(): Promise<void> {\n await fs.mkdir(PATHS.APP_DIR, { recursive: true })\n await ensureFile(PATHS.GITHUB_TOKEN_PATH)\n}\n\nasync function ensureFile(filePath: string): Promise<void> {\n try {\n await fs.access(filePath, fs.constants.W_OK)\n } catch {\n await fs.writeFile(filePath, \"\")\n await fs.chmod(filePath, 0o600)\n }\n}\n","import type { ModelsResponse } from \"~/services/copilot/get-models\"\n\nexport interface State {\n githubToken?: string\n copilotToken?: string\n\n accountType: string\n models?: ModelsResponse\n vsCodeVersion?: string\n\n manualApprove: boolean\n rateLimitWait: boolean\n showToken: boolean\n\n // Rate limiting configuration\n rateLimitSeconds?: number\n lastRequestTimestamp?: number\n}\n\nexport const state: State = {\n accountType: \"individual\",\n manualApprove: false,\n rateLimitWait: false,\n showToken: false,\n}\n","import { randomUUID } from \"node:crypto\"\n\nimport type { State } from \"./state\"\n\nexport const standardHeaders = () => ({\n \"content-type\": \"application/json\",\n accept: \"application/json\",\n})\n\nconst COPILOT_VERSION = \"0.26.7\"\nconst EDITOR_PLUGIN_VERSION = `copilot-chat/${COPILOT_VERSION}`\nconst USER_AGENT = `GitHubCopilotChat/${COPILOT_VERSION}`\n\nconst API_VERSION = \"2025-04-01\"\n\nexport const copilotBaseUrl = (state: State) =>\n state.accountType === \"individual\" ?\n \"https://api.githubcopilot.com\"\n : `https://api.${state.accountType}.githubcopilot.com`\nexport const copilotHeaders = (\n state: State,\n vision: boolean = false,\n integrationId: string = \"vscode-chat\",\n) => {\n const headers: Record<string, string> = {\n Authorization: `Bearer ${state.copilotToken}`,\n \"content-type\": standardHeaders()[\"content-type\"],\n \"copilot-integration-id\": integrationId,\n \"editor-version\": `vscode/${state.vsCodeVersion}`,\n \"editor-plugin-version\": EDITOR_PLUGIN_VERSION,\n \"user-agent\": USER_AGENT,\n \"openai-intent\": \"conversation-panel\",\n \"x-github-api-version\": API_VERSION,\n \"x-request-id\": randomUUID(),\n \"x-vscode-user-agent-library-version\": \"electron-fetch\",\n }\n\n if (vision) headers[\"copilot-vision-request\"] = \"true\"\n\n return headers\n}\n\nexport const GITHUB_API_BASE_URL = \"https://api.github.com\"\nexport const githubHeaders = (state: State) => ({\n ...standardHeaders(),\n authorization: `token ${state.githubToken}`,\n \"editor-version\": `vscode/${state.vsCodeVersion}`,\n \"editor-plugin-version\": EDITOR_PLUGIN_VERSION,\n \"user-agent\": USER_AGENT,\n \"x-github-api-version\": API_VERSION,\n \"x-vscode-user-agent-library-version\": \"electron-fetch\",\n})\n\nexport const GITHUB_BASE_URL = \"https://github.com\"\nexport const GITHUB_CLIENT_ID = \"Iv1.b507a08c87ecfe98\"\nexport const GITHUB_APP_SCOPES = [\"read:user\"].join(\" \")\n","import type { Context } from \"hono\"\nimport type { ContentfulStatusCode } from \"hono/utils/http-status\"\n\nimport consola from \"consola\"\n\nexport class HTTPError extends Error {\n response: Response\n\n constructor(message: string, response: Response) {\n super(message)\n this.response = response\n }\n}\n\nexport async function forwardError(c: Context, error: unknown) {\n consola.error(\"Error occurred:\", error)\n\n if (error instanceof HTTPError) {\n const errorText = await error.response.text()\n let errorJson: unknown\n try {\n errorJson = JSON.parse(errorText)\n } catch {\n errorJson = errorText\n }\n consola.error(\"HTTP error:\", errorJson)\n return c.json(\n {\n error: {\n message: errorText,\n type: \"error\",\n },\n },\n error.response.status as ContentfulStatusCode,\n )\n }\n\n return c.json(\n {\n error: {\n message: (error as Error).message,\n type: \"error\",\n },\n },\n 500,\n )\n}\n","import { GITHUB_API_BASE_URL, githubHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getCopilotToken = async () => {\n const response = await fetch(\n `${GITHUB_API_BASE_URL}/copilot_internal/v2/token`,\n {\n headers: githubHeaders(state),\n },\n )\n\n if (!response.ok) throw new HTTPError(\"Failed to get Copilot token\", response)\n\n return (await response.json()) as GetCopilotTokenResponse\n}\n\n// Trimmed for the sake of simplicity\ninterface GetCopilotTokenResponse {\n expires_at: number\n refresh_in: number\n token: string\n}\n","import {\n GITHUB_APP_SCOPES,\n GITHUB_BASE_URL,\n GITHUB_CLIENT_ID,\n standardHeaders,\n} from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\n\nexport async function getDeviceCode(): Promise<DeviceCodeResponse> {\n const response = await fetch(`${GITHUB_BASE_URL}/login/device/code`, {\n method: \"POST\",\n headers: standardHeaders(),\n body: JSON.stringify({\n client_id: GITHUB_CLIENT_ID,\n scope: GITHUB_APP_SCOPES,\n }),\n })\n\n if (!response.ok) throw new HTTPError(\"Failed to get device code\", response)\n\n return (await response.json()) as DeviceCodeResponse\n}\n\nexport interface DeviceCodeResponse {\n device_code: string\n user_code: string\n verification_uri: string\n expires_in: number\n interval: number\n}\n","import { GITHUB_API_BASE_URL, standardHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport async function getGitHubUser() {\n const response = await fetch(`${GITHUB_API_BASE_URL}/user`, {\n headers: {\n authorization: `token ${state.githubToken}`,\n ...standardHeaders(),\n },\n })\n\n if (!response.ok) throw new HTTPError(\"Failed to get GitHub user\", response)\n\n return (await response.json()) as GithubUserResponse\n}\n\n// Trimmed for the sake of simplicity\ninterface GithubUserResponse {\n login: string\n}\n","import { copilotBaseUrl, copilotHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getModels = async () => {\n const response = await fetch(`${copilotBaseUrl(state)}/models`, {\n headers: copilotHeaders(state),\n })\n\n if (!response.ok) throw new HTTPError(\"Failed to get models\", response)\n\n return (await response.json()) as ModelsResponse\n}\n\nexport interface ModelsResponse {\n data: Array<Model>\n object: string\n}\n\ninterface ModelLimits {\n max_context_window_tokens?: number\n max_output_tokens?: number\n max_prompt_tokens?: number\n max_inputs?: number\n}\n\ninterface ModelSupports {\n tool_calls?: boolean\n parallel_tool_calls?: boolean\n dimensions?: boolean\n}\n\ninterface ModelCapabilities {\n family: string\n limits: ModelLimits\n object: string\n supports: ModelSupports\n tokenizer: string\n type: string\n}\n\nexport interface Model {\n capabilities: ModelCapabilities\n id: string\n model_picker_enabled: boolean\n name: string\n object: string\n preview: boolean\n vendor: string\n version: string\n supported_endpoints?: Array<string>\n policy?: {\n state: string\n terms: string\n }\n}\n","const FALLBACK = \"1.104.3\"\n\nexport async function getVSCodeVersion() {\n const controller = new AbortController()\n const timeout = setTimeout(() => {\n controller.abort()\n }, 5000)\n\n try {\n const response = await fetch(\n \"https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h=visual-studio-code-bin\",\n {\n signal: controller.signal,\n },\n )\n\n const pkgbuild = await response.text()\n const pkgverRegex = /pkgver=([0-9.]+)/\n const match = pkgbuild.match(pkgverRegex)\n\n if (match) {\n return match[1]\n }\n\n return FALLBACK\n } catch {\n return FALLBACK\n } finally {\n clearTimeout(timeout)\n }\n}\n\nawait getVSCodeVersion()\n","import consola from \"consola\"\n\nimport { getModels } from \"~/services/copilot/get-models\"\nimport { getVSCodeVersion } from \"~/services/get-vscode-version\"\n\nimport { state } from \"./state\"\n\nexport const sleep = (ms: number) =>\n new Promise((resolve) => {\n setTimeout(resolve, ms)\n })\n\nexport const isNullish = (value: unknown): value is null | undefined =>\n value === null || value === undefined\n\nexport async function cacheModels(): Promise<void> {\n const models = await getModels()\n state.models = models\n}\n\nexport const cacheVSCodeVersion = async () => {\n const response = await getVSCodeVersion()\n state.vsCodeVersion = response\n\n consola.info(`Using VSCode version: ${response}`)\n}\n","import consola from \"consola\"\n\nimport {\n GITHUB_BASE_URL,\n GITHUB_CLIENT_ID,\n standardHeaders,\n} from \"~/lib/api-config\"\nimport { sleep } from \"~/lib/utils\"\n\nimport type { DeviceCodeResponse } from \"./get-device-code\"\n\nexport async function pollAccessToken(\n deviceCode: DeviceCodeResponse,\n): Promise<string> {\n // Interval is in seconds, we need to multiply by 1000 to get milliseconds\n // I'm also adding another second, just to be safe\n const sleepDuration = (deviceCode.interval + 1) * 1000\n consola.debug(`Polling access token with interval of ${sleepDuration}ms`)\n\n while (true) {\n const response = await fetch(\n `${GITHUB_BASE_URL}/login/oauth/access_token`,\n {\n method: \"POST\",\n headers: standardHeaders(),\n body: JSON.stringify({\n client_id: GITHUB_CLIENT_ID,\n device_code: deviceCode.device_code,\n grant_type: \"urn:ietf:params:oauth:grant-type:device_code\",\n }),\n },\n )\n\n if (!response.ok) {\n await sleep(sleepDuration)\n consola.error(\"Failed to poll access token:\", await response.text())\n\n continue\n }\n\n const json = await response.json()\n consola.debug(\"Polling access token response:\", json)\n\n const { access_token } = json as AccessTokenResponse\n\n if (access_token) {\n return access_token\n } else {\n await sleep(sleepDuration)\n }\n }\n}\n\ninterface AccessTokenResponse {\n access_token: string\n token_type: string\n scope: string\n}\n","import consola from \"consola\"\nimport fs from \"node:fs/promises\"\n\nimport { PATHS } from \"~/lib/paths\"\nimport { getCopilotToken } from \"~/services/github/get-copilot-token\"\nimport { getDeviceCode } from \"~/services/github/get-device-code\"\nimport { getGitHubUser } from \"~/services/github/get-user\"\nimport { pollAccessToken } from \"~/services/github/poll-access-token\"\n\nimport { HTTPError } from \"./error\"\nimport { state } from \"./state\"\n\nconst readGithubToken = () => fs.readFile(PATHS.GITHUB_TOKEN_PATH, \"utf8\")\n\nconst writeGithubToken = (token: string) =>\n fs.writeFile(PATHS.GITHUB_TOKEN_PATH, token)\n\nexport const setupCopilotToken = async () => {\n const { token, refresh_in } = await getCopilotToken()\n state.copilotToken = token\n\n // Display the Copilot token to the screen\n consola.debug(\"GitHub Copilot Token fetched successfully!\")\n if (state.showToken) {\n consola.info(\"Copilot token:\", token)\n }\n\n const refreshInterval = (refresh_in - 60) * 1000\n setInterval(async () => {\n consola.debug(\"Refreshing Copilot token\")\n try {\n const { token } = await getCopilotToken()\n state.copilotToken = token\n consola.debug(\"Copilot token refreshed\")\n if (state.showToken) {\n consola.info(\"Refreshed Copilot token:\", token)\n }\n } catch (error) {\n consola.error(\"Failed to refresh Copilot token:\", error)\n }\n }, refreshInterval)\n}\n\ninterface SetupGitHubTokenOptions {\n force?: boolean\n}\n\nexport async function setupGitHubToken(\n options?: SetupGitHubTokenOptions,\n): Promise<void> {\n try {\n const githubToken = await readGithubToken()\n\n if (githubToken && !options?.force) {\n state.githubToken = githubToken\n if (state.showToken) {\n consola.info(\"GitHub token:\", githubToken)\n }\n await logUser()\n\n return\n }\n\n consola.info(\"Not logged in, getting new access token\")\n const response = await getDeviceCode()\n consola.debug(\"Device code response:\", response)\n\n consola.info(\n `Please enter the code \"${response.user_code}\" in ${response.verification_uri}`,\n )\n\n const token = await pollAccessToken(response)\n await writeGithubToken(token)\n state.githubToken = token\n\n if (state.showToken) {\n consola.info(\"GitHub token:\", token)\n }\n await logUser()\n } catch (error) {\n if (error instanceof HTTPError) {\n consola.error(\"Failed to get GitHub token:\", await error.response.json())\n throw error\n }\n\n consola.error(\"Failed to get GitHub token:\", error)\n throw error\n }\n}\n\nasync function logUser() {\n const user = await getGitHubUser()\n consola.info(`Logged in as ${user.login}`)\n}\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport consola from \"consola\"\n\nimport { PATHS, ensurePaths } from \"./lib/paths\"\nimport { state } from \"./lib/state\"\nimport { setupGitHubToken } from \"./lib/token\"\n\ninterface RunAuthOptions {\n verbose: boolean\n showToken: boolean\n}\n\nexport async function runAuth(options: RunAuthOptions): Promise<void> {\n if (options.verbose) {\n consola.level = 5\n consola.info(\"Verbose logging enabled\")\n }\n\n state.showToken = options.showToken\n\n await ensurePaths()\n await setupGitHubToken({ force: true })\n consola.success(\"GitHub token written to\", PATHS.GITHUB_TOKEN_PATH)\n}\n\nexport const auth = defineCommand({\n meta: {\n name: \"auth\",\n description: \"Run GitHub auth flow without running the server\",\n },\n args: {\n verbose: {\n alias: \"v\",\n type: \"boolean\",\n default: false,\n description: \"Enable verbose logging\",\n },\n \"show-token\": {\n type: \"boolean\",\n default: false,\n description: \"Show GitHub token on auth\",\n },\n },\n run({ args }) {\n return runAuth({\n verbose: args.verbose,\n showToken: args[\"show-token\"],\n })\n },\n})\n","import { GITHUB_API_BASE_URL, githubHeaders } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const getCopilotUsage = async (): Promise<CopilotUsageResponse> => {\n const response = await fetch(`${GITHUB_API_BASE_URL}/copilot_internal/user`, {\n headers: githubHeaders(state),\n })\n\n if (!response.ok) {\n throw new HTTPError(\"Failed to get Copilot usage\", response)\n }\n\n return (await response.json()) as CopilotUsageResponse\n}\n\nexport interface QuotaDetail {\n entitlement: number\n overage_count: number\n overage_permitted: boolean\n percent_remaining: number\n quota_id: string\n quota_remaining: number\n remaining: number\n unlimited: boolean\n}\n\ninterface QuotaSnapshots {\n chat: QuotaDetail\n completions: QuotaDetail\n premium_interactions: QuotaDetail\n}\n\ninterface CopilotUsageResponse {\n access_type_sku: string\n analytics_tracking_id: string\n assigned_date: string\n can_signup_for_limited: boolean\n chat_enabled: boolean\n copilot_plan: string\n organization_login_list: Array<unknown>\n organization_list: Array<unknown>\n quota_reset_date: string\n quota_snapshots: QuotaSnapshots\n}\n","import { defineCommand } from \"citty\"\nimport consola from \"consola\"\n\nimport { ensurePaths } from \"./lib/paths\"\nimport { setupGitHubToken } from \"./lib/token\"\nimport {\n getCopilotUsage,\n type QuotaDetail,\n} from \"./services/github/get-copilot-usage\"\n\nexport const checkUsage = defineCommand({\n meta: {\n name: \"check-usage\",\n description: \"Show current GitHub Copilot usage/quota information\",\n },\n async run() {\n await ensurePaths()\n await setupGitHubToken()\n try {\n const usage = await getCopilotUsage()\n const premium = usage.quota_snapshots.premium_interactions\n const premiumTotal = premium.entitlement\n const premiumUsed = premiumTotal - premium.remaining\n const premiumPercentUsed =\n premiumTotal > 0 ? (premiumUsed / premiumTotal) * 100 : 0\n const premiumPercentRemaining = premium.percent_remaining\n\n // Helper to summarize a quota snapshot\n function summarizeQuota(name: string, snap: QuotaDetail | undefined) {\n if (!snap) return `${name}: N/A`\n const total = snap.entitlement\n const used = total - snap.remaining\n const percentUsed = total > 0 ? (used / total) * 100 : 0\n const percentRemaining = snap.percent_remaining\n return `${name}: ${used}/${total} used (${percentUsed.toFixed(1)}% used, ${percentRemaining.toFixed(1)}% remaining)`\n }\n\n const premiumLine = `Premium: ${premiumUsed}/${premiumTotal} used (${premiumPercentUsed.toFixed(1)}% used, ${premiumPercentRemaining.toFixed(1)}% remaining)`\n const chatLine = summarizeQuota(\"Chat\", usage.quota_snapshots.chat)\n const completionsLine = summarizeQuota(\n \"Completions\",\n usage.quota_snapshots.completions,\n )\n\n consola.box(\n `Copilot Usage (plan: ${usage.copilot_plan})\\n`\n + `Quota resets: ${usage.quota_reset_date}\\n`\n + `\\nQuotas:\\n`\n + ` ${premiumLine}\\n`\n + ` ${chatLine}\\n`\n + ` ${completionsLine}`,\n )\n } catch (err) {\n consola.error(\"Failed to fetch Copilot usage:\", err)\n process.exit(1)\n }\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport consola from \"consola\"\nimport fs from \"node:fs/promises\"\nimport os from \"node:os\"\n\nimport { PATHS } from \"./lib/paths\"\n\ninterface DebugInfo {\n version: string\n runtime: {\n name: string\n version: string\n platform: string\n arch: string\n }\n paths: {\n APP_DIR: string\n GITHUB_TOKEN_PATH: string\n }\n tokenExists: boolean\n}\n\ninterface RunDebugOptions {\n json: boolean\n}\n\nasync function getPackageVersion(): Promise<string> {\n try {\n const packageJsonPath = new URL(\"../package.json\", import.meta.url).pathname\n // @ts-expect-error https://github.com/sindresorhus/eslint-plugin-unicorn/blob/v59.0.1/docs/rules/prefer-json-parse-buffer.md\n // JSON.parse() can actually parse buffers\n const packageJson = JSON.parse(await fs.readFile(packageJsonPath)) as {\n version: string\n }\n return packageJson.version\n } catch {\n return \"unknown\"\n }\n}\n\nfunction getRuntimeInfo() {\n const isBun = typeof Bun !== \"undefined\"\n\n return {\n name: isBun ? \"bun\" : \"node\",\n version: isBun ? Bun.version : process.version.slice(1),\n platform: os.platform(),\n arch: os.arch(),\n }\n}\n\nasync function checkTokenExists(): Promise<boolean> {\n try {\n const stats = await fs.stat(PATHS.GITHUB_TOKEN_PATH)\n if (!stats.isFile()) return false\n\n const content = await fs.readFile(PATHS.GITHUB_TOKEN_PATH, \"utf8\")\n return content.trim().length > 0\n } catch {\n return false\n }\n}\n\nasync function getDebugInfo(): Promise<DebugInfo> {\n const [version, tokenExists] = await Promise.all([\n getPackageVersion(),\n checkTokenExists(),\n ])\n\n return {\n version,\n runtime: getRuntimeInfo(),\n paths: {\n APP_DIR: PATHS.APP_DIR,\n GITHUB_TOKEN_PATH: PATHS.GITHUB_TOKEN_PATH,\n },\n tokenExists,\n }\n}\n\nfunction printDebugInfoPlain(info: DebugInfo): void {\n consola.info(`openroute debug\n\nVersion: ${info.version}\nRuntime: ${info.runtime.name} ${info.runtime.version} (${info.runtime.platform} ${info.runtime.arch})\n\nPaths:\n- APP_DIR: ${info.paths.APP_DIR}\n- GITHUB_TOKEN_PATH: ${info.paths.GITHUB_TOKEN_PATH}\n\nToken exists: ${info.tokenExists ? \"Yes\" : \"No\"}`)\n}\n\nfunction printDebugInfoJson(info: DebugInfo): void {\n console.log(JSON.stringify(info, null, 2))\n}\n\nexport async function runDebug(options: RunDebugOptions): Promise<void> {\n const debugInfo = await getDebugInfo()\n\n if (options.json) {\n printDebugInfoJson(debugInfo)\n } else {\n printDebugInfoPlain(debugInfo)\n }\n}\n\nexport const debug = defineCommand({\n meta: {\n name: \"debug\",\n description: \"Print debug information about the application\",\n },\n args: {\n json: {\n type: \"boolean\",\n default: false,\n description: \"Output debug information as JSON\",\n },\n },\n run({ args }) {\n return runDebug({\n json: args.json,\n })\n },\n})\n","import consola from \"consola\"\nimport { getProxyForUrl } from \"proxy-from-env\"\nimport { Agent, ProxyAgent, setGlobalDispatcher, type Dispatcher } from \"undici\"\n\nexport function initProxyFromEnv(): void {\n if (typeof Bun !== \"undefined\") return\n\n try {\n const direct = new Agent()\n const proxies = new Map<string, ProxyAgent>()\n\n // We only need a minimal dispatcher that implements `dispatch` at runtime.\n // Typing the object as `Dispatcher` forces TypeScript to require many\n // additional methods. Instead, keep a plain object and cast when passing\n // to `setGlobalDispatcher`.\n const dispatcher = {\n dispatch(\n options: Dispatcher.DispatchOptions,\n handler: Dispatcher.DispatchHandler,\n ) {\n try {\n const origin =\n typeof options.origin === \"string\" ?\n new URL(options.origin)\n : (options.origin as URL)\n const get = getProxyForUrl as unknown as (\n u: string,\n ) => string | undefined\n const raw = get(origin.toString())\n const proxyUrl = raw && raw.length > 0 ? raw : undefined\n if (!proxyUrl) {\n consola.debug(`HTTP proxy bypass: ${origin.hostname}`)\n return (direct as unknown as Dispatcher).dispatch(options, handler)\n }\n let agent = proxies.get(proxyUrl)\n if (!agent) {\n agent = new ProxyAgent(proxyUrl)\n proxies.set(proxyUrl, agent)\n }\n let label = proxyUrl\n try {\n const u = new URL(proxyUrl)\n label = `${u.protocol}//${u.host}`\n } catch {\n /* noop */\n }\n consola.debug(`HTTP proxy route: ${origin.hostname} via ${label}`)\n return (agent as unknown as Dispatcher).dispatch(options, handler)\n } catch {\n return (direct as unknown as Dispatcher).dispatch(options, handler)\n }\n },\n close() {\n return direct.close()\n },\n destroy() {\n return direct.destroy()\n },\n }\n\n setGlobalDispatcher(dispatcher as unknown as Dispatcher)\n consola.debug(\"HTTP proxy configured from environment (per-URL)\")\n } catch (err) {\n consola.debug(\"Proxy setup skipped:\", err)\n }\n}\n","import { execSync } from \"node:child_process\"\nimport process from \"node:process\"\n\ntype ShellName = \"bash\" | \"zsh\" | \"fish\" | \"powershell\" | \"cmd\" | \"sh\"\ntype EnvVars = Record<string, string | undefined>\n\nfunction getShell(): ShellName {\n const { platform, ppid, env } = process\n\n if (platform === \"win32\") {\n try {\n const command = `wmic process get ParentProcessId,Name | findstr \"${ppid}\"`\n const parentProcess = execSync(command, { stdio: \"pipe\" }).toString()\n\n if (parentProcess.toLowerCase().includes(\"powershell.exe\")) {\n return \"powershell\"\n }\n } catch {\n return \"cmd\"\n }\n\n return \"cmd\"\n } else {\n const shellPath = env.SHELL\n if (shellPath) {\n if (shellPath.endsWith(\"zsh\")) return \"zsh\"\n if (shellPath.endsWith(\"fish\")) return \"fish\"\n if (shellPath.endsWith(\"bash\")) return \"bash\"\n }\n\n return \"sh\"\n }\n}\n\n/**\n * Generates a copy-pasteable script to set multiple environment variables\n * and run a subsequent command.\n * @param {EnvVars} envVars - An object of environment variables to set.\n * @param {string} commandToRun - The command to run after setting the variables.\n * @returns {string} The formatted script string.\n */\nexport function generateEnvScript(\n envVars: EnvVars,\n commandToRun: string = \"\",\n): string {\n const shell = getShell()\n const filteredEnvVars = Object.entries(envVars).filter(\n ([, value]) => value !== undefined,\n ) as Array<[string, string]>\n\n let commandBlock: string\n\n switch (shell) {\n case \"powershell\": {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `$env:${key} = ${value}`)\n .join(\"; \")\n break\n }\n case \"cmd\": {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `set ${key}=${value}`)\n .join(\" & \")\n break\n }\n case \"fish\": {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `set -gx ${key} ${value}`)\n .join(\"; \")\n break\n }\n default: {\n // bash, zsh, sh\n const assignments = filteredEnvVars\n .map(([key, value]) => `${key}=${value}`)\n .join(\" \")\n commandBlock = filteredEnvVars.length > 0 ? `export ${assignments}` : \"\"\n break\n }\n }\n\n if (commandBlock && commandToRun) {\n const separator = shell === \"cmd\" ? \" & \" : \" && \"\n return `${commandBlock}${separator}${commandToRun}`\n }\n\n return commandBlock || commandToRun\n}\n","import consola from \"consola\"\n\nimport { HTTPError } from \"./error\"\n\nexport const awaitApproval = async () => {\n const response = await consola.prompt(`Accept incoming request?`, {\n type: \"confirm\",\n })\n\n if (!response)\n throw new HTTPError(\n \"Request rejected\",\n Response.json({ message: \"Request rejected\" }, { status: 403 }),\n )\n}\n","import consola from \"consola\"\n\nimport type { State } from \"./state\"\n\nimport { HTTPError } from \"./error\"\nimport { sleep } from \"./utils\"\n\nexport async function checkRateLimit(state: State) {\n if (state.rateLimitSeconds === undefined) return\n\n const now = Date.now()\n\n if (!state.lastRequestTimestamp) {\n state.lastRequestTimestamp = now\n return\n }\n\n const elapsedSeconds = (now - state.lastRequestTimestamp) / 1000\n\n if (elapsedSeconds > state.rateLimitSeconds) {\n state.lastRequestTimestamp = now\n return\n }\n\n const waitTimeSeconds = Math.ceil(state.rateLimitSeconds - elapsedSeconds)\n\n if (!state.rateLimitWait) {\n consola.warn(\n `Rate limit exceeded. Need to wait ${waitTimeSeconds} more seconds.`,\n )\n throw new HTTPError(\n \"Rate limit exceeded\",\n Response.json({ message: \"Rate limit exceeded\" }, { status: 429 }),\n )\n }\n\n const waitTimeMs = waitTimeSeconds * 1000\n consola.warn(\n `Rate limit reached. Waiting ${waitTimeSeconds} seconds before proceeding...`,\n )\n await sleep(waitTimeMs)\n \n state.lastRequestTimestamp = now\n consola.info(\"Rate limit wait completed, proceeding with request\")\n return\n}\n","import type {\n ChatCompletionsPayload,\n ContentPart,\n Message,\n Tool,\n ToolCall,\n} from \"~/services/copilot/create-chat-completions\"\nimport type { Model } from \"~/services/copilot/get-models\"\n\n// Encoder type mapping\nconst ENCODING_MAP = {\n o200k_base: () => import(\"gpt-tokenizer/encoding/o200k_base\"),\n cl100k_base: () => import(\"gpt-tokenizer/encoding/cl100k_base\"),\n p50k_base: () => import(\"gpt-tokenizer/encoding/p50k_base\"),\n p50k_edit: () => import(\"gpt-tokenizer/encoding/p50k_edit\"),\n r50k_base: () => import(\"gpt-tokenizer/encoding/r50k_base\"),\n} as const\n\ntype SupportedEncoding = keyof typeof ENCODING_MAP\n\n// Define encoder interface\ninterface Encoder {\n encode: (text: string) => Array<number>\n}\n\n// Cache loaded encoders to avoid repeated imports\nconst encodingCache = new Map<string, Encoder>()\n\n/**\n * Calculate tokens for tool calls\n */\nconst calculateToolCallsTokens = (\n toolCalls: Array<ToolCall>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = 0\n for (const toolCall of toolCalls) {\n tokens += constants.funcInit\n tokens += encoder.encode(JSON.stringify(toolCall)).length\n }\n tokens += constants.funcEnd\n return tokens\n}\n\n/**\n * Calculate tokens for content parts\n */\nconst calculateContentPartsTokens = (\n contentParts: Array<ContentPart>,\n encoder: Encoder,\n): number => {\n let tokens = 0\n for (const part of contentParts) {\n if (part.type === \"image_url\") {\n tokens += encoder.encode(part.image_url.url).length + 85\n } else if (part.text) {\n tokens += encoder.encode(part.text).length\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens for a single message\n */\nconst calculateMessageTokens = (\n message: Message,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n const tokensPerMessage = 3\n const tokensPerName = 1\n let tokens = tokensPerMessage\n for (const [key, value] of Object.entries(message)) {\n if (typeof value === \"string\") {\n tokens += encoder.encode(value).length\n }\n if (key === \"name\") {\n tokens += tokensPerName\n }\n if (key === \"tool_calls\") {\n tokens += calculateToolCallsTokens(\n value as Array<ToolCall>,\n encoder,\n constants,\n )\n }\n if (key === \"content\" && Array.isArray(value)) {\n tokens += calculateContentPartsTokens(\n value as Array<ContentPart>,\n encoder,\n )\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens using custom algorithm\n */\nconst calculateTokens = (\n messages: Array<Message>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n if (messages.length === 0) {\n return 0\n }\n let numTokens = 0\n for (const message of messages) {\n numTokens += calculateMessageTokens(message, encoder, constants)\n }\n // every reply is primed with <|start|>assistant<|message|>\n numTokens += 3\n return numTokens\n}\n\n/**\n * Get the corresponding encoder module based on encoding type\n */\nconst getEncodeChatFunction = async (encoding: string): Promise<Encoder> => {\n if (encodingCache.has(encoding)) {\n const cached = encodingCache.get(encoding)\n if (cached) {\n return cached\n }\n }\n\n const supportedEncoding = encoding as SupportedEncoding\n if (!(supportedEncoding in ENCODING_MAP)) {\n const fallbackModule = (await ENCODING_MAP.o200k_base()) as Encoder\n encodingCache.set(encoding, fallbackModule)\n return fallbackModule\n }\n\n const encodingModule = (await ENCODING_MAP[supportedEncoding]()) as Encoder\n encodingCache.set(encoding, encodingModule)\n return encodingModule\n}\n\n/**\n * Get tokenizer type from model information\n */\nexport const getTokenizerFromModel = (model: Model): string => {\n return model.capabilities.tokenizer || \"o200k_base\"\n}\n\n/**\n * Get model-specific constants for token calculation\n */\nconst getModelConstants = (model: Model) => {\n return model.id === \"gpt-3.5-turbo\" || model.id === \"gpt-4\" ?\n {\n funcInit: 10,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n }\n : {\n funcInit: 7,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n }\n}\n\n/**\n * Calculate tokens for a single parameter\n */\nconst calculateParameterTokens = (\n key: string,\n prop: unknown,\n context: {\n encoder: Encoder\n constants: ReturnType<typeof getModelConstants>\n },\n): number => {\n const { encoder, constants } = context\n let tokens = constants.propKey\n\n // Early return if prop is not an object\n if (typeof prop !== \"object\" || prop === null) {\n return tokens\n }\n\n // Type assertion for parameter properties\n const param = prop as {\n type?: string\n description?: string\n enum?: Array<unknown>\n [key: string]: unknown\n }\n\n const paramName = key\n const paramType = param.type || \"string\"\n let paramDesc = param.description || \"\"\n\n // Handle enum values\n if (param.enum && Array.isArray(param.enum)) {\n tokens += constants.enumInit\n for (const item of param.enum) {\n tokens += constants.enumItem\n tokens += encoder.encode(String(item)).length\n }\n }\n\n // Clean up description\n if (paramDesc.endsWith(\".\")) {\n paramDesc = paramDesc.slice(0, -1)\n }\n\n // Encode the main parameter line\n const line = `${paramName}:${paramType}:${paramDesc}`\n tokens += encoder.encode(line).length\n\n // Handle additional properties (excluding standard ones)\n const excludedKeys = new Set([\"type\", \"description\", \"enum\"])\n for (const propertyName of Object.keys(param)) {\n if (!excludedKeys.has(propertyName)) {\n const propertyValue = param[propertyName]\n const propertyText =\n typeof propertyValue === \"string\" ? propertyValue : (\n JSON.stringify(propertyValue)\n )\n tokens += encoder.encode(`${propertyName}:${propertyText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for function parameters\n */\nconst calculateParametersTokens = (\n parameters: unknown,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n if (!parameters || typeof parameters !== \"object\") {\n return 0\n }\n\n const params = parameters as Record<string, unknown>\n let tokens = 0\n\n for (const [key, value] of Object.entries(params)) {\n if (key === \"properties\") {\n const properties = value as Record<string, unknown>\n if (Object.keys(properties).length > 0) {\n tokens += constants.propInit\n for (const propKey of Object.keys(properties)) {\n tokens += calculateParameterTokens(propKey, properties[propKey], {\n encoder,\n constants,\n })\n }\n }\n } else {\n const paramText =\n typeof value === \"string\" ? value : JSON.stringify(value)\n tokens += encoder.encode(`${key}:${paramText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for a single tool\n */\nconst calculateToolTokens = (\n tool: Tool,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let tokens = constants.funcInit\n const func = tool.function\n const fName = func.name\n let fDesc = func.description || \"\"\n if (fDesc.endsWith(\".\")) {\n fDesc = fDesc.slice(0, -1)\n }\n const line = fName + \":\" + fDesc\n tokens += encoder.encode(line).length\n if (\n typeof func.parameters === \"object\" \n && func.parameters !== null\n ) {\n tokens += calculateParametersTokens(func.parameters, encoder, constants)\n }\n return tokens\n}\n\n/**\n * Calculate token count for tools based on model\n */\nexport const numTokensForTools = (\n tools: Array<Tool>,\n encoder: Encoder,\n constants: ReturnType<typeof getModelConstants>,\n): number => {\n let funcTokenCount = 0\n for (const tool of tools) {\n funcTokenCount += calculateToolTokens(tool, encoder, constants)\n }\n funcTokenCount += constants.funcEnd\n return funcTokenCount\n}\n\n/**\n * Calculate the token count of messages, supporting multiple GPT encoders\n */\nexport const getTokenCount = async (\n payload: ChatCompletionsPayload,\n model: Model,\n): Promise<{ input: number; output: number }> => {\n // Get tokenizer string\n const tokenizer = getTokenizerFromModel(model)\n\n // Get corresponding encoder module\n const encoder = await getEncodeChatFunction(tokenizer)\n\n const simplifiedMessages = payload.messages\n const inputMessages = simplifiedMessages.filter(\n (msg) => msg.role !== \"assistant\",\n )\n const outputMessages = simplifiedMessages.filter(\n (msg) => msg.role === \"assistant\",\n )\n\n const constants = getModelConstants(model)\n let inputTokens = calculateTokens(inputMessages, encoder, constants)\n if (payload.tools && payload.tools.length > 0) {\n inputTokens += numTokensForTools(payload.tools, encoder, constants)\n }\n const outputTokens = calculateTokens(outputMessages, encoder, constants)\n\n return {\n input: inputTokens,\n output: outputTokens,\n }\n}\n","import consola from \"consola\"\nimport { events } from \"fetch-event-stream\"\n\nimport { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createChatCompletions = async (\n payload: ChatCompletionsPayload,\n) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const enableVision = payload.messages.some(\n (x) =>\n typeof x.content !== \"string\"\n && x.content?.some((x) => x.type === \"image_url\"),\n )\n\n // Agent/user check for X-Initiator header\n // Determine if any message is from an agent (\"assistant\" or \"tool\")\n const isAgentCall = payload.messages.some((msg) =>\n [\"assistant\", \"tool\"].includes(msg.role),\n )\n\n // Build headers and add X-Initiator\n const headers: Record<string, string> = {\n ...copilotHeaders(state, enableVision),\n \"X-Initiator\": isAgentCall ? \"agent\" : \"user\",\n }\n\n const response = await fetch(`${copilotBaseUrl(state)}/chat/completions`, {\n method: \"POST\",\n headers,\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) {\n consola.error(\"Failed to create chat completions\", response)\n throw new HTTPError(\"Failed to create chat completions\", response)\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as ChatCompletionResponse\n}\n\n// Streaming types\n\nexport interface ChatCompletionChunk {\n id: string\n object: \"chat.completion.chunk\"\n created: number\n model: string\n choices: Array<Choice>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n completion_tokens_details?: {\n accepted_prediction_tokens: number\n rejected_prediction_tokens: number\n }\n }\n}\n\ninterface Delta {\n content?: string | null\n role?: \"user\" | \"assistant\" | \"system\" | \"tool\"\n tool_calls?: Array<{\n index: number\n id?: string\n type?: \"function\"\n function?: {\n name?: string\n arguments?: string\n }\n }>\n}\n\ninterface Choice {\n index: number\n delta: Delta\n finish_reason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null\n logprobs: object | null\n}\n\n// Non-streaming types\n\nexport interface ChatCompletionResponse {\n id: string\n object: \"chat.completion\"\n created: number\n model: string\n choices: Array<ChoiceNonStreaming>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n }\n}\n\ninterface ResponseMessage {\n role: \"assistant\"\n content: string | null\n tool_calls?: Array<ToolCall>\n}\n\ninterface ChoiceNonStreaming {\n index: number\n message: ResponseMessage\n logprobs: object | null\n finish_reason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\"\n}\n\n// Payload types\n\nexport interface ChatCompletionsPayload {\n messages: Array<Message>\n model: string\n temperature?: number | null\n top_p?: number | null\n max_tokens?: number | null\n stop?: string | Array<string> | null\n n?: number | null\n stream?: boolean | null\n\n frequency_penalty?: number | null\n presence_penalty?: number | null\n logit_bias?: Record<string, number> | null\n logprobs?: boolean | null\n response_format?: { type: \"json_object\" } | null\n seed?: number | null\n tools?: Array<Tool> | null\n tool_choice?:\n | \"none\"\n | \"auto\"\n | \"required\"\n | { type: \"function\"; function: { name: string } }\n | null\n user?: string | null\n}\n\nexport interface Tool {\n type: \"function\"\n function: {\n name: string\n description?: string\n parameters: Record<string, unknown>\n }\n}\n\nexport interface Message {\n role: \"user\" | \"assistant\" | \"system\" | \"tool\" | \"developer\"\n content: string | Array<ContentPart> | null\n\n name?: string\n tool_calls?: Array<ToolCall>\n tool_call_id?: string\n}\n\nexport interface ToolCall {\n id: string\n type: \"function\"\n function: {\n name: string\n arguments: string\n }\n}\n\nexport type ContentPart = TextPart | ImagePart\n\nexport interface TextPart {\n type: \"text\"\n text: string\n}\n\nexport interface ImagePart {\n type: \"image_url\"\n image_url: {\n url: string\n detail?: \"low\" | \"high\" | \"auto\"\n }\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\nimport { streamSSE, type SSEMessage } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport { checkRateLimit } from \"~/lib/rate-limit\"\nimport { state } from \"~/lib/state\"\nimport { getTokenCount } from \"~/lib/tokenizer\"\nimport { isNullish } from \"~/lib/utils\"\nimport {\n createChatCompletions,\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n} from \"~/services/copilot/create-chat-completions\"\n\nexport async function handleCompletion(c: Context) {\n await checkRateLimit(state)\n\n let payload = await c.req.json<ChatCompletionsPayload>()\n consola.debug(\"Request payload:\", JSON.stringify(payload).slice(-400))\n\n // Find the selected model\n const selectedModel = state.models?.data.find(\n (model) => model.id === payload.model,\n )\n\n // Calculate and display token count\n try {\n if (selectedModel) {\n const tokenCount = await getTokenCount(payload, selectedModel)\n consola.info(\"Current token count:\", tokenCount)\n } else {\n consola.warn(\"No model selected, skipping token count calculation\")\n }\n } catch (error) {\n consola.warn(\"Failed to calculate token count:\", error)\n }\n\n if (state.manualApprove) await awaitApproval()\n\n if (isNullish(payload.max_tokens)) {\n payload = {\n ...payload,\n max_tokens: selectedModel?.capabilities.limits.max_output_tokens,\n }\n consola.debug(\"Set max_tokens to:\", JSON.stringify(payload.max_tokens))\n }\n\n const response = await createChatCompletions(payload)\n\n if (isNonStreaming(response)) {\n consola.debug(\"Non-streaming response:\", JSON.stringify(response))\n return c.json(response)\n }\n\n consola.debug(\"Streaming response\")\n return streamSSE(c, async (stream) => {\n for await (const chunk of response) {\n consola.debug(\"Streaming chunk:\", JSON.stringify(chunk))\n await stream.writeSSE(chunk as SSEMessage)\n }\n })\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createChatCompletions>>,\n): response is ChatCompletionResponse => Object.hasOwn(response, \"choices\")\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleCompletion } from \"./handler\"\n\nexport const completionRoutes = new Hono()\n\ncompletionRoutes.post(\"/\", async (c) => {\n try {\n return await handleCompletion(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createEmbeddings = async (payload: EmbeddingRequest) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const response = await fetch(`${copilotBaseUrl(state)}/embeddings`, {\n method: \"POST\",\n headers: copilotHeaders(state),\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) throw new HTTPError(\"Failed to create embeddings\", response)\n\n return (await response.json()) as EmbeddingResponse\n}\n\nexport interface EmbeddingRequest {\n input: string | Array<string>\n model: string\n}\n\nexport interface Embedding {\n object: string\n embedding: Array<number>\n index: number\n}\n\nexport interface EmbeddingResponse {\n object: string\n data: Array<Embedding>\n model: string\n usage: {\n prompt_tokens: number\n total_tokens: number\n }\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport {\n createEmbeddings,\n type EmbeddingRequest,\n} from \"~/services/copilot/create-embeddings\"\n\nexport const embeddingRoutes = new Hono()\n\nembeddingRoutes.post(\"/\", async (c) => {\n try {\n const payload = await c.req.json<EmbeddingRequest>()\n const response = await createEmbeddings(payload)\n\n return c.json(response)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { type AnthropicResponse } from \"./anthropic-types\"\n\nexport function mapOpenAIStopReasonToAnthropic(\n finishReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null,\n): AnthropicResponse[\"stop_reason\"] {\n if (finishReason === null) {\n return null\n }\n const stopReasonMap = {\n stop: \"end_turn\",\n length: \"max_tokens\",\n tool_calls: \"tool_use\",\n content_filter: \"end_turn\",\n } as const\n return stopReasonMap[finishReason]\n}\n","import {\n type ChatCompletionResponse,\n type ChatCompletionsPayload,\n type ContentPart,\n type Message,\n type TextPart,\n type Tool,\n type ToolCall,\n} from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicAssistantContentBlock,\n type AnthropicAssistantMessage,\n type AnthropicMessage,\n type AnthropicMessagesPayload,\n type AnthropicResponse,\n type AnthropicTextBlock,\n type AnthropicThinkingBlock,\n type AnthropicTool,\n type AnthropicToolResultBlock,\n type AnthropicToolUseBlock,\n type AnthropicUserContentBlock,\n type AnthropicUserMessage,\n} from \"./anthropic-types\"\nimport { mapOpenAIStopReasonToAnthropic } from \"./utils\"\n\n// Payload translation\n\nexport function translateToOpenAI(\n payload: AnthropicMessagesPayload,\n): ChatCompletionsPayload {\n return {\n model: translateModelName(payload.model),\n messages: translateAnthropicMessagesToOpenAI(\n payload.messages,\n payload.system,\n ),\n max_tokens: payload.max_tokens,\n stop: payload.stop_sequences,\n stream: payload.stream,\n temperature: payload.temperature,\n top_p: payload.top_p,\n user: payload.metadata?.user_id,\n tools: translateAnthropicToolsToOpenAI(payload.tools),\n tool_choice: translateAnthropicToolChoiceToOpenAI(payload.tool_choice),\n }\n}\n\nfunction translateModelName(model: string): string {\n // Subagent requests use a specific model number which Copilot doesn't support\n if (model.startsWith(\"claude-sonnet-4-\")) {\n return model.replace(/^claude-sonnet-4-.*/, \"claude-sonnet-4\")\n } else if (model.startsWith(\"claude-opus-\")) {\n return model.replace(/^claude-opus-4-.*/, \"claude-opus-4\")\n }\n return model\n}\n\nfunction translateAnthropicMessagesToOpenAI(\n anthropicMessages: Array<AnthropicMessage>,\n system: string | Array<AnthropicTextBlock> | undefined,\n): Array<Message> {\n const systemMessages = handleSystemPrompt(system)\n\n const otherMessages = anthropicMessages.flatMap((message) =>\n message.role === \"user\" ?\n handleUserMessage(message)\n : handleAssistantMessage(message),\n )\n\n return [...systemMessages, ...otherMessages]\n}\n\nfunction handleSystemPrompt(\n system: string | Array<AnthropicTextBlock> | undefined,\n): Array<Message> {\n if (!system) {\n return []\n }\n\n if (typeof system === \"string\") {\n return [{ role: \"system\", content: system }]\n } else {\n const systemText = system.map((block) => block.text).join(\"\\n\\n\")\n return [{ role: \"system\", content: systemText }]\n }\n}\n\nfunction handleUserMessage(message: AnthropicUserMessage): Array<Message> {\n const newMessages: Array<Message> = []\n\n if (Array.isArray(message.content)) {\n const toolResultBlocks = message.content.filter(\n (block): block is AnthropicToolResultBlock =>\n block.type === \"tool_result\",\n )\n const otherBlocks = message.content.filter(\n (block) => block.type !== \"tool_result\",\n )\n\n // Tool results must come first to maintain protocol: tool_use -> tool_result -> user\n for (const block of toolResultBlocks) {\n newMessages.push({\n role: \"tool\",\n tool_call_id: block.tool_use_id,\n content: mapContent(block.content),\n })\n }\n\n if (otherBlocks.length > 0) {\n newMessages.push({\n role: \"user\",\n content: mapContent(otherBlocks),\n })\n }\n } else {\n newMessages.push({\n role: \"user\",\n content: mapContent(message.content),\n })\n }\n\n return newMessages\n}\n\nfunction handleAssistantMessage(\n message: AnthropicAssistantMessage,\n): Array<Message> {\n if (!Array.isArray(message.content)) {\n return [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n },\n ]\n }\n\n const toolUseBlocks = message.content.filter(\n (block): block is AnthropicToolUseBlock => block.type === \"tool_use\",\n )\n\n const textBlocks = message.content.filter(\n (block): block is AnthropicTextBlock => block.type === \"text\",\n )\n\n const thinkingBlocks = message.content.filter(\n (block): block is AnthropicThinkingBlock => block.type === \"thinking\",\n )\n\n // Combine text and thinking blocks, as OpenAI doesn't have separate thinking blocks\n const allTextContent = [\n ...textBlocks.map((b) => b.text),\n ...thinkingBlocks.map((b) => b.thinking),\n ].join(\"\\n\\n\")\n\n return toolUseBlocks.length > 0 ?\n [\n {\n role: \"assistant\",\n content: allTextContent || null,\n tool_calls: toolUseBlocks.map((toolUse) => ({\n id: toolUse.id,\n type: \"function\",\n function: {\n name: toolUse.name,\n arguments: JSON.stringify(toolUse.input),\n },\n })),\n },\n ]\n : [\n {\n role: \"assistant\",\n content: mapContent(message.content),\n },\n ]\n}\n\nfunction mapContent(\n content:\n | string\n | Array<AnthropicUserContentBlock | AnthropicAssistantContentBlock>,\n): string | Array<ContentPart> | null {\n if (typeof content === \"string\") {\n return content\n }\n if (!Array.isArray(content)) {\n return null\n }\n\n const hasImage = content.some((block) => block.type === \"image\")\n if (!hasImage) {\n return content\n .filter(\n (block): block is AnthropicTextBlock | AnthropicThinkingBlock =>\n block.type === \"text\" || block.type === \"thinking\",\n )\n .map((block) => (block.type === \"text\" ? block.text : block.thinking))\n .join(\"\\n\\n\")\n }\n\n const contentParts: Array<ContentPart> = []\n for (const block of content) {\n switch (block.type) {\n case \"text\": {\n contentParts.push({ type: \"text\", text: block.text })\n\n break\n }\n case \"thinking\": {\n contentParts.push({ type: \"text\", text: block.thinking })\n\n break\n }\n case \"image\": {\n contentParts.push({\n type: \"image_url\",\n image_url: {\n url: `data:${block.source.media_type};base64,${block.source.data}`,\n },\n })\n\n break\n }\n // No default\n }\n }\n return contentParts\n}\n\nfunction translateAnthropicToolsToOpenAI(\n anthropicTools: Array<AnthropicTool> | undefined,\n): Array<Tool> | undefined {\n if (!anthropicTools) {\n return undefined\n }\n return anthropicTools.map((tool) => ({\n type: \"function\",\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.input_schema,\n },\n }))\n}\n\nfunction translateAnthropicToolChoiceToOpenAI(\n anthropicToolChoice: AnthropicMessagesPayload[\"tool_choice\"],\n): ChatCompletionsPayload[\"tool_choice\"] {\n if (!anthropicToolChoice) {\n return undefined\n }\n\n switch (anthropicToolChoice.type) {\n case \"auto\": {\n return \"auto\"\n }\n case \"any\": {\n return \"required\"\n }\n case \"tool\": {\n if (anthropicToolChoice.name) {\n return {\n type: \"function\",\n function: { name: anthropicToolChoice.name },\n }\n }\n return undefined\n }\n case \"none\": {\n return \"none\"\n }\n default: {\n return undefined\n }\n }\n}\n\n// Response translation\n\nexport function translateToAnthropic(\n response: ChatCompletionResponse,\n): AnthropicResponse {\n // Merge content from all choices\n const allTextBlocks: Array<AnthropicTextBlock> = []\n const allToolUseBlocks: Array<AnthropicToolUseBlock> = []\n let stopReason: \"stop\" | \"length\" | \"tool_calls\" | \"content_filter\" | null =\n null // default\n stopReason = response.choices[0]?.finish_reason ?? stopReason\n\n // Process all choices to extract text and tool use blocks\n for (const choice of response.choices) {\n const textBlocks = getAnthropicTextBlocks(choice.message.content)\n const toolUseBlocks = getAnthropicToolUseBlocks(choice.message.tool_calls)\n\n allTextBlocks.push(...textBlocks)\n allToolUseBlocks.push(...toolUseBlocks)\n\n // Use the finish_reason from the first choice, or prioritize tool_calls\n if (choice.finish_reason === \"tool_calls\" || stopReason === \"stop\") {\n stopReason = choice.finish_reason\n }\n }\n\n // Note: GitHub Copilot doesn't generate thinking blocks, so we don't include them in responses\n\n return {\n id: response.id,\n type: \"message\",\n role: \"assistant\",\n model: response.model,\n content: [...allTextBlocks, ...allToolUseBlocks],\n stop_reason: mapOpenAIStopReasonToAnthropic(stopReason),\n stop_sequence: null,\n usage: {\n input_tokens:\n (response.usage?.prompt_tokens ?? 0)\n - (response.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: response.usage?.completion_tokens ?? 0,\n ...(response.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n response.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n }\n}\n\nfunction getAnthropicTextBlocks(\n messageContent: Message[\"content\"],\n): Array<AnthropicTextBlock> {\n if (typeof messageContent === \"string\") {\n return [{ type: \"text\", text: messageContent }]\n }\n\n if (Array.isArray(messageContent)) {\n return messageContent\n .filter((part): part is TextPart => part.type === \"text\")\n .map((part) => ({ type: \"text\", text: part.text }))\n }\n\n return []\n}\n\nfunction getAnthropicToolUseBlocks(\n toolCalls: Array<ToolCall> | undefined,\n): Array<AnthropicToolUseBlock> {\n if (!toolCalls) {\n return []\n }\n return toolCalls.map((toolCall) => ({\n type: \"tool_use\",\n id: toolCall.id,\n name: toolCall.function.name,\n input: JSON.parse(toolCall.function.arguments) as Record<string, unknown>,\n }))\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\n\nimport { state } from \"~/lib/state\"\nimport { getTokenCount } from \"~/lib/tokenizer\"\n\nimport { type AnthropicMessagesPayload } from \"./anthropic-types\"\nimport { translateToOpenAI } from \"./non-stream-translation\"\n\n/**\n * Handles token counting for Anthropic messages\n */\nexport async function handleCountTokens(c: Context) {\n try {\n const anthropicBeta = c.req.header(\"anthropic-beta\")\n\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n\n const openAIPayload = translateToOpenAI(anthropicPayload)\n\n const selectedModel = state.models?.data.find(\n (model) => model.id === anthropicPayload.model,\n )\n\n if (!selectedModel) {\n consola.warn(\"Model not found, returning default token count\")\n return c.json({\n input_tokens: 1,\n })\n }\n\n const tokenCount = await getTokenCount(openAIPayload, selectedModel)\n\n if (anthropicPayload.tools && anthropicPayload.tools.length > 0) {\n let mcpToolExist = false\n if (anthropicBeta?.startsWith(\"claude-code\")) {\n mcpToolExist = anthropicPayload.tools.some((tool) =>\n tool.name.startsWith(\"mcp__\"),\n )\n }\n if (!mcpToolExist) {\n if (anthropicPayload.model.startsWith(\"claude\")) {\n // https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/overview#pricing\n tokenCount.input = tokenCount.input + 346\n } else if (anthropicPayload.model.startsWith(\"grok\")) {\n tokenCount.input = tokenCount.input + 480\n }\n }\n }\n\n let finalTokenCount = tokenCount.input + tokenCount.output\n if (anthropicPayload.model.startsWith(\"claude\")) {\n finalTokenCount = Math.round(finalTokenCount * 1.15)\n } else if (anthropicPayload.model.startsWith(\"grok\")) {\n finalTokenCount = Math.round(finalTokenCount * 1.03)\n }\n\n consola.info(\"Token count:\", finalTokenCount)\n\n return c.json({\n input_tokens: finalTokenCount,\n })\n } catch (error) {\n consola.error(\"Error counting tokens:\", error)\n return c.json({\n input_tokens: 1,\n })\n }\n}\n","import { type ChatCompletionChunk } from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicStreamEventData,\n type AnthropicStreamState,\n} from \"./anthropic-types\"\nimport { mapOpenAIStopReasonToAnthropic } from \"./utils\"\n\nfunction isToolBlockOpen(state: AnthropicStreamState): boolean {\n if (!state.contentBlockOpen) {\n return false\n }\n // Check if the current block index corresponds to any known tool call\n return Object.values(state.toolCalls).some(\n (tc) => tc.anthropicBlockIndex === state.contentBlockIndex,\n )\n}\n\n \nexport function translateChunkToAnthropicEvents(\n chunk: ChatCompletionChunk,\n state: AnthropicStreamState,\n): Array<AnthropicStreamEventData> {\n const events: Array<AnthropicStreamEventData> = []\n\n if (chunk.choices.length === 0) {\n return events\n }\n\n const choice = chunk.choices[0]\n const { delta } = choice\n\n if (!state.messageStartSent) {\n events.push({\n type: \"message_start\",\n message: {\n id: chunk.id,\n type: \"message\",\n role: \"assistant\",\n content: [],\n model: chunk.model,\n stop_reason: null,\n stop_sequence: null,\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: 0, // Will be updated in message_delta when finished\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n })\n state.messageStartSent = true\n }\n\n if (delta.content) {\n if (isToolBlockOpen(state)) {\n // A tool block was open, so close it before starting a text block.\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n if (!state.contentBlockOpen) {\n events.push({\n type: \"content_block_start\",\n index: state.contentBlockIndex,\n content_block: {\n type: \"text\",\n text: \"\",\n },\n })\n state.contentBlockOpen = true\n }\n\n events.push({\n type: \"content_block_delta\",\n index: state.contentBlockIndex,\n delta: {\n type: \"text_delta\",\n text: delta.content,\n },\n })\n }\n\n if (delta.tool_calls) {\n for (const toolCall of delta.tool_calls) {\n if (toolCall.id && toolCall.function?.name) {\n // New tool call starting.\n if (state.contentBlockOpen) {\n // Close any previously open block.\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n const anthropicBlockIndex = state.contentBlockIndex\n state.toolCalls[toolCall.index] = {\n id: toolCall.id,\n name: toolCall.function.name,\n anthropicBlockIndex,\n }\n\n events.push({\n type: \"content_block_start\",\n index: anthropicBlockIndex,\n content_block: {\n type: \"tool_use\",\n id: toolCall.id,\n name: toolCall.function.name,\n input: {},\n },\n })\n state.contentBlockOpen = true\n }\n\n if (toolCall.function?.arguments) {\n const toolCallInfo = state.toolCalls[toolCall.index]\n // Tool call can still be empty\n \n if (toolCallInfo) {\n events.push({\n type: \"content_block_delta\",\n index: toolCallInfo.anthropicBlockIndex,\n delta: {\n type: \"input_json_delta\",\n partial_json: toolCall.function.arguments,\n },\n })\n }\n }\n }\n }\n\n if (choice.finish_reason) {\n if (state.contentBlockOpen) {\n events.push({\n type: \"content_block_stop\",\n index: state.contentBlockIndex,\n })\n state.contentBlockOpen = false\n }\n\n events.push(\n {\n type: \"message_delta\",\n delta: {\n stop_reason: mapOpenAIStopReasonToAnthropic(choice.finish_reason),\n stop_sequence: null,\n },\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: chunk.usage?.completion_tokens ?? 0,\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n {\n type: \"message_stop\",\n },\n )\n }\n\n return events\n}\n\nexport function translateErrorToAnthropicErrorEvent(): AnthropicStreamEventData {\n return {\n type: \"error\",\n error: {\n type: \"api_error\",\n message: \"An unexpected error occurred during streaming.\",\n },\n }\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\nimport { streamSSE } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport { checkRateLimit } from \"~/lib/rate-limit\"\nimport { state } from \"~/lib/state\"\nimport {\n createChatCompletions,\n type ChatCompletionChunk,\n type ChatCompletionResponse,\n} from \"~/services/copilot/create-chat-completions\"\n\nimport {\n type AnthropicMessagesPayload,\n type AnthropicStreamState,\n} from \"./anthropic-types\"\nimport {\n translateToAnthropic,\n translateToOpenAI,\n} from \"./non-stream-translation\"\nimport { translateChunkToAnthropicEvents } from \"./stream-translation\"\n\nexport async function handleCompletion(c: Context) {\n await checkRateLimit(state)\n\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n consola.debug(\"Anthropic request payload:\", JSON.stringify(anthropicPayload))\n\n const openAIPayload = translateToOpenAI(anthropicPayload)\n consola.debug(\n \"Translated OpenAI request payload:\",\n JSON.stringify(openAIPayload),\n )\n\n if (state.manualApprove) {\n await awaitApproval()\n }\n\n const response = await createChatCompletions(openAIPayload)\n\n if (isNonStreaming(response)) {\n consola.debug(\n \"Non-streaming response from Copilot:\",\n JSON.stringify(response).slice(-400),\n )\n const anthropicResponse = translateToAnthropic(response)\n consola.debug(\n \"Translated Anthropic response:\",\n JSON.stringify(anthropicResponse),\n )\n return c.json(anthropicResponse)\n }\n\n consola.debug(\"Streaming response from Copilot\")\n return streamSSE(c, async (stream) => {\n const streamState: AnthropicStreamState = {\n messageStartSent: false,\n contentBlockIndex: 0,\n contentBlockOpen: false,\n toolCalls: {},\n }\n\n for await (const rawEvent of response) {\n consola.debug(\"Copilot raw stream event:\", JSON.stringify(rawEvent))\n if (rawEvent.data === \"[DONE]\") {\n break\n }\n\n if (!rawEvent.data) {\n continue\n }\n\n const chunk = JSON.parse(rawEvent.data) as ChatCompletionChunk\n const events = translateChunkToAnthropicEvents(chunk, streamState)\n\n for (const event of events) {\n consola.debug(\"Translated Anthropic event:\", JSON.stringify(event))\n await stream.writeSSE({\n event: event.type,\n data: JSON.stringify(event),\n })\n }\n }\n })\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createChatCompletions>>,\n): response is ChatCompletionResponse => Object.hasOwn(response, \"choices\")\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleCountTokens } from \"./count-tokens-handler\"\nimport { handleCompletion } from \"./handler\"\n\nexport const messageRoutes = new Hono()\n\nmessageRoutes.post(\"/\", async (c) => {\n try {\n return await handleCompletion(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n\nmessageRoutes.post(\"/count_tokens\", async (c) => {\n try {\n return await handleCountTokens(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\nimport { cacheModels } from \"~/lib/utils\"\n\nexport const modelRoutes = new Hono()\n\nmodelRoutes.get(\"/\", async (c) => {\n try {\n if (!state.models) {\n // This should be handled by startup logic, but as a fallback.\n await cacheModels()\n }\n\n const models = state.models?.data.map((model) => ({\n id: model.id,\n object: \"model\",\n type: \"model\",\n created: 0, // No date available from source\n created_at: new Date(0).toISOString(), // No date available from source\n owned_by: model.vendor,\n display_name: model.name,\n }))\n\n return c.json({\n object: \"list\",\n data: models,\n has_more: false,\n })\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import consola from \"consola\"\nimport { events } from \"fetch-event-stream\"\n\nimport { copilotHeaders, copilotBaseUrl } from \"~/lib/api-config\"\nimport { HTTPError } from \"~/lib/error\"\nimport { state } from \"~/lib/state\"\n\nexport const createResponses = async (payload: ResponsesPayload) => {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n const enableVision = detectVision(payload.input)\n\n const isAgentCall = detectAgentCall(payload.input)\n\n const headers: Record<string, string> = {\n ...copilotHeaders(state, enableVision),\n \"X-Initiator\": isAgentCall ? \"agent\" : \"user\",\n }\n\n const filteredPayload = filterUnsupportedTools(payload)\n\n const response = await fetch(`${copilotBaseUrl(state)}/responses`, {\n method: \"POST\",\n headers,\n body: JSON.stringify(filteredPayload),\n })\n\n if (!response.ok) {\n consola.error(\"Failed to create responses\", response)\n throw new HTTPError(\"Failed to create responses\", response)\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as ResponsesApiResponse\n}\n\nfunction detectVision(input: ResponsesPayload[\"input\"]): boolean {\n if (typeof input === \"string\") return false\n if (!Array.isArray(input)) return false\n\n return input.some((item) => {\n if (\"content\" in item && Array.isArray(item.content)) {\n return item.content.some(\n (part: Record<string, unknown>) => part.type === \"input_image\",\n )\n }\n return false\n })\n}\n\nfunction detectAgentCall(input: ResponsesPayload[\"input\"]): boolean {\n if (typeof input === \"string\") return false\n if (!Array.isArray(input)) return false\n\n return input.some((item) => {\n if (\"role\" in item && item.role === \"assistant\") return true\n if (\"type\" in item && item.type === \"function_call_output\") return true\n return false\n })\n}\n\nfunction filterUnsupportedTools(payload: ResponsesPayload): ResponsesPayload {\n if (!payload.tools || !Array.isArray(payload.tools)) return payload\n\n const supported = payload.tools.filter((tool) => {\n const isSupported = tool.type === \"function\"\n if (!isSupported) {\n consola.debug(`Stripping unsupported tool type: ${tool.type}`)\n }\n return isSupported\n })\n\n return {\n ...payload,\n tools: supported.length > 0 ? supported : undefined,\n }\n}\n\n// Types\n\nexport interface ResponsesInputItem {\n role?: \"user\" | \"assistant\" | \"system\"\n type?: \"message\" | \"function_call\" | \"function_call_output\"\n content?: string | Array<Record<string, unknown>>\n name?: string\n call_id?: string\n arguments?: string\n output?: string\n [key: string]: unknown\n}\n\nexport interface ResponsesTool {\n type: string\n name?: string\n description?: string\n parameters?: Record<string, unknown>\n [key: string]: unknown\n}\n\nexport interface ResponsesPayload {\n model: string\n input: string | Array<ResponsesInputItem>\n instructions?: string\n tools?: Array<ResponsesTool>\n tool_choice?: string | { type: string; name?: string }\n max_output_tokens?: number\n temperature?: number\n top_p?: number\n stream?: boolean\n store?: boolean\n metadata?: Record<string, string>\n previous_response_id?: string\n reasoning?: { effort?: string; summary?: string }\n [key: string]: unknown\n}\n\nexport interface ResponsesApiResponse {\n id: string\n object: \"response\"\n status: string\n output: Array<unknown>\n [key: string]: unknown\n}\n","import consola from \"consola\"\n\nimport { copilotBaseUrl, copilotHeaders } from \"~/lib/api-config\"\nimport { state } from \"~/lib/state\"\nimport { sleep } from \"~/lib/utils\"\n\nexport interface WebSearchResult {\n content: string\n references: Array<{ title: string; url: string }>\n}\n\ninterface ThreadsResponse {\n thread_id: string\n}\n\ninterface ThreadsMessageResponse {\n message: {\n content: string\n references: Array<{\n query?: string\n results?: Array<{\n title: string\n url: string\n reference_type: string\n }>\n }>\n }\n}\n\nconst MAX_SEARCHES_PER_SECOND = 3\nlet searchTimestamps: Array<number> = []\n\nasync function throttleSearch(): Promise<void> {\n const now = Date.now()\n searchTimestamps = searchTimestamps.filter((t) => now - t < 1000)\n if (searchTimestamps.length >= MAX_SEARCHES_PER_SECOND) {\n const waitMs = 1000 - (now - searchTimestamps[0])\n if (waitMs > 0) {\n consola.debug(`Web search rate limited, waiting ${waitMs}ms`)\n await sleep(waitMs)\n }\n }\n searchTimestamps.push(Date.now())\n}\n\nfunction threadsHeaders(): Record<string, string> {\n return copilotHeaders(state, false, \"copilot-chat\")\n}\n\nasync function createThread(): Promise<string> {\n const response = await fetch(`${copilotBaseUrl(state)}/github/chat/threads`, {\n method: \"POST\",\n headers: threadsHeaders(),\n body: JSON.stringify({}),\n })\n\n if (!response.ok) {\n consola.error(\"Failed to create chat thread\", response.status)\n throw new Error(`Failed to create chat thread: ${response.status}`)\n }\n\n const data = (await response.json()) as ThreadsResponse\n return data.thread_id\n}\n\nasync function sendThreadMessage(\n threadId: string,\n query: string,\n): Promise<ThreadsMessageResponse> {\n const response = await fetch(\n `${copilotBaseUrl(state)}/github/chat/threads/${threadId}/messages`,\n {\n method: \"POST\",\n headers: threadsHeaders(),\n body: JSON.stringify({\n content: query,\n intent: \"conversation\",\n skills: [\"web-search\"],\n references: [],\n }),\n },\n )\n\n if (!response.ok) {\n consola.error(\"Failed to send thread message\", response.status)\n throw new Error(`Failed to send thread message: ${response.status}`)\n }\n\n return (await response.json()) as ThreadsMessageResponse\n}\n\nexport async function searchWeb(query: string): Promise<WebSearchResult> {\n if (!state.copilotToken) throw new Error(\"Copilot token not found\")\n\n await throttleSearch()\n\n consola.info(`Web search: \"${query.slice(0, 80)}\"`)\n\n const threadId = await createThread()\n const response = await sendThreadMessage(threadId, query)\n\n const references: Array<{ title: string; url: string }> = []\n for (const ref of response.message.references) {\n if (ref.results) {\n for (const result of ref.results) {\n if (result.url && result.reference_type !== \"bing_search\") {\n references.push({ title: result.title, url: result.url })\n }\n }\n }\n }\n\n consola.debug(`Web search returned ${references.length} references`)\n\n return {\n content: response.message.content,\n references,\n }\n}\n","import type { Context } from \"hono\"\n\nimport consola from \"consola\"\nimport { streamSSE } from \"hono/streaming\"\n\nimport { awaitApproval } from \"~/lib/approval\"\nimport { checkRateLimit } from \"~/lib/rate-limit\"\nimport { state } from \"~/lib/state\"\nimport { isNullish } from \"~/lib/utils\"\nimport {\n createResponses,\n type ResponsesApiResponse,\n type ResponsesInputItem,\n type ResponsesPayload,\n} from \"~/services/copilot/create-responses\"\nimport { searchWeb } from \"~/services/copilot/web-search\"\n\nexport async function handleResponses(c: Context) {\n await checkRateLimit(state)\n\n const payload = await c.req.json<ResponsesPayload>()\n consola.debug(\n \"Responses request payload:\",\n JSON.stringify(payload).slice(-400),\n )\n\n const selectedModel = state.models?.data.find(\n (model) => model.id === payload.model,\n )\n\n consola.info(\"Token counting not yet supported for /responses endpoint\")\n\n if (state.manualApprove) await awaitApproval()\n\n await injectWebSearchIfNeeded(payload)\n\n if (isNullish(payload.max_output_tokens)) {\n payload.max_output_tokens =\n selectedModel?.capabilities.limits.max_output_tokens\n consola.debug(\n \"Set max_output_tokens to:\",\n JSON.stringify(payload.max_output_tokens),\n )\n }\n\n const response = await createResponses(payload)\n\n if (isNonStreaming(response)) {\n consola.debug(\"Non-streaming response:\", JSON.stringify(response))\n return c.json(response)\n }\n\n consola.debug(\"Streaming response\")\n return streamSSE(c, async (stream) => {\n for await (const chunk of response) {\n consola.debug(\"Streaming chunk:\", JSON.stringify(chunk))\n\n if (chunk.data === \"[DONE]\") {\n break\n }\n\n if (!chunk.data) {\n continue\n }\n\n await stream.writeSSE({\n data: chunk.data,\n event: chunk.event,\n id: chunk.id?.toString(),\n })\n }\n })\n}\n\nconst isNonStreaming = (\n response: Awaited<ReturnType<typeof createResponses>>,\n): response is ResponsesApiResponse => Object.hasOwn(response, \"output\")\n\nasync function injectWebSearchIfNeeded(\n payload: ResponsesPayload,\n): Promise<void> {\n const hasWebSearch = payload.tools?.some((t) => t.type === \"web_search\")\n if (!hasWebSearch) return\n\n // Skip search on follow-up messages (function call results)\n if (Array.isArray(payload.input)) {\n const hasFollowUp = payload.input.some(\n (item: ResponsesInputItem) => item.type === \"function_call_output\",\n )\n if (hasFollowUp) return\n }\n\n const query = extractUserQuery(payload.input)\n if (!query) return\n\n try {\n const results = await searchWeb(query)\n const searchContext = [\n \"[Web Search Results]\",\n results.content,\n \"\",\n results.references.map((r) => `- [${r.title}](${r.url})`).join(\"\\n\"),\n \"[End Web Search Results]\",\n ].join(\"\\n\")\n\n payload.instructions =\n payload.instructions ?\n `${searchContext}\\n\\n${payload.instructions}`\n : searchContext\n } catch (error) {\n consola.warn(\"Web search failed, continuing without results:\", error)\n }\n}\n\nfunction extractUserQuery(\n input: ResponsesPayload[\"input\"],\n): string | undefined {\n if (typeof input === \"string\") return input\n if (!Array.isArray(input)) return undefined\n\n // Find the last user message\n for (let i = input.length - 1; i >= 0; i--) {\n const item = input[i]\n if (\"role\" in item && item.role === \"user\") {\n if (typeof item.content === \"string\") return item.content\n if (Array.isArray(item.content)) {\n const text = item.content.find(\n (p: Record<string, unknown>) => p.type === \"input_text\",\n )\n if (text && \"text\" in text) return text.text as string\n }\n }\n }\n return undefined\n}\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\n\nimport { handleResponses } from \"./handler\"\n\nexport const responsesRoutes = new Hono()\n\nresponsesRoutes.post(\"/\", async (c) => {\n try {\n return await handleResponses(c)\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { searchWeb } from \"~/services/copilot/web-search\"\n\nexport const searchRoutes = new Hono()\n\nsearchRoutes.post(\"/\", async (c) => {\n try {\n const { query } = await c.req.json<{ query: string }>()\n\n if (!query || typeof query !== \"string\") {\n return c.json(\n { error: { message: \"Missing required field: query\" } },\n 400,\n )\n }\n\n const results = await searchWeb(query)\n return c.json({ results })\n } catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\n\nimport { state } from \"~/lib/state\"\n\nexport const tokenRoute = new Hono()\n\ntokenRoute.get(\"/\", (c) => {\n return c.json({\n token: state.copilotToken,\n })\n})\n","import { Hono } from \"hono\"\nimport consola from \"consola\"\n\nimport { forwardError } from \"~/lib/error\"\nimport { getCopilotUsage } from \"~/services/github/get-copilot-usage\"\n\nexport const usageRoute = new Hono()\n\nusageRoute.get(\"/\", async (c) => {\n try {\n const usage = await getCopilotUsage()\n return c.json(usage)\n } catch (error) {\n consola.error(\"Error fetching Copilot usage:\", error)\n return await forwardError(c, error)\n }\n})\n","import { Hono } from \"hono\"\nimport { cors } from \"hono/cors\"\nimport { logger } from \"hono/logger\"\n\nimport { completionRoutes } from \"./routes/chat-completions/route\"\nimport { embeddingRoutes } from \"./routes/embeddings/route\"\nimport { messageRoutes } from \"./routes/messages/route\"\nimport { modelRoutes } from \"./routes/models/route\"\nimport { responsesRoutes } from \"./routes/responses/route\"\nimport { searchRoutes } from \"./routes/search/route\"\nimport { tokenRoute } from \"./routes/token/route\"\nimport { usageRoute } from \"./routes/usage/route\"\n\nexport const server = new Hono()\n\nserver.use(logger())\nserver.use(cors())\n\nserver.get(\"/\", (c) => c.text(\"Server running\"))\n\nserver.route(\"/chat/completions\", completionRoutes)\nserver.route(\"/responses\", responsesRoutes)\nserver.route(\"/models\", modelRoutes)\nserver.route(\"/embeddings\", embeddingRoutes)\nserver.route(\"/search\", searchRoutes)\nserver.route(\"/usage\", usageRoute)\nserver.route(\"/token\", tokenRoute)\n\n// Compatibility with tools that expect v1/ prefix\nserver.route(\"/v1/chat/completions\", completionRoutes)\nserver.route(\"/v1/responses\", responsesRoutes)\nserver.route(\"/v1/models\", modelRoutes)\nserver.route(\"/v1/embeddings\", embeddingRoutes)\nserver.route(\"/v1/search\", searchRoutes)\n\n// Anthropic compatible endpoints\nserver.route(\"/v1/messages\", messageRoutes)\n","#!/usr/bin/env node\n\nimport { defineCommand } from \"citty\"\nimport clipboard from \"clipboardy\"\nimport consola from \"consola\"\nimport { serve, type ServerHandler } from \"srvx\"\nimport invariant from \"tiny-invariant\"\n\nimport { ensurePaths } from \"./lib/paths\"\nimport { initProxyFromEnv } from \"./lib/proxy\"\nimport { generateEnvScript } from \"./lib/shell\"\nimport { state } from \"./lib/state\"\nimport { setupCopilotToken, setupGitHubToken } from \"./lib/token\"\nimport { cacheModels, cacheVSCodeVersion } from \"./lib/utils\"\nimport { server } from \"./server\"\n\ninterface RunServerOptions {\n port: number\n verbose: boolean\n accountType: string\n manual: boolean\n rateLimit?: number\n rateLimitWait: boolean\n githubToken?: string\n claudeCode: boolean\n codex: boolean\n showToken: boolean\n proxyEnv: boolean\n}\n\nasync function generateClaudeCodeCommand(serverUrl: string) {\n invariant(state.models, \"Models should be loaded by now\")\n\n const selectedModel = await consola.prompt(\n \"Select a model to use with Claude Code\",\n {\n type: \"select\",\n options: state.models.data.map((model) => model.id),\n },\n )\n\n const selectedSmallModel = await consola.prompt(\n \"Select a small model to use with Claude Code\",\n {\n type: \"select\",\n options: state.models.data.map((model) => model.id),\n },\n )\n\n const command = generateEnvScript(\n {\n ANTHROPIC_BASE_URL: serverUrl,\n ANTHROPIC_AUTH_TOKEN: \"dummy\",\n ANTHROPIC_MODEL: selectedModel,\n ANTHROPIC_DEFAULT_SONNET_MODEL: selectedModel,\n ANTHROPIC_SMALL_FAST_MODEL: selectedSmallModel,\n ANTHROPIC_DEFAULT_HAIKU_MODEL: selectedSmallModel,\n DISABLE_NON_ESSENTIAL_MODEL_CALLS: \"1\",\n CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC: \"1\",\n },\n \"claude\",\n )\n\n try {\n clipboard.writeSync(command)\n consola.success(\"Copied Claude Code command to clipboard!\")\n } catch {\n consola.warn(\n \"Failed to copy to clipboard. Here is the Claude Code command:\",\n )\n consola.log(command)\n }\n}\n\nasync function generateCodexCommand(serverUrl: string) {\n invariant(state.models, \"Models should be loaded by now\")\n\n const defaultCodexModel = state.models.data.find(\n (model) => model.id === \"gpt5.2-codex\",\n )\n\n const selectedModel =\n defaultCodexModel ?\n defaultCodexModel.id\n : await consola.prompt(\"Select a model to use with Codex CLI\", {\n type: \"select\",\n options: state.models.data.map((model) => model.id),\n })\n\n const command = generateEnvScript(\n {\n OPENAI_BASE_URL: `${serverUrl}/v1`,\n OPENAI_API_KEY: \"dummy\",\n },\n `codex -m ${selectedModel}`,\n )\n\n try {\n clipboard.writeSync(command)\n consola.success(\"Copied Codex CLI command to clipboard!\")\n } catch {\n consola.warn(\"Failed to copy to clipboard. Here is the Codex CLI command:\")\n consola.log(command)\n }\n}\n\nexport async function runServer(options: RunServerOptions): Promise<void> {\n if (options.proxyEnv) {\n initProxyFromEnv()\n }\n\n if (options.verbose) {\n consola.level = 5\n consola.info(\"Verbose logging enabled\")\n }\n\n state.accountType = options.accountType\n if (options.accountType !== \"individual\") {\n consola.info(`Using ${options.accountType} plan GitHub account`)\n }\n\n state.manualApprove = options.manual\n state.rateLimitSeconds = options.rateLimit\n state.rateLimitWait = options.rateLimitWait\n state.showToken = options.showToken\n\n await ensurePaths()\n await cacheVSCodeVersion()\n\n if (options.githubToken) {\n state.githubToken = options.githubToken\n consola.info(\"Using provided GitHub token\")\n } else {\n await setupGitHubToken()\n }\n\n await setupCopilotToken()\n await cacheModels()\n\n consola.info(\n `Available models: \\n${state.models?.data.map((model) => `- ${model.id}`).join(\"\\n\")}`,\n )\n\n const serverUrl = `http://localhost:${options.port}`\n\n if (options.claudeCode) await generateClaudeCodeCommand(serverUrl)\n if (options.codex) await generateCodexCommand(serverUrl)\n\n consola.box(\n `🌐 Usage Viewer: https://animeshkundu.github.io/openroute/dashboard.html?endpoint=${serverUrl}/usage`,\n )\n\n serve({\n fetch: server.fetch as ServerHandler,\n port: options.port,\n })\n}\n\nexport const start = defineCommand({\n meta: {\n name: \"start\",\n description: \"Start the openroute server\",\n },\n args: {\n port: {\n alias: \"p\",\n type: \"string\",\n default: \"8787\",\n description: \"Port to listen on\",\n },\n verbose: {\n alias: \"v\",\n type: \"boolean\",\n default: false,\n description: \"Enable verbose logging\",\n },\n \"account-type\": {\n alias: \"a\",\n type: \"string\",\n default: \"individual\",\n description: \"Account type to use (individual, business, enterprise)\",\n },\n manual: {\n type: \"boolean\",\n default: false,\n description: \"Enable manual request approval\",\n },\n \"rate-limit\": {\n alias: \"r\",\n type: \"string\",\n description: \"Rate limit in seconds between requests\",\n },\n wait: {\n alias: \"w\",\n type: \"boolean\",\n default: false,\n description:\n \"Wait instead of error when rate limit is hit. Has no effect if rate limit is not set\",\n },\n \"github-token\": {\n alias: \"g\",\n type: \"string\",\n description:\n \"Provide GitHub token directly (must be generated using the `auth` subcommand)\",\n },\n \"claude-code\": {\n alias: \"c\",\n type: \"boolean\",\n default: false,\n description:\n \"Generate a command to launch Claude Code with Copilot API config\",\n },\n codex: {\n type: \"boolean\",\n default: false,\n description:\n \"Generate a command to launch Codex CLI with Copilot API config\",\n },\n \"show-token\": {\n type: \"boolean\",\n default: false,\n description: \"Show GitHub and Copilot tokens on fetch and refresh\",\n },\n \"proxy-env\": {\n type: \"boolean\",\n default: false,\n description: \"Initialize proxy from environment variables\",\n },\n },\n run({ args }) {\n const rateLimitRaw = args[\"rate-limit\"]\n const rateLimit =\n \n rateLimitRaw === undefined ? undefined : Number.parseInt(rateLimitRaw, 10)\n\n return runServer({\n port: Number.parseInt(args.port, 10),\n verbose: args.verbose,\n accountType: args[\"account-type\"],\n manual: args.manual,\n rateLimit,\n rateLimitWait: args.wait,\n githubToken: args[\"github-token\"],\n claudeCode: args[\"claude-code\"],\n codex: args.codex,\n showToken: args[\"show-token\"],\n proxyEnv: args[\"proxy-env\"],\n })\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand, runMain } from \"citty\"\n\nimport { auth } from \"./auth\"\nimport { checkUsage } from \"./check-usage\"\nimport { debug } from \"./debug\"\nimport { start } from \"./start\"\n\nconst main = defineCommand({\n meta: {\n name: \"oproute\",\n description:\n \"A reverse proxy that exposes GitHub Copilot as OpenAI and Anthropic compatible API endpoints.\",\n },\n subCommands: { auth, start, \"check-usage\": checkUsage, debug },\n})\n\nawait runMain(main)\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAIA,MAAM,UAAU,KAAK,KAAK,GAAG,SAAS,EAAE,UAAU,SAAS,YAAY;AAEvE,MAAM,oBAAoB,KAAK,KAAK,SAAS,eAAe;AAE5D,MAAa,QAAQ;CACnB;CACA;CACD;AAED,eAAsB,cAA6B;AACjD,OAAM,GAAG,MAAM,MAAM,SAAS,EAAE,WAAW,MAAM,CAAC;AAClD,OAAM,WAAW,MAAM,kBAAkB;;AAG3C,eAAe,WAAW,UAAiC;AACzD,KAAI;AACF,QAAM,GAAG,OAAO,UAAU,GAAG,UAAU,KAAK;SACtC;AACN,QAAM,GAAG,UAAU,UAAU,GAAG;AAChC,QAAM,GAAG,MAAM,UAAU,IAAM;;;;;;ACJnC,MAAaA,QAAe;CAC1B,aAAa;CACb,eAAe;CACf,eAAe;CACf,WAAW;CACZ;;;;ACpBD,MAAa,yBAAyB;CACpC,gBAAgB;CAChB,QAAQ;CACT;AAED,MAAM,kBAAkB;AACxB,MAAM,wBAAwB,gBAAgB;AAC9C,MAAM,aAAa,qBAAqB;AAExC,MAAM,cAAc;AAEpB,MAAa,kBAAkB,YAC7BC,QAAM,gBAAgB,eACpB,kCACA,eAAeA,QAAM,YAAY;AACrC,MAAa,kBACX,SACA,SAAkB,OAClB,gBAAwB,kBACrB;CACH,MAAMC,UAAkC;EACtC,eAAe,UAAUD,QAAM;EAC/B,gBAAgB,iBAAiB,CAAC;EAClC,0BAA0B;EAC1B,kBAAkB,UAAUA,QAAM;EAClC,yBAAyB;EACzB,cAAc;EACd,iBAAiB;EACjB,wBAAwB;EACxB,gBAAgB,YAAY;EAC5B,uCAAuC;EACxC;AAED,KAAI,OAAQ,SAAQ,4BAA4B;AAEhD,QAAO;;AAGT,MAAa,sBAAsB;AACnC,MAAa,iBAAiB,aAAkB;CAC9C,GAAG,iBAAiB;CACpB,eAAe,SAASA,QAAM;CAC9B,kBAAkB,UAAUA,QAAM;CAClC,yBAAyB;CACzB,cAAc;CACd,wBAAwB;CACxB,uCAAuC;CACxC;AAED,MAAa,kBAAkB;AAC/B,MAAa,mBAAmB;AAChC,MAAa,oBAAoB,CAAC,YAAY,CAAC,KAAK,IAAI;;;;AClDxD,IAAa,YAAb,cAA+B,MAAM;CACnC;CAEA,YAAY,SAAiB,UAAoB;AAC/C,QAAM,QAAQ;AACd,OAAK,WAAW;;;AAIpB,eAAsB,aAAa,GAAY,OAAgB;AAC7D,SAAQ,MAAM,mBAAmB,MAAM;AAEvC,KAAI,iBAAiB,WAAW;EAC9B,MAAM,YAAY,MAAM,MAAM,SAAS,MAAM;EAC7C,IAAIE;AACJ,MAAI;AACF,eAAY,KAAK,MAAM,UAAU;UAC3B;AACN,eAAY;;AAEd,UAAQ,MAAM,eAAe,UAAU;AACvC,SAAO,EAAE,KACP,EACE,OAAO;GACL,SAAS;GACT,MAAM;GACP,EACF,EACD,MAAM,SAAS,OAChB;;AAGH,QAAO,EAAE,KACP,EACE,OAAO;EACL,SAAU,MAAgB;EAC1B,MAAM;EACP,EACF,EACD,IACD;;;;;ACzCH,MAAa,kBAAkB,YAAY;CACzC,MAAM,WAAW,MAAM,MACrB,GAAG,oBAAoB,6BACvB,EACE,SAAS,cAAc,MAAM,EAC9B,CACF;AAED,KAAI,CAAC,SAAS,GAAI,OAAM,IAAI,UAAU,+BAA+B,SAAS;AAE9E,QAAQ,MAAM,SAAS,MAAM;;;;;ACN/B,eAAsB,gBAA6C;CACjE,MAAM,WAAW,MAAM,MAAM,GAAG,gBAAgB,qBAAqB;EACnE,QAAQ;EACR,SAAS,iBAAiB;EAC1B,MAAM,KAAK,UAAU;GACnB,WAAW;GACX,OAAO;GACR,CAAC;EACH,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,IAAI,UAAU,6BAA6B,SAAS;AAE5E,QAAQ,MAAM,SAAS,MAAM;;;;;AChB/B,eAAsB,gBAAgB;CACpC,MAAM,WAAW,MAAM,MAAM,GAAG,oBAAoB,QAAQ,EAC1D,SAAS;EACP,eAAe,SAAS,MAAM;EAC9B,GAAG,iBAAiB;EACrB,EACF,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,IAAI,UAAU,6BAA6B,SAAS;AAE5E,QAAQ,MAAM,SAAS,MAAM;;;;;ACV/B,MAAa,YAAY,YAAY;CACnC,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,UAAU,EAC9D,SAAS,eAAe,MAAM,EAC/B,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,IAAI,UAAU,wBAAwB,SAAS;AAEvE,QAAQ,MAAM,SAAS,MAAM;;;;;ACX/B,MAAM,WAAW;AAEjB,eAAsB,mBAAmB;CACvC,MAAM,aAAa,IAAI,iBAAiB;CACxC,MAAM,UAAU,iBAAiB;AAC/B,aAAW,OAAO;IACjB,IAAK;AAER,KAAI;EAUF,MAAM,SAFW,OAPA,MAAM,MACrB,kFACA,EACE,QAAQ,WAAW,QACpB,CACF,EAE+B,MAAM,EAEf,MADH,mBACqB;AAEzC,MAAI,MACF,QAAO,MAAM;AAGf,SAAO;SACD;AACN,SAAO;WACC;AACR,eAAa,QAAQ;;;AAIzB,MAAM,kBAAkB;;;;ACzBxB,MAAa,SAAS,OACpB,IAAI,SAAS,YAAY;AACvB,YAAW,SAAS,GAAG;EACvB;AAEJ,MAAa,aAAa,UACxB,UAAU,QAAQ,UAAU;AAE9B,eAAsB,cAA6B;AAEjD,OAAM,SADS,MAAM,WAAW;;AAIlC,MAAa,qBAAqB,YAAY;CAC5C,MAAM,WAAW,MAAM,kBAAkB;AACzC,OAAM,gBAAgB;AAEtB,SAAQ,KAAK,yBAAyB,WAAW;;;;;ACbnD,eAAsB,gBACpB,YACiB;CAGjB,MAAM,iBAAiB,WAAW,WAAW,KAAK;AAClD,SAAQ,MAAM,yCAAyC,cAAc,IAAI;AAEzE,QAAO,MAAM;EACX,MAAM,WAAW,MAAM,MACrB,GAAG,gBAAgB,4BACnB;GACE,QAAQ;GACR,SAAS,iBAAiB;GAC1B,MAAM,KAAK,UAAU;IACnB,WAAW;IACX,aAAa,WAAW;IACxB,YAAY;IACb,CAAC;GACH,CACF;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,SAAM,MAAM,cAAc;AAC1B,WAAQ,MAAM,gCAAgC,MAAM,SAAS,MAAM,CAAC;AAEpE;;EAGF,MAAM,OAAO,MAAM,SAAS,MAAM;AAClC,UAAQ,MAAM,kCAAkC,KAAK;EAErD,MAAM,EAAE,iBAAiB;AAEzB,MAAI,aACF,QAAO;MAEP,OAAM,MAAM,cAAc;;;;;;ACpChC,MAAM,wBAAwB,GAAG,SAAS,MAAM,mBAAmB,OAAO;AAE1E,MAAM,oBAAoB,UACxB,GAAG,UAAU,MAAM,mBAAmB,MAAM;AAE9C,MAAa,oBAAoB,YAAY;CAC3C,MAAM,EAAE,OAAO,eAAe,MAAM,iBAAiB;AACrD,OAAM,eAAe;AAGrB,SAAQ,MAAM,6CAA6C;AAC3D,KAAI,MAAM,UACR,SAAQ,KAAK,kBAAkB,MAAM;CAGvC,MAAM,mBAAmB,aAAa,MAAM;AAC5C,aAAY,YAAY;AACtB,UAAQ,MAAM,2BAA2B;AACzC,MAAI;GACF,MAAM,EAAE,mBAAU,MAAM,iBAAiB;AACzC,SAAM,eAAeC;AACrB,WAAQ,MAAM,0BAA0B;AACxC,OAAI,MAAM,UACR,SAAQ,KAAK,4BAA4BA,QAAM;WAE1C,OAAO;AACd,WAAQ,MAAM,oCAAoC,MAAM;;IAEzD,gBAAgB;;AAOrB,eAAsB,iBACpB,SACe;AACf,KAAI;EACF,MAAM,cAAc,MAAM,iBAAiB;AAE3C,MAAI,eAAe,CAAC,SAAS,OAAO;AAClC,SAAM,cAAc;AACpB,OAAI,MAAM,UACR,SAAQ,KAAK,iBAAiB,YAAY;AAE5C,SAAM,SAAS;AAEf;;AAGF,UAAQ,KAAK,0CAA0C;EACvD,MAAM,WAAW,MAAM,eAAe;AACtC,UAAQ,MAAM,yBAAyB,SAAS;AAEhD,UAAQ,KACN,0BAA0B,SAAS,UAAU,OAAO,SAAS,mBAC9D;EAED,MAAM,QAAQ,MAAM,gBAAgB,SAAS;AAC7C,QAAM,iBAAiB,MAAM;AAC7B,QAAM,cAAc;AAEpB,MAAI,MAAM,UACR,SAAQ,KAAK,iBAAiB,MAAM;AAEtC,QAAM,SAAS;UACR,OAAO;AACd,MAAI,iBAAiB,WAAW;AAC9B,WAAQ,MAAM,+BAA+B,MAAM,MAAM,SAAS,MAAM,CAAC;AACzE,SAAM;;AAGR,UAAQ,MAAM,+BAA+B,MAAM;AACnD,QAAM;;;AAIV,eAAe,UAAU;CACvB,MAAM,OAAO,MAAM,eAAe;AAClC,SAAQ,KAAK,gBAAgB,KAAK,QAAQ;;;;;AC9E5C,eAAsB,QAAQ,SAAwC;AACpE,KAAI,QAAQ,SAAS;AACnB,UAAQ,QAAQ;AAChB,UAAQ,KAAK,0BAA0B;;AAGzC,OAAM,YAAY,QAAQ;AAE1B,OAAM,aAAa;AACnB,OAAM,iBAAiB,EAAE,OAAO,MAAM,CAAC;AACvC,SAAQ,QAAQ,2BAA2B,MAAM,kBAAkB;;AAGrE,MAAa,OAAO,cAAc;CAChC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;EACJ,SAAS;GACP,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,cAAc;GACZ,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACF;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,QAAQ;GACb,SAAS,KAAK;GACd,WAAW,KAAK;GACjB,CAAC;;CAEL,CAAC;;;;AC/CF,MAAa,kBAAkB,YAA2C;CACxE,MAAM,WAAW,MAAM,MAAM,GAAG,oBAAoB,yBAAyB,EAC3E,SAAS,cAAc,MAAM,EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,IAAI,UAAU,+BAA+B,SAAS;AAG9D,QAAQ,MAAM,SAAS,MAAM;;;;;ACH/B,MAAa,aAAa,cAAc;CACtC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM,MAAM;AACV,QAAM,aAAa;AACnB,QAAM,kBAAkB;AACxB,MAAI;GACF,MAAM,QAAQ,MAAM,iBAAiB;GACrC,MAAM,UAAU,MAAM,gBAAgB;GACtC,MAAM,eAAe,QAAQ;GAC7B,MAAM,cAAc,eAAe,QAAQ;GAC3C,MAAM,qBACJ,eAAe,IAAK,cAAc,eAAgB,MAAM;GAC1D,MAAM,0BAA0B,QAAQ;GAGxC,SAAS,eAAe,MAAc,MAA+B;AACnE,QAAI,CAAC,KAAM,QAAO,GAAG,KAAK;IAC1B,MAAM,QAAQ,KAAK;IACnB,MAAM,OAAO,QAAQ,KAAK;IAC1B,MAAM,cAAc,QAAQ,IAAK,OAAO,QAAS,MAAM;IACvD,MAAM,mBAAmB,KAAK;AAC9B,WAAO,GAAG,KAAK,IAAI,KAAK,GAAG,MAAM,SAAS,YAAY,QAAQ,EAAE,CAAC,UAAU,iBAAiB,QAAQ,EAAE,CAAC;;GAGzG,MAAM,cAAc,YAAY,YAAY,GAAG,aAAa,SAAS,mBAAmB,QAAQ,EAAE,CAAC,UAAU,wBAAwB,QAAQ,EAAE,CAAC;GAChJ,MAAM,WAAW,eAAe,QAAQ,MAAM,gBAAgB,KAAK;GACnE,MAAM,kBAAkB,eACtB,eACA,MAAM,gBAAgB,YACvB;AAED,WAAQ,IACN,wBAAwB,MAAM,aAAa,mBACtB,MAAM,iBAAiB,iBAEnC,YAAY,MACZ,SAAS,MACT,kBACV;WACM,KAAK;AACZ,WAAQ,MAAM,kCAAkC,IAAI;AACpD,WAAQ,KAAK,EAAE;;;CAGpB,CAAC;;;;AC7BF,eAAe,oBAAqC;AAClD,KAAI;EACF,MAAM,kBAAkB,IAAI,IAAI,mBAAmB,OAAO,KAAK,IAAI,CAAC;AAMpE,SAHoB,KAAK,MAAM,MAAM,GAAG,SAAS,gBAAgB,CAAC,CAG/C;SACb;AACN,SAAO;;;AAIX,SAAS,iBAAiB;CACxB,MAAM,QAAQ,OAAO,QAAQ;AAE7B,QAAO;EACL,MAAM,QAAQ,QAAQ;EACtB,SAAS,QAAQ,IAAI,UAAU,QAAQ,QAAQ,MAAM,EAAE;EACvD,UAAU,GAAG,UAAU;EACvB,MAAM,GAAG,MAAM;EAChB;;AAGH,eAAe,mBAAqC;AAClD,KAAI;AAEF,MAAI,EADU,MAAM,GAAG,KAAK,MAAM,kBAAkB,EACzC,QAAQ,CAAE,QAAO;AAG5B,UADgB,MAAM,GAAG,SAAS,MAAM,mBAAmB,OAAO,EACnD,MAAM,CAAC,SAAS;SACzB;AACN,SAAO;;;AAIX,eAAe,eAAmC;CAChD,MAAM,CAAC,SAAS,eAAe,MAAM,QAAQ,IAAI,CAC/C,mBAAmB,EACnB,kBAAkB,CACnB,CAAC;AAEF,QAAO;EACL;EACA,SAAS,gBAAgB;EACzB,OAAO;GACL,SAAS,MAAM;GACf,mBAAmB,MAAM;GAC1B;EACD;EACD;;AAGH,SAAS,oBAAoB,MAAuB;AAClD,SAAQ,KAAK;;WAEJ,KAAK,QAAQ;WACb,KAAK,QAAQ,KAAK,GAAG,KAAK,QAAQ,QAAQ,IAAI,KAAK,QAAQ,SAAS,GAAG,KAAK,QAAQ,KAAK;;;aAGvF,KAAK,MAAM,QAAQ;uBACT,KAAK,MAAM,kBAAkB;;gBAEpC,KAAK,cAAc,QAAQ,OAAO;;AAGlD,SAAS,mBAAmB,MAAuB;AACjD,SAAQ,IAAI,KAAK,UAAU,MAAM,MAAM,EAAE,CAAC;;AAG5C,eAAsB,SAAS,SAAyC;CACtE,MAAM,YAAY,MAAM,cAAc;AAEtC,KAAI,QAAQ,KACV,oBAAmB,UAAU;KAE7B,qBAAoB,UAAU;;AAIlC,MAAa,QAAQ,cAAc;CACjC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM,EACJ,MAAM;EACJ,MAAM;EACN,SAAS;EACT,aAAa;EACd,EACF;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,SAAS,EACd,MAAM,KAAK,MACZ,CAAC;;CAEL,CAAC;;;;AC1HF,SAAgB,mBAAyB;AACvC,KAAI,OAAO,QAAQ,YAAa;AAEhC,KAAI;EACF,MAAM,SAAS,IAAI,OAAO;EAC1B,MAAM,0BAAU,IAAI,KAAyB;AAmD7C,sBA7CmB;GACjB,SACE,SACA,SACA;AACA,QAAI;KACF,MAAM,SACJ,OAAO,QAAQ,WAAW,WACxB,IAAI,IAAI,QAAQ,OAAO,GACtB,QAAQ;KAIb,MAAM,MAHM,eAGI,OAAO,UAAU,CAAC;KAClC,MAAM,WAAW,OAAO,IAAI,SAAS,IAAI,MAAM;AAC/C,SAAI,CAAC,UAAU;AACb,cAAQ,MAAM,sBAAsB,OAAO,WAAW;AACtD,aAAQ,OAAiC,SAAS,SAAS,QAAQ;;KAErE,IAAI,QAAQ,QAAQ,IAAI,SAAS;AACjC,SAAI,CAAC,OAAO;AACV,cAAQ,IAAI,WAAW,SAAS;AAChC,cAAQ,IAAI,UAAU,MAAM;;KAE9B,IAAI,QAAQ;AACZ,SAAI;MACF,MAAM,IAAI,IAAI,IAAI,SAAS;AAC3B,cAAQ,GAAG,EAAE,SAAS,IAAI,EAAE;aACtB;AAGR,aAAQ,MAAM,qBAAqB,OAAO,SAAS,OAAO,QAAQ;AAClE,YAAQ,MAAgC,SAAS,SAAS,QAAQ;YAC5D;AACN,YAAQ,OAAiC,SAAS,SAAS,QAAQ;;;GAGvE,QAAQ;AACN,WAAO,OAAO,OAAO;;GAEvB,UAAU;AACR,WAAO,OAAO,SAAS;;GAE1B,CAEuD;AACxD,UAAQ,MAAM,mDAAmD;UAC1D,KAAK;AACZ,UAAQ,MAAM,wBAAwB,IAAI;;;;;;ACzD9C,SAAS,WAAsB;CAC7B,MAAM,EAAE,UAAU,MAAM,QAAQC;AAEhC,KAAI,aAAa,SAAS;AACxB,MAAI;AAIF,OAFsB,SADN,oDAAoD,KAAK,IACjC,EAAE,OAAO,QAAQ,CAAC,CAAC,UAAU,CAEnD,aAAa,CAAC,SAAS,iBAAiB,CACxD,QAAO;UAEH;AACN,UAAO;;AAGT,SAAO;QACF;EACL,MAAM,YAAY,IAAI;AACtB,MAAI,WAAW;AACb,OAAI,UAAU,SAAS,MAAM,CAAE,QAAO;AACtC,OAAI,UAAU,SAAS,OAAO,CAAE,QAAO;AACvC,OAAI,UAAU,SAAS,OAAO,CAAE,QAAO;;AAGzC,SAAO;;;;;;;;;;AAWX,SAAgB,kBACd,SACA,eAAuB,IACf;CACR,MAAM,QAAQ,UAAU;CACxB,MAAM,kBAAkB,OAAO,QAAQ,QAAQ,CAAC,QAC7C,GAAG,WAAW,UAAU,OAC1B;CAED,IAAIC;AAEJ,SAAQ,OAAR;EACE,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,QAAQ,IAAI,KAAK,QAAQ,CAC/C,KAAK,KAAK;AACb;EAEF,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,OAAO,IAAI,GAAG,QAAQ,CAC5C,KAAK,MAAM;AACd;EAEF,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,WAAW,IAAI,GAAG,QAAQ,CAChD,KAAK,KAAK;AACb;EAEF,SAAS;GAEP,MAAM,cAAc,gBACjB,KAAK,CAAC,KAAK,WAAW,GAAG,IAAI,GAAG,QAAQ,CACxC,KAAK,IAAI;AACZ,kBAAe,gBAAgB,SAAS,IAAI,UAAU,gBAAgB;AACtE;;;AAIJ,KAAI,gBAAgB,aAElB,QAAO,GAAG,eADQ,UAAU,QAAQ,QAAQ,SACP;AAGvC,QAAO,gBAAgB;;;;;AClFzB,MAAa,gBAAgB,YAAY;AAKvC,KAAI,CAJa,MAAM,QAAQ,OAAO,4BAA4B,EAChE,MAAM,WACP,CAAC,CAGA,OAAM,IAAI,UACR,oBACA,SAAS,KAAK,EAAE,SAAS,oBAAoB,EAAE,EAAE,QAAQ,KAAK,CAAC,CAChE;;;;;ACNL,eAAsB,eAAe,SAAc;AACjD,KAAIC,QAAM,qBAAqB,OAAW;CAE1C,MAAM,MAAM,KAAK,KAAK;AAEtB,KAAI,CAACA,QAAM,sBAAsB;AAC/B,UAAM,uBAAuB;AAC7B;;CAGF,MAAM,kBAAkB,MAAMA,QAAM,wBAAwB;AAE5D,KAAI,iBAAiBA,QAAM,kBAAkB;AAC3C,UAAM,uBAAuB;AAC7B;;CAGF,MAAM,kBAAkB,KAAK,KAAKA,QAAM,mBAAmB,eAAe;AAE1E,KAAI,CAACA,QAAM,eAAe;AACxB,UAAQ,KACN,qCAAqC,gBAAgB,gBACtD;AACD,QAAM,IAAI,UACR,uBACA,SAAS,KAAK,EAAE,SAAS,uBAAuB,EAAE,EAAE,QAAQ,KAAK,CAAC,CACnE;;CAGH,MAAM,aAAa,kBAAkB;AACrC,SAAQ,KACN,+BAA+B,gBAAgB,+BAChD;AACD,OAAM,MAAM,WAAW;AAEvB,SAAM,uBAAuB;AAC7B,SAAQ,KAAK,qDAAqD;;;;;ACjCpE,MAAM,eAAe;CACnB,kBAAkB,OAAO;CACzB,mBAAmB,OAAO;CAC1B,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACzB;AAUD,MAAM,gCAAgB,IAAI,KAAsB;;;;AAKhD,MAAM,4BACJ,WACA,SACA,cACW;CACX,IAAI,SAAS;AACb,MAAK,MAAM,YAAY,WAAW;AAChC,YAAU,UAAU;AACpB,YAAU,QAAQ,OAAO,KAAK,UAAU,SAAS,CAAC,CAAC;;AAErD,WAAU,UAAU;AACpB,QAAO;;;;;AAMT,MAAM,+BACJ,cACA,YACW;CACX,IAAI,SAAS;AACb,MAAK,MAAM,QAAQ,aACjB,KAAI,KAAK,SAAS,YAChB,WAAU,QAAQ,OAAO,KAAK,UAAU,IAAI,CAAC,SAAS;UAC7C,KAAK,KACd,WAAU,QAAQ,OAAO,KAAK,KAAK,CAAC;AAGxC,QAAO;;;;;AAMT,MAAM,0BACJ,SACA,SACA,cACW;CACX,MAAM,mBAAmB;CACzB,MAAM,gBAAgB;CACtB,IAAI,SAAS;AACb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,EAAE;AAClD,MAAI,OAAO,UAAU,SACnB,WAAU,QAAQ,OAAO,MAAM,CAAC;AAElC,MAAI,QAAQ,OACV,WAAU;AAEZ,MAAI,QAAQ,aACV,WAAU,yBACR,OACA,SACA,UACD;AAEH,MAAI,QAAQ,aAAa,MAAM,QAAQ,MAAM,CAC3C,WAAU,4BACR,OACA,QACD;;AAGL,QAAO;;;;;AAMT,MAAM,mBACJ,UACA,SACA,cACW;AACX,KAAI,SAAS,WAAW,EACtB,QAAO;CAET,IAAI,YAAY;AAChB,MAAK,MAAM,WAAW,SACpB,cAAa,uBAAuB,SAAS,SAAS,UAAU;AAGlE,cAAa;AACb,QAAO;;;;;AAMT,MAAM,wBAAwB,OAAO,aAAuC;AAC1E,KAAI,cAAc,IAAI,SAAS,EAAE;EAC/B,MAAM,SAAS,cAAc,IAAI,SAAS;AAC1C,MAAI,OACF,QAAO;;CAIX,MAAM,oBAAoB;AAC1B,KAAI,EAAE,qBAAqB,eAAe;EACxC,MAAM,iBAAkB,MAAM,aAAa,YAAY;AACvD,gBAAc,IAAI,UAAU,eAAe;AAC3C,SAAO;;CAGT,MAAM,iBAAkB,MAAM,aAAa,oBAAoB;AAC/D,eAAc,IAAI,UAAU,eAAe;AAC3C,QAAO;;;;;AAMT,MAAa,yBAAyB,UAAyB;AAC7D,QAAO,MAAM,aAAa,aAAa;;;;;AAMzC,MAAM,qBAAqB,UAAiB;AAC1C,QAAO,MAAM,OAAO,mBAAmB,MAAM,OAAO,UAChD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACV,GACD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACV;;;;;AAMP,MAAM,4BACJ,KACA,MACA,YAIW;CACX,MAAM,EAAE,SAAS,cAAc;CAC/B,IAAI,SAAS,UAAU;AAGvB,KAAI,OAAO,SAAS,YAAY,SAAS,KACvC,QAAO;CAIT,MAAM,QAAQ;CAOd,MAAM,YAAY;CAClB,MAAM,YAAY,MAAM,QAAQ;CAChC,IAAI,YAAY,MAAM,eAAe;AAGrC,KAAI,MAAM,QAAQ,MAAM,QAAQ,MAAM,KAAK,EAAE;AAC3C,YAAU,UAAU;AACpB,OAAK,MAAM,QAAQ,MAAM,MAAM;AAC7B,aAAU,UAAU;AACpB,aAAU,QAAQ,OAAO,OAAO,KAAK,CAAC,CAAC;;;AAK3C,KAAI,UAAU,SAAS,IAAI,CACzB,aAAY,UAAU,MAAM,GAAG,GAAG;CAIpC,MAAM,OAAO,GAAG,UAAU,GAAG,UAAU,GAAG;AAC1C,WAAU,QAAQ,OAAO,KAAK,CAAC;CAG/B,MAAM,eAAe,IAAI,IAAI;EAAC;EAAQ;EAAe;EAAO,CAAC;AAC7D,MAAK,MAAM,gBAAgB,OAAO,KAAK,MAAM,CAC3C,KAAI,CAAC,aAAa,IAAI,aAAa,EAAE;EACnC,MAAM,gBAAgB,MAAM;EAC5B,MAAM,eACJ,OAAO,kBAAkB,WAAW,gBAClC,KAAK,UAAU,cAAc;AAEjC,YAAU,QAAQ,OAAO,GAAG,aAAa,GAAG,eAAe,CAAC;;AAIhE,QAAO;;;;;AAMT,MAAM,6BACJ,YACA,SACA,cACW;AACX,KAAI,CAAC,cAAc,OAAO,eAAe,SACvC,QAAO;CAGT,MAAM,SAAS;CACf,IAAI,SAAS;AAEb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,CAC/C,KAAI,QAAQ,cAAc;EACxB,MAAM,aAAa;AACnB,MAAI,OAAO,KAAK,WAAW,CAAC,SAAS,GAAG;AACtC,aAAU,UAAU;AACpB,QAAK,MAAM,WAAW,OAAO,KAAK,WAAW,CAC3C,WAAU,yBAAyB,SAAS,WAAW,UAAU;IAC/D;IACA;IACD,CAAC;;QAGD;EACL,MAAM,YACJ,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,MAAM;AAC3D,YAAU,QAAQ,OAAO,GAAG,IAAI,GAAG,YAAY,CAAC;;AAIpD,QAAO;;;;;AAMT,MAAM,uBACJ,MACA,SACA,cACW;CACX,IAAI,SAAS,UAAU;CACvB,MAAM,OAAO,KAAK;CAClB,MAAM,QAAQ,KAAK;CACnB,IAAI,QAAQ,KAAK,eAAe;AAChC,KAAI,MAAM,SAAS,IAAI,CACrB,SAAQ,MAAM,MAAM,GAAG,GAAG;CAE5B,MAAM,OAAO,QAAQ,MAAM;AAC3B,WAAU,QAAQ,OAAO,KAAK,CAAC;AAC/B,KACE,OAAO,KAAK,eAAe,YACxB,KAAK,eAAe,KAEvB,WAAU,0BAA0B,KAAK,YAAY,SAAS,UAAU;AAE1E,QAAO;;;;;AAMT,MAAa,qBACX,OACA,SACA,cACW;CACX,IAAI,iBAAiB;AACrB,MAAK,MAAM,QAAQ,MACjB,mBAAkB,oBAAoB,MAAM,SAAS,UAAU;AAEjE,mBAAkB,UAAU;AAC5B,QAAO;;;;;AAMT,MAAa,gBAAgB,OAC3B,SACA,UAC+C;CAK/C,MAAM,UAAU,MAAM,sBAHJ,sBAAsB,MAAM,CAGQ;CAEtD,MAAM,qBAAqB,QAAQ;CACnC,MAAM,gBAAgB,mBAAmB,QACtC,QAAQ,IAAI,SAAS,YACvB;CACD,MAAM,iBAAiB,mBAAmB,QACvC,QAAQ,IAAI,SAAS,YACvB;CAED,MAAM,YAAY,kBAAkB,MAAM;CAC1C,IAAI,cAAc,gBAAgB,eAAe,SAAS,UAAU;AACpE,KAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,EAC1C,gBAAe,kBAAkB,QAAQ,OAAO,SAAS,UAAU;CAErE,MAAM,eAAe,gBAAgB,gBAAgB,SAAS,UAAU;AAExE,QAAO;EACL,OAAO;EACP,QAAQ;EACT;;;;;ACnVH,MAAa,wBAAwB,OACnC,YACG;AACH,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,eAAe,QAAQ,SAAS,MACnC,MACC,OAAO,EAAE,YAAY,YAClB,EAAE,SAAS,MAAM,QAAMC,IAAE,SAAS,YAAY,CACpD;CAID,MAAM,cAAc,QAAQ,SAAS,MAAM,QACzC,CAAC,aAAa,OAAO,CAAC,SAAS,IAAI,KAAK,CACzC;CAGD,MAAMC,UAAkC;EACtC,GAAG,eAAe,OAAO,aAAa;EACtC,eAAe,cAAc,UAAU;EACxC;CAED,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,oBAAoB;EACxE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,qCAAqC,SAAS;AAC5D,QAAM,IAAI,UAAU,qCAAqC,SAAS;;AAGpE,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;;;;AC7B/B,eAAsBC,mBAAiB,GAAY;AACjD,OAAM,eAAe,MAAM;CAE3B,IAAI,UAAU,MAAM,EAAE,IAAI,MAA8B;AACxD,SAAQ,MAAM,oBAAoB,KAAK,UAAU,QAAQ,CAAC,MAAM,KAAK,CAAC;CAGtE,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,QAAQ,MACjC;AAGD,KAAI;AACF,MAAI,eAAe;GACjB,MAAM,aAAa,MAAM,cAAc,SAAS,cAAc;AAC9D,WAAQ,KAAK,wBAAwB,WAAW;QAEhD,SAAQ,KAAK,sDAAsD;UAE9D,OAAO;AACd,UAAQ,KAAK,oCAAoC,MAAM;;AAGzD,KAAI,MAAM,cAAe,OAAM,eAAe;AAE9C,KAAI,UAAU,QAAQ,WAAW,EAAE;AACjC,YAAU;GACR,GAAG;GACH,YAAY,eAAe,aAAa,OAAO;GAChD;AACD,UAAQ,MAAM,sBAAsB,KAAK,UAAU,QAAQ,WAAW,CAAC;;CAGzE,MAAM,WAAW,MAAM,sBAAsB,QAAQ;AAErD,KAAIC,iBAAe,SAAS,EAAE;AAC5B,UAAQ,MAAM,2BAA2B,KAAK,UAAU,SAAS,CAAC;AAClE,SAAO,EAAE,KAAK,SAAS;;AAGzB,SAAQ,MAAM,qBAAqB;AACnC,QAAO,UAAU,GAAG,OAAO,WAAW;AACpC,aAAW,MAAM,SAAS,UAAU;AAClC,WAAQ,MAAM,oBAAoB,KAAK,UAAU,MAAM,CAAC;AACxD,SAAM,OAAO,SAAS,MAAoB;;GAE5C;;AAGJ,MAAMA,oBACJ,aACuC,OAAO,OAAO,UAAU,UAAU;;;;AC7D3E,MAAa,mBAAmB,IAAI,MAAM;AAE1C,iBAAiB,KAAK,KAAK,OAAO,MAAM;AACtC,KAAI;AACF,SAAO,MAAMC,mBAAiB,EAAE;UACzB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACVF,MAAa,mBAAmB,OAAO,YAA8B;AACnE,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,cAAc;EAClE,QAAQ;EACR,SAAS,eAAe,MAAM;EAC9B,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GAAI,OAAM,IAAI,UAAU,+BAA+B,SAAS;AAE9E,QAAQ,MAAM,SAAS,MAAM;;;;;ACP/B,MAAa,kBAAkB,IAAI,MAAM;AAEzC,gBAAgB,KAAK,KAAK,OAAO,MAAM;AACrC,KAAI;EAEF,MAAM,WAAW,MAAM,iBADP,MAAM,EAAE,IAAI,MAAwB,CACJ;AAEhD,SAAO,EAAE,KAAK,SAAS;UAChB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACjBF,SAAgB,+BACd,cACkC;AAClC,KAAI,iBAAiB,KACnB,QAAO;AAQT,QANsB;EACpB,MAAM;EACN,QAAQ;EACR,YAAY;EACZ,gBAAgB;EACjB,CACoB;;;;;ACcvB,SAAgB,kBACd,SACwB;AACxB,QAAO;EACL,OAAO,mBAAmB,QAAQ,MAAM;EACxC,UAAU,mCACR,QAAQ,UACR,QAAQ,OACT;EACD,YAAY,QAAQ;EACpB,MAAM,QAAQ;EACd,QAAQ,QAAQ;EAChB,aAAa,QAAQ;EACrB,OAAO,QAAQ;EACf,MAAM,QAAQ,UAAU;EACxB,OAAO,gCAAgC,QAAQ,MAAM;EACrD,aAAa,qCAAqC,QAAQ,YAAY;EACvE;;AAGH,SAAS,mBAAmB,OAAuB;AAEjD,KAAI,MAAM,WAAW,mBAAmB,CACtC,QAAO,MAAM,QAAQ,uBAAuB,kBAAkB;UACrD,MAAM,WAAW,eAAe,CACzC,QAAO,MAAM,QAAQ,qBAAqB,gBAAgB;AAE5D,QAAO;;AAGT,SAAS,mCACP,mBACA,QACgB;CAChB,MAAM,iBAAiB,mBAAmB,OAAO;CAEjD,MAAM,gBAAgB,kBAAkB,SAAS,YAC/C,QAAQ,SAAS,SACf,kBAAkB,QAAQ,GAC1B,uBAAuB,QAAQ,CAClC;AAED,QAAO,CAAC,GAAG,gBAAgB,GAAG,cAAc;;AAG9C,SAAS,mBACP,QACgB;AAChB,KAAI,CAAC,OACH,QAAO,EAAE;AAGX,KAAI,OAAO,WAAW,SACpB,QAAO,CAAC;EAAE,MAAM;EAAU,SAAS;EAAQ,CAAC;KAG5C,QAAO,CAAC;EAAE,MAAM;EAAU,SADP,OAAO,KAAK,UAAU,MAAM,KAAK,CAAC,KAAK,OAAO;EAClB,CAAC;;AAIpD,SAAS,kBAAkB,SAA+C;CACxE,MAAMC,cAA8B,EAAE;AAEtC,KAAI,MAAM,QAAQ,QAAQ,QAAQ,EAAE;EAClC,MAAM,mBAAmB,QAAQ,QAAQ,QACtC,UACC,MAAM,SAAS,cAClB;EACD,MAAM,cAAc,QAAQ,QAAQ,QACjC,UAAU,MAAM,SAAS,cAC3B;AAGD,OAAK,MAAM,SAAS,iBAClB,aAAY,KAAK;GACf,MAAM;GACN,cAAc,MAAM;GACpB,SAAS,WAAW,MAAM,QAAQ;GACnC,CAAC;AAGJ,MAAI,YAAY,SAAS,EACvB,aAAY,KAAK;GACf,MAAM;GACN,SAAS,WAAW,YAAY;GACjC,CAAC;OAGJ,aAAY,KAAK;EACf,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CAAC;AAGJ,QAAO;;AAGT,SAAS,uBACP,SACgB;AAChB,KAAI,CAAC,MAAM,QAAQ,QAAQ,QAAQ,CACjC,QAAO,CACL;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;CAGH,MAAM,gBAAgB,QAAQ,QAAQ,QACnC,UAA0C,MAAM,SAAS,WAC3D;CAED,MAAM,aAAa,QAAQ,QAAQ,QAChC,UAAuC,MAAM,SAAS,OACxD;CAED,MAAM,iBAAiB,QAAQ,QAAQ,QACpC,UAA2C,MAAM,SAAS,WAC5D;CAGD,MAAM,iBAAiB,CACrB,GAAG,WAAW,KAAK,MAAM,EAAE,KAAK,EAChC,GAAG,eAAe,KAAK,MAAM,EAAE,SAAS,CACzC,CAAC,KAAK,OAAO;AAEd,QAAO,cAAc,SAAS,IAC1B,CACE;EACE,MAAM;EACN,SAAS,kBAAkB;EAC3B,YAAY,cAAc,KAAK,aAAa;GAC1C,IAAI,QAAQ;GACZ,MAAM;GACN,UAAU;IACR,MAAM,QAAQ;IACd,WAAW,KAAK,UAAU,QAAQ,MAAM;IACzC;GACF,EAAE;EACJ,CACF,GACD,CACE;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;;AAGP,SAAS,WACP,SAGoC;AACpC,KAAI,OAAO,YAAY,SACrB,QAAO;AAET,KAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,QAAO;AAIT,KAAI,CADa,QAAQ,MAAM,UAAU,MAAM,SAAS,QAAQ,CAE9D,QAAO,QACJ,QACE,UACC,MAAM,SAAS,UAAU,MAAM,SAAS,WAC3C,CACA,KAAK,UAAW,MAAM,SAAS,SAAS,MAAM,OAAO,MAAM,SAAU,CACrE,KAAK,OAAO;CAGjB,MAAMC,eAAmC,EAAE;AAC3C,MAAK,MAAM,SAAS,QAClB,SAAQ,MAAM,MAAd;EACE,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAM,CAAC;AAErD;EAEF,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAU,CAAC;AAEzD;EAEF,KAAK;AACH,gBAAa,KAAK;IAChB,MAAM;IACN,WAAW,EACT,KAAK,QAAQ,MAAM,OAAO,WAAW,UAAU,MAAM,OAAO,QAC7D;IACF,CAAC;AAEF;;AAKN,QAAO;;AAGT,SAAS,gCACP,gBACyB;AACzB,KAAI,CAAC,eACH;AAEF,QAAO,eAAe,KAAK,UAAU;EACnC,MAAM;EACN,UAAU;GACR,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,YAAY,KAAK;GAClB;EACF,EAAE;;AAGL,SAAS,qCACP,qBACuC;AACvC,KAAI,CAAC,oBACH;AAGF,SAAQ,oBAAoB,MAA5B;EACE,KAAK,OACH,QAAO;EAET,KAAK,MACH,QAAO;EAET,KAAK;AACH,OAAI,oBAAoB,KACtB,QAAO;IACL,MAAM;IACN,UAAU,EAAE,MAAM,oBAAoB,MAAM;IAC7C;AAEH;EAEF,KAAK,OACH,QAAO;EAET,QACE;;;AAON,SAAgB,qBACd,UACmB;CAEnB,MAAMC,gBAA2C,EAAE;CACnD,MAAMC,mBAAiD,EAAE;CACzD,IAAIC,aACF;AACF,cAAa,SAAS,QAAQ,IAAI,iBAAiB;AAGnD,MAAK,MAAM,UAAU,SAAS,SAAS;EACrC,MAAM,aAAa,uBAAuB,OAAO,QAAQ,QAAQ;EACjE,MAAM,gBAAgB,0BAA0B,OAAO,QAAQ,WAAW;AAE1E,gBAAc,KAAK,GAAG,WAAW;AACjC,mBAAiB,KAAK,GAAG,cAAc;AAGvC,MAAI,OAAO,kBAAkB,gBAAgB,eAAe,OAC1D,cAAa,OAAO;;AAMxB,QAAO;EACL,IAAI,SAAS;EACb,MAAM;EACN,MAAM;EACN,OAAO,SAAS;EAChB,SAAS,CAAC,GAAG,eAAe,GAAG,iBAAiB;EAChD,aAAa,+BAA+B,WAAW;EACvD,eAAe;EACf,OAAO;GACL,eACG,SAAS,OAAO,iBAAiB,MAC/B,SAAS,OAAO,uBAAuB,iBAAiB;GAC7D,eAAe,SAAS,OAAO,qBAAqB;GACpD,GAAI,SAAS,OAAO,uBAAuB,kBACrC,UAAa,EACjB,yBACE,SAAS,MAAM,sBAAsB,eACxC;GACF;EACF;;AAGH,SAAS,uBACP,gBAC2B;AAC3B,KAAI,OAAO,mBAAmB,SAC5B,QAAO,CAAC;EAAE,MAAM;EAAQ,MAAM;EAAgB,CAAC;AAGjD,KAAI,MAAM,QAAQ,eAAe,CAC/B,QAAO,eACJ,QAAQ,SAA2B,KAAK,SAAS,OAAO,CACxD,KAAK,UAAU;EAAE,MAAM;EAAQ,MAAM,KAAK;EAAM,EAAE;AAGvD,QAAO,EAAE;;AAGX,SAAS,0BACP,WAC8B;AAC9B,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,UAAU,KAAK,cAAc;EAClC,MAAM;EACN,IAAI,SAAS;EACb,MAAM,SAAS,SAAS;EACxB,OAAO,KAAK,MAAM,SAAS,SAAS,UAAU;EAC/C,EAAE;;;;;;;;ACtVL,eAAsB,kBAAkB,GAAY;AAClD,KAAI;EACF,MAAM,gBAAgB,EAAE,IAAI,OAAO,iBAAiB;EAEpD,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;EAErE,MAAM,gBAAgB,kBAAkB,iBAAiB;EAEzD,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,iBAAiB,MAC1C;AAED,MAAI,CAAC,eAAe;AAClB,WAAQ,KAAK,iDAAiD;AAC9D,UAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;EAGJ,MAAM,aAAa,MAAM,cAAc,eAAe,cAAc;AAEpE,MAAI,iBAAiB,SAAS,iBAAiB,MAAM,SAAS,GAAG;GAC/D,IAAI,eAAe;AACnB,OAAI,eAAe,WAAW,cAAc,CAC1C,gBAAe,iBAAiB,MAAM,MAAM,SAC1C,KAAK,KAAK,WAAW,QAAQ,CAC9B;AAEH,OAAI,CAAC,cACH;QAAI,iBAAiB,MAAM,WAAW,SAAS,CAE7C,YAAW,QAAQ,WAAW,QAAQ;aAC7B,iBAAiB,MAAM,WAAW,OAAO,CAClD,YAAW,QAAQ,WAAW,QAAQ;;;EAK5C,IAAI,kBAAkB,WAAW,QAAQ,WAAW;AACpD,MAAI,iBAAiB,MAAM,WAAW,SAAS,CAC7C,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;WAC3C,iBAAiB,MAAM,WAAW,OAAO,CAClD,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;AAGtD,UAAQ,KAAK,gBAAgB,gBAAgB;AAE7C,SAAO,EAAE,KAAK,EACZ,cAAc,iBACf,CAAC;UACK,OAAO;AACd,UAAQ,MAAM,0BAA0B,MAAM;AAC9C,SAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;;;;;AC3DN,SAAS,gBAAgB,SAAsC;AAC7D,KAAI,CAACC,QAAM,iBACT,QAAO;AAGT,QAAO,OAAO,OAAOA,QAAM,UAAU,CAAC,MACnC,OAAO,GAAG,wBAAwBA,QAAM,kBAC1C;;AAIH,SAAgB,gCACd,OACA,SACiC;CACjC,MAAMC,WAA0C,EAAE;AAElD,KAAI,MAAM,QAAQ,WAAW,EAC3B,QAAOC;CAGT,MAAM,SAAS,MAAM,QAAQ;CAC7B,MAAM,EAAE,UAAU;AAElB,KAAI,CAACF,QAAM,kBAAkB;AAC3B,WAAO,KAAK;GACV,MAAM;GACN,SAAS;IACP,IAAI,MAAM;IACV,MAAM;IACN,MAAM;IACN,SAAS,EAAE;IACX,OAAO,MAAM;IACb,aAAa;IACb,eAAe;IACf,OAAO;KACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;KAC1D,eAAe;KACf,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;KACF;IACF;GACF,CAAC;AACF,UAAM,mBAAmB;;AAG3B,KAAI,MAAM,SAAS;AACjB,MAAI,gBAAgBA,QAAM,EAAE;AAE1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM;AACN,WAAM,mBAAmB;;AAG3B,MAAI,CAACA,QAAM,kBAAkB;AAC3B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACb,eAAe;KACb,MAAM;KACN,MAAM;KACP;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KAAK;GACV,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,MAAM,MAAM;IACb;GACF,CAAC;;AAGJ,KAAI,MAAM,WACR,MAAK,MAAM,YAAY,MAAM,YAAY;AACvC,MAAI,SAAS,MAAM,SAAS,UAAU,MAAM;AAE1C,OAAIA,QAAM,kBAAkB;AAE1B,aAAO,KAAK;KACV,MAAM;KACN,OAAOA,QAAM;KACd,CAAC;AACF,YAAM;AACN,YAAM,mBAAmB;;GAG3B,MAAM,sBAAsBA,QAAM;AAClC,WAAM,UAAU,SAAS,SAAS;IAChC,IAAI,SAAS;IACb,MAAM,SAAS,SAAS;IACxB;IACD;AAED,YAAO,KAAK;IACV,MAAM;IACN,OAAO;IACP,eAAe;KACb,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS,SAAS;KACxB,OAAO,EAAE;KACV;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,MAAI,SAAS,UAAU,WAAW;GAChC,MAAM,eAAeA,QAAM,UAAU,SAAS;AAG9C,OAAI,aACF,UAAO,KAAK;IACV,MAAM;IACN,OAAO,aAAa;IACpB,OAAO;KACL,MAAM;KACN,cAAc,SAAS,SAAS;KACjC;IACF,CAAC;;;AAMV,KAAI,OAAO,eAAe;AACxB,MAAIA,QAAM,kBAAkB;AAC1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KACL;GACE,MAAM;GACN,OAAO;IACL,aAAa,+BAA+B,OAAO,cAAc;IACjE,eAAe;IAChB;GACD,OAAO;IACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;IAC1D,eAAe,MAAM,OAAO,qBAAqB;IACjD,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;IACF;GACF,EACD,EACE,MAAM,gBACP,CACF;;AAGH,QAAOE;;;;;AC1JT,eAAsB,iBAAiB,GAAY;AACjD,OAAM,eAAe,MAAM;CAE3B,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;AACrE,SAAQ,MAAM,8BAA8B,KAAK,UAAU,iBAAiB,CAAC;CAE7E,MAAM,gBAAgB,kBAAkB,iBAAiB;AACzD,SAAQ,MACN,sCACA,KAAK,UAAU,cAAc,CAC9B;AAED,KAAI,MAAM,cACR,OAAM,eAAe;CAGvB,MAAM,WAAW,MAAM,sBAAsB,cAAc;AAE3D,KAAIC,iBAAe,SAAS,EAAE;AAC5B,UAAQ,MACN,wCACA,KAAK,UAAU,SAAS,CAAC,MAAM,KAAK,CACrC;EACD,MAAM,oBAAoB,qBAAqB,SAAS;AACxD,UAAQ,MACN,kCACA,KAAK,UAAU,kBAAkB,CAClC;AACD,SAAO,EAAE,KAAK,kBAAkB;;AAGlC,SAAQ,MAAM,kCAAkC;AAChD,QAAO,UAAU,GAAG,OAAO,WAAW;EACpC,MAAMC,cAAoC;GACxC,kBAAkB;GAClB,mBAAmB;GACnB,kBAAkB;GAClB,WAAW,EAAE;GACd;AAED,aAAW,MAAM,YAAY,UAAU;AACrC,WAAQ,MAAM,6BAA6B,KAAK,UAAU,SAAS,CAAC;AACpE,OAAI,SAAS,SAAS,SACpB;AAGF,OAAI,CAAC,SAAS,KACZ;GAIF,MAAMC,WAAS,gCADD,KAAK,MAAM,SAAS,KAAK,EACe,YAAY;AAElE,QAAK,MAAM,SAASA,UAAQ;AAC1B,YAAQ,MAAM,+BAA+B,KAAK,UAAU,MAAM,CAAC;AACnE,UAAM,OAAO,SAAS;KACpB,OAAO,MAAM;KACb,MAAM,KAAK,UAAU,MAAM;KAC5B,CAAC;;;GAGN;;AAGJ,MAAMF,oBACJ,aACuC,OAAO,OAAO,UAAU,UAAU;;;;ACnF3E,MAAa,gBAAgB,IAAI,MAAM;AAEvC,cAAc,KAAK,KAAK,OAAO,MAAM;AACnC,KAAI;AACF,SAAO,MAAM,iBAAiB,EAAE;UACzB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;AAEF,cAAc,KAAK,iBAAiB,OAAO,MAAM;AAC/C,KAAI;AACF,SAAO,MAAM,kBAAkB,EAAE;UAC1B,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACjBF,MAAa,cAAc,IAAI,MAAM;AAErC,YAAY,IAAI,KAAK,OAAO,MAAM;AAChC,KAAI;AACF,MAAI,CAAC,MAAM,OAET,OAAM,aAAa;EAGrB,MAAM,SAAS,MAAM,QAAQ,KAAK,KAAK,WAAW;GAChD,IAAI,MAAM;GACV,QAAQ;GACR,MAAM;GACN,SAAS;GACT,6BAAY,IAAI,KAAK,EAAE,EAAC,aAAa;GACrC,UAAU,MAAM;GAChB,cAAc,MAAM;GACrB,EAAE;AAEH,SAAO,EAAE,KAAK;GACZ,QAAQ;GACR,MAAM;GACN,UAAU;GACX,CAAC;UACK,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;AC1BF,MAAa,kBAAkB,OAAO,YAA8B;AAClE,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;CAEnE,MAAM,eAAe,aAAa,QAAQ,MAAM;CAEhD,MAAM,cAAc,gBAAgB,QAAQ,MAAM;CAElD,MAAMG,UAAkC;EACtC,GAAG,eAAe,OAAO,aAAa;EACtC,eAAe,cAAc,UAAU;EACxC;CAED,MAAM,kBAAkB,uBAAuB,QAAQ;CAEvD,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,aAAa;EACjE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,gBAAgB;EACtC,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,8BAA8B,SAAS;AACrD,QAAM,IAAI,UAAU,8BAA8B,SAAS;;AAG7D,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;AAG/B,SAAS,aAAa,OAA2C;AAC/D,KAAI,OAAO,UAAU,SAAU,QAAO;AACtC,KAAI,CAAC,MAAM,QAAQ,MAAM,CAAE,QAAO;AAElC,QAAO,MAAM,MAAM,SAAS;AAC1B,MAAI,aAAa,QAAQ,MAAM,QAAQ,KAAK,QAAQ,CAClD,QAAO,KAAK,QAAQ,MACjB,SAAkC,KAAK,SAAS,cAClD;AAEH,SAAO;GACP;;AAGJ,SAAS,gBAAgB,OAA2C;AAClE,KAAI,OAAO,UAAU,SAAU,QAAO;AACtC,KAAI,CAAC,MAAM,QAAQ,MAAM,CAAE,QAAO;AAElC,QAAO,MAAM,MAAM,SAAS;AAC1B,MAAI,UAAU,QAAQ,KAAK,SAAS,YAAa,QAAO;AACxD,MAAI,UAAU,QAAQ,KAAK,SAAS,uBAAwB,QAAO;AACnE,SAAO;GACP;;AAGJ,SAAS,uBAAuB,SAA6C;AAC3E,KAAI,CAAC,QAAQ,SAAS,CAAC,MAAM,QAAQ,QAAQ,MAAM,CAAE,QAAO;CAE5D,MAAM,YAAY,QAAQ,MAAM,QAAQ,SAAS;EAC/C,MAAM,cAAc,KAAK,SAAS;AAClC,MAAI,CAAC,YACH,SAAQ,MAAM,oCAAoC,KAAK,OAAO;AAEhE,SAAO;GACP;AAEF,QAAO;EACL,GAAG;EACH,OAAO,UAAU,SAAS,IAAI,YAAY;EAC3C;;;;;ACjDH,MAAM,0BAA0B;AAChC,IAAIC,mBAAkC,EAAE;AAExC,eAAe,iBAAgC;CAC7C,MAAM,MAAM,KAAK,KAAK;AACtB,oBAAmB,iBAAiB,QAAQ,MAAM,MAAM,IAAI,IAAK;AACjE,KAAI,iBAAiB,UAAU,yBAAyB;EACtD,MAAM,SAAS,OAAQ,MAAM,iBAAiB;AAC9C,MAAI,SAAS,GAAG;AACd,WAAQ,MAAM,oCAAoC,OAAO,IAAI;AAC7D,SAAM,MAAM,OAAO;;;AAGvB,kBAAiB,KAAK,KAAK,KAAK,CAAC;;AAGnC,SAAS,iBAAyC;AAChD,QAAO,eAAe,OAAO,OAAO,eAAe;;AAGrD,eAAe,eAAgC;CAC7C,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,uBAAuB;EAC3E,QAAQ;EACR,SAAS,gBAAgB;EACzB,MAAM,KAAK,UAAU,EAAE,CAAC;EACzB,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,gCAAgC,SAAS,OAAO;AAC9D,QAAM,IAAI,MAAM,iCAAiC,SAAS,SAAS;;AAIrE,SADc,MAAM,SAAS,MAAM,EACvB;;AAGd,eAAe,kBACb,UACA,OACiC;CACjC,MAAM,WAAW,MAAM,MACrB,GAAG,eAAe,MAAM,CAAC,uBAAuB,SAAS,YACzD;EACE,QAAQ;EACR,SAAS,gBAAgB;EACzB,MAAM,KAAK,UAAU;GACnB,SAAS;GACT,QAAQ;GACR,QAAQ,CAAC,aAAa;GACtB,YAAY,EAAE;GACf,CAAC;EACH,CACF;AAED,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,iCAAiC,SAAS,OAAO;AAC/D,QAAM,IAAI,MAAM,kCAAkC,SAAS,SAAS;;AAGtE,QAAQ,MAAM,SAAS,MAAM;;AAG/B,eAAsB,UAAU,OAAyC;AACvE,KAAI,CAAC,MAAM,aAAc,OAAM,IAAI,MAAM,0BAA0B;AAEnE,OAAM,gBAAgB;AAEtB,SAAQ,KAAK,gBAAgB,MAAM,MAAM,GAAG,GAAG,CAAC,GAAG;CAGnD,MAAM,WAAW,MAAM,kBADN,MAAM,cAAc,EACc,MAAM;CAEzD,MAAMC,aAAoD,EAAE;AAC5D,MAAK,MAAM,OAAO,SAAS,QAAQ,WACjC,KAAI,IAAI,SACN;OAAK,MAAM,UAAU,IAAI,QACvB,KAAI,OAAO,OAAO,OAAO,mBAAmB,cAC1C,YAAW,KAAK;GAAE,OAAO,OAAO;GAAO,KAAK,OAAO;GAAK,CAAC;;AAMjE,SAAQ,MAAM,uBAAuB,WAAW,OAAO,aAAa;AAEpE,QAAO;EACL,SAAS,SAAS,QAAQ;EAC1B;EACD;;;;;ACpGH,eAAsB,gBAAgB,GAAY;AAChD,OAAM,eAAe,MAAM;CAE3B,MAAM,UAAU,MAAM,EAAE,IAAI,MAAwB;AACpD,SAAQ,MACN,8BACA,KAAK,UAAU,QAAQ,CAAC,MAAM,KAAK,CACpC;CAED,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACtC,UAAU,MAAM,OAAO,QAAQ,MACjC;AAED,SAAQ,KAAK,2DAA2D;AAExE,KAAI,MAAM,cAAe,OAAM,eAAe;AAE9C,OAAM,wBAAwB,QAAQ;AAEtC,KAAI,UAAU,QAAQ,kBAAkB,EAAE;AACxC,UAAQ,oBACN,eAAe,aAAa,OAAO;AACrC,UAAQ,MACN,6BACA,KAAK,UAAU,QAAQ,kBAAkB,CAC1C;;CAGH,MAAM,WAAW,MAAM,gBAAgB,QAAQ;AAE/C,KAAI,eAAe,SAAS,EAAE;AAC5B,UAAQ,MAAM,2BAA2B,KAAK,UAAU,SAAS,CAAC;AAClE,SAAO,EAAE,KAAK,SAAS;;AAGzB,SAAQ,MAAM,qBAAqB;AACnC,QAAO,UAAU,GAAG,OAAO,WAAW;AACpC,aAAW,MAAM,SAAS,UAAU;AAClC,WAAQ,MAAM,oBAAoB,KAAK,UAAU,MAAM,CAAC;AAExD,OAAI,MAAM,SAAS,SACjB;AAGF,OAAI,CAAC,MAAM,KACT;AAGF,SAAM,OAAO,SAAS;IACpB,MAAM,MAAM;IACZ,OAAO,MAAM;IACb,IAAI,MAAM,IAAI,UAAU;IACzB,CAAC;;GAEJ;;AAGJ,MAAM,kBACJ,aACqC,OAAO,OAAO,UAAU,SAAS;AAExE,eAAe,wBACb,SACe;AAEf,KAAI,CADiB,QAAQ,OAAO,MAAM,MAAM,EAAE,SAAS,aAAa,CACrD;AAGnB,KAAI,MAAM,QAAQ,QAAQ,MAAM,EAI9B;MAHoB,QAAQ,MAAM,MAC/B,SAA6B,KAAK,SAAS,uBAC7C,CACgB;;CAGnB,MAAM,QAAQ,iBAAiB,QAAQ,MAAM;AAC7C,KAAI,CAAC,MAAO;AAEZ,KAAI;EACF,MAAM,UAAU,MAAM,UAAU,MAAM;EACtC,MAAM,gBAAgB;GACpB;GACA,QAAQ;GACR;GACA,QAAQ,WAAW,KAAK,MAAM,MAAM,EAAE,MAAM,IAAI,EAAE,IAAI,GAAG,CAAC,KAAK,KAAK;GACpE;GACD,CAAC,KAAK,KAAK;AAEZ,UAAQ,eACN,QAAQ,eACN,GAAG,cAAc,MAAM,QAAQ,iBAC/B;UACG,OAAO;AACd,UAAQ,KAAK,kDAAkD,MAAM;;;AAIzE,SAAS,iBACP,OACoB;AACpB,KAAI,OAAO,UAAU,SAAU,QAAO;AACtC,KAAI,CAAC,MAAM,QAAQ,MAAM,CAAE,QAAO;AAGlC,MAAK,IAAI,IAAI,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;EAC1C,MAAM,OAAO,MAAM;AACnB,MAAI,UAAU,QAAQ,KAAK,SAAS,QAAQ;AAC1C,OAAI,OAAO,KAAK,YAAY,SAAU,QAAO,KAAK;AAClD,OAAI,MAAM,QAAQ,KAAK,QAAQ,EAAE;IAC/B,MAAM,OAAO,KAAK,QAAQ,MACvB,MAA+B,EAAE,SAAS,aAC5C;AACD,QAAI,QAAQ,UAAU,KAAM,QAAO,KAAK;;;;;;;;AC3HhD,MAAa,kBAAkB,IAAI,MAAM;AAEzC,gBAAgB,KAAK,KAAK,OAAO,MAAM;AACrC,KAAI;AACF,SAAO,MAAM,gBAAgB,EAAE;UACxB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACTF,MAAa,eAAe,IAAI,MAAM;AAEtC,aAAa,KAAK,KAAK,OAAO,MAAM;AAClC,KAAI;EACF,MAAM,EAAE,UAAU,MAAM,EAAE,IAAI,MAAyB;AAEvD,MAAI,CAAC,SAAS,OAAO,UAAU,SAC7B,QAAO,EAAE,KACP,EAAE,OAAO,EAAE,SAAS,iCAAiC,EAAE,EACvD,IACD;EAGH,MAAM,UAAU,MAAM,UAAU,MAAM;AACtC,SAAO,EAAE,KAAK,EAAE,SAAS,CAAC;UACnB,OAAO;AACd,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACnBF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,MAAM,MAAM;AACzB,QAAO,EAAE,KAAK,EACZ,OAAO,MAAM,cACd,CAAC;EACF;;;;ACJF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,KAAK,OAAO,MAAM;AAC/B,KAAI;EACF,MAAM,QAAQ,MAAM,iBAAiB;AACrC,SAAO,EAAE,KAAK,MAAM;UACb,OAAO;AACd,UAAQ,MAAM,iCAAiC,MAAM;AACrD,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACHF,MAAa,SAAS,IAAI,MAAM;AAEhC,OAAO,IAAI,QAAQ,CAAC;AACpB,OAAO,IAAI,MAAM,CAAC;AAElB,OAAO,IAAI,MAAM,MAAM,EAAE,KAAK,iBAAiB,CAAC;AAEhD,OAAO,MAAM,qBAAqB,iBAAiB;AACnD,OAAO,MAAM,cAAc,gBAAgB;AAC3C,OAAO,MAAM,WAAW,YAAY;AACpC,OAAO,MAAM,eAAe,gBAAgB;AAC5C,OAAO,MAAM,WAAW,aAAa;AACrC,OAAO,MAAM,UAAU,WAAW;AAClC,OAAO,MAAM,UAAU,WAAW;AAGlC,OAAO,MAAM,wBAAwB,iBAAiB;AACtD,OAAO,MAAM,iBAAiB,gBAAgB;AAC9C,OAAO,MAAM,cAAc,YAAY;AACvC,OAAO,MAAM,kBAAkB,gBAAgB;AAC/C,OAAO,MAAM,cAAc,aAAa;AAGxC,OAAO,MAAM,gBAAgB,cAAc;;;;ACN3C,eAAe,0BAA0B,WAAmB;AAC1D,WAAU,MAAM,QAAQ,iCAAiC;CAEzD,MAAM,gBAAgB,MAAM,QAAQ,OAClC,0CACA;EACE,MAAM;EACN,SAAS,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,GAAG;EACpD,CACF;CAED,MAAM,qBAAqB,MAAM,QAAQ,OACvC,gDACA;EACE,MAAM;EACN,SAAS,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,GAAG;EACpD,CACF;CAED,MAAM,UAAU,kBACd;EACE,oBAAoB;EACpB,sBAAsB;EACtB,iBAAiB;EACjB,gCAAgC;EAChC,4BAA4B;EAC5B,+BAA+B;EAC/B,mCAAmC;EACnC,0CAA0C;EAC3C,EACD,SACD;AAED,KAAI;AACF,YAAU,UAAU,QAAQ;AAC5B,UAAQ,QAAQ,2CAA2C;SACrD;AACN,UAAQ,KACN,gEACD;AACD,UAAQ,IAAI,QAAQ;;;AAIxB,eAAe,qBAAqB,WAAmB;AACrD,WAAU,MAAM,QAAQ,iCAAiC;CAEzD,MAAM,oBAAoB,MAAM,OAAO,KAAK,MACzC,UAAU,MAAM,OAAO,eACzB;CAED,MAAM,gBACJ,oBACE,kBAAkB,KAClB,MAAM,QAAQ,OAAO,wCAAwC;EAC3D,MAAM;EACN,SAAS,MAAM,OAAO,KAAK,KAAK,UAAU,MAAM,GAAG;EACpD,CAAC;CAEN,MAAM,UAAU,kBACd;EACE,iBAAiB,GAAG,UAAU;EAC9B,gBAAgB;EACjB,EACD,YAAY,gBACb;AAED,KAAI;AACF,YAAU,UAAU,QAAQ;AAC5B,UAAQ,QAAQ,yCAAyC;SACnD;AACN,UAAQ,KAAK,8DAA8D;AAC3E,UAAQ,IAAI,QAAQ;;;AAIxB,eAAsB,UAAU,SAA0C;AACxE,KAAI,QAAQ,SACV,mBAAkB;AAGpB,KAAI,QAAQ,SAAS;AACnB,UAAQ,QAAQ;AAChB,UAAQ,KAAK,0BAA0B;;AAGzC,OAAM,cAAc,QAAQ;AAC5B,KAAI,QAAQ,gBAAgB,aAC1B,SAAQ,KAAK,SAAS,QAAQ,YAAY,sBAAsB;AAGlE,OAAM,gBAAgB,QAAQ;AAC9B,OAAM,mBAAmB,QAAQ;AACjC,OAAM,gBAAgB,QAAQ;AAC9B,OAAM,YAAY,QAAQ;AAE1B,OAAM,aAAa;AACnB,OAAM,oBAAoB;AAE1B,KAAI,QAAQ,aAAa;AACvB,QAAM,cAAc,QAAQ;AAC5B,UAAQ,KAAK,8BAA8B;OAE3C,OAAM,kBAAkB;AAG1B,OAAM,mBAAmB;AACzB,OAAM,aAAa;AAEnB,SAAQ,KACN,uBAAuB,MAAM,QAAQ,KAAK,KAAK,UAAU,KAAK,MAAM,KAAK,CAAC,KAAK,KAAK,GACrF;CAED,MAAM,YAAY,oBAAoB,QAAQ;AAE9C,KAAI,QAAQ,WAAY,OAAM,0BAA0B,UAAU;AAClE,KAAI,QAAQ,MAAO,OAAM,qBAAqB,UAAU;AAExD,SAAQ,IACN,qFAAqF,UAAU,QAChG;AAED,OAAM;EACJ,OAAO,OAAO;EACd,MAAM,QAAQ;EACf,CAAC;;AAGJ,MAAa,QAAQ,cAAc;CACjC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;EACJ,MAAM;GACJ,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,SAAS;GACP,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,gBAAgB;GACd,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,QAAQ;GACN,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,cAAc;GACZ,OAAO;GACP,MAAM;GACN,aAAa;GACd;EACD,MAAM;GACJ,OAAO;GACP,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,gBAAgB;GACd,OAAO;GACP,MAAM;GACN,aACE;GACH;EACD,eAAe;GACb,OAAO;GACP,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,OAAO;GACL,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,cAAc;GACZ,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,aAAa;GACX,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACF;CACD,IAAI,EAAE,QAAQ;EACZ,MAAM,eAAe,KAAK;EAC1B,MAAM,YAEJ,iBAAiB,SAAY,SAAY,OAAO,SAAS,cAAc,GAAG;AAE5E,SAAO,UAAU;GACf,MAAM,OAAO,SAAS,KAAK,MAAM,GAAG;GACpC,SAAS,KAAK;GACd,aAAa,KAAK;GAClB,QAAQ,KAAK;GACb;GACA,eAAe,KAAK;GACpB,aAAa,KAAK;GAClB,YAAY,KAAK;GACjB,OAAO,KAAK;GACZ,WAAW,KAAK;GAChB,UAAU,KAAK;GAChB,CAAC;;CAEL,CAAC;;;;ACvOF,MAAM,QATO,cAAc;CACzB,MAAM;EACJ,MAAM;EACN,aACE;EACH;CACD,aAAa;EAAE;EAAM;EAAO,eAAe;EAAY;EAAO;CAC/D,CAAC,CAEiB"}
package/package.json ADDED
@@ -0,0 +1,71 @@
1
+ {
2
+ "name": "github-router",
3
+ "version": "0.1.0",
4
+ "description": "A reverse proxy that exposes GitHub Copilot as OpenAI and Anthropic compatible API endpoints.",
5
+ "keywords": [
6
+ "proxy",
7
+ "github-copilot",
8
+ "openai-compatible",
9
+ "anthropic-compatible"
10
+ ],
11
+ "homepage": "https://github.com/animeshkundu/openroute",
12
+ "bugs": "https://github.com/animeshkundu/openroute/issues",
13
+ "repository": {
14
+ "type": "git",
15
+ "url": "git+https://github.com/animeshkundu/openroute.git"
16
+ },
17
+ "author": "animeshkundu",
18
+ "type": "module",
19
+ "bin": {
20
+ "github-router": "./dist/main.js"
21
+ },
22
+ "files": [
23
+ "dist"
24
+ ],
25
+ "scripts": {
26
+ "build": "tsdown",
27
+ "dev": "bun run --watch ./src/main.ts",
28
+ "knip": "knip-bun",
29
+ "lint": "eslint --cache",
30
+ "lint:all": "eslint --cache .",
31
+ "prepack": "bun run build",
32
+ "prepare": "simple-git-hooks",
33
+ "release": "bun publish --access public",
34
+ "start": "NODE_ENV=production bun run ./src/main.ts",
35
+ "test": "bun test",
36
+ "typecheck": "tsc"
37
+ },
38
+ "simple-git-hooks": {
39
+ "pre-commit": "bunx lint-staged"
40
+ },
41
+ "lint-staged": {
42
+ "*": "bun run lint --fix"
43
+ },
44
+ "dependencies": {
45
+ "citty": "^0.1.6",
46
+ "clipboardy": "^5.0.0",
47
+ "consola": "^3.4.2",
48
+ "fetch-event-stream": "^0.1.5",
49
+ "gpt-tokenizer": "^3.0.1",
50
+ "hono": "^4.9.9",
51
+ "proxy-from-env": "^1.1.0",
52
+ "srvx": "^0.8.9",
53
+ "tiny-invariant": "^1.3.3",
54
+ "undici": "^7.16.0",
55
+ "zod": "^4.1.11"
56
+ },
57
+ "devDependencies": {
58
+ "@eslint/js": "^9.37.0",
59
+ "eslint-config-prettier": "^10.1.0",
60
+ "@types/bun": "^1.2.23",
61
+ "@types/proxy-from-env": "^1.0.4",
62
+ "eslint": "^9.37.0",
63
+ "knip": "^5.64.1",
64
+ "lint-staged": "^16.2.3",
65
+ "prettier-plugin-packagejson": "^2.5.19",
66
+ "simple-git-hooks": "^2.13.1",
67
+ "tsdown": "^0.15.6",
68
+ "typescript": "^5.9.3",
69
+ "typescript-eslint": "^8.33.0"
70
+ }
71
+ }