@jer-y/copilot-proxy 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +359 -0
- package/dist/main.js +1712 -0
- package/dist/main.js.map +1 -0
- package/package.json +68 -0
package/dist/main.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"main.js","names":["state: State","state","headers: Record<string, string>","errorJson: unknown","token","commandBlock: string","state","x","headers: Record<string, string>","handleCompletion","isNonStreaming","handleCompletion","MODEL_CONFIGS: Record<string, ModelConfig>","reasoning_effort: 'low' | 'medium' | 'high' | undefined","newMessages: Array<Message>","contentParts: Array<ContentPart>","allTextBlocks: Array<AnthropicTextBlock>","allToolUseBlocks: Array<AnthropicToolUseBlock>","stopReason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | null","state","events: Array<AnthropicStreamEventData>","events","isNonStreaming","streamState: AnthropicStreamState","events","headers: Record<string, string>"],"sources":["../src/lib/paths.ts","../src/lib/state.ts","../src/lib/api-config.ts","../src/lib/error.ts","../src/services/github/get-copilot-token.ts","../src/services/github/get-device-code.ts","../src/services/github/get-user.ts","../src/services/copilot/get-models.ts","../src/services/get-vscode-version.ts","../src/lib/utils.ts","../src/services/github/poll-access-token.ts","../src/lib/token.ts","../src/auth.ts","../src/services/github/get-copilot-usage.ts","../src/check-usage.ts","../src/debug.ts","../src/lib/proxy.ts","../src/lib/shell.ts","../src/lib/approval.ts","../src/lib/rate-limit.ts","../src/lib/tokenizer.ts","../src/services/copilot/create-chat-completions.ts","../src/routes/chat-completions/handler.ts","../src/routes/chat-completions/route.ts","../src/services/copilot/create-embeddings.ts","../src/routes/embeddings/route.ts","../src/lib/model-config.ts","../src/routes/messages/utils.ts","../src/routes/messages/non-stream-translation.ts","../src/routes/messages/count-tokens-handler.ts","../src/routes/messages/stream-translation.ts","../src/routes/messages/handler.ts","../src/routes/messages/route.ts","../src/routes/models/route.ts","../src/services/copilot/create-responses.ts","../src/routes/responses/handler.ts","../src/routes/responses/route.ts","../src/routes/token/route.ts","../src/routes/usage/route.ts","../src/server.ts","../src/start.ts","../src/main.ts"],"sourcesContent":["import fs from 'node:fs/promises'\nimport os from 'node:os'\nimport path from 'node:path'\n\nconst APP_DIR = path.join(os.homedir(), '.local', 'share', 'copilot-proxy')\n\nconst GITHUB_TOKEN_PATH = path.join(APP_DIR, 'github_token')\n\nexport const PATHS = {\n APP_DIR,\n GITHUB_TOKEN_PATH,\n}\n\nexport async function ensurePaths(): Promise<void> {\n await fs.mkdir(PATHS.APP_DIR, { recursive: true })\n await ensureFile(PATHS.GITHUB_TOKEN_PATH)\n}\n\nasync function ensureFile(filePath: string): Promise<void> {\n try {\n await fs.access(filePath, fs.constants.W_OK)\n }\n catch {\n await fs.writeFile(filePath, '')\n await fs.chmod(filePath, 0o600)\n }\n}\n","import type { ModelsResponse } from '~/services/copilot/get-models'\n\nexport interface State {\n githubToken?: string\n copilotToken?: string\n\n accountType: string\n models?: ModelsResponse\n vsCodeVersion?: string\n\n manualApprove: boolean\n rateLimitWait: boolean\n showToken: boolean\n\n // Rate limiting configuration\n rateLimitSeconds?: number\n lastRequestTimestamp?: number\n}\n\nexport const state: State = {\n accountType: 'individual',\n manualApprove: false,\n rateLimitWait: false,\n showToken: false,\n}\n","import type { State } from './state'\n\nimport { randomUUID } from 'node:crypto'\n\nexport function standardHeaders() {\n return {\n 'content-type': 'application/json',\n 'accept': 'application/json',\n }\n}\n\nconst COPILOT_VERSION = '0.26.7'\nconst EDITOR_PLUGIN_VERSION = `copilot-chat/${COPILOT_VERSION}`\nconst USER_AGENT = `GitHubCopilotChat/${COPILOT_VERSION}`\n\nconst API_VERSION = '2025-05-01'\n\nexport function copilotBaseUrl(state: State) {\n return state.accountType === 'individual'\n ? 'https://api.githubcopilot.com'\n : `https://api.${state.accountType}.githubcopilot.com`\n}\nexport function copilotHeaders(state: State, vision: boolean = false) {\n const headers: Record<string, string> = {\n 'Authorization': `Bearer ${state.copilotToken}`,\n 'content-type': standardHeaders()['content-type'],\n 'copilot-integration-id': 'vscode-chat',\n 'editor-version': `vscode/${state.vsCodeVersion}`,\n 'editor-plugin-version': EDITOR_PLUGIN_VERSION,\n 'user-agent': USER_AGENT,\n 'openai-intent': 'conversation-agent',\n 'x-interaction-type': 'conversation-agent',\n 'x-github-api-version': API_VERSION,\n 'x-request-id': randomUUID(),\n 'x-vscode-user-agent-library-version': 'electron-fetch',\n }\n\n if (vision)\n headers['copilot-vision-request'] = 'true'\n\n return headers\n}\n\nexport const GITHUB_API_BASE_URL = 'https://api.github.com'\nexport function githubHeaders(state: State) {\n return {\n ...standardHeaders(),\n 'authorization': `token ${state.githubToken}`,\n 'editor-version': `vscode/${state.vsCodeVersion}`,\n 'editor-plugin-version': EDITOR_PLUGIN_VERSION,\n 'user-agent': USER_AGENT,\n 'x-github-api-version': API_VERSION,\n 'x-vscode-user-agent-library-version': 'electron-fetch',\n }\n}\n\nexport const GITHUB_BASE_URL = 'https://github.com'\nexport const GITHUB_CLIENT_ID = 'Iv1.b507a08c87ecfe98'\nexport const GITHUB_APP_SCOPES = ['read:user'].join(' ')\n","import type { Context } from 'hono'\nimport type { ContentfulStatusCode } from 'hono/utils/http-status'\n\nimport consola from 'consola'\n\nexport class HTTPError extends Error {\n response: Response\n\n constructor(message: string, response: Response) {\n super(message)\n this.response = response\n }\n}\n\nexport async function forwardError(c: Context, error: unknown) {\n consola.error('Error occurred:', error)\n\n if (error instanceof HTTPError) {\n const errorText = await error.response.text()\n let errorJson: unknown\n try {\n errorJson = JSON.parse(errorText)\n }\n catch {\n errorJson = errorText\n }\n consola.error('HTTP error:', errorJson)\n return c.json(\n {\n error: {\n message: errorText,\n type: 'error',\n },\n },\n error.response.status as ContentfulStatusCode,\n )\n }\n\n return c.json(\n {\n error: {\n message: (error as Error).message,\n type: 'error',\n },\n },\n 500,\n )\n}\n","import { GITHUB_API_BASE_URL, githubHeaders } from '~/lib/api-config'\nimport { HTTPError } from '~/lib/error'\nimport { state } from '~/lib/state'\n\nexport async function getCopilotToken() {\n const response = await fetch(\n `${GITHUB_API_BASE_URL}/copilot_internal/v2/token`,\n {\n headers: githubHeaders(state),\n },\n )\n\n if (!response.ok)\n throw new HTTPError('Failed to get Copilot token', response)\n\n return (await response.json()) as GetCopilotTokenResponse\n}\n\n// Trimmed for the sake of simplicity\ninterface GetCopilotTokenResponse {\n expires_at: number\n refresh_in: number\n token: string\n}\n","import {\n GITHUB_APP_SCOPES,\n GITHUB_BASE_URL,\n GITHUB_CLIENT_ID,\n standardHeaders,\n} from '~/lib/api-config'\nimport { HTTPError } from '~/lib/error'\n\nexport async function getDeviceCode(): Promise<DeviceCodeResponse> {\n const response = await fetch(`${GITHUB_BASE_URL}/login/device/code`, {\n method: 'POST',\n headers: standardHeaders(),\n body: JSON.stringify({\n client_id: GITHUB_CLIENT_ID,\n scope: GITHUB_APP_SCOPES,\n }),\n })\n\n if (!response.ok)\n throw new HTTPError('Failed to get device code', response)\n\n return (await response.json()) as DeviceCodeResponse\n}\n\nexport interface DeviceCodeResponse {\n device_code: string\n user_code: string\n verification_uri: string\n expires_in: number\n interval: number\n}\n","import { GITHUB_API_BASE_URL, standardHeaders } from '~/lib/api-config'\nimport { HTTPError } from '~/lib/error'\nimport { state } from '~/lib/state'\n\nexport async function getGitHubUser() {\n const response = await fetch(`${GITHUB_API_BASE_URL}/user`, {\n headers: {\n authorization: `token ${state.githubToken}`,\n ...standardHeaders(),\n },\n })\n\n if (!response.ok)\n throw new HTTPError('Failed to get GitHub user', response)\n\n return (await response.json()) as GithubUserResponse\n}\n\n// Trimmed for the sake of simplicity\ninterface GithubUserResponse {\n login: string\n}\n","import { copilotBaseUrl, copilotHeaders } from '~/lib/api-config'\nimport { HTTPError } from '~/lib/error'\nimport { state } from '~/lib/state'\n\nexport async function getModels() {\n const response = await fetch(`${copilotBaseUrl(state)}/models`, {\n headers: copilotHeaders(state),\n })\n\n if (!response.ok)\n throw new HTTPError('Failed to get models', response)\n\n return (await response.json()) as ModelsResponse\n}\n\nexport interface ModelsResponse {\n data: Array<Model>\n object: string\n}\n\ninterface ModelLimits {\n max_context_window_tokens?: number\n max_output_tokens?: number\n max_prompt_tokens?: number\n max_inputs?: number\n}\n\ninterface ModelSupports {\n tool_calls?: boolean\n parallel_tool_calls?: boolean\n dimensions?: boolean\n}\n\ninterface ModelCapabilities {\n family: string\n limits: ModelLimits\n object: string\n supports: ModelSupports\n tokenizer: string\n type: string\n}\n\nexport interface Model {\n capabilities: ModelCapabilities\n id: string\n model_picker_enabled: boolean\n name: string\n object: string\n preview: boolean\n vendor: string\n version: string\n policy?: {\n state: string\n terms: string\n }\n}\n","const FALLBACK = '1.104.3'\n\nexport async function getVSCodeVersion() {\n const controller = new AbortController()\n const timeout = setTimeout(() => {\n controller.abort()\n }, 5000)\n\n try {\n const response = await fetch(\n 'https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h=visual-studio-code-bin',\n {\n signal: controller.signal,\n },\n )\n\n const pkgbuild = await response.text()\n const pkgverRegex = /pkgver=([0-9.]+)/\n const match = pkgbuild.match(pkgverRegex)\n\n if (match) {\n return match[1]\n }\n\n return FALLBACK\n }\n catch {\n return FALLBACK\n }\n finally {\n clearTimeout(timeout)\n }\n}\n\n// eslint-disable-next-line antfu/no-top-level-await\nawait getVSCodeVersion()\n","import consola from 'consola'\n\nimport { getModels } from '~/services/copilot/get-models'\nimport { getVSCodeVersion } from '~/services/get-vscode-version'\n\nimport { state } from './state'\n\nexport function sleep(ms: number) {\n return new Promise((resolve) => {\n setTimeout(resolve, ms)\n })\n}\n\nexport function isNullish(value: unknown): value is null | undefined {\n return value === null || value === undefined\n}\n\nexport async function cacheModels(): Promise<void> {\n const models = await getModels()\n state.models = models\n}\n\nexport async function cacheVSCodeVersion() {\n const response = await getVSCodeVersion()\n state.vsCodeVersion = response\n\n consola.info(`Using VSCode version: ${response}`)\n}\n","import type { DeviceCodeResponse } from './get-device-code'\n\nimport consola from 'consola'\nimport {\n GITHUB_BASE_URL,\n GITHUB_CLIENT_ID,\n standardHeaders,\n} from '~/lib/api-config'\n\nimport { sleep } from '~/lib/utils'\n\nexport async function pollAccessToken(\n deviceCode: DeviceCodeResponse,\n): Promise<string> {\n // Interval is in seconds, we need to multiply by 1000 to get milliseconds\n // I'm also adding another second, just to be safe\n const sleepDuration = (deviceCode.interval + 1) * 1000\n consola.debug(`Polling access token with interval of ${sleepDuration}ms`)\n\n while (true) {\n const response = await fetch(\n `${GITHUB_BASE_URL}/login/oauth/access_token`,\n {\n method: 'POST',\n headers: standardHeaders(),\n body: JSON.stringify({\n client_id: GITHUB_CLIENT_ID,\n device_code: deviceCode.device_code,\n grant_type: 'urn:ietf:params:oauth:grant-type:device_code',\n }),\n },\n )\n\n if (!response.ok) {\n await sleep(sleepDuration)\n consola.error('Failed to poll access token:', await response.text())\n\n continue\n }\n\n const json = await response.json()\n consola.debug('Polling access token response:', json)\n\n const { access_token } = json as AccessTokenResponse\n\n if (access_token) {\n return access_token\n }\n else {\n await sleep(sleepDuration)\n }\n }\n}\n\ninterface AccessTokenResponse {\n access_token: string\n token_type: string\n scope: string\n}\n","import fs from 'node:fs/promises'\nimport consola from 'consola'\n\nimport { PATHS } from '~/lib/paths'\nimport { getCopilotToken } from '~/services/github/get-copilot-token'\nimport { getDeviceCode } from '~/services/github/get-device-code'\nimport { getGitHubUser } from '~/services/github/get-user'\nimport { pollAccessToken } from '~/services/github/poll-access-token'\n\nimport { HTTPError } from './error'\nimport { state } from './state'\n\nconst readGithubToken = () => fs.readFile(PATHS.GITHUB_TOKEN_PATH, 'utf8')\n\nfunction writeGithubToken(token: string) {\n return fs.writeFile(PATHS.GITHUB_TOKEN_PATH, token)\n}\n\nexport async function setupCopilotToken() {\n const { token, refresh_in } = await getCopilotToken()\n state.copilotToken = token\n\n // Display the Copilot token to the screen\n consola.debug('GitHub Copilot Token fetched successfully!')\n if (state.showToken) {\n consola.info('Copilot token:', token)\n }\n\n const refreshInterval = (refresh_in - 60) * 1000\n setInterval(async () => {\n consola.debug('Refreshing Copilot token')\n try {\n const { token } = await getCopilotToken()\n state.copilotToken = token\n consola.debug('Copilot token refreshed')\n if (state.showToken) {\n consola.info('Refreshed Copilot token:', token)\n }\n }\n catch (error) {\n consola.error('Failed to refresh Copilot token:', error)\n throw error\n }\n }, refreshInterval)\n}\n\ninterface SetupGitHubTokenOptions {\n force?: boolean\n}\n\nexport async function setupGitHubToken(\n options?: SetupGitHubTokenOptions,\n): Promise<void> {\n try {\n const githubToken = await readGithubToken()\n\n if (githubToken && !options?.force) {\n state.githubToken = githubToken\n if (state.showToken) {\n consola.info('GitHub token:', githubToken)\n }\n await logUser()\n\n return\n }\n\n consola.info('Not logged in, getting new access token')\n const response = await getDeviceCode()\n consola.debug('Device code response:', response)\n\n consola.info(\n `Please enter the code \"${response.user_code}\" in ${response.verification_uri}`,\n )\n\n const token = await pollAccessToken(response)\n await writeGithubToken(token)\n state.githubToken = token\n\n if (state.showToken) {\n consola.info('GitHub token:', token)\n }\n await logUser()\n }\n catch (error) {\n if (error instanceof HTTPError) {\n consola.error('Failed to get GitHub token:', await error.response.json())\n throw error\n }\n\n consola.error('Failed to get GitHub token:', error)\n throw error\n }\n}\n\nasync function logUser() {\n const user = await getGitHubUser()\n consola.info(`Logged in as ${user.login}`)\n}\n","#!/usr/bin/env node\n\nimport { defineCommand } from 'citty'\nimport consola from 'consola'\n\nimport { ensurePaths, PATHS } from './lib/paths'\nimport { state } from './lib/state'\nimport { setupGitHubToken } from './lib/token'\n\ninterface RunAuthOptions {\n verbose: boolean\n showToken: boolean\n}\n\nexport async function runAuth(options: RunAuthOptions): Promise<void> {\n if (options.verbose) {\n consola.level = 5\n consola.info('Verbose logging enabled')\n }\n\n state.showToken = options.showToken\n\n await ensurePaths()\n await setupGitHubToken({ force: true })\n consola.success('GitHub token written to', PATHS.GITHUB_TOKEN_PATH)\n}\n\nexport const auth = defineCommand({\n meta: {\n name: 'auth',\n description: 'Run GitHub auth flow without running the server',\n },\n args: {\n 'verbose': {\n alias: 'v',\n type: 'boolean',\n default: false,\n description: 'Enable verbose logging',\n },\n 'show-token': {\n type: 'boolean',\n default: false,\n description: 'Show GitHub token on auth',\n },\n },\n run({ args }) {\n return runAuth({\n verbose: args.verbose,\n showToken: args['show-token'],\n })\n },\n})\n","import { GITHUB_API_BASE_URL, githubHeaders } from '~/lib/api-config'\nimport { HTTPError } from '~/lib/error'\nimport { state } from '~/lib/state'\n\nexport async function getCopilotUsage(): Promise<CopilotUsageResponse> {\n const response = await fetch(`${GITHUB_API_BASE_URL}/copilot_internal/user`, {\n headers: githubHeaders(state),\n })\n\n if (!response.ok) {\n throw new HTTPError('Failed to get Copilot usage', response)\n }\n\n return (await response.json()) as CopilotUsageResponse\n}\n\nexport interface QuotaDetail {\n entitlement: number\n overage_count: number\n overage_permitted: boolean\n percent_remaining: number\n quota_id: string\n quota_remaining: number\n remaining: number\n unlimited: boolean\n}\n\ninterface QuotaSnapshots {\n chat: QuotaDetail\n completions: QuotaDetail\n premium_interactions: QuotaDetail\n}\n\ninterface CopilotUsageResponse {\n access_type_sku: string\n analytics_tracking_id: string\n assigned_date: string\n can_signup_for_limited: boolean\n chat_enabled: boolean\n copilot_plan: string\n organization_login_list: Array<unknown>\n organization_list: Array<unknown>\n quota_reset_date: string\n quota_snapshots: QuotaSnapshots\n}\n","import type { QuotaDetail } from './services/github/get-copilot-usage'\nimport process from 'node:process'\nimport { defineCommand } from 'citty'\n\nimport consola from 'consola'\nimport { ensurePaths } from './lib/paths'\nimport { setupGitHubToken } from './lib/token'\nimport {\n getCopilotUsage,\n\n} from './services/github/get-copilot-usage'\n\nexport const checkUsage = defineCommand({\n meta: {\n name: 'check-usage',\n description: 'Show current GitHub Copilot usage/quota information',\n },\n async run() {\n await ensurePaths()\n await setupGitHubToken()\n try {\n const usage = await getCopilotUsage()\n const premium = usage.quota_snapshots.premium_interactions\n const premiumTotal = premium.entitlement\n const premiumUsed = premiumTotal - premium.remaining\n const premiumPercentUsed\n = premiumTotal > 0 ? (premiumUsed / premiumTotal) * 100 : 0\n const premiumPercentRemaining = premium.percent_remaining\n\n // Helper to summarize a quota snapshot\n function summarizeQuota(name: string, snap: QuotaDetail | undefined) {\n if (!snap)\n return `${name}: N/A`\n const total = snap.entitlement\n const used = total - snap.remaining\n const percentUsed = total > 0 ? (used / total) * 100 : 0\n const percentRemaining = snap.percent_remaining\n return `${name}: ${used}/${total} used (${percentUsed.toFixed(1)}% used, ${percentRemaining.toFixed(1)}% remaining)`\n }\n\n const premiumLine = `Premium: ${premiumUsed}/${premiumTotal} used (${premiumPercentUsed.toFixed(1)}% used, ${premiumPercentRemaining.toFixed(1)}% remaining)`\n const chatLine = summarizeQuota('Chat', usage.quota_snapshots.chat)\n const completionsLine = summarizeQuota(\n 'Completions',\n usage.quota_snapshots.completions,\n )\n\n consola.box(\n `Copilot Usage (plan: ${usage.copilot_plan})\\n`\n + `Quota resets: ${usage.quota_reset_date}\\n`\n + `\\nQuotas:\\n`\n + ` ${premiumLine}\\n`\n + ` ${chatLine}\\n`\n + ` ${completionsLine}`,\n )\n }\n catch (err) {\n consola.error('Failed to fetch Copilot usage:', err)\n process.exit(1)\n }\n },\n})\n","#!/usr/bin/env node\n\nimport fs from 'node:fs/promises'\nimport os from 'node:os'\nimport process from 'node:process'\nimport { defineCommand } from 'citty'\nimport consola from 'consola'\n\nimport { PATHS } from './lib/paths'\n\ninterface DebugInfo {\n version: string\n runtime: {\n name: string\n version: string\n platform: string\n arch: string\n }\n paths: {\n APP_DIR: string\n GITHUB_TOKEN_PATH: string\n }\n tokenExists: boolean\n}\n\ninterface RunDebugOptions {\n json: boolean\n}\n\nasync function getPackageVersion(): Promise<string> {\n try {\n const packageJsonPath = new URL('../package.json', import.meta.url).pathname\n // @ts-expect-error https://github.com/sindresorhus/eslint-plugin-unicorn/blob/v59.0.1/docs/rules/prefer-json-parse-buffer.md\n // JSON.parse() can actually parse buffers\n const packageJson = JSON.parse(await fs.readFile(packageJsonPath)) as {\n version: string\n }\n return packageJson.version\n }\n catch {\n return 'unknown'\n }\n}\n\nfunction getRuntimeInfo() {\n const isBun = typeof Bun !== 'undefined'\n\n return {\n name: isBun ? 'bun' : 'node',\n version: isBun ? Bun.version : process.version.slice(1),\n platform: os.platform(),\n arch: os.arch(),\n }\n}\n\nasync function checkTokenExists(): Promise<boolean> {\n try {\n const stats = await fs.stat(PATHS.GITHUB_TOKEN_PATH)\n if (!stats.isFile())\n return false\n\n const content = await fs.readFile(PATHS.GITHUB_TOKEN_PATH, 'utf8')\n return content.trim().length > 0\n }\n catch {\n return false\n }\n}\n\nasync function getDebugInfo(): Promise<DebugInfo> {\n const [version, tokenExists] = await Promise.all([\n getPackageVersion(),\n checkTokenExists(),\n ])\n\n return {\n version,\n runtime: getRuntimeInfo(),\n paths: {\n APP_DIR: PATHS.APP_DIR,\n GITHUB_TOKEN_PATH: PATHS.GITHUB_TOKEN_PATH,\n },\n tokenExists,\n }\n}\n\nfunction printDebugInfoPlain(info: DebugInfo): void {\n consola.info(`copilot-proxy debug\n\nVersion: ${info.version}\nRuntime: ${info.runtime.name} ${info.runtime.version} (${info.runtime.platform} ${info.runtime.arch})\n\nPaths:\n- APP_DIR: ${info.paths.APP_DIR}\n- GITHUB_TOKEN_PATH: ${info.paths.GITHUB_TOKEN_PATH}\n\nToken exists: ${info.tokenExists ? 'Yes' : 'No'}`)\n}\n\nfunction printDebugInfoJson(info: DebugInfo): void {\n // eslint-disable-next-line no-console\n console.log(JSON.stringify(info, null, 2))\n}\n\nexport async function runDebug(options: RunDebugOptions): Promise<void> {\n const debugInfo = await getDebugInfo()\n\n if (options.json) {\n printDebugInfoJson(debugInfo)\n }\n else {\n printDebugInfoPlain(debugInfo)\n }\n}\n\nexport const debug = defineCommand({\n meta: {\n name: 'debug',\n description: 'Print debug information about the application',\n },\n args: {\n json: {\n type: 'boolean',\n default: false,\n description: 'Output debug information as JSON',\n },\n },\n run({ args }) {\n return runDebug({\n json: args.json,\n })\n },\n})\n","import type { Dispatcher } from 'undici'\nimport consola from 'consola'\nimport { getProxyForUrl } from 'proxy-from-env'\nimport { Agent, ProxyAgent, setGlobalDispatcher } from 'undici'\n\nexport function initProxyFromEnv(): void {\n if (typeof Bun !== 'undefined')\n return\n\n try {\n const direct = new Agent()\n const proxies = new Map<string, ProxyAgent>()\n\n // We only need a minimal dispatcher that implements `dispatch` at runtime.\n // Typing the object as `Dispatcher` forces TypeScript to require many\n // additional methods. Instead, keep a plain object and cast when passing\n // to `setGlobalDispatcher`.\n const dispatcher = {\n dispatch(\n options: Dispatcher.DispatchOptions,\n handler: Dispatcher.DispatchHandler,\n ) {\n try {\n const origin\n = typeof options.origin === 'string'\n ? new URL(options.origin)\n : (options.origin as URL)\n const get = getProxyForUrl as unknown as (\n u: string,\n ) => string | undefined\n const raw = get(origin.toString())\n const proxyUrl = raw && raw.length > 0 ? raw : undefined\n if (!proxyUrl) {\n consola.debug(`HTTP proxy bypass: ${origin.hostname}`)\n return (direct as unknown as Dispatcher).dispatch(options, handler)\n }\n let agent = proxies.get(proxyUrl)\n if (!agent) {\n agent = new ProxyAgent(proxyUrl)\n proxies.set(proxyUrl, agent)\n }\n let label = proxyUrl\n try {\n const u = new URL(proxyUrl)\n label = `${u.protocol}//${u.host}`\n }\n catch {\n /* noop */\n }\n consola.debug(`HTTP proxy route: ${origin.hostname} via ${label}`)\n return (agent as unknown as Dispatcher).dispatch(options, handler)\n }\n catch {\n return (direct as unknown as Dispatcher).dispatch(options, handler)\n }\n },\n close() {\n return direct.close()\n },\n destroy() {\n return direct.destroy()\n },\n }\n\n setGlobalDispatcher(dispatcher as unknown as Dispatcher)\n consola.debug('HTTP proxy configured from environment (per-URL)')\n }\n catch (err) {\n consola.debug('Proxy setup skipped:', err)\n }\n}\n","import { execSync } from 'node:child_process'\nimport process from 'node:process'\n\ntype ShellName = 'bash' | 'zsh' | 'fish' | 'powershell' | 'cmd' | 'sh'\ntype EnvVars = Record<string, string | undefined>\n\nfunction getShell(): ShellName {\n const { platform, ppid, env } = process\n\n if (platform === 'win32') {\n try {\n const command = `wmic process get ParentProcessId,Name | findstr \"${ppid}\"`\n const parentProcess = execSync(command, { stdio: 'pipe' }).toString()\n\n if (parentProcess.toLowerCase().includes('powershell.exe')) {\n return 'powershell'\n }\n }\n catch {\n return 'cmd'\n }\n\n return 'cmd'\n }\n else {\n const shellPath = env.SHELL\n if (shellPath) {\n if (shellPath.endsWith('zsh'))\n return 'zsh'\n if (shellPath.endsWith('fish'))\n return 'fish'\n if (shellPath.endsWith('bash'))\n return 'bash'\n }\n\n return 'sh'\n }\n}\n\n/**\n * Generates a copy-pasteable script to set multiple environment variables\n * and run a subsequent command.\n * @param {EnvVars} envVars - An object of environment variables to set.\n * @param {string} commandToRun - The command to run after setting the variables.\n * @returns {string} The formatted script string.\n */\nexport function generateEnvScript(\n envVars: EnvVars,\n commandToRun: string = '',\n): string {\n const shell = getShell()\n const filteredEnvVars = Object.entries(envVars).filter(\n ([, value]) => value !== undefined,\n ) as Array<[string, string]>\n\n let commandBlock: string\n\n switch (shell) {\n case 'powershell': {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `$env:${key} = ${value}`)\n .join('; ')\n break\n }\n case 'cmd': {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `set ${key}=${value}`)\n .join(' & ')\n break\n }\n case 'fish': {\n commandBlock = filteredEnvVars\n .map(([key, value]) => `set -gx ${key} ${value}`)\n .join('; ')\n break\n }\n default: {\n // bash, zsh, sh\n const assignments = filteredEnvVars\n .map(([key, value]) => `${key}=${value}`)\n .join(' ')\n commandBlock = filteredEnvVars.length > 0 ? `export ${assignments}` : ''\n break\n }\n }\n\n if (commandBlock && commandToRun) {\n const separator = shell === 'cmd' ? ' & ' : ' && '\n return `${commandBlock}${separator}${commandToRun}`\n }\n\n return commandBlock || commandToRun\n}\n","import consola from 'consola'\n\nimport { HTTPError } from './error'\n\nexport async function awaitApproval() {\n const response = await consola.prompt(`Accept incoming request?`, {\n type: 'confirm',\n })\n\n if (!response) {\n throw new HTTPError(\n 'Request rejected',\n Response.json({ message: 'Request rejected' }, { status: 403 }),\n )\n }\n}\n","import type { State } from './state'\n\nimport consola from 'consola'\n\nimport { HTTPError } from './error'\nimport { sleep } from './utils'\n\nexport async function checkRateLimit(state: State) {\n if (state.rateLimitSeconds === undefined)\n return\n\n const now = Date.now()\n\n if (!state.lastRequestTimestamp) {\n state.lastRequestTimestamp = now\n return\n }\n\n const elapsedSeconds = (now - state.lastRequestTimestamp) / 1000\n\n if (elapsedSeconds > state.rateLimitSeconds) {\n state.lastRequestTimestamp = now\n return\n }\n\n const waitTimeSeconds = Math.ceil(state.rateLimitSeconds - elapsedSeconds)\n\n if (!state.rateLimitWait) {\n consola.warn(\n `Rate limit exceeded. Need to wait ${waitTimeSeconds} more seconds.`,\n )\n throw new HTTPError(\n 'Rate limit exceeded',\n Response.json({ message: 'Rate limit exceeded' }, { status: 429 }),\n )\n }\n\n const waitTimeMs = waitTimeSeconds * 1000\n consola.warn(\n `Rate limit reached. Waiting ${waitTimeSeconds} seconds before proceeding...`,\n )\n await sleep(waitTimeMs)\n\n state.lastRequestTimestamp = now\n consola.info('Rate limit wait completed, proceeding with request')\n}\n","import type {\n ChatCompletionsPayload,\n ContentPart,\n Message,\n Tool,\n ToolCall,\n} from '~/services/copilot/create-chat-completions'\nimport type { Model } from '~/services/copilot/get-models'\n\n// Encoder type mapping\nconst ENCODING_MAP = {\n o200k_base: () => import('gpt-tokenizer/encoding/o200k_base'),\n cl100k_base: () => import('gpt-tokenizer/encoding/cl100k_base'),\n p50k_base: () => import('gpt-tokenizer/encoding/p50k_base'),\n p50k_edit: () => import('gpt-tokenizer/encoding/p50k_edit'),\n r50k_base: () => import('gpt-tokenizer/encoding/r50k_base'),\n} as const\n\ntype SupportedEncoding = keyof typeof ENCODING_MAP\n\n// Define encoder interface\ninterface Encoder {\n encode: (text: string) => Array<number>\n}\n\n// Cache loaded encoders to avoid repeated imports\nconst encodingCache = new Map<string, Encoder>()\n\n/**\n * Calculate tokens for tool calls\n */\nfunction calculateToolCallsTokens(toolCalls: Array<ToolCall>, encoder: Encoder, constants: ReturnType<typeof getModelConstants>): number {\n let tokens = 0\n for (const toolCall of toolCalls) {\n tokens += constants.funcInit\n tokens += encoder.encode(JSON.stringify(toolCall)).length\n }\n tokens += constants.funcEnd\n return tokens\n}\n\n/**\n * Calculate tokens for content parts\n */\nfunction calculateContentPartsTokens(contentParts: Array<ContentPart>, encoder: Encoder): number {\n let tokens = 0\n for (const part of contentParts) {\n if (part.type === 'image_url') {\n tokens += encoder.encode(part.image_url.url).length + 85\n }\n else if (part.text) {\n tokens += encoder.encode(part.text).length\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens for a single message\n */\nfunction calculateMessageTokens(message: Message, encoder: Encoder, constants: ReturnType<typeof getModelConstants>): number {\n const tokensPerMessage = 3\n const tokensPerName = 1\n let tokens = tokensPerMessage\n for (const [key, value] of Object.entries(message)) {\n if (typeof value === 'string') {\n tokens += encoder.encode(value).length\n }\n if (key === 'name') {\n tokens += tokensPerName\n }\n if (key === 'tool_calls') {\n tokens += calculateToolCallsTokens(\n value as Array<ToolCall>,\n encoder,\n constants,\n )\n }\n if (key === 'content' && Array.isArray(value)) {\n tokens += calculateContentPartsTokens(\n value as Array<ContentPart>,\n encoder,\n )\n }\n }\n return tokens\n}\n\n/**\n * Calculate tokens using custom algorithm\n */\nfunction calculateTokens(messages: Array<Message>, encoder: Encoder, constants: ReturnType<typeof getModelConstants>): number {\n if (messages.length === 0) {\n return 0\n }\n let numTokens = 0\n for (const message of messages) {\n numTokens += calculateMessageTokens(message, encoder, constants)\n }\n // every reply is primed with <|start|>assistant<|message|>\n numTokens += 3\n return numTokens\n}\n\n/**\n * Get the corresponding encoder module based on encoding type\n */\nasync function getEncodeChatFunction(encoding: string): Promise<Encoder> {\n if (encodingCache.has(encoding)) {\n const cached = encodingCache.get(encoding)\n if (cached) {\n return cached\n }\n }\n\n const supportedEncoding = encoding as SupportedEncoding\n if (!(supportedEncoding in ENCODING_MAP)) {\n const fallbackModule = (await ENCODING_MAP.o200k_base()) as Encoder\n encodingCache.set(encoding, fallbackModule)\n return fallbackModule\n }\n\n const encodingModule = (await ENCODING_MAP[supportedEncoding]()) as Encoder\n encodingCache.set(encoding, encodingModule)\n return encodingModule\n}\n\n/**\n * Get tokenizer type from model information\n */\nexport function getTokenizerFromModel(model: Model): string {\n return model.capabilities.tokenizer || 'o200k_base'\n}\n\n/**\n * Get model-specific constants for token calculation\n */\nfunction getModelConstants(model: Model) {\n return model.id === 'gpt-3.5-turbo' || model.id === 'gpt-4'\n ? {\n funcInit: 10,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n }\n : {\n funcInit: 7,\n propInit: 3,\n propKey: 3,\n enumInit: -3,\n enumItem: 3,\n funcEnd: 12,\n }\n}\n\n/**\n * Calculate tokens for a single parameter\n */\nfunction calculateParameterTokens(key: string, prop: unknown, context: {\n encoder: Encoder\n constants: ReturnType<typeof getModelConstants>\n}): number {\n const { encoder, constants } = context\n let tokens = constants.propKey\n\n // Early return if prop is not an object\n if (typeof prop !== 'object' || prop === null) {\n return tokens\n }\n\n // Type assertion for parameter properties\n const param = prop as {\n type?: string\n description?: string\n enum?: Array<unknown>\n [key: string]: unknown\n }\n\n const paramName = key\n const paramType = param.type || 'string'\n let paramDesc = param.description || ''\n\n // Handle enum values\n if (param.enum && Array.isArray(param.enum)) {\n tokens += constants.enumInit\n for (const item of param.enum) {\n tokens += constants.enumItem\n tokens += encoder.encode(String(item)).length\n }\n }\n\n // Clean up description\n if (paramDesc.endsWith('.')) {\n paramDesc = paramDesc.slice(0, -1)\n }\n\n // Encode the main parameter line\n const line = `${paramName}:${paramType}:${paramDesc}`\n tokens += encoder.encode(line).length\n\n // Handle additional properties (excluding standard ones)\n const excludedKeys = new Set(['type', 'description', 'enum'])\n for (const propertyName of Object.keys(param)) {\n if (!excludedKeys.has(propertyName)) {\n const propertyValue = param[propertyName]\n const propertyText\n = typeof propertyValue === 'string'\n ? propertyValue\n : (\n JSON.stringify(propertyValue)\n )\n tokens += encoder.encode(`${propertyName}:${propertyText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for function parameters\n */\nfunction calculateParametersTokens(parameters: unknown, encoder: Encoder, constants: ReturnType<typeof getModelConstants>): number {\n if (!parameters || typeof parameters !== 'object') {\n return 0\n }\n\n const params = parameters as Record<string, unknown>\n let tokens = 0\n\n for (const [key, value] of Object.entries(params)) {\n if (key === 'properties') {\n const properties = value as Record<string, unknown>\n if (Object.keys(properties).length > 0) {\n tokens += constants.propInit\n for (const propKey of Object.keys(properties)) {\n tokens += calculateParameterTokens(propKey, properties[propKey], {\n encoder,\n constants,\n })\n }\n }\n }\n else {\n const paramText\n = typeof value === 'string' ? value : JSON.stringify(value)\n tokens += encoder.encode(`${key}:${paramText}`).length\n }\n }\n\n return tokens\n}\n\n/**\n * Calculate tokens for a single tool\n */\nfunction calculateToolTokens(tool: Tool, encoder: Encoder, constants: ReturnType<typeof getModelConstants>): number {\n let tokens = constants.funcInit\n const func = tool.function\n const fName = func.name\n let fDesc = func.description || ''\n if (fDesc.endsWith('.')) {\n fDesc = fDesc.slice(0, -1)\n }\n const line = `${fName}:${fDesc}`\n tokens += encoder.encode(line).length\n if (\n typeof func.parameters === 'object'\n && func.parameters !== null\n ) {\n tokens += calculateParametersTokens(func.parameters, encoder, constants)\n }\n return tokens\n}\n\n/**\n * Calculate token count for tools based on model\n */\nexport function numTokensForTools(tools: Array<Tool>, encoder: Encoder, constants: ReturnType<typeof getModelConstants>): number {\n let funcTokenCount = 0\n for (const tool of tools) {\n funcTokenCount += calculateToolTokens(tool, encoder, constants)\n }\n funcTokenCount += constants.funcEnd\n return funcTokenCount\n}\n\n/**\n * Calculate the token count of messages, supporting multiple GPT encoders\n */\nexport async function getTokenCount(payload: ChatCompletionsPayload, model: Model): Promise<{ input: number, output: number }> {\n // Get tokenizer string\n const tokenizer = getTokenizerFromModel(model)\n\n // Get corresponding encoder module\n const encoder = await getEncodeChatFunction(tokenizer)\n\n const simplifiedMessages = payload.messages\n const inputMessages = simplifiedMessages.filter(\n msg => msg.role !== 'assistant',\n )\n const outputMessages = simplifiedMessages.filter(\n msg => msg.role === 'assistant',\n )\n\n const constants = getModelConstants(model)\n let inputTokens = calculateTokens(inputMessages, encoder, constants)\n if (payload.tools && payload.tools.length > 0) {\n inputTokens += numTokensForTools(payload.tools, encoder, constants)\n }\n const outputTokens = calculateTokens(outputMessages, encoder, constants)\n\n return {\n input: inputTokens,\n output: outputTokens,\n }\n}\n","import consola from 'consola'\nimport { events } from 'fetch-event-stream'\n\nimport { copilotBaseUrl, copilotHeaders } from '~/lib/api-config'\nimport { HTTPError } from '~/lib/error'\nimport { state } from '~/lib/state'\n\nexport async function createChatCompletions(payload: ChatCompletionsPayload) {\n if (!state.copilotToken)\n throw new Error('Copilot token not found')\n\n const enableVision = payload.messages.some(\n x =>\n typeof x.content !== 'string'\n && x.content?.some(x => x.type === 'image_url'),\n )\n\n // Agent/user check for X-Initiator header\n // Determine if any message is from an agent (\"assistant\" or \"tool\")\n const isAgentCall = payload.messages.some(msg =>\n ['assistant', 'tool'].includes(msg.role),\n )\n\n // Build headers and add X-Initiator\n const headers: Record<string, string> = {\n ...copilotHeaders(state, enableVision),\n 'X-Initiator': isAgentCall ? 'agent' : 'user',\n }\n\n const response = await fetch(`${copilotBaseUrl(state)}/chat/completions`, {\n method: 'POST',\n headers,\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) {\n consola.error('Failed to create chat completions', response)\n throw new HTTPError('Failed to create chat completions', response)\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as ChatCompletionResponse\n}\n\n// Streaming types\n\nexport interface ChatCompletionChunk {\n id: string\n object: 'chat.completion.chunk'\n created: number\n model: string\n choices: Array<Choice>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n completion_tokens_details?: {\n accepted_prediction_tokens: number\n rejected_prediction_tokens: number\n }\n }\n}\n\ninterface Delta {\n content?: string | null\n role?: 'user' | 'assistant' | 'system' | 'tool'\n tool_calls?: Array<{\n index: number\n id?: string\n type?: 'function'\n function?: {\n name?: string\n arguments?: string\n }\n }>\n}\n\ninterface Choice {\n index: number\n delta: Delta\n finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | null\n logprobs: object | null\n}\n\n// Non-streaming types\n\nexport interface ChatCompletionResponse {\n id: string\n object: 'chat.completion'\n created: number\n model: string\n choices: Array<ChoiceNonStreaming>\n system_fingerprint?: string\n usage?: {\n prompt_tokens: number\n completion_tokens: number\n total_tokens: number\n prompt_tokens_details?: {\n cached_tokens: number\n }\n }\n}\n\ninterface ResponseMessage {\n role: 'assistant'\n content: string | null\n tool_calls?: Array<ToolCall>\n}\n\ninterface ChoiceNonStreaming {\n index: number\n message: ResponseMessage\n logprobs: object | null\n finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter'\n}\n\n// Payload types\n\nexport interface ChatCompletionsPayload {\n messages: Array<Message>\n model: string\n temperature?: number | null\n top_p?: number | null\n max_tokens?: number | null\n stop?: string | Array<string> | null\n n?: number | null\n stream?: boolean | null\n\n frequency_penalty?: number | null\n presence_penalty?: number | null\n logit_bias?: Record<string, number> | null\n logprobs?: boolean | null\n response_format?: { type: 'json_object' } | null\n seed?: number | null\n tools?: Array<Tool> | null\n tool_choice?:\n | 'none'\n | 'auto'\n | 'required'\n | { type: 'function', function: { name: string } }\n | null\n user?: string | null\n reasoning_effort?: 'low' | 'medium' | 'high' | null\n snippy?: { enabled: boolean } | null\n}\n\nexport interface Tool {\n type: 'function'\n function: {\n name: string\n description?: string\n parameters: Record<string, unknown>\n }\n copilot_cache_control?: { type: 'ephemeral' } | null\n}\n\nexport interface Message {\n role: 'user' | 'assistant' | 'system' | 'tool' | 'developer'\n content: string | Array<ContentPart> | null\n\n name?: string\n tool_calls?: Array<ToolCall>\n tool_call_id?: string\n copilot_cache_control?: { type: 'ephemeral' } | null\n}\n\nexport interface ToolCall {\n id: string\n type: 'function'\n function: {\n name: string\n arguments: string\n }\n}\n\nexport type ContentPart = TextPart | ImagePart\n\nexport interface TextPart {\n type: 'text'\n text: string\n}\n\nexport interface ImagePart {\n type: 'image_url'\n image_url: {\n url: string\n detail?: 'low' | 'high' | 'auto'\n }\n}\n","import type { Context } from 'hono'\n\nimport type { SSEMessage } from 'hono/streaming'\nimport type { ChatCompletionResponse, ChatCompletionsPayload } from '~/services/copilot/create-chat-completions'\nimport consola from 'consola'\n\nimport { streamSSE } from 'hono/streaming'\nimport { awaitApproval } from '~/lib/approval'\nimport { checkRateLimit } from '~/lib/rate-limit'\nimport { state } from '~/lib/state'\nimport { getTokenCount } from '~/lib/tokenizer'\nimport { isNullish } from '~/lib/utils'\nimport {\n\n createChatCompletions,\n} from '~/services/copilot/create-chat-completions'\n\nexport async function handleCompletion(c: Context) {\n await checkRateLimit(state)\n\n let payload = await c.req.json<ChatCompletionsPayload>()\n consola.debug('Request payload:', JSON.stringify(payload).slice(-400))\n\n // Find the selected model\n const selectedModel = state.models?.data.find(\n model => model.id === payload.model,\n )\n\n // Calculate and display token count\n try {\n if (selectedModel) {\n const tokenCount = await getTokenCount(payload, selectedModel)\n consola.info('Current token count:', tokenCount)\n }\n else {\n consola.warn('No model selected, skipping token count calculation')\n }\n }\n catch (error) {\n consola.warn('Failed to calculate token count:', error)\n }\n\n if (state.manualApprove)\n await awaitApproval()\n\n if (isNullish(payload.max_tokens)) {\n payload = {\n ...payload,\n max_tokens: selectedModel?.capabilities.limits.max_output_tokens,\n }\n consola.debug('Set max_tokens to:', JSON.stringify(payload.max_tokens))\n }\n\n const response = await createChatCompletions(payload)\n\n if (isNonStreaming(response)) {\n consola.debug('Non-streaming response:', JSON.stringify(response))\n return c.json(response)\n }\n\n consola.debug('Streaming response')\n return streamSSE(c, async (stream) => {\n for await (const chunk of response) {\n consola.debug('Streaming chunk:', JSON.stringify(chunk))\n await stream.writeSSE(chunk as SSEMessage)\n }\n })\n}\n\nfunction isNonStreaming(response: Awaited<ReturnType<typeof createChatCompletions>>): response is ChatCompletionResponse {\n return Object.hasOwn(response, 'choices')\n}\n","import { Hono } from 'hono'\n\nimport { forwardError } from '~/lib/error'\n\nimport { handleCompletion } from './handler'\n\nexport const completionRoutes = new Hono()\n\ncompletionRoutes.post('/', async (c) => {\n try {\n return await handleCompletion(c)\n }\n catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { copilotBaseUrl, copilotHeaders } from '~/lib/api-config'\nimport { HTTPError } from '~/lib/error'\nimport { state } from '~/lib/state'\n\nexport async function createEmbeddings(payload: EmbeddingRequest) {\n if (!state.copilotToken)\n throw new Error('Copilot token not found')\n\n const response = await fetch(`${copilotBaseUrl(state)}/embeddings`, {\n method: 'POST',\n headers: copilotHeaders(state),\n body: JSON.stringify(payload),\n })\n\n if (!response.ok)\n throw new HTTPError('Failed to create embeddings', response)\n\n return (await response.json()) as EmbeddingResponse\n}\n\nexport interface EmbeddingRequest {\n input: string | Array<string>\n model: string\n}\n\nexport interface Embedding {\n object: string\n embedding: Array<number>\n index: number\n}\n\nexport interface EmbeddingResponse {\n object: string\n data: Array<Embedding>\n model: string\n usage: {\n prompt_tokens: number\n total_tokens: number\n }\n}\n","import type { EmbeddingRequest } from '~/services/copilot/create-embeddings'\n\nimport { Hono } from 'hono'\nimport { forwardError } from '~/lib/error'\nimport {\n createEmbeddings,\n\n} from '~/services/copilot/create-embeddings'\n\nexport const embeddingRoutes = new Hono()\n\nembeddingRoutes.post('/', async (c) => {\n try {\n const paylod = await c.req.json<EmbeddingRequest>()\n const response = await createEmbeddings(paylod)\n\n return c.json(response)\n }\n catch (error) {\n return await forwardError(c, error)\n }\n})\n","export interface ModelConfig {\n /** Whether the model uses thinking mode (drives /responses vs /chat/completions routing) */\n thinkingMode?: boolean\n /** Whether to add copilot_cache_control headers for prompt caching */\n enableCacheControl?: boolean\n /** Default reasoning effort level */\n defaultReasoningEffort?: 'low' | 'medium' | 'high'\n /** Supported reasoning effort levels */\n supportedReasoningEfforts?: Array<'low' | 'medium' | 'high' | 'xhigh'>\n /** Whether the model supports tool_choice parameter */\n supportsToolChoice?: boolean\n /** Whether the model supports parallel tool calls */\n supportsParallelToolCalls?: boolean\n}\n\nconst MODEL_CONFIGS: Record<string, ModelConfig> = {\n 'claude-sonnet-4': {\n enableCacheControl: true,\n defaultReasoningEffort: undefined,\n supportsToolChoice: false,\n supportsParallelToolCalls: false,\n },\n 'claude-sonnet-4.5': {\n enableCacheControl: true,\n defaultReasoningEffort: undefined,\n supportsToolChoice: false,\n supportsParallelToolCalls: false,\n },\n 'claude-opus-4.5': {\n enableCacheControl: true,\n defaultReasoningEffort: undefined,\n supportsToolChoice: false,\n supportsParallelToolCalls: false,\n },\n 'claude-opus-4.6': {\n enableCacheControl: true,\n defaultReasoningEffort: 'high',\n supportedReasoningEfforts: ['low', 'medium', 'high'],\n supportsToolChoice: false,\n supportsParallelToolCalls: true,\n },\n 'gpt-4o': {\n supportsToolChoice: true,\n supportsParallelToolCalls: true,\n },\n 'gpt-4.1': {\n supportsToolChoice: true,\n supportsParallelToolCalls: true,\n },\n 'gpt-5': {\n thinkingMode: true,\n supportsToolChoice: true,\n supportsParallelToolCalls: true,\n },\n 'gpt-5.1-codex': {\n thinkingMode: true,\n defaultReasoningEffort: 'high',\n supportedReasoningEfforts: ['low', 'medium', 'high'],\n supportsToolChoice: true,\n supportsParallelToolCalls: true,\n },\n 'gpt-5.2-codex': {\n thinkingMode: true,\n defaultReasoningEffort: 'high',\n supportedReasoningEfforts: ['low', 'medium', 'high', 'xhigh'],\n supportsToolChoice: true,\n supportsParallelToolCalls: true,\n },\n 'o3-mini': {\n thinkingMode: true,\n supportsToolChoice: true,\n },\n 'o4-mini': {\n thinkingMode: true,\n supportsToolChoice: true,\n },\n}\n\n/**\n * Get model-specific configuration.\n * Returns the config for an exact match, or for the base model name (without version suffix).\n * Falls back to an empty config if no match is found.\n */\nexport function getModelConfig(modelId: string): ModelConfig {\n // Exact match\n if (MODEL_CONFIGS[modelId]) {\n return MODEL_CONFIGS[modelId]\n }\n\n // Try prefix match for families (e.g., 'gpt-5.2-codex-max' matches 'gpt-5.2-codex')\n const entries = Object.entries(MODEL_CONFIGS).sort(\n (a, b) => b[0].length - a[0].length,\n )\n for (const [key, config] of entries) {\n if (modelId.startsWith(key)) {\n return config\n }\n }\n\n // Default: check if it's a Claude model (enable cache control by default)\n if (modelId.startsWith('claude')) {\n return { enableCacheControl: true, supportsToolChoice: false }\n }\n\n return {}\n}\n\n/**\n * Check if a model uses thinking mode (and should use /responses endpoint)\n */\nexport function isThinkingModeModel(modelId: string): boolean {\n return getModelConfig(modelId).thinkingMode === true\n}\n","import type { AnthropicResponse } from './anthropic-types'\n\nexport function mapOpenAIStopReasonToAnthropic(\n finishReason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | null,\n): AnthropicResponse['stop_reason'] {\n if (finishReason === null) {\n return null\n }\n const stopReasonMap = {\n stop: 'end_turn',\n length: 'max_tokens',\n tool_calls: 'tool_use',\n content_filter: 'end_turn',\n } as const\n return stopReasonMap[finishReason]\n}\n","import type { AnthropicAssistantContentBlock, AnthropicAssistantMessage, AnthropicMessage, AnthropicMessagesPayload, AnthropicResponse, AnthropicTextBlock, AnthropicThinkingBlock, AnthropicTool, AnthropicToolResultBlock, AnthropicToolUseBlock, AnthropicUserContentBlock, AnthropicUserMessage } from './anthropic-types'\n\nimport type { ChatCompletionResponse, ChatCompletionsPayload, ContentPart, Message, TextPart, Tool, ToolCall } from '~/services/copilot/create-chat-completions'\nimport { getModelConfig } from '~/lib/model-config'\nimport { mapOpenAIStopReasonToAnthropic } from './utils'\n\n// Payload translation\n\nexport function translateToOpenAI(\n payload: AnthropicMessagesPayload,\n): ChatCompletionsPayload {\n const model = translateModelName(payload.model)\n const modelConfig = getModelConfig(model)\n const enableCacheControl = modelConfig.enableCacheControl === true\n\n const messages = translateAnthropicMessagesToOpenAI(\n payload.messages,\n payload.system,\n )\n\n // Add copilot_cache_control to the system message for Claude models\n if (enableCacheControl) {\n const systemMessage = messages.find(m => m.role === 'system')\n if (systemMessage) {\n systemMessage.copilot_cache_control = { type: 'ephemeral' }\n }\n }\n\n const tools = translateAnthropicToolsToOpenAI(payload.tools)\n\n // Add copilot_cache_control to the last tool for Claude models\n if (enableCacheControl && tools && tools.length > 0) {\n tools[tools.length - 1].copilot_cache_control = { type: 'ephemeral' }\n }\n\n // Map Anthropic thinking budget_tokens to reasoning_effort\n let reasoning_effort: 'low' | 'medium' | 'high' | undefined\n if (payload.thinking?.budget_tokens) {\n reasoning_effort = 'high'\n }\n else if (modelConfig.thinkingMode !== true && modelConfig.defaultReasoningEffort) {\n reasoning_effort = modelConfig.defaultReasoningEffort\n }\n\n return {\n model,\n messages,\n max_tokens: payload.max_tokens,\n stop: payload.stop_sequences,\n stream: payload.stream,\n temperature: payload.temperature,\n top_p: payload.top_p,\n user: payload.metadata?.user_id,\n tools,\n tool_choice: translateAnthropicToolChoiceToOpenAI(payload.tool_choice),\n snippy: { enabled: false },\n ...(reasoning_effort && { reasoning_effort }),\n }\n}\n\nfunction translateModelName(model: string): string {\n // Claude subagent requests use specific version suffixes that Copilot doesn't support\n // e.g., claude-sonnet-4-20250514 → claude-sonnet-4\n const hyphenVersionMatch = model.match(\n /^(claude-(?:sonnet|opus|haiku)-4)-(5|6)-\\d+$/,\n )\n if (hyphenVersionMatch) {\n return `${hyphenVersionMatch[1]}.${hyphenVersionMatch[2]}`\n }\n const claudePatterns = [\n /^(claude-sonnet-4)-\\d+$/,\n /^(claude-opus-4)-\\d+$/,\n /^(claude-haiku-4)-\\d+$/,\n /^(claude-sonnet-4\\.5)-\\d+$/,\n /^(claude-opus-4\\.5)-\\d+$/,\n /^(claude-opus-4\\.6)-\\d+$/,\n /^(claude-haiku-4\\.5)-\\d+$/,\n ]\n\n for (const pattern of claudePatterns) {\n const match = model.match(pattern)\n if (match) {\n return match[1]\n }\n }\n\n return model\n}\n\nfunction translateAnthropicMessagesToOpenAI(\n anthropicMessages: Array<AnthropicMessage>,\n system: string | Array<AnthropicTextBlock> | undefined,\n): Array<Message> {\n const systemMessages = handleSystemPrompt(system)\n\n const otherMessages = anthropicMessages.flatMap(message =>\n message.role === 'user'\n ? handleUserMessage(message)\n : handleAssistantMessage(message),\n )\n\n return [...systemMessages, ...otherMessages]\n}\n\nfunction handleSystemPrompt(\n system: string | Array<AnthropicTextBlock> | undefined,\n): Array<Message> {\n if (!system) {\n return []\n }\n\n if (typeof system === 'string') {\n return [{ role: 'system', content: system }]\n }\n else {\n const systemText = system.map(block => block.text).join('\\n\\n')\n return [{ role: 'system', content: systemText }]\n }\n}\n\nfunction handleUserMessage(message: AnthropicUserMessage): Array<Message> {\n const newMessages: Array<Message> = []\n\n if (Array.isArray(message.content)) {\n const toolResultBlocks = message.content.filter(\n (block): block is AnthropicToolResultBlock =>\n block.type === 'tool_result',\n )\n const otherBlocks = message.content.filter(\n block => block.type !== 'tool_result',\n )\n\n // Tool results must come first to maintain protocol: tool_use -> tool_result -> user\n for (const block of toolResultBlocks) {\n newMessages.push({\n role: 'tool',\n tool_call_id: block.tool_use_id,\n content: mapContent(block.content),\n })\n }\n\n if (otherBlocks.length > 0) {\n newMessages.push({\n role: 'user',\n content: mapContent(otherBlocks),\n })\n }\n }\n else {\n newMessages.push({\n role: 'user',\n content: mapContent(message.content),\n })\n }\n\n return newMessages\n}\n\nfunction handleAssistantMessage(\n message: AnthropicAssistantMessage,\n): Array<Message> {\n if (!Array.isArray(message.content)) {\n return [\n {\n role: 'assistant',\n content: mapContent(message.content),\n },\n ]\n }\n\n const toolUseBlocks = message.content.filter(\n (block): block is AnthropicToolUseBlock => block.type === 'tool_use',\n )\n\n const textBlocks = message.content.filter(\n (block): block is AnthropicTextBlock => block.type === 'text',\n )\n\n const thinkingBlocks = message.content.filter(\n (block): block is AnthropicThinkingBlock => block.type === 'thinking',\n )\n\n // Combine text and thinking blocks, as OpenAI doesn't have separate thinking blocks\n const allTextContent = [\n ...textBlocks.map(b => b.text),\n ...thinkingBlocks.map(b => b.thinking),\n ].join('\\n\\n')\n\n return toolUseBlocks.length > 0\n ? [\n {\n role: 'assistant',\n content: allTextContent || null,\n tool_calls: toolUseBlocks.map(toolUse => ({\n id: toolUse.id,\n type: 'function',\n function: {\n name: toolUse.name,\n arguments: JSON.stringify(toolUse.input),\n },\n })),\n },\n ]\n : [\n {\n role: 'assistant',\n content: mapContent(message.content),\n },\n ]\n}\n\nfunction mapContent(\n content:\n | string\n | Array<AnthropicUserContentBlock | AnthropicAssistantContentBlock>,\n): string | Array<ContentPart> | null {\n if (typeof content === 'string') {\n return content\n }\n if (!Array.isArray(content)) {\n return null\n }\n\n const hasImage = content.some(block => block.type === 'image')\n if (!hasImage) {\n return content\n .filter(\n (block): block is AnthropicTextBlock | AnthropicThinkingBlock =>\n block.type === 'text' || block.type === 'thinking',\n )\n .map(block => (block.type === 'text' ? block.text : block.thinking))\n .join('\\n\\n')\n }\n\n const contentParts: Array<ContentPart> = []\n for (const block of content) {\n switch (block.type) {\n case 'text': {\n contentParts.push({ type: 'text', text: block.text })\n\n break\n }\n case 'thinking': {\n contentParts.push({ type: 'text', text: block.thinking })\n\n break\n }\n case 'image': {\n contentParts.push({\n type: 'image_url',\n image_url: {\n url: `data:${block.source.media_type};base64,${block.source.data}`,\n },\n })\n\n break\n }\n // No default\n }\n }\n return contentParts\n}\n\nfunction translateAnthropicToolsToOpenAI(\n anthropicTools: Array<AnthropicTool> | undefined,\n): Array<Tool> | undefined {\n if (!anthropicTools) {\n return undefined\n }\n return anthropicTools.map(tool => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.input_schema,\n },\n }))\n}\n\nfunction translateAnthropicToolChoiceToOpenAI(\n anthropicToolChoice: AnthropicMessagesPayload['tool_choice'],\n): ChatCompletionsPayload['tool_choice'] {\n if (!anthropicToolChoice) {\n return undefined\n }\n\n switch (anthropicToolChoice.type) {\n case 'auto': {\n return 'auto'\n }\n case 'any': {\n return 'required'\n }\n case 'tool': {\n if (anthropicToolChoice.name) {\n return {\n type: 'function',\n function: { name: anthropicToolChoice.name },\n }\n }\n return undefined\n }\n case 'none': {\n return 'none'\n }\n default: {\n return undefined\n }\n }\n}\n\n// Response translation\n\nexport function translateToAnthropic(\n response: ChatCompletionResponse,\n): AnthropicResponse {\n // Merge content from all choices\n const allTextBlocks: Array<AnthropicTextBlock> = []\n const allToolUseBlocks: Array<AnthropicToolUseBlock> = []\n let stopReason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | null\n = null // default\n stopReason = response.choices[0]?.finish_reason ?? stopReason\n\n // Process all choices to extract text and tool use blocks\n for (const choice of response.choices) {\n const textBlocks = getAnthropicTextBlocks(choice.message.content)\n const toolUseBlocks = getAnthropicToolUseBlocks(choice.message.tool_calls)\n\n allTextBlocks.push(...textBlocks)\n allToolUseBlocks.push(...toolUseBlocks)\n\n // Use the finish_reason from the first choice, or prioritize tool_calls\n if (choice.finish_reason === 'tool_calls' || stopReason === 'stop') {\n stopReason = choice.finish_reason\n }\n }\n\n // Note: GitHub Copilot doesn't generate thinking blocks, so we don't include them in responses\n\n return {\n id: response.id,\n type: 'message',\n role: 'assistant',\n model: response.model,\n content: [...allTextBlocks, ...allToolUseBlocks],\n stop_reason: mapOpenAIStopReasonToAnthropic(stopReason),\n stop_sequence: null,\n usage: {\n input_tokens:\n (response.usage?.prompt_tokens ?? 0)\n - (response.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: response.usage?.completion_tokens ?? 0,\n ...(response.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n response.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n }\n}\n\nfunction getAnthropicTextBlocks(\n messageContent: Message['content'],\n): Array<AnthropicTextBlock> {\n if (typeof messageContent === 'string') {\n return [{ type: 'text', text: messageContent }]\n }\n\n if (Array.isArray(messageContent)) {\n return messageContent\n .filter((part): part is TextPart => part.type === 'text')\n .map(part => ({ type: 'text', text: part.text }))\n }\n\n return []\n}\n\nfunction getAnthropicToolUseBlocks(\n toolCalls: Array<ToolCall> | undefined,\n): Array<AnthropicToolUseBlock> {\n if (!toolCalls) {\n return []\n }\n return toolCalls.map(toolCall => ({\n type: 'tool_use',\n id: toolCall.id,\n name: toolCall.function.name,\n input: JSON.parse(toolCall.function.arguments) as Record<string, unknown>,\n }))\n}\n","import type { Context } from 'hono'\n\nimport type { AnthropicMessagesPayload } from './anthropic-types'\n\nimport consola from 'consola'\nimport { state } from '~/lib/state'\n\nimport { getTokenCount } from '~/lib/tokenizer'\nimport { translateToOpenAI } from './non-stream-translation'\n\n/**\n * Handles token counting for Anthropic messages\n */\nexport async function handleCountTokens(c: Context) {\n try {\n const anthropicBeta = c.req.header('anthropic-beta')\n\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n\n const openAIPayload = translateToOpenAI(anthropicPayload)\n\n const selectedModel = state.models?.data.find(\n model => model.id === anthropicPayload.model,\n )\n\n if (!selectedModel) {\n consola.warn('Model not found, returning default token count')\n return c.json({\n input_tokens: 1,\n })\n }\n\n const tokenCount = await getTokenCount(openAIPayload, selectedModel)\n\n if (anthropicPayload.tools && anthropicPayload.tools.length > 0) {\n let mcpToolExist = false\n if (anthropicBeta?.startsWith('claude-code')) {\n mcpToolExist = anthropicPayload.tools.some(tool =>\n tool.name.startsWith('mcp__'),\n )\n }\n if (!mcpToolExist) {\n if (anthropicPayload.model.startsWith('claude')) {\n // https://docs.anthropic.com/en/docs/agents-and-tools/tool-use/overview#pricing\n tokenCount.input = tokenCount.input + 346\n }\n else if (anthropicPayload.model.startsWith('grok')) {\n tokenCount.input = tokenCount.input + 480\n }\n }\n }\n\n let finalTokenCount = tokenCount.input + tokenCount.output\n if (anthropicPayload.model.startsWith('claude')) {\n finalTokenCount = Math.round(finalTokenCount * 1.15)\n }\n else if (anthropicPayload.model.startsWith('grok')) {\n finalTokenCount = Math.round(finalTokenCount * 1.03)\n }\n\n consola.info('Token count:', finalTokenCount)\n\n return c.json({\n input_tokens: finalTokenCount,\n })\n }\n catch (error) {\n consola.error('Error counting tokens:', error)\n return c.json({\n input_tokens: 1,\n })\n }\n}\n","import type { AnthropicStreamEventData, AnthropicStreamState } from './anthropic-types'\n\nimport type { ChatCompletionChunk } from '~/services/copilot/create-chat-completions'\nimport { mapOpenAIStopReasonToAnthropic } from './utils'\n\nfunction isToolBlockOpen(state: AnthropicStreamState): boolean {\n if (!state.contentBlockOpen) {\n return false\n }\n // Check if the current block index corresponds to any known tool call\n return Object.values(state.toolCalls).some(\n tc => tc.anthropicBlockIndex === state.contentBlockIndex,\n )\n}\n\nexport function translateChunkToAnthropicEvents(\n chunk: ChatCompletionChunk,\n state: AnthropicStreamState,\n): Array<AnthropicStreamEventData> {\n const events: Array<AnthropicStreamEventData> = []\n\n if (chunk.choices.length === 0) {\n return events\n }\n\n const choice = chunk.choices[0]\n const { delta } = choice\n\n if (!state.messageStartSent) {\n events.push({\n type: 'message_start',\n message: {\n id: chunk.id,\n type: 'message',\n role: 'assistant',\n content: [],\n model: chunk.model,\n stop_reason: null,\n stop_sequence: null,\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: 0, // Will be updated in message_delta when finished\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n })\n state.messageStartSent = true\n }\n\n if (delta.content) {\n if (isToolBlockOpen(state)) {\n // A tool block was open, so close it before starting a text block.\n events.push({\n type: 'content_block_stop',\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n if (!state.contentBlockOpen) {\n events.push({\n type: 'content_block_start',\n index: state.contentBlockIndex,\n content_block: {\n type: 'text',\n text: '',\n },\n })\n state.contentBlockOpen = true\n }\n\n events.push({\n type: 'content_block_delta',\n index: state.contentBlockIndex,\n delta: {\n type: 'text_delta',\n text: delta.content,\n },\n })\n }\n\n if (delta.tool_calls) {\n for (const toolCall of delta.tool_calls) {\n if (toolCall.id && toolCall.function?.name) {\n // New tool call starting.\n if (state.contentBlockOpen) {\n // Close any previously open block.\n events.push({\n type: 'content_block_stop',\n index: state.contentBlockIndex,\n })\n state.contentBlockIndex++\n state.contentBlockOpen = false\n }\n\n const anthropicBlockIndex = state.contentBlockIndex\n state.toolCalls[toolCall.index] = {\n id: toolCall.id,\n name: toolCall.function.name,\n anthropicBlockIndex,\n }\n\n events.push({\n type: 'content_block_start',\n index: anthropicBlockIndex,\n content_block: {\n type: 'tool_use',\n id: toolCall.id,\n name: toolCall.function.name,\n input: {},\n },\n })\n state.contentBlockOpen = true\n }\n\n if (toolCall.function?.arguments) {\n const toolCallInfo = state.toolCalls[toolCall.index]\n // Tool call can still be empty\n if (toolCallInfo) {\n events.push({\n type: 'content_block_delta',\n index: toolCallInfo.anthropicBlockIndex,\n delta: {\n type: 'input_json_delta',\n partial_json: toolCall.function.arguments,\n },\n })\n }\n }\n }\n }\n\n if (choice.finish_reason) {\n if (state.contentBlockOpen) {\n events.push({\n type: 'content_block_stop',\n index: state.contentBlockIndex,\n })\n state.contentBlockOpen = false\n }\n\n events.push(\n {\n type: 'message_delta',\n delta: {\n stop_reason: mapOpenAIStopReasonToAnthropic(choice.finish_reason),\n stop_sequence: null,\n },\n usage: {\n input_tokens:\n (chunk.usage?.prompt_tokens ?? 0)\n - (chunk.usage?.prompt_tokens_details?.cached_tokens ?? 0),\n output_tokens: chunk.usage?.completion_tokens ?? 0,\n ...(chunk.usage?.prompt_tokens_details?.cached_tokens\n !== undefined && {\n cache_read_input_tokens:\n chunk.usage.prompt_tokens_details.cached_tokens,\n }),\n },\n },\n {\n type: 'message_stop',\n },\n )\n }\n\n return events\n}\n\nexport function translateErrorToAnthropicErrorEvent(): AnthropicStreamEventData {\n return {\n type: 'error',\n error: {\n type: 'api_error',\n message: 'An unexpected error occurred during streaming.',\n },\n }\n}\n","import type { Context } from 'hono'\n\nimport type { AnthropicMessagesPayload, AnthropicStreamState } from './anthropic-types'\nimport type { ChatCompletionChunk, ChatCompletionResponse } from '~/services/copilot/create-chat-completions'\n\nimport consola from 'consola'\nimport { streamSSE } from 'hono/streaming'\nimport { awaitApproval } from '~/lib/approval'\nimport { checkRateLimit } from '~/lib/rate-limit'\n\nimport { state } from '~/lib/state'\nimport {\n\n createChatCompletions,\n} from '~/services/copilot/create-chat-completions'\nimport {\n translateToAnthropic,\n translateToOpenAI,\n} from './non-stream-translation'\nimport { translateChunkToAnthropicEvents } from './stream-translation'\n\nexport async function handleCompletion(c: Context) {\n await checkRateLimit(state)\n\n const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()\n consola.debug('Anthropic request payload:', JSON.stringify(anthropicPayload))\n\n const openAIPayload = translateToOpenAI(anthropicPayload)\n consola.debug(\n 'Translated OpenAI request payload:',\n JSON.stringify(openAIPayload),\n )\n\n if (state.manualApprove) {\n await awaitApproval()\n }\n\n const response = await createChatCompletions(openAIPayload)\n\n if (isNonStreaming(response)) {\n consola.debug(\n 'Non-streaming response from Copilot:',\n JSON.stringify(response).slice(-400),\n )\n const anthropicResponse = translateToAnthropic(response)\n consola.debug(\n 'Translated Anthropic response:',\n JSON.stringify(anthropicResponse),\n )\n return c.json(anthropicResponse)\n }\n\n consola.debug('Streaming response from Copilot')\n return streamSSE(c, async (stream) => {\n const streamState: AnthropicStreamState = {\n messageStartSent: false,\n contentBlockIndex: 0,\n contentBlockOpen: false,\n toolCalls: {},\n }\n\n for await (const rawEvent of response) {\n consola.debug('Copilot raw stream event:', JSON.stringify(rawEvent))\n if (rawEvent.data === '[DONE]') {\n break\n }\n\n if (!rawEvent.data) {\n continue\n }\n\n const chunk = JSON.parse(rawEvent.data) as ChatCompletionChunk\n const events = translateChunkToAnthropicEvents(chunk, streamState)\n\n for (const event of events) {\n consola.debug('Translated Anthropic event:', JSON.stringify(event))\n await stream.writeSSE({\n event: event.type,\n data: JSON.stringify(event),\n })\n }\n }\n })\n}\n\nfunction isNonStreaming(response: Awaited<ReturnType<typeof createChatCompletions>>): response is ChatCompletionResponse {\n return Object.hasOwn(response, 'choices')\n}\n","import { Hono } from 'hono'\n\nimport { forwardError } from '~/lib/error'\n\nimport { handleCountTokens } from './count-tokens-handler'\nimport { handleCompletion } from './handler'\n\nexport const messageRoutes = new Hono()\n\nmessageRoutes.post('/', async (c) => {\n try {\n return await handleCompletion(c)\n }\n catch (error) {\n return await forwardError(c, error)\n }\n})\n\nmessageRoutes.post('/count_tokens', async (c) => {\n try {\n return await handleCountTokens(c)\n }\n catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from 'hono'\n\nimport { forwardError } from '~/lib/error'\nimport { state } from '~/lib/state'\nimport { cacheModels } from '~/lib/utils'\n\nexport const modelRoutes = new Hono()\n\nmodelRoutes.get('/', async (c) => {\n try {\n if (!state.models) {\n // This should be handled by startup logic, but as a fallback.\n await cacheModels()\n }\n\n const models = state.models?.data.map(model => ({\n id: model.id,\n object: 'model',\n type: 'model',\n created: 0, // No date available from source\n created_at: new Date(0).toISOString(), // No date available from source\n owned_by: model.vendor,\n display_name: model.name,\n }))\n\n return c.json({\n object: 'list',\n data: models,\n has_more: false,\n })\n }\n catch (error) {\n return await forwardError(c, error)\n }\n})\n","import consola from 'consola'\nimport { events } from 'fetch-event-stream'\n\nimport { copilotBaseUrl, copilotHeaders } from '~/lib/api-config'\nimport { HTTPError } from '~/lib/error'\nimport { state } from '~/lib/state'\n\nexport async function createResponses(payload: ResponsesPayload) {\n if (!state.copilotToken)\n throw new Error('Copilot token not found')\n\n const hasVision = hasVisionInput(payload)\n\n const isAgentCall = payload.input.some(item =>\n ['assistant'].includes(item.role),\n )\n\n const headers: Record<string, string> = {\n ...copilotHeaders(state, hasVision),\n 'X-Initiator': isAgentCall ? 'agent' : 'user',\n }\n\n const response = await fetch(`${copilotBaseUrl(state)}/responses`, {\n method: 'POST',\n headers,\n body: JSON.stringify(payload),\n })\n\n if (!response.ok) {\n consola.error('Failed to create responses', response)\n throw new HTTPError('Failed to create responses', response)\n }\n\n if (payload.stream) {\n return events(response)\n }\n\n return (await response.json()) as ResponsesResponse\n}\n\nfunction hasVisionInput(payload: ResponsesPayload): boolean {\n const visionTypes = new Set([\n 'input_image',\n 'image',\n 'image_url',\n 'image_file',\n ])\n\n return payload.input.some((item) => {\n if (!Array.isArray(item.content)) {\n return false\n }\n return item.content.some(part => visionTypes.has(part.type))\n })\n}\n\n// Payload types\n\nexport interface ResponsesPayload {\n model: string\n instructions?: string\n input: Array<ResponsesInputItem>\n tools?: Array<ResponsesTool>\n reasoning?: {\n effort?: 'low' | 'medium' | 'high' | 'xhigh'\n summary?: 'auto' | 'concise' | 'detailed' | 'none'\n }\n parallel_tool_calls?: boolean\n store?: boolean\n stream?: boolean\n include?: Array<string>\n temperature?: number | null\n top_p?: number | null\n max_output_tokens?: number | null\n}\n\nexport interface ResponsesInputItem {\n role: 'user' | 'assistant' | 'system' | 'developer'\n content: string | Array<{ type: string, [key: string]: unknown }>\n [key: string]: unknown\n}\n\nexport interface ResponsesTool {\n type: 'function'\n name: string\n description?: string\n parameters?: Record<string, unknown> | null\n strict?: boolean\n}\n\n// Response types\n\nexport interface ResponsesResponse {\n id: string\n object: 'response'\n model: string\n output: Array<ResponsesOutputItem>\n usage?: {\n input_tokens: number\n output_tokens: number\n total_tokens: number\n }\n status: 'completed' | 'failed' | 'incomplete' | 'in_progress'\n}\n\nexport interface ResponsesOutputItem {\n type: 'message' | 'function_call' | 'reasoning'\n id?: string\n // For message type\n role?: 'assistant'\n content?: Array<{ type: 'output_text', text: string }>\n // For function_call type\n name?: string\n arguments?: string\n call_id?: string\n // For reasoning type\n summary?: Array<{ type: 'summary_text', text: string }>\n}\n","import type { Context } from 'hono'\n\nimport type { SSEMessage } from 'hono/streaming'\nimport type { ResponsesPayload, ResponsesResponse } from '~/services/copilot/create-responses'\nimport consola from 'consola'\n\nimport { streamSSE } from 'hono/streaming'\nimport { awaitApproval } from '~/lib/approval'\nimport { checkRateLimit } from '~/lib/rate-limit'\nimport { state } from '~/lib/state'\nimport { createResponses } from '~/services/copilot/create-responses'\n\nexport async function handleResponses(c: Context) {\n await checkRateLimit(state)\n\n const payload = await c.req.json<ResponsesPayload>()\n consola.debug('Responses API request payload:', JSON.stringify(payload).slice(-400))\n\n if (state.manualApprove) {\n await awaitApproval()\n }\n\n const response = await createResponses(payload)\n\n if (isNonStreaming(response)) {\n consola.debug('Non-streaming responses:', JSON.stringify(response))\n return c.json(response)\n }\n\n consola.debug('Streaming responses')\n return streamSSE(c, async (stream) => {\n for await (const chunk of response) {\n consola.debug('Responses streaming chunk:', JSON.stringify(chunk))\n await stream.writeSSE(chunk as SSEMessage)\n }\n })\n}\n\nfunction isNonStreaming(response: Awaited<ReturnType<typeof createResponses>>): response is ResponsesResponse {\n return Object.hasOwn(response, 'output')\n}\n","import { Hono } from 'hono'\n\nimport { forwardError } from '~/lib/error'\n\nimport { handleResponses } from './handler'\n\nexport const responsesRoutes = new Hono()\n\nresponsesRoutes.post('/', async (c) => {\n try {\n return await handleResponses(c)\n }\n catch (error) {\n return await forwardError(c, error)\n }\n})\n","import { Hono } from 'hono'\n\nimport { state } from '~/lib/state'\n\nexport const tokenRoute = new Hono()\n\ntokenRoute.get('/', (c) => {\n try {\n return c.json({\n token: state.copilotToken,\n })\n }\n catch (error) {\n console.error('Error fetching token:', error)\n return c.json({ error: 'Failed to fetch token', token: null }, 500)\n }\n})\n","import { Hono } from 'hono'\n\nimport { getCopilotUsage } from '~/services/github/get-copilot-usage'\n\nexport const usageRoute = new Hono()\n\nusageRoute.get('/', async (c) => {\n try {\n const usage = await getCopilotUsage()\n return c.json(usage)\n }\n catch (error) {\n console.error('Error fetching Copilot usage:', error)\n return c.json({ error: 'Failed to fetch Copilot usage' }, 500)\n }\n})\n","import { Hono } from 'hono'\nimport { cors } from 'hono/cors'\nimport { logger } from 'hono/logger'\n\nimport { completionRoutes } from './routes/chat-completions/route'\nimport { embeddingRoutes } from './routes/embeddings/route'\nimport { messageRoutes } from './routes/messages/route'\nimport { modelRoutes } from './routes/models/route'\nimport { responsesRoutes } from './routes/responses/route'\nimport { tokenRoute } from './routes/token/route'\nimport { usageRoute } from './routes/usage/route'\n\nexport const server = new Hono()\n\nserver.use(logger())\nserver.use(cors())\n\nserver.get('/', c => c.text('Server running'))\n\nserver.route('/chat/completions', completionRoutes)\nserver.route('/models', modelRoutes)\nserver.route('/embeddings', embeddingRoutes)\nserver.route('/responses', responsesRoutes)\nserver.route('/usage', usageRoute)\nserver.route('/token', tokenRoute)\n\n// Compatibility with tools that expect v1/ prefix\nserver.route('/v1/chat/completions', completionRoutes)\nserver.route('/v1/models', modelRoutes)\nserver.route('/v1/embeddings', embeddingRoutes)\nserver.route('/v1/responses', responsesRoutes)\n\n// Anthropic compatible endpoints\nserver.route('/v1/messages', messageRoutes)\n","#!/usr/bin/env node\n\nimport type { ServerHandler } from 'srvx'\nimport { defineCommand } from 'citty'\nimport clipboard from 'clipboardy'\nimport consola from 'consola'\nimport { serve } from 'srvx'\nimport invariant from 'tiny-invariant'\n\nimport { ensurePaths } from './lib/paths'\nimport { initProxyFromEnv } from './lib/proxy'\nimport { generateEnvScript } from './lib/shell'\nimport { state } from './lib/state'\nimport { setupCopilotToken, setupGitHubToken } from './lib/token'\nimport { cacheModels, cacheVSCodeVersion } from './lib/utils'\nimport { server } from './server'\n\ninterface RunServerOptions {\n port: number\n verbose: boolean\n accountType: string\n manual: boolean\n rateLimit?: number\n rateLimitWait: boolean\n githubToken?: string\n claudeCode: boolean\n showToken: boolean\n proxyEnv: boolean\n}\n\nexport async function runServer(options: RunServerOptions): Promise<void> {\n if (options.proxyEnv) {\n initProxyFromEnv()\n }\n\n if (options.verbose) {\n consola.level = 5\n consola.info('Verbose logging enabled')\n }\n\n state.accountType = options.accountType\n if (options.accountType !== 'individual') {\n consola.info(`Using ${options.accountType} plan GitHub account`)\n }\n\n state.manualApprove = options.manual\n state.rateLimitSeconds = options.rateLimit\n state.rateLimitWait = options.rateLimitWait\n state.showToken = options.showToken\n\n await ensurePaths()\n await cacheVSCodeVersion()\n\n if (options.githubToken) {\n state.githubToken = options.githubToken\n consola.info('Using provided GitHub token')\n }\n else {\n await setupGitHubToken()\n }\n\n await setupCopilotToken()\n await cacheModels()\n\n consola.info(\n `Available models: \\n${state.models?.data.map(model => `- ${model.id}`).join('\\n')}`,\n )\n\n const serverUrl = `http://localhost:${options.port}`\n\n if (options.claudeCode) {\n invariant(state.models, 'Models should be loaded by now')\n\n const selectedModel = await consola.prompt(\n 'Select a model to use with Claude Code',\n {\n type: 'select',\n options: state.models.data.map(model => model.id),\n },\n )\n\n const selectedSmallModel = await consola.prompt(\n 'Select a small model to use with Claude Code',\n {\n type: 'select',\n options: state.models.data.map(model => model.id),\n },\n )\n\n const command = generateEnvScript(\n {\n ANTHROPIC_BASE_URL: serverUrl,\n ANTHROPIC_AUTH_TOKEN: 'dummy',\n ANTHROPIC_MODEL: selectedModel,\n ANTHROPIC_DEFAULT_SONNET_MODEL: selectedModel,\n ANTHROPIC_SMALL_FAST_MODEL: selectedSmallModel,\n ANTHROPIC_DEFAULT_HAIKU_MODEL: selectedSmallModel,\n DISABLE_NON_ESSENTIAL_MODEL_CALLS: '1',\n CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC: '1',\n },\n 'claude',\n )\n\n try {\n clipboard.writeSync(command)\n consola.success('Copied Claude Code command to clipboard!')\n }\n catch {\n consola.warn(\n 'Failed to copy to clipboard. Here is the Claude Code command:',\n )\n consola.log(command)\n }\n }\n\n consola.box(\n `🌐 Usage Viewer: https://jer-y.github.io/copilot-proxy?endpoint=${serverUrl}/usage`,\n )\n\n serve({\n fetch: server.fetch as ServerHandler,\n port: options.port,\n })\n}\n\nexport const start = defineCommand({\n meta: {\n name: 'start',\n description: 'Start the Copilot API server',\n },\n args: {\n 'port': {\n alias: 'p',\n type: 'string',\n default: '4141',\n description: 'Port to listen on',\n },\n 'verbose': {\n alias: 'v',\n type: 'boolean',\n default: false,\n description: 'Enable verbose logging',\n },\n 'account-type': {\n alias: 'a',\n type: 'string',\n default: 'individual',\n description: 'Account type to use (individual, business, enterprise)',\n },\n 'manual': {\n type: 'boolean',\n default: false,\n description: 'Enable manual request approval',\n },\n 'rate-limit': {\n alias: 'r',\n type: 'string',\n description: 'Rate limit in seconds between requests',\n },\n 'wait': {\n alias: 'w',\n type: 'boolean',\n default: false,\n description:\n 'Wait instead of error when rate limit is hit. Has no effect if rate limit is not set',\n },\n 'github-token': {\n alias: 'g',\n type: 'string',\n description:\n 'Provide GitHub token directly (must be generated using the `auth` subcommand)',\n },\n 'claude-code': {\n alias: 'c',\n type: 'boolean',\n default: false,\n description:\n 'Generate a command to launch Claude Code with Copilot API config',\n },\n 'show-token': {\n type: 'boolean',\n default: false,\n description: 'Show GitHub and Copilot tokens on fetch and refresh',\n },\n 'proxy-env': {\n type: 'boolean',\n default: false,\n description: 'Initialize proxy from environment variables',\n },\n },\n run({ args }) {\n const rateLimitRaw = args['rate-limit']\n const rateLimit\n = rateLimitRaw === undefined ? undefined : Number.parseInt(rateLimitRaw, 10)\n\n return runServer({\n port: Number.parseInt(args.port, 10),\n verbose: args.verbose,\n accountType: args['account-type'],\n manual: args.manual,\n rateLimit,\n rateLimitWait: args.wait,\n githubToken: args['github-token'],\n claudeCode: args['claude-code'],\n showToken: args['show-token'],\n proxyEnv: args['proxy-env'],\n })\n },\n})\n","#!/usr/bin/env node\n\nimport { defineCommand, runMain } from 'citty'\n\nimport { auth } from './auth'\nimport { checkUsage } from './check-usage'\nimport { debug } from './debug'\nimport { start } from './start'\n\nconst main = defineCommand({\n meta: {\n name: 'copilot-proxy',\n description:\n 'A wrapper around GitHub Copilot API to make it OpenAI compatible, making it usable for other tools.',\n },\n subCommands: { auth, start, 'check-usage': checkUsage, debug },\n})\n\n// eslint-disable-next-line antfu/no-top-level-await\nawait runMain(main)\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAIA,MAAM,UAAU,KAAK,KAAK,GAAG,SAAS,EAAE,UAAU,SAAS,gBAAgB;AAE3E,MAAM,oBAAoB,KAAK,KAAK,SAAS,eAAe;AAE5D,MAAa,QAAQ;CACnB;CACA;CACD;AAED,eAAsB,cAA6B;AACjD,OAAM,GAAG,MAAM,MAAM,SAAS,EAAE,WAAW,MAAM,CAAC;AAClD,OAAM,WAAW,MAAM,kBAAkB;;AAG3C,eAAe,WAAW,UAAiC;AACzD,KAAI;AACF,QAAM,GAAG,OAAO,UAAU,GAAG,UAAU,KAAK;SAExC;AACJ,QAAM,GAAG,UAAU,UAAU,GAAG;AAChC,QAAM,GAAG,MAAM,UAAU,IAAM;;;;;;ACLnC,MAAaA,QAAe;CAC1B,aAAa;CACb,eAAe;CACf,eAAe;CACf,WAAW;CACZ;;;;ACpBD,SAAgB,kBAAkB;AAChC,QAAO;EACL,gBAAgB;EAChB,UAAU;EACX;;AAGH,MAAM,kBAAkB;AACxB,MAAM,wBAAwB,gBAAgB;AAC9C,MAAM,aAAa,qBAAqB;AAExC,MAAM,cAAc;AAEpB,SAAgB,eAAe,SAAc;AAC3C,QAAOC,QAAM,gBAAgB,eACzB,kCACA,eAAeA,QAAM,YAAY;;AAEvC,SAAgB,eAAe,SAAc,SAAkB,OAAO;CACpE,MAAMC,UAAkC;EACtC,iBAAiB,UAAUD,QAAM;EACjC,gBAAgB,iBAAiB,CAAC;EAClC,0BAA0B;EAC1B,kBAAkB,UAAUA,QAAM;EAClC,yBAAyB;EACzB,cAAc;EACd,iBAAiB;EACjB,sBAAsB;EACtB,wBAAwB;EACxB,gBAAgB,YAAY;EAC5B,uCAAuC;EACxC;AAED,KAAI,OACF,SAAQ,4BAA4B;AAEtC,QAAO;;AAGT,MAAa,sBAAsB;AACnC,SAAgB,cAAc,SAAc;AAC1C,QAAO;EACL,GAAG,iBAAiB;EACpB,iBAAiB,SAASA,QAAM;EAChC,kBAAkB,UAAUA,QAAM;EAClC,yBAAyB;EACzB,cAAc;EACd,wBAAwB;EACxB,uCAAuC;EACxC;;AAGH,MAAa,kBAAkB;AAC/B,MAAa,mBAAmB;AAChC,MAAa,oBAAoB,CAAC,YAAY,CAAC,KAAK,IAAI;;;;ACrDxD,IAAa,YAAb,cAA+B,MAAM;CACnC;CAEA,YAAY,SAAiB,UAAoB;AAC/C,QAAM,QAAQ;AACd,OAAK,WAAW;;;AAIpB,eAAsB,aAAa,GAAY,OAAgB;AAC7D,SAAQ,MAAM,mBAAmB,MAAM;AAEvC,KAAI,iBAAiB,WAAW;EAC9B,MAAM,YAAY,MAAM,MAAM,SAAS,MAAM;EAC7C,IAAIE;AACJ,MAAI;AACF,eAAY,KAAK,MAAM,UAAU;UAE7B;AACJ,eAAY;;AAEd,UAAQ,MAAM,eAAe,UAAU;AACvC,SAAO,EAAE,KACP,EACE,OAAO;GACL,SAAS;GACT,MAAM;GACP,EACF,EACD,MAAM,SAAS,OAChB;;AAGH,QAAO,EAAE,KACP,EACE,OAAO;EACL,SAAU,MAAgB;EAC1B,MAAM;EACP,EACF,EACD,IACD;;;;;AC1CH,eAAsB,kBAAkB;CACtC,MAAM,WAAW,MAAM,MACrB,GAAG,oBAAoB,6BACvB,EACE,SAAS,cAAc,MAAM,EAC9B,CACF;AAED,KAAI,CAAC,SAAS,GACZ,OAAM,IAAI,UAAU,+BAA+B,SAAS;AAE9D,QAAQ,MAAM,SAAS,MAAM;;;;;ACP/B,eAAsB,gBAA6C;CACjE,MAAM,WAAW,MAAM,MAAM,GAAG,gBAAgB,qBAAqB;EACnE,QAAQ;EACR,SAAS,iBAAiB;EAC1B,MAAM,KAAK,UAAU;GACnB,WAAW;GACX,OAAO;GACR,CAAC;EACH,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,IAAI,UAAU,6BAA6B,SAAS;AAE5D,QAAQ,MAAM,SAAS,MAAM;;;;;ACjB/B,eAAsB,gBAAgB;CACpC,MAAM,WAAW,MAAM,MAAM,GAAG,oBAAoB,QAAQ,EAC1D,SAAS;EACP,eAAe,SAAS,MAAM;EAC9B,GAAG,iBAAiB;EACrB,EACF,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,IAAI,UAAU,6BAA6B,SAAS;AAE5D,QAAQ,MAAM,SAAS,MAAM;;;;;ACX/B,eAAsB,YAAY;CAChC,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,UAAU,EAC9D,SAAS,eAAe,MAAM,EAC/B,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,IAAI,UAAU,wBAAwB,SAAS;AAEvD,QAAQ,MAAM,SAAS,MAAM;;;;;ACZ/B,MAAM,WAAW;AAEjB,eAAsB,mBAAmB;CACvC,MAAM,aAAa,IAAI,iBAAiB;CACxC,MAAM,UAAU,iBAAiB;AAC/B,aAAW,OAAO;IACjB,IAAK;AAER,KAAI;EAUF,MAAM,SAFW,OAPA,MAAM,MACrB,kFACA,EACE,QAAQ,WAAW,QACpB,CACF,EAE+B,MAAM,EAEf,MADH,mBACqB;AAEzC,MAAI,MACF,QAAO,MAAM;AAGf,SAAO;SAEH;AACJ,SAAO;WAED;AACN,eAAa,QAAQ;;;AAKzB,MAAM,kBAAkB;;;;AC5BxB,SAAgB,MAAM,IAAY;AAChC,QAAO,IAAI,SAAS,YAAY;AAC9B,aAAW,SAAS,GAAG;GACvB;;AAGJ,SAAgB,UAAU,OAA2C;AACnE,QAAO,UAAU,QAAQ,UAAU;;AAGrC,eAAsB,cAA6B;AAEjD,OAAM,SADS,MAAM,WAAW;;AAIlC,eAAsB,qBAAqB;CACzC,MAAM,WAAW,MAAM,kBAAkB;AACzC,OAAM,gBAAgB;AAEtB,SAAQ,KAAK,yBAAyB,WAAW;;;;;ACfnD,eAAsB,gBACpB,YACiB;CAGjB,MAAM,iBAAiB,WAAW,WAAW,KAAK;AAClD,SAAQ,MAAM,yCAAyC,cAAc,IAAI;AAEzE,QAAO,MAAM;EACX,MAAM,WAAW,MAAM,MACrB,GAAG,gBAAgB,4BACnB;GACE,QAAQ;GACR,SAAS,iBAAiB;GAC1B,MAAM,KAAK,UAAU;IACnB,WAAW;IACX,aAAa,WAAW;IACxB,YAAY;IACb,CAAC;GACH,CACF;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,SAAM,MAAM,cAAc;AAC1B,WAAQ,MAAM,gCAAgC,MAAM,SAAS,MAAM,CAAC;AAEpE;;EAGF,MAAM,OAAO,MAAM,SAAS,MAAM;AAClC,UAAQ,MAAM,kCAAkC,KAAK;EAErD,MAAM,EAAE,iBAAiB;AAEzB,MAAI,aACF,QAAO;MAGP,OAAM,MAAM,cAAc;;;;;;ACrChC,MAAM,wBAAwB,GAAG,SAAS,MAAM,mBAAmB,OAAO;AAE1E,SAAS,iBAAiB,OAAe;AACvC,QAAO,GAAG,UAAU,MAAM,mBAAmB,MAAM;;AAGrD,eAAsB,oBAAoB;CACxC,MAAM,EAAE,OAAO,eAAe,MAAM,iBAAiB;AACrD,OAAM,eAAe;AAGrB,SAAQ,MAAM,6CAA6C;AAC3D,KAAI,MAAM,UACR,SAAQ,KAAK,kBAAkB,MAAM;CAGvC,MAAM,mBAAmB,aAAa,MAAM;AAC5C,aAAY,YAAY;AACtB,UAAQ,MAAM,2BAA2B;AACzC,MAAI;GACF,MAAM,EAAE,mBAAU,MAAM,iBAAiB;AACzC,SAAM,eAAeC;AACrB,WAAQ,MAAM,0BAA0B;AACxC,OAAI,MAAM,UACR,SAAQ,KAAK,4BAA4BA,QAAM;WAG5C,OAAO;AACZ,WAAQ,MAAM,oCAAoC,MAAM;AACxD,SAAM;;IAEP,gBAAgB;;AAOrB,eAAsB,iBACpB,SACe;AACf,KAAI;EACF,MAAM,cAAc,MAAM,iBAAiB;AAE3C,MAAI,eAAe,CAAC,SAAS,OAAO;AAClC,SAAM,cAAc;AACpB,OAAI,MAAM,UACR,SAAQ,KAAK,iBAAiB,YAAY;AAE5C,SAAM,SAAS;AAEf;;AAGF,UAAQ,KAAK,0CAA0C;EACvD,MAAM,WAAW,MAAM,eAAe;AACtC,UAAQ,MAAM,yBAAyB,SAAS;AAEhD,UAAQ,KACN,0BAA0B,SAAS,UAAU,OAAO,SAAS,mBAC9D;EAED,MAAM,QAAQ,MAAM,gBAAgB,SAAS;AAC7C,QAAM,iBAAiB,MAAM;AAC7B,QAAM,cAAc;AAEpB,MAAI,MAAM,UACR,SAAQ,KAAK,iBAAiB,MAAM;AAEtC,QAAM,SAAS;UAEV,OAAO;AACZ,MAAI,iBAAiB,WAAW;AAC9B,WAAQ,MAAM,+BAA+B,MAAM,MAAM,SAAS,MAAM,CAAC;AACzE,SAAM;;AAGR,UAAQ,MAAM,+BAA+B,MAAM;AACnD,QAAM;;;AAIV,eAAe,UAAU;CACvB,MAAM,OAAO,MAAM,eAAe;AAClC,SAAQ,KAAK,gBAAgB,KAAK,QAAQ;;;;;AClF5C,eAAsB,QAAQ,SAAwC;AACpE,KAAI,QAAQ,SAAS;AACnB,UAAQ,QAAQ;AAChB,UAAQ,KAAK,0BAA0B;;AAGzC,OAAM,YAAY,QAAQ;AAE1B,OAAM,aAAa;AACnB,OAAM,iBAAiB,EAAE,OAAO,MAAM,CAAC;AACvC,SAAQ,QAAQ,2BAA2B,MAAM,kBAAkB;;AAGrE,MAAa,OAAO,cAAc;CAChC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;EACJ,WAAW;GACT,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,cAAc;GACZ,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACF;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,QAAQ;GACb,SAAS,KAAK;GACd,WAAW,KAAK;GACjB,CAAC;;CAEL,CAAC;;;;AC/CF,eAAsB,kBAAiD;CACrE,MAAM,WAAW,MAAM,MAAM,GAAG,oBAAoB,yBAAyB,EAC3E,SAAS,cAAc,MAAM,EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,IAAI,UAAU,+BAA+B,SAAS;AAG9D,QAAQ,MAAM,SAAS,MAAM;;;;;ACD/B,MAAa,aAAa,cAAc;CACtC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM,MAAM;AACV,QAAM,aAAa;AACnB,QAAM,kBAAkB;AACxB,MAAI;GACF,MAAM,QAAQ,MAAM,iBAAiB;GACrC,MAAM,UAAU,MAAM,gBAAgB;GACtC,MAAM,eAAe,QAAQ;GAC7B,MAAM,cAAc,eAAe,QAAQ;GAC3C,MAAM,qBACF,eAAe,IAAK,cAAc,eAAgB,MAAM;GAC5D,MAAM,0BAA0B,QAAQ;GAGxC,SAAS,eAAe,MAAc,MAA+B;AACnE,QAAI,CAAC,KACH,QAAO,GAAG,KAAK;IACjB,MAAM,QAAQ,KAAK;IACnB,MAAM,OAAO,QAAQ,KAAK;IAC1B,MAAM,cAAc,QAAQ,IAAK,OAAO,QAAS,MAAM;IACvD,MAAM,mBAAmB,KAAK;AAC9B,WAAO,GAAG,KAAK,IAAI,KAAK,GAAG,MAAM,SAAS,YAAY,QAAQ,EAAE,CAAC,UAAU,iBAAiB,QAAQ,EAAE,CAAC;;GAGzG,MAAM,cAAc,YAAY,YAAY,GAAG,aAAa,SAAS,mBAAmB,QAAQ,EAAE,CAAC,UAAU,wBAAwB,QAAQ,EAAE,CAAC;GAChJ,MAAM,WAAW,eAAe,QAAQ,MAAM,gBAAgB,KAAK;GACnE,MAAM,kBAAkB,eACtB,eACA,MAAM,gBAAgB,YACvB;AAED,WAAQ,IACN,wBAAwB,MAAM,aAAa,mBACxB,MAAM,iBAAiB,iBAEnC,YAAY,MACZ,SAAS,MACT,kBACR;WAEI,KAAK;AACV,WAAQ,MAAM,kCAAkC,IAAI;AACpD,WAAQ,KAAK,EAAE;;;CAGpB,CAAC;;;;AChCF,eAAe,oBAAqC;AAClD,KAAI;EACF,MAAM,kBAAkB,IAAI,IAAI,mBAAmB,OAAO,KAAK,IAAI,CAAC;AAMpE,SAHoB,KAAK,MAAM,MAAM,GAAG,SAAS,gBAAgB,CAAC,CAG/C;SAEf;AACJ,SAAO;;;AAIX,SAAS,iBAAiB;CACxB,MAAM,QAAQ,OAAO,QAAQ;AAE7B,QAAO;EACL,MAAM,QAAQ,QAAQ;EACtB,SAAS,QAAQ,IAAI,UAAU,QAAQ,QAAQ,MAAM,EAAE;EACvD,UAAU,GAAG,UAAU;EACvB,MAAM,GAAG,MAAM;EAChB;;AAGH,eAAe,mBAAqC;AAClD,KAAI;AAEF,MAAI,EADU,MAAM,GAAG,KAAK,MAAM,kBAAkB,EACzC,QAAQ,CACjB,QAAO;AAGT,UADgB,MAAM,GAAG,SAAS,MAAM,mBAAmB,OAAO,EACnD,MAAM,CAAC,SAAS;SAE3B;AACJ,SAAO;;;AAIX,eAAe,eAAmC;CAChD,MAAM,CAAC,SAAS,eAAe,MAAM,QAAQ,IAAI,CAC/C,mBAAmB,EACnB,kBAAkB,CACnB,CAAC;AAEF,QAAO;EACL;EACA,SAAS,gBAAgB;EACzB,OAAO;GACL,SAAS,MAAM;GACf,mBAAmB,MAAM;GAC1B;EACD;EACD;;AAGH,SAAS,oBAAoB,MAAuB;AAClD,SAAQ,KAAK;;WAEJ,KAAK,QAAQ;WACb,KAAK,QAAQ,KAAK,GAAG,KAAK,QAAQ,QAAQ,IAAI,KAAK,QAAQ,SAAS,GAAG,KAAK,QAAQ,KAAK;;;aAGvF,KAAK,MAAM,QAAQ;uBACT,KAAK,MAAM,kBAAkB;;gBAEpC,KAAK,cAAc,QAAQ,OAAO;;AAGlD,SAAS,mBAAmB,MAAuB;AAEjD,SAAQ,IAAI,KAAK,UAAU,MAAM,MAAM,EAAE,CAAC;;AAG5C,eAAsB,SAAS,SAAyC;CACtE,MAAM,YAAY,MAAM,cAAc;AAEtC,KAAI,QAAQ,KACV,oBAAmB,UAAU;KAG7B,qBAAoB,UAAU;;AAIlC,MAAa,QAAQ,cAAc;CACjC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM,EACJ,MAAM;EACJ,MAAM;EACN,SAAS;EACT,aAAa;EACd,EACF;CACD,IAAI,EAAE,QAAQ;AACZ,SAAO,SAAS,EACd,MAAM,KAAK,MACZ,CAAC;;CAEL,CAAC;;;;AC/HF,SAAgB,mBAAyB;AACvC,KAAI,OAAO,QAAQ,YACjB;AAEF,KAAI;EACF,MAAM,SAAS,IAAI,OAAO;EAC1B,MAAM,0BAAU,IAAI,KAAyB;AAqD7C,sBA/CmB;GACjB,SACE,SACA,SACA;AACA,QAAI;KACF,MAAM,SACF,OAAO,QAAQ,WAAW,WACxB,IAAI,IAAI,QAAQ,OAAO,GACtB,QAAQ;KAIf,MAAM,MAHM,eAGI,OAAO,UAAU,CAAC;KAClC,MAAM,WAAW,OAAO,IAAI,SAAS,IAAI,MAAM;AAC/C,SAAI,CAAC,UAAU;AACb,cAAQ,MAAM,sBAAsB,OAAO,WAAW;AACtD,aAAQ,OAAiC,SAAS,SAAS,QAAQ;;KAErE,IAAI,QAAQ,QAAQ,IAAI,SAAS;AACjC,SAAI,CAAC,OAAO;AACV,cAAQ,IAAI,WAAW,SAAS;AAChC,cAAQ,IAAI,UAAU,MAAM;;KAE9B,IAAI,QAAQ;AACZ,SAAI;MACF,MAAM,IAAI,IAAI,IAAI,SAAS;AAC3B,cAAQ,GAAG,EAAE,SAAS,IAAI,EAAE;aAExB;AAGN,aAAQ,MAAM,qBAAqB,OAAO,SAAS,OAAO,QAAQ;AAClE,YAAQ,MAAgC,SAAS,SAAS,QAAQ;YAE9D;AACJ,YAAQ,OAAiC,SAAS,SAAS,QAAQ;;;GAGvE,QAAQ;AACN,WAAO,OAAO,OAAO;;GAEvB,UAAU;AACR,WAAO,OAAO,SAAS;;GAE1B,CAEuD;AACxD,UAAQ,MAAM,mDAAmD;UAE5D,KAAK;AACV,UAAQ,MAAM,wBAAwB,IAAI;;;;;;AC9D9C,SAAS,WAAsB;CAC7B,MAAM,EAAE,UAAU,MAAM,QAAQ;AAEhC,KAAI,aAAa,SAAS;AACxB,MAAI;GACF,MAAM,UAAU,oDAAoD,KAAK;AAGzE,OAFsB,SAAS,SAAS,EAAE,OAAO,QAAQ,CAAC,CAAC,UAAU,CAEnD,aAAa,CAAC,SAAS,iBAAiB,CACxD,QAAO;UAGL;AACJ,UAAO;;AAGT,SAAO;QAEJ;EACH,MAAM,YAAY,IAAI;AACtB,MAAI,WAAW;AACb,OAAI,UAAU,SAAS,MAAM,CAC3B,QAAO;AACT,OAAI,UAAU,SAAS,OAAO,CAC5B,QAAO;AACT,OAAI,UAAU,SAAS,OAAO,CAC5B,QAAO;;AAGX,SAAO;;;;;;;;;;AAWX,SAAgB,kBACd,SACA,eAAuB,IACf;CACR,MAAM,QAAQ,UAAU;CACxB,MAAM,kBAAkB,OAAO,QAAQ,QAAQ,CAAC,QAC7C,GAAG,WAAW,UAAU,OAC1B;CAED,IAAIC;AAEJ,SAAQ,OAAR;EACE,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,QAAQ,IAAI,KAAK,QAAQ,CAC/C,KAAK,KAAK;AACb;EAEF,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,OAAO,IAAI,GAAG,QAAQ,CAC5C,KAAK,MAAM;AACd;EAEF,KAAK;AACH,kBAAe,gBACZ,KAAK,CAAC,KAAK,WAAW,WAAW,IAAI,GAAG,QAAQ,CAChD,KAAK,KAAK;AACb;EAEF,SAAS;GAEP,MAAM,cAAc,gBACjB,KAAK,CAAC,KAAK,WAAW,GAAG,IAAI,GAAG,QAAQ,CACxC,KAAK,IAAI;AACZ,kBAAe,gBAAgB,SAAS,IAAI,UAAU,gBAAgB;AACtE;;;AAIJ,KAAI,gBAAgB,aAElB,QAAO,GAAG,eADQ,UAAU,QAAQ,QAAQ,SACP;AAGvC,QAAO,gBAAgB;;;;;ACvFzB,eAAsB,gBAAgB;AAKpC,KAAI,CAJa,MAAM,QAAQ,OAAO,4BAA4B,EAChE,MAAM,WACP,CAAC,CAGA,OAAM,IAAI,UACR,oBACA,SAAS,KAAK,EAAE,SAAS,oBAAoB,EAAE,EAAE,QAAQ,KAAK,CAAC,CAChE;;;;;ACNL,eAAsB,eAAe,SAAc;AACjD,KAAIC,QAAM,qBAAqB,OAC7B;CAEF,MAAM,MAAM,KAAK,KAAK;AAEtB,KAAI,CAACA,QAAM,sBAAsB;AAC/B,UAAM,uBAAuB;AAC7B;;CAGF,MAAM,kBAAkB,MAAMA,QAAM,wBAAwB;AAE5D,KAAI,iBAAiBA,QAAM,kBAAkB;AAC3C,UAAM,uBAAuB;AAC7B;;CAGF,MAAM,kBAAkB,KAAK,KAAKA,QAAM,mBAAmB,eAAe;AAE1E,KAAI,CAACA,QAAM,eAAe;AACxB,UAAQ,KACN,qCAAqC,gBAAgB,gBACtD;AACD,QAAM,IAAI,UACR,uBACA,SAAS,KAAK,EAAE,SAAS,uBAAuB,EAAE,EAAE,QAAQ,KAAK,CAAC,CACnE;;CAGH,MAAM,aAAa,kBAAkB;AACrC,SAAQ,KACN,+BAA+B,gBAAgB,+BAChD;AACD,OAAM,MAAM,WAAW;AAEvB,SAAM,uBAAuB;AAC7B,SAAQ,KAAK,qDAAqD;;;;;AClCpE,MAAM,eAAe;CACnB,kBAAkB,OAAO;CACzB,mBAAmB,OAAO;CAC1B,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACxB,iBAAiB,OAAO;CACzB;AAUD,MAAM,gCAAgB,IAAI,KAAsB;;;;AAKhD,SAAS,yBAAyB,WAA4B,SAAkB,WAAyD;CACvI,IAAI,SAAS;AACb,MAAK,MAAM,YAAY,WAAW;AAChC,YAAU,UAAU;AACpB,YAAU,QAAQ,OAAO,KAAK,UAAU,SAAS,CAAC,CAAC;;AAErD,WAAU,UAAU;AACpB,QAAO;;;;;AAMT,SAAS,4BAA4B,cAAkC,SAA0B;CAC/F,IAAI,SAAS;AACb,MAAK,MAAM,QAAQ,aACjB,KAAI,KAAK,SAAS,YAChB,WAAU,QAAQ,OAAO,KAAK,UAAU,IAAI,CAAC,SAAS;UAE/C,KAAK,KACZ,WAAU,QAAQ,OAAO,KAAK,KAAK,CAAC;AAGxC,QAAO;;;;;AAMT,SAAS,uBAAuB,SAAkB,SAAkB,WAAyD;CAC3H,MAAM,mBAAmB;CACzB,MAAM,gBAAgB;CACtB,IAAI,SAAS;AACb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,EAAE;AAClD,MAAI,OAAO,UAAU,SACnB,WAAU,QAAQ,OAAO,MAAM,CAAC;AAElC,MAAI,QAAQ,OACV,WAAU;AAEZ,MAAI,QAAQ,aACV,WAAU,yBACR,OACA,SACA,UACD;AAEH,MAAI,QAAQ,aAAa,MAAM,QAAQ,MAAM,CAC3C,WAAU,4BACR,OACA,QACD;;AAGL,QAAO;;;;;AAMT,SAAS,gBAAgB,UAA0B,SAAkB,WAAyD;AAC5H,KAAI,SAAS,WAAW,EACtB,QAAO;CAET,IAAI,YAAY;AAChB,MAAK,MAAM,WAAW,SACpB,cAAa,uBAAuB,SAAS,SAAS,UAAU;AAGlE,cAAa;AACb,QAAO;;;;;AAMT,eAAe,sBAAsB,UAAoC;AACvE,KAAI,cAAc,IAAI,SAAS,EAAE;EAC/B,MAAM,SAAS,cAAc,IAAI,SAAS;AAC1C,MAAI,OACF,QAAO;;CAIX,MAAM,oBAAoB;AAC1B,KAAI,EAAE,qBAAqB,eAAe;EACxC,MAAM,iBAAkB,MAAM,aAAa,YAAY;AACvD,gBAAc,IAAI,UAAU,eAAe;AAC3C,SAAO;;CAGT,MAAM,iBAAkB,MAAM,aAAa,oBAAoB;AAC/D,eAAc,IAAI,UAAU,eAAe;AAC3C,QAAO;;;;;AAMT,SAAgB,sBAAsB,OAAsB;AAC1D,QAAO,MAAM,aAAa,aAAa;;;;;AAMzC,SAAS,kBAAkB,OAAc;AACvC,QAAO,MAAM,OAAO,mBAAmB,MAAM,OAAO,UAChD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACV,GACD;EACE,UAAU;EACV,UAAU;EACV,SAAS;EACT,UAAU;EACV,UAAU;EACV,SAAS;EACV;;;;;AAMP,SAAS,yBAAyB,KAAa,MAAe,SAGnD;CACT,MAAM,EAAE,SAAS,cAAc;CAC/B,IAAI,SAAS,UAAU;AAGvB,KAAI,OAAO,SAAS,YAAY,SAAS,KACvC,QAAO;CAIT,MAAM,QAAQ;CAOd,MAAM,YAAY;CAClB,MAAM,YAAY,MAAM,QAAQ;CAChC,IAAI,YAAY,MAAM,eAAe;AAGrC,KAAI,MAAM,QAAQ,MAAM,QAAQ,MAAM,KAAK,EAAE;AAC3C,YAAU,UAAU;AACpB,OAAK,MAAM,QAAQ,MAAM,MAAM;AAC7B,aAAU,UAAU;AACpB,aAAU,QAAQ,OAAO,OAAO,KAAK,CAAC,CAAC;;;AAK3C,KAAI,UAAU,SAAS,IAAI,CACzB,aAAY,UAAU,MAAM,GAAG,GAAG;CAIpC,MAAM,OAAO,GAAG,UAAU,GAAG,UAAU,GAAG;AAC1C,WAAU,QAAQ,OAAO,KAAK,CAAC;CAG/B,MAAM,eAAe,IAAI,IAAI;EAAC;EAAQ;EAAe;EAAO,CAAC;AAC7D,MAAK,MAAM,gBAAgB,OAAO,KAAK,MAAM,CAC3C,KAAI,CAAC,aAAa,IAAI,aAAa,EAAE;EACnC,MAAM,gBAAgB,MAAM;EAC5B,MAAM,eACF,OAAO,kBAAkB,WACvB,gBAEE,KAAK,UAAU,cAAc;AAErC,YAAU,QAAQ,OAAO,GAAG,aAAa,GAAG,eAAe,CAAC;;AAIhE,QAAO;;;;;AAMT,SAAS,0BAA0B,YAAqB,SAAkB,WAAyD;AACjI,KAAI,CAAC,cAAc,OAAO,eAAe,SACvC,QAAO;CAGT,MAAM,SAAS;CACf,IAAI,SAAS;AAEb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,CAC/C,KAAI,QAAQ,cAAc;EACxB,MAAM,aAAa;AACnB,MAAI,OAAO,KAAK,WAAW,CAAC,SAAS,GAAG;AACtC,aAAU,UAAU;AACpB,QAAK,MAAM,WAAW,OAAO,KAAK,WAAW,CAC3C,WAAU,yBAAyB,SAAS,WAAW,UAAU;IAC/D;IACA;IACD,CAAC;;QAIH;EACH,MAAM,YACF,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,MAAM;AAC7D,YAAU,QAAQ,OAAO,GAAG,IAAI,GAAG,YAAY,CAAC;;AAIpD,QAAO;;;;;AAMT,SAAS,oBAAoB,MAAY,SAAkB,WAAyD;CAClH,IAAI,SAAS,UAAU;CACvB,MAAM,OAAO,KAAK;CAClB,MAAM,QAAQ,KAAK;CACnB,IAAI,QAAQ,KAAK,eAAe;AAChC,KAAI,MAAM,SAAS,IAAI,CACrB,SAAQ,MAAM,MAAM,GAAG,GAAG;CAE5B,MAAM,OAAO,GAAG,MAAM,GAAG;AACzB,WAAU,QAAQ,OAAO,KAAK,CAAC;AAC/B,KACE,OAAO,KAAK,eAAe,YACxB,KAAK,eAAe,KAEvB,WAAU,0BAA0B,KAAK,YAAY,SAAS,UAAU;AAE1E,QAAO;;;;;AAMT,SAAgB,kBAAkB,OAAoB,SAAkB,WAAyD;CAC/H,IAAI,iBAAiB;AACrB,MAAK,MAAM,QAAQ,MACjB,mBAAkB,oBAAoB,MAAM,SAAS,UAAU;AAEjE,mBAAkB,UAAU;AAC5B,QAAO;;;;;AAMT,eAAsB,cAAc,SAAiC,OAA0D;CAE7H,MAAM,YAAY,sBAAsB,MAAM;CAG9C,MAAM,UAAU,MAAM,sBAAsB,UAAU;CAEtD,MAAM,qBAAqB,QAAQ;CACnC,MAAM,gBAAgB,mBAAmB,QACvC,QAAO,IAAI,SAAS,YACrB;CACD,MAAM,iBAAiB,mBAAmB,QACxC,QAAO,IAAI,SAAS,YACrB;CAED,MAAM,YAAY,kBAAkB,MAAM;CAC1C,IAAI,cAAc,gBAAgB,eAAe,SAAS,UAAU;AACpE,KAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,EAC1C,gBAAe,kBAAkB,QAAQ,OAAO,SAAS,UAAU;CAErE,MAAM,eAAe,gBAAgB,gBAAgB,SAAS,UAAU;AAExE,QAAO;EACL,OAAO;EACP,QAAQ;EACT;;;;;ACrTH,eAAsB,sBAAsB,SAAiC;AAC3E,KAAI,CAAC,MAAM,aACT,OAAM,IAAI,MAAM,0BAA0B;CAE5C,MAAM,eAAe,QAAQ,SAAS,MACpC,MACE,OAAO,EAAE,YAAY,YAClB,EAAE,SAAS,MAAK,QAAKC,IAAE,SAAS,YAAY,CAClD;CAID,MAAM,cAAc,QAAQ,SAAS,MAAK,QACxC,CAAC,aAAa,OAAO,CAAC,SAAS,IAAI,KAAK,CACzC;CAGD,MAAMC,UAAkC;EACtC,GAAG,eAAe,OAAO,aAAa;EACtC,eAAe,cAAc,UAAU;EACxC;CAED,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,oBAAoB;EACxE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,qCAAqC,SAAS;AAC5D,QAAM,IAAI,UAAU,qCAAqC,SAAS;;AAGpE,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;;;;AC3B/B,eAAsBC,mBAAiB,GAAY;AACjD,OAAM,eAAe,MAAM;CAE3B,IAAI,UAAU,MAAM,EAAE,IAAI,MAA8B;AACxD,SAAQ,MAAM,oBAAoB,KAAK,UAAU,QAAQ,CAAC,MAAM,KAAK,CAAC;CAGtE,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACvC,UAAS,MAAM,OAAO,QAAQ,MAC/B;AAGD,KAAI;AACF,MAAI,eAAe;GACjB,MAAM,aAAa,MAAM,cAAc,SAAS,cAAc;AAC9D,WAAQ,KAAK,wBAAwB,WAAW;QAGhD,SAAQ,KAAK,sDAAsD;UAGhE,OAAO;AACZ,UAAQ,KAAK,oCAAoC,MAAM;;AAGzD,KAAI,MAAM,cACR,OAAM,eAAe;AAEvB,KAAI,UAAU,QAAQ,WAAW,EAAE;AACjC,YAAU;GACR,GAAG;GACH,YAAY,eAAe,aAAa,OAAO;GAChD;AACD,UAAQ,MAAM,sBAAsB,KAAK,UAAU,QAAQ,WAAW,CAAC;;CAGzE,MAAM,WAAW,MAAM,sBAAsB,QAAQ;AAErD,KAAIC,iBAAe,SAAS,EAAE;AAC5B,UAAQ,MAAM,2BAA2B,KAAK,UAAU,SAAS,CAAC;AAClE,SAAO,EAAE,KAAK,SAAS;;AAGzB,SAAQ,MAAM,qBAAqB;AACnC,QAAO,UAAU,GAAG,OAAO,WAAW;AACpC,aAAW,MAAM,SAAS,UAAU;AAClC,WAAQ,MAAM,oBAAoB,KAAK,UAAU,MAAM,CAAC;AACxD,SAAM,OAAO,SAAS,MAAoB;;GAE5C;;AAGJ,SAASA,iBAAe,UAAiG;AACvH,QAAO,OAAO,OAAO,UAAU,UAAU;;;;;AChE3C,MAAa,mBAAmB,IAAI,MAAM;AAE1C,iBAAiB,KAAK,KAAK,OAAO,MAAM;AACtC,KAAI;AACF,SAAO,MAAMC,mBAAiB,EAAE;UAE3B,OAAO;AACZ,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACXF,eAAsB,iBAAiB,SAA2B;AAChE,KAAI,CAAC,MAAM,aACT,OAAM,IAAI,MAAM,0BAA0B;CAE5C,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,cAAc;EAClE,QAAQ;EACR,SAAS,eAAe,MAAM;EAC9B,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,GACZ,OAAM,IAAI,UAAU,+BAA+B,SAAS;AAE9D,QAAQ,MAAM,SAAS,MAAM;;;;;ACR/B,MAAa,kBAAkB,IAAI,MAAM;AAEzC,gBAAgB,KAAK,KAAK,OAAO,MAAM;AACrC,KAAI;EACF,MAAM,SAAS,MAAM,EAAE,IAAI,MAAwB;EACnD,MAAM,WAAW,MAAM,iBAAiB,OAAO;AAE/C,SAAO,EAAE,KAAK,SAAS;UAElB,OAAO;AACZ,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACNF,MAAMC,gBAA6C;CACjD,mBAAmB;EACjB,oBAAoB;EACpB,wBAAwB;EACxB,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,qBAAqB;EACnB,oBAAoB;EACpB,wBAAwB;EACxB,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,mBAAmB;EACjB,oBAAoB;EACpB,wBAAwB;EACxB,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,mBAAmB;EACjB,oBAAoB;EACpB,wBAAwB;EACxB,2BAA2B;GAAC;GAAO;GAAU;GAAO;EACpD,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,UAAU;EACR,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,WAAW;EACT,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,SAAS;EACP,cAAc;EACd,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,iBAAiB;EACf,cAAc;EACd,wBAAwB;EACxB,2BAA2B;GAAC;GAAO;GAAU;GAAO;EACpD,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,iBAAiB;EACf,cAAc;EACd,wBAAwB;EACxB,2BAA2B;GAAC;GAAO;GAAU;GAAQ;GAAQ;EAC7D,oBAAoB;EACpB,2BAA2B;EAC5B;CACD,WAAW;EACT,cAAc;EACd,oBAAoB;EACrB;CACD,WAAW;EACT,cAAc;EACd,oBAAoB;EACrB;CACF;;;;;;AAOD,SAAgB,eAAe,SAA8B;AAE3D,KAAI,cAAc,SAChB,QAAO,cAAc;CAIvB,MAAM,UAAU,OAAO,QAAQ,cAAc,CAAC,MAC3C,GAAG,MAAM,EAAE,GAAG,SAAS,EAAE,GAAG,OAC9B;AACD,MAAK,MAAM,CAAC,KAAK,WAAW,QAC1B,KAAI,QAAQ,WAAW,IAAI,CACzB,QAAO;AAKX,KAAI,QAAQ,WAAW,SAAS,CAC9B,QAAO;EAAE,oBAAoB;EAAM,oBAAoB;EAAO;AAGhE,QAAO,EAAE;;;;;ACtGX,SAAgB,+BACd,cACkC;AAClC,KAAI,iBAAiB,KACnB,QAAO;AAQT,QANsB;EACpB,MAAM;EACN,QAAQ;EACR,YAAY;EACZ,gBAAgB;EACjB,CACoB;;;;;ACNvB,SAAgB,kBACd,SACwB;CACxB,MAAM,QAAQ,mBAAmB,QAAQ,MAAM;CAC/C,MAAM,cAAc,eAAe,MAAM;CACzC,MAAM,qBAAqB,YAAY,uBAAuB;CAE9D,MAAM,WAAW,mCACf,QAAQ,UACR,QAAQ,OACT;AAGD,KAAI,oBAAoB;EACtB,MAAM,gBAAgB,SAAS,MAAK,MAAK,EAAE,SAAS,SAAS;AAC7D,MAAI,cACF,eAAc,wBAAwB,EAAE,MAAM,aAAa;;CAI/D,MAAM,QAAQ,gCAAgC,QAAQ,MAAM;AAG5D,KAAI,sBAAsB,SAAS,MAAM,SAAS,EAChD,OAAM,MAAM,SAAS,GAAG,wBAAwB,EAAE,MAAM,aAAa;CAIvE,IAAIC;AACJ,KAAI,QAAQ,UAAU,cACpB,oBAAmB;UAEZ,YAAY,iBAAiB,QAAQ,YAAY,uBACxD,oBAAmB,YAAY;AAGjC,QAAO;EACL;EACA;EACA,YAAY,QAAQ;EACpB,MAAM,QAAQ;EACd,QAAQ,QAAQ;EAChB,aAAa,QAAQ;EACrB,OAAO,QAAQ;EACf,MAAM,QAAQ,UAAU;EACxB;EACA,aAAa,qCAAqC,QAAQ,YAAY;EACtE,QAAQ,EAAE,SAAS,OAAO;EAC1B,GAAI,oBAAoB,EAAE,kBAAkB;EAC7C;;AAGH,SAAS,mBAAmB,OAAuB;CAGjD,MAAM,qBAAqB,MAAM,MAC/B,+CACD;AACD,KAAI,mBACF,QAAO,GAAG,mBAAmB,GAAG,GAAG,mBAAmB;AAYxD,MAAK,MAAM,WAVY;EACrB;EACA;EACA;EACA;EACA;EACA;EACA;EACD,EAEqC;EACpC,MAAM,QAAQ,MAAM,MAAM,QAAQ;AAClC,MAAI,MACF,QAAO,MAAM;;AAIjB,QAAO;;AAGT,SAAS,mCACP,mBACA,QACgB;CAChB,MAAM,iBAAiB,mBAAmB,OAAO;CAEjD,MAAM,gBAAgB,kBAAkB,SAAQ,YAC9C,QAAQ,SAAS,SACb,kBAAkB,QAAQ,GAC1B,uBAAuB,QAAQ,CACpC;AAED,QAAO,CAAC,GAAG,gBAAgB,GAAG,cAAc;;AAG9C,SAAS,mBACP,QACgB;AAChB,KAAI,CAAC,OACH,QAAO,EAAE;AAGX,KAAI,OAAO,WAAW,SACpB,QAAO,CAAC;EAAE,MAAM;EAAU,SAAS;EAAQ,CAAC;KAI5C,QAAO,CAAC;EAAE,MAAM;EAAU,SADP,OAAO,KAAI,UAAS,MAAM,KAAK,CAAC,KAAK,OAAO;EAChB,CAAC;;AAIpD,SAAS,kBAAkB,SAA+C;CACxE,MAAMC,cAA8B,EAAE;AAEtC,KAAI,MAAM,QAAQ,QAAQ,QAAQ,EAAE;EAClC,MAAM,mBAAmB,QAAQ,QAAQ,QACtC,UACC,MAAM,SAAS,cAClB;EACD,MAAM,cAAc,QAAQ,QAAQ,QAClC,UAAS,MAAM,SAAS,cACzB;AAGD,OAAK,MAAM,SAAS,iBAClB,aAAY,KAAK;GACf,MAAM;GACN,cAAc,MAAM;GACpB,SAAS,WAAW,MAAM,QAAQ;GACnC,CAAC;AAGJ,MAAI,YAAY,SAAS,EACvB,aAAY,KAAK;GACf,MAAM;GACN,SAAS,WAAW,YAAY;GACjC,CAAC;OAIJ,aAAY,KAAK;EACf,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CAAC;AAGJ,QAAO;;AAGT,SAAS,uBACP,SACgB;AAChB,KAAI,CAAC,MAAM,QAAQ,QAAQ,QAAQ,CACjC,QAAO,CACL;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;CAGH,MAAM,gBAAgB,QAAQ,QAAQ,QACnC,UAA0C,MAAM,SAAS,WAC3D;CAED,MAAM,aAAa,QAAQ,QAAQ,QAChC,UAAuC,MAAM,SAAS,OACxD;CAED,MAAM,iBAAiB,QAAQ,QAAQ,QACpC,UAA2C,MAAM,SAAS,WAC5D;CAGD,MAAM,iBAAiB,CACrB,GAAG,WAAW,KAAI,MAAK,EAAE,KAAK,EAC9B,GAAG,eAAe,KAAI,MAAK,EAAE,SAAS,CACvC,CAAC,KAAK,OAAO;AAEd,QAAO,cAAc,SAAS,IAC1B,CACE;EACE,MAAM;EACN,SAAS,kBAAkB;EAC3B,YAAY,cAAc,KAAI,aAAY;GACxC,IAAI,QAAQ;GACZ,MAAM;GACN,UAAU;IACR,MAAM,QAAQ;IACd,WAAW,KAAK,UAAU,QAAQ,MAAM;IACzC;GACF,EAAE;EACJ,CACF,GACD,CACE;EACE,MAAM;EACN,SAAS,WAAW,QAAQ,QAAQ;EACrC,CACF;;AAGP,SAAS,WACP,SAGoC;AACpC,KAAI,OAAO,YAAY,SACrB,QAAO;AAET,KAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,QAAO;AAIT,KAAI,CADa,QAAQ,MAAK,UAAS,MAAM,SAAS,QAAQ,CAE5D,QAAO,QACJ,QACE,UACC,MAAM,SAAS,UAAU,MAAM,SAAS,WAC3C,CACA,KAAI,UAAU,MAAM,SAAS,SAAS,MAAM,OAAO,MAAM,SAAU,CACnE,KAAK,OAAO;CAGjB,MAAMC,eAAmC,EAAE;AAC3C,MAAK,MAAM,SAAS,QAClB,SAAQ,MAAM,MAAd;EACE,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAM,CAAC;AAErD;EAEF,KAAK;AACH,gBAAa,KAAK;IAAE,MAAM;IAAQ,MAAM,MAAM;IAAU,CAAC;AAEzD;EAEF,KAAK;AACH,gBAAa,KAAK;IAChB,MAAM;IACN,WAAW,EACT,KAAK,QAAQ,MAAM,OAAO,WAAW,UAAU,MAAM,OAAO,QAC7D;IACF,CAAC;AAEF;;AAKN,QAAO;;AAGT,SAAS,gCACP,gBACyB;AACzB,KAAI,CAAC,eACH;AAEF,QAAO,eAAe,KAAI,UAAS;EACjC,MAAM;EACN,UAAU;GACR,MAAM,KAAK;GACX,aAAa,KAAK;GAClB,YAAY,KAAK;GAClB;EACF,EAAE;;AAGL,SAAS,qCACP,qBACuC;AACvC,KAAI,CAAC,oBACH;AAGF,SAAQ,oBAAoB,MAA5B;EACE,KAAK,OACH,QAAO;EAET,KAAK,MACH,QAAO;EAET,KAAK;AACH,OAAI,oBAAoB,KACtB,QAAO;IACL,MAAM;IACN,UAAU,EAAE,MAAM,oBAAoB,MAAM;IAC7C;AAEH;EAEF,KAAK,OACH,QAAO;EAET,QACE;;;AAON,SAAgB,qBACd,UACmB;CAEnB,MAAMC,gBAA2C,EAAE;CACnD,MAAMC,mBAAiD,EAAE;CACzD,IAAIC,aACA;AACJ,cAAa,SAAS,QAAQ,IAAI,iBAAiB;AAGnD,MAAK,MAAM,UAAU,SAAS,SAAS;EACrC,MAAM,aAAa,uBAAuB,OAAO,QAAQ,QAAQ;EACjE,MAAM,gBAAgB,0BAA0B,OAAO,QAAQ,WAAW;AAE1E,gBAAc,KAAK,GAAG,WAAW;AACjC,mBAAiB,KAAK,GAAG,cAAc;AAGvC,MAAI,OAAO,kBAAkB,gBAAgB,eAAe,OAC1D,cAAa,OAAO;;AAMxB,QAAO;EACL,IAAI,SAAS;EACb,MAAM;EACN,MAAM;EACN,OAAO,SAAS;EAChB,SAAS,CAAC,GAAG,eAAe,GAAG,iBAAiB;EAChD,aAAa,+BAA+B,WAAW;EACvD,eAAe;EACf,OAAO;GACL,eACG,SAAS,OAAO,iBAAiB,MAC/B,SAAS,OAAO,uBAAuB,iBAAiB;GAC7D,eAAe,SAAS,OAAO,qBAAqB;GACpD,GAAI,SAAS,OAAO,uBAAuB,kBACrC,UAAa,EACjB,yBACE,SAAS,MAAM,sBAAsB,eACxC;GACF;EACF;;AAGH,SAAS,uBACP,gBAC2B;AAC3B,KAAI,OAAO,mBAAmB,SAC5B,QAAO,CAAC;EAAE,MAAM;EAAQ,MAAM;EAAgB,CAAC;AAGjD,KAAI,MAAM,QAAQ,eAAe,CAC/B,QAAO,eACJ,QAAQ,SAA2B,KAAK,SAAS,OAAO,CACxD,KAAI,UAAS;EAAE,MAAM;EAAQ,MAAM,KAAK;EAAM,EAAE;AAGrD,QAAO,EAAE;;AAGX,SAAS,0BACP,WAC8B;AAC9B,KAAI,CAAC,UACH,QAAO,EAAE;AAEX,QAAO,UAAU,KAAI,cAAa;EAChC,MAAM;EACN,IAAI,SAAS;EACb,MAAM,SAAS,SAAS;EACxB,OAAO,KAAK,MAAM,SAAS,SAAS,UAAU;EAC/C,EAAE;;;;;;;;ACvXL,eAAsB,kBAAkB,GAAY;AAClD,KAAI;EACF,MAAM,gBAAgB,EAAE,IAAI,OAAO,iBAAiB;EAEpD,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;EAErE,MAAM,gBAAgB,kBAAkB,iBAAiB;EAEzD,MAAM,gBAAgB,MAAM,QAAQ,KAAK,MACvC,UAAS,MAAM,OAAO,iBAAiB,MACxC;AAED,MAAI,CAAC,eAAe;AAClB,WAAQ,KAAK,iDAAiD;AAC9D,UAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;EAGJ,MAAM,aAAa,MAAM,cAAc,eAAe,cAAc;AAEpE,MAAI,iBAAiB,SAAS,iBAAiB,MAAM,SAAS,GAAG;GAC/D,IAAI,eAAe;AACnB,OAAI,eAAe,WAAW,cAAc,CAC1C,gBAAe,iBAAiB,MAAM,MAAK,SACzC,KAAK,KAAK,WAAW,QAAQ,CAC9B;AAEH,OAAI,CAAC,cACH;QAAI,iBAAiB,MAAM,WAAW,SAAS,CAE7C,YAAW,QAAQ,WAAW,QAAQ;aAE/B,iBAAiB,MAAM,WAAW,OAAO,CAChD,YAAW,QAAQ,WAAW,QAAQ;;;EAK5C,IAAI,kBAAkB,WAAW,QAAQ,WAAW;AACpD,MAAI,iBAAiB,MAAM,WAAW,SAAS,CAC7C,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;WAE7C,iBAAiB,MAAM,WAAW,OAAO,CAChD,mBAAkB,KAAK,MAAM,kBAAkB,KAAK;AAGtD,UAAQ,KAAK,gBAAgB,gBAAgB;AAE7C,SAAO,EAAE,KAAK,EACZ,cAAc,iBACf,CAAC;UAEG,OAAO;AACZ,UAAQ,MAAM,0BAA0B,MAAM;AAC9C,SAAO,EAAE,KAAK,EACZ,cAAc,GACf,CAAC;;;;;;ACjEN,SAAS,gBAAgB,SAAsC;AAC7D,KAAI,CAACC,QAAM,iBACT,QAAO;AAGT,QAAO,OAAO,OAAOA,QAAM,UAAU,CAAC,MACpC,OAAM,GAAG,wBAAwBA,QAAM,kBACxC;;AAGH,SAAgB,gCACd,OACA,SACiC;CACjC,MAAMC,WAA0C,EAAE;AAElD,KAAI,MAAM,QAAQ,WAAW,EAC3B,QAAOC;CAGT,MAAM,SAAS,MAAM,QAAQ;CAC7B,MAAM,EAAE,UAAU;AAElB,KAAI,CAACF,QAAM,kBAAkB;AAC3B,WAAO,KAAK;GACV,MAAM;GACN,SAAS;IACP,IAAI,MAAM;IACV,MAAM;IACN,MAAM;IACN,SAAS,EAAE;IACX,OAAO,MAAM;IACb,aAAa;IACb,eAAe;IACf,OAAO;KACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;KAC1D,eAAe;KACf,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;KACF;IACF;GACF,CAAC;AACF,UAAM,mBAAmB;;AAG3B,KAAI,MAAM,SAAS;AACjB,MAAI,gBAAgBA,QAAM,EAAE;AAE1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM;AACN,WAAM,mBAAmB;;AAG3B,MAAI,CAACA,QAAM,kBAAkB;AAC3B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACb,eAAe;KACb,MAAM;KACN,MAAM;KACP;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KAAK;GACV,MAAM;GACN,OAAOA,QAAM;GACb,OAAO;IACL,MAAM;IACN,MAAM,MAAM;IACb;GACF,CAAC;;AAGJ,KAAI,MAAM,WACR,MAAK,MAAM,YAAY,MAAM,YAAY;AACvC,MAAI,SAAS,MAAM,SAAS,UAAU,MAAM;AAE1C,OAAIA,QAAM,kBAAkB;AAE1B,aAAO,KAAK;KACV,MAAM;KACN,OAAOA,QAAM;KACd,CAAC;AACF,YAAM;AACN,YAAM,mBAAmB;;GAG3B,MAAM,sBAAsBA,QAAM;AAClC,WAAM,UAAU,SAAS,SAAS;IAChC,IAAI,SAAS;IACb,MAAM,SAAS,SAAS;IACxB;IACD;AAED,YAAO,KAAK;IACV,MAAM;IACN,OAAO;IACP,eAAe;KACb,MAAM;KACN,IAAI,SAAS;KACb,MAAM,SAAS,SAAS;KACxB,OAAO,EAAE;KACV;IACF,CAAC;AACF,WAAM,mBAAmB;;AAG3B,MAAI,SAAS,UAAU,WAAW;GAChC,MAAM,eAAeA,QAAM,UAAU,SAAS;AAE9C,OAAI,aACF,UAAO,KAAK;IACV,MAAM;IACN,OAAO,aAAa;IACpB,OAAO;KACL,MAAM;KACN,cAAc,SAAS,SAAS;KACjC;IACF,CAAC;;;AAMV,KAAI,OAAO,eAAe;AACxB,MAAIA,QAAM,kBAAkB;AAC1B,YAAO,KAAK;IACV,MAAM;IACN,OAAOA,QAAM;IACd,CAAC;AACF,WAAM,mBAAmB;;AAG3B,WAAO,KACL;GACE,MAAM;GACN,OAAO;IACL,aAAa,+BAA+B,OAAO,cAAc;IACjE,eAAe;IAChB;GACD,OAAO;IACL,eACG,MAAM,OAAO,iBAAiB,MAC5B,MAAM,OAAO,uBAAuB,iBAAiB;IAC1D,eAAe,MAAM,OAAO,qBAAqB;IACjD,GAAI,MAAM,OAAO,uBAAuB,kBAClC,UAAa,EACjB,yBACE,MAAM,MAAM,sBAAsB,eACrC;IACF;GACF,EACD,EACE,MAAM,gBACP,CACF;;AAGH,QAAOE;;;;;ACxJT,eAAsB,iBAAiB,GAAY;AACjD,OAAM,eAAe,MAAM;CAE3B,MAAM,mBAAmB,MAAM,EAAE,IAAI,MAAgC;AACrE,SAAQ,MAAM,8BAA8B,KAAK,UAAU,iBAAiB,CAAC;CAE7E,MAAM,gBAAgB,kBAAkB,iBAAiB;AACzD,SAAQ,MACN,sCACA,KAAK,UAAU,cAAc,CAC9B;AAED,KAAI,MAAM,cACR,OAAM,eAAe;CAGvB,MAAM,WAAW,MAAM,sBAAsB,cAAc;AAE3D,KAAIC,iBAAe,SAAS,EAAE;AAC5B,UAAQ,MACN,wCACA,KAAK,UAAU,SAAS,CAAC,MAAM,KAAK,CACrC;EACD,MAAM,oBAAoB,qBAAqB,SAAS;AACxD,UAAQ,MACN,kCACA,KAAK,UAAU,kBAAkB,CAClC;AACD,SAAO,EAAE,KAAK,kBAAkB;;AAGlC,SAAQ,MAAM,kCAAkC;AAChD,QAAO,UAAU,GAAG,OAAO,WAAW;EACpC,MAAMC,cAAoC;GACxC,kBAAkB;GAClB,mBAAmB;GACnB,kBAAkB;GAClB,WAAW,EAAE;GACd;AAED,aAAW,MAAM,YAAY,UAAU;AACrC,WAAQ,MAAM,6BAA6B,KAAK,UAAU,SAAS,CAAC;AACpE,OAAI,SAAS,SAAS,SACpB;AAGF,OAAI,CAAC,SAAS,KACZ;GAGF,MAAM,QAAQ,KAAK,MAAM,SAAS,KAAK;GACvC,MAAMC,WAAS,gCAAgC,OAAO,YAAY;AAElE,QAAK,MAAM,SAASA,UAAQ;AAC1B,YAAQ,MAAM,+BAA+B,KAAK,UAAU,MAAM,CAAC;AACnE,UAAM,OAAO,SAAS;KACpB,OAAO,MAAM;KACb,MAAM,KAAK,UAAU,MAAM;KAC5B,CAAC;;;GAGN;;AAGJ,SAASF,iBAAe,UAAiG;AACvH,QAAO,OAAO,OAAO,UAAU,UAAU;;;;;AC/E3C,MAAa,gBAAgB,IAAI,MAAM;AAEvC,cAAc,KAAK,KAAK,OAAO,MAAM;AACnC,KAAI;AACF,SAAO,MAAM,iBAAiB,EAAE;UAE3B,OAAO;AACZ,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;AAEF,cAAc,KAAK,iBAAiB,OAAO,MAAM;AAC/C,KAAI;AACF,SAAO,MAAM,kBAAkB,EAAE;UAE5B,OAAO;AACZ,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACnBF,MAAa,cAAc,IAAI,MAAM;AAErC,YAAY,IAAI,KAAK,OAAO,MAAM;AAChC,KAAI;AACF,MAAI,CAAC,MAAM,OAET,OAAM,aAAa;EAGrB,MAAM,SAAS,MAAM,QAAQ,KAAK,KAAI,WAAU;GAC9C,IAAI,MAAM;GACV,QAAQ;GACR,MAAM;GACN,SAAS;GACT,6BAAY,IAAI,KAAK,EAAE,EAAC,aAAa;GACrC,UAAU,MAAM;GAChB,cAAc,MAAM;GACrB,EAAE;AAEH,SAAO,EAAE,KAAK;GACZ,QAAQ;GACR,MAAM;GACN,UAAU;GACX,CAAC;UAEG,OAAO;AACZ,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;AC3BF,eAAsB,gBAAgB,SAA2B;AAC/D,KAAI,CAAC,MAAM,aACT,OAAM,IAAI,MAAM,0BAA0B;CAE5C,MAAM,YAAY,eAAe,QAAQ;CAEzC,MAAM,cAAc,QAAQ,MAAM,MAAK,SACrC,CAAC,YAAY,CAAC,SAAS,KAAK,KAAK,CAClC;CAED,MAAMG,UAAkC;EACtC,GAAG,eAAe,OAAO,UAAU;EACnC,eAAe,cAAc,UAAU;EACxC;CAED,MAAM,WAAW,MAAM,MAAM,GAAG,eAAe,MAAM,CAAC,aAAa;EACjE,QAAQ;EACR;EACA,MAAM,KAAK,UAAU,QAAQ;EAC9B,CAAC;AAEF,KAAI,CAAC,SAAS,IAAI;AAChB,UAAQ,MAAM,8BAA8B,SAAS;AACrD,QAAM,IAAI,UAAU,8BAA8B,SAAS;;AAG7D,KAAI,QAAQ,OACV,QAAO,OAAO,SAAS;AAGzB,QAAQ,MAAM,SAAS,MAAM;;AAG/B,SAAS,eAAe,SAAoC;CAC1D,MAAM,cAAc,IAAI,IAAI;EAC1B;EACA;EACA;EACA;EACD,CAAC;AAEF,QAAO,QAAQ,MAAM,MAAM,SAAS;AAClC,MAAI,CAAC,MAAM,QAAQ,KAAK,QAAQ,CAC9B,QAAO;AAET,SAAO,KAAK,QAAQ,MAAK,SAAQ,YAAY,IAAI,KAAK,KAAK,CAAC;GAC5D;;;;;ACzCJ,eAAsB,gBAAgB,GAAY;AAChD,OAAM,eAAe,MAAM;CAE3B,MAAM,UAAU,MAAM,EAAE,IAAI,MAAwB;AACpD,SAAQ,MAAM,kCAAkC,KAAK,UAAU,QAAQ,CAAC,MAAM,KAAK,CAAC;AAEpF,KAAI,MAAM,cACR,OAAM,eAAe;CAGvB,MAAM,WAAW,MAAM,gBAAgB,QAAQ;AAE/C,KAAI,eAAe,SAAS,EAAE;AAC5B,UAAQ,MAAM,4BAA4B,KAAK,UAAU,SAAS,CAAC;AACnE,SAAO,EAAE,KAAK,SAAS;;AAGzB,SAAQ,MAAM,sBAAsB;AACpC,QAAO,UAAU,GAAG,OAAO,WAAW;AACpC,aAAW,MAAM,SAAS,UAAU;AAClC,WAAQ,MAAM,8BAA8B,KAAK,UAAU,MAAM,CAAC;AAClE,SAAM,OAAO,SAAS,MAAoB;;GAE5C;;AAGJ,SAAS,eAAe,UAAsF;AAC5G,QAAO,OAAO,OAAO,UAAU,SAAS;;;;;ACjC1C,MAAa,kBAAkB,IAAI,MAAM;AAEzC,gBAAgB,KAAK,KAAK,OAAO,MAAM;AACrC,KAAI;AACF,SAAO,MAAM,gBAAgB,EAAE;UAE1B,OAAO;AACZ,SAAO,MAAM,aAAa,GAAG,MAAM;;EAErC;;;;ACXF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,MAAM,MAAM;AACzB,KAAI;AACF,SAAO,EAAE,KAAK,EACZ,OAAO,MAAM,cACd,CAAC;UAEG,OAAO;AACZ,UAAQ,MAAM,yBAAyB,MAAM;AAC7C,SAAO,EAAE,KAAK;GAAE,OAAO;GAAyB,OAAO;GAAM,EAAE,IAAI;;EAErE;;;;ACZF,MAAa,aAAa,IAAI,MAAM;AAEpC,WAAW,IAAI,KAAK,OAAO,MAAM;AAC/B,KAAI;EACF,MAAM,QAAQ,MAAM,iBAAiB;AACrC,SAAO,EAAE,KAAK,MAAM;UAEf,OAAO;AACZ,UAAQ,MAAM,iCAAiC,MAAM;AACrD,SAAO,EAAE,KAAK,EAAE,OAAO,iCAAiC,EAAE,IAAI;;EAEhE;;;;ACHF,MAAa,SAAS,IAAI,MAAM;AAEhC,OAAO,IAAI,QAAQ,CAAC;AACpB,OAAO,IAAI,MAAM,CAAC;AAElB,OAAO,IAAI,MAAK,MAAK,EAAE,KAAK,iBAAiB,CAAC;AAE9C,OAAO,MAAM,qBAAqB,iBAAiB;AACnD,OAAO,MAAM,WAAW,YAAY;AACpC,OAAO,MAAM,eAAe,gBAAgB;AAC5C,OAAO,MAAM,cAAc,gBAAgB;AAC3C,OAAO,MAAM,UAAU,WAAW;AAClC,OAAO,MAAM,UAAU,WAAW;AAGlC,OAAO,MAAM,wBAAwB,iBAAiB;AACtD,OAAO,MAAM,cAAc,YAAY;AACvC,OAAO,MAAM,kBAAkB,gBAAgB;AAC/C,OAAO,MAAM,iBAAiB,gBAAgB;AAG9C,OAAO,MAAM,gBAAgB,cAAc;;;;ACH3C,eAAsB,UAAU,SAA0C;AACxE,KAAI,QAAQ,SACV,mBAAkB;AAGpB,KAAI,QAAQ,SAAS;AACnB,UAAQ,QAAQ;AAChB,UAAQ,KAAK,0BAA0B;;AAGzC,OAAM,cAAc,QAAQ;AAC5B,KAAI,QAAQ,gBAAgB,aAC1B,SAAQ,KAAK,SAAS,QAAQ,YAAY,sBAAsB;AAGlE,OAAM,gBAAgB,QAAQ;AAC9B,OAAM,mBAAmB,QAAQ;AACjC,OAAM,gBAAgB,QAAQ;AAC9B,OAAM,YAAY,QAAQ;AAE1B,OAAM,aAAa;AACnB,OAAM,oBAAoB;AAE1B,KAAI,QAAQ,aAAa;AACvB,QAAM,cAAc,QAAQ;AAC5B,UAAQ,KAAK,8BAA8B;OAG3C,OAAM,kBAAkB;AAG1B,OAAM,mBAAmB;AACzB,OAAM,aAAa;AAEnB,SAAQ,KACN,uBAAuB,MAAM,QAAQ,KAAK,KAAI,UAAS,KAAK,MAAM,KAAK,CAAC,KAAK,KAAK,GACnF;CAED,MAAM,YAAY,oBAAoB,QAAQ;AAE9C,KAAI,QAAQ,YAAY;AACtB,YAAU,MAAM,QAAQ,iCAAiC;EAEzD,MAAM,gBAAgB,MAAM,QAAQ,OAClC,0CACA;GACE,MAAM;GACN,SAAS,MAAM,OAAO,KAAK,KAAI,UAAS,MAAM,GAAG;GAClD,CACF;EAED,MAAM,qBAAqB,MAAM,QAAQ,OACvC,gDACA;GACE,MAAM;GACN,SAAS,MAAM,OAAO,KAAK,KAAI,UAAS,MAAM,GAAG;GAClD,CACF;EAED,MAAM,UAAU,kBACd;GACE,oBAAoB;GACpB,sBAAsB;GACtB,iBAAiB;GACjB,gCAAgC;GAChC,4BAA4B;GAC5B,+BAA+B;GAC/B,mCAAmC;GACnC,0CAA0C;GAC3C,EACD,SACD;AAED,MAAI;AACF,aAAU,UAAU,QAAQ;AAC5B,WAAQ,QAAQ,2CAA2C;UAEvD;AACJ,WAAQ,KACN,gEACD;AACD,WAAQ,IAAI,QAAQ;;;AAIxB,SAAQ,IACN,mEAAmE,UAAU,QAC9E;AAED,OAAM;EACJ,OAAO,OAAO;EACd,MAAM,QAAQ;EACf,CAAC;;AAGJ,MAAa,QAAQ,cAAc;CACjC,MAAM;EACJ,MAAM;EACN,aAAa;EACd;CACD,MAAM;EACJ,QAAQ;GACN,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,WAAW;GACT,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,gBAAgB;GACd,OAAO;GACP,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,UAAU;GACR,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,cAAc;GACZ,OAAO;GACP,MAAM;GACN,aAAa;GACd;EACD,QAAQ;GACN,OAAO;GACP,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,gBAAgB;GACd,OAAO;GACP,MAAM;GACN,aACE;GACH;EACD,eAAe;GACb,OAAO;GACP,MAAM;GACN,SAAS;GACT,aACE;GACH;EACD,cAAc;GACZ,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACD,aAAa;GACX,MAAM;GACN,SAAS;GACT,aAAa;GACd;EACF;CACD,IAAI,EAAE,QAAQ;EACZ,MAAM,eAAe,KAAK;EAC1B,MAAM,YACF,iBAAiB,SAAY,SAAY,OAAO,SAAS,cAAc,GAAG;AAE9E,SAAO,UAAU;GACf,MAAM,OAAO,SAAS,KAAK,MAAM,GAAG;GACpC,SAAS,KAAK;GACd,aAAa,KAAK;GAClB,QAAQ,KAAK;GACb;GACA,eAAe,KAAK;GACpB,aAAa,KAAK;GAClB,YAAY,KAAK;GACjB,WAAW,KAAK;GAChB,UAAU,KAAK;GAChB,CAAC;;CAEL,CAAC;;;;ACvMF,MAAM,OAAO,cAAc;CACzB,MAAM;EACJ,MAAM;EACN,aACE;EACH;CACD,aAAa;EAAE;EAAM;EAAO,eAAe;EAAY;EAAO;CAC/D,CAAC;AAGF,MAAM,QAAQ,KAAK"}
|
package/package.json
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@jer-y/copilot-proxy",
|
|
3
|
+
"type": "module",
|
|
4
|
+
"version": "0.1.0",
|
|
5
|
+
"description": "Turn GitHub Copilot into OpenAI/Anthropic API compatible server. Usable with Claude Code!",
|
|
6
|
+
"author": "jer-y",
|
|
7
|
+
"homepage": "https://github.com/jer-y/copilot-proxy",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "git+https://github.com/jer-y/copilot-proxy.git"
|
|
11
|
+
},
|
|
12
|
+
"bugs": "https://github.com/jer-y/copilot-proxy/issues",
|
|
13
|
+
"keywords": [
|
|
14
|
+
"proxy",
|
|
15
|
+
"github-copilot",
|
|
16
|
+
"openai-compatible"
|
|
17
|
+
],
|
|
18
|
+
"bin": {
|
|
19
|
+
"copilot-proxy": "./dist/main.js"
|
|
20
|
+
},
|
|
21
|
+
"files": [
|
|
22
|
+
"dist"
|
|
23
|
+
],
|
|
24
|
+
"scripts": {
|
|
25
|
+
"build": "tsdown",
|
|
26
|
+
"dev": "bun run --watch ./src/main.ts",
|
|
27
|
+
"knip": "knip-bun",
|
|
28
|
+
"lint": "eslint --cache",
|
|
29
|
+
"lint:all": "eslint --cache .",
|
|
30
|
+
"prepack": "bun run build",
|
|
31
|
+
"prepare": "simple-git-hooks",
|
|
32
|
+
"release": "bumpp && bun publish --access public",
|
|
33
|
+
"start": "NODE_ENV=production bun run ./src/main.ts",
|
|
34
|
+
"typecheck": "tsc"
|
|
35
|
+
},
|
|
36
|
+
"dependencies": {
|
|
37
|
+
"citty": "^0.1.6",
|
|
38
|
+
"clipboardy": "^5.0.0",
|
|
39
|
+
"consola": "^3.4.2",
|
|
40
|
+
"fetch-event-stream": "^0.1.5",
|
|
41
|
+
"gpt-tokenizer": "^3.0.1",
|
|
42
|
+
"hono": "^4.9.9",
|
|
43
|
+
"proxy-from-env": "^1.1.0",
|
|
44
|
+
"srvx": "^0.8.9",
|
|
45
|
+
"tiny-invariant": "^1.3.3",
|
|
46
|
+
"undici": "^7.16.0",
|
|
47
|
+
"zod": "^4.1.11"
|
|
48
|
+
},
|
|
49
|
+
"devDependencies": {
|
|
50
|
+
"@antfu/eslint-config": "^4.13.2",
|
|
51
|
+
"@types/bun": "^1.2.23",
|
|
52
|
+
"@types/proxy-from-env": "^1.0.4",
|
|
53
|
+
"bumpp": "^10.2.3",
|
|
54
|
+
"eslint": "^9.37.0",
|
|
55
|
+
"knip": "^5.64.1",
|
|
56
|
+
"lint-staged": "^16.2.3",
|
|
57
|
+
"prettier-plugin-packagejson": "^2.5.19",
|
|
58
|
+
"simple-git-hooks": "^2.13.1",
|
|
59
|
+
"tsdown": "^0.15.6",
|
|
60
|
+
"typescript": "^5.9.3"
|
|
61
|
+
},
|
|
62
|
+
"simple-git-hooks": {
|
|
63
|
+
"pre-commit": "bunx lint-staged"
|
|
64
|
+
},
|
|
65
|
+
"lint-staged": {
|
|
66
|
+
"*": "bun run lint --fix"
|
|
67
|
+
}
|
|
68
|
+
}
|