@agiflowai/agent-cli 0.0.10 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -0
- package/README.md +8 -347
- package/dist/claudeNotification.js +13653 -24
- package/dist/claudePermissionRequest.js +13675 -0
- package/dist/claudePostToolUse.js +13661 -17
- package/dist/claudePreToolUse.js +13773 -20
- package/dist/claudeSessionStart.js +13667 -17
- package/dist/claudeStop.js +13525 -0
- package/dist/cli.js +2298 -1
- package/dist/cli.js.map +1 -1
- package/dist/codex.md +386 -0
- package/dist/daemon.js +847 -0
- package/dist/daemon.js.map +1 -0
- package/dist/index-7uB_UKC1.mjs +31 -0
- package/dist/index-7uB_UKC1.mjs.map +1 -0
- package/dist/index-BQL-MgGG.mjs +122 -0
- package/dist/index-BQL-MgGG.mjs.map +1 -0
- package/dist/index-CVYu2Dkh.mjs +40977 -0
- package/dist/index-CVYu2Dkh.mjs.map +1 -0
- package/dist/index-DDi9orhQ.mjs +57 -0
- package/dist/index-DDi9orhQ.mjs.map +1 -0
- package/dist/index.js +319 -1
- package/dist/index.js.map +1 -1
- package/dist/package.json +20 -3
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +13 -5
- package/dist/AgentHttpService-Cz0DO3Zx.js +0 -28
- package/dist/AgentHttpService-Cz0DO3Zx.js.map +0 -1
- package/dist/cli-Cr-ovvgN.js +0 -104
- package/dist/cli-Cr-ovvgN.js.map +0 -1
- package/dist/networkLog.js +0 -435
- package/dist/networkLog.js.map +0 -1
package/dist/networkLog.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"networkLog.js","sources":["../src/llms/loggers/file.ts","../src/llms/parsers/costCalculator.ts","../src/llms/parsers/claude.ts","../__vite-browser-external:node:zlib","../src/llms/parsers/gemini.ts","../src/llms/parsers/index.ts","../src/llms/providers/index.ts","../src/llms/enhancers/claude.ts","../src/llms/enhancers/gemini.ts","../src/llms/enhancers/index.ts","../src/llms/interceptors/instruments/fetchInstrument.ts","../src/llms/interceptors/instruments/httpInstrument.ts","../src/llms/interceptors/index.ts","../src/llms/router/auth/CodexAuth.ts","../src/llms/router/transformers/codex.md?raw","../src/llms/router/transformers/claudeToOpenAI.ts","../src/llms/router/transformers/openAIToClaude.ts","../src/llms/router/index.ts","../src/hooks/networkLog.ts"],"sourcesContent":["// filepath: /Users/vuongngo/workspace/agiflow/apps/agent-cli/src/llms/loggers/file.ts\n/**\n * FileLogger - Direct file logging without stdout pollution\n *\n * This logger writes structured JSON logs directly to a file stream\n * to avoid interfering with stdout when used as an imported module\n */\n\nimport * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport { SubEnvManager } from '../../config/subenv';\n\nexport interface LogEntry {\n time: string;\n level: number;\n msg: string;\n [key: string]: any;\n}\n\nexport class FileLogger {\n private logStream: fs.WriteStream;\n private logPath: string;\n\n constructor(logPath: string) {\n this.logPath = logPath;\n\n // Ensure directory exists\n const logDir = path.dirname(this.logPath);\n if (!fs.existsSync(logDir)) {\n fs.mkdirSync(logDir, { recursive: true });\n }\n\n // Create write stream for direct file logging\n this.logStream = fs.createWriteStream(this.logPath, { flags: 'a' });\n\n // Handle stream errors gracefully\n this.logStream.on('error', (error) => {\n // Only log to stderr to avoid stdout pollution\n console.error(`[FileLogger] Error writing to log file: ${error.message}`);\n });\n }\n\n /**\n * Log an info-level message\n */\n info(message: string, data?: Record<string, any>): void {\n this.log(30, message, data);\n }\n\n /**\n * Log a debug-level message\n */\n debug(message: string, data?: Record<string, any>): void {\n this.log(20, message, data);\n }\n\n /**\n * Log a warning-level message\n */\n warn(message: string, data?: Record<string, any>): void {\n this.log(40, message, data);\n }\n\n /**\n * Log an error-level message\n */\n error(message: string, data?: Record<string, any>): void {\n this.log(50, message, data);\n }\n\n /**\n * Log a message with specified level\n */\n log(level: number, message: string, data?: Record<string, any>): void {\n const logEntry: LogEntry = {\n time: new Date().toISOString(),\n level,\n msg: message,\n ...data,\n };\n\n try {\n this.logStream.write(JSON.stringify(logEntry) + '\\n');\n } catch (error) {\n // Only log to stderr to avoid stdout pollution\n console.error(`[FileLogger] Failed to write log entry: ${error}`);\n }\n }\n\n /**\n * Close the log stream\n */\n close(): void {\n if (this.logStream && !this.logStream.destroyed) {\n this.logStream.end();\n }\n }\n\n /**\n * Get the current log file path\n */\n getLogPath(): string {\n return this.logPath;\n }\n\n /**\n * Check if debug logging should be enabled based on environment\n */\n static shouldDebug(): boolean {\n return SubEnvManager.isDebugMode || SubEnvManager.isNetlogDebug;\n }\n}\n","/**\n * Cost Calculator for LLM Models\n *\n * Calculates costs based on token usage using pricing data from cost.json\n */\n\nimport costData from '../../../cost.json';\n\nexport interface ModelCost {\n totalCost: number;\n inputCost: number;\n outputCost: number;\n cacheReadCost?: number;\n cacheCreationCost?: number;\n}\n\nexport interface TokenUsage {\n inputTokens?: number;\n outputTokens?: number;\n cachedInputTokens?: number;\n cacheCreationInputTokens?: number;\n}\n\nexport class CostCalculator {\n /**\n * Calculate cost for a given model and token usage\n */\n static calculateCost(modelName: string, usage: TokenUsage): ModelCost | null {\n const modelPricing = costData[modelName as keyof typeof costData];\n\n if (!modelPricing) {\n return null;\n }\n\n const inputTokens = usage.inputTokens || 0;\n const outputTokens = usage.outputTokens || 0;\n const cachedInputTokens = usage.cachedInputTokens || 0;\n const cacheCreationInputTokens = usage.cacheCreationInputTokens || 0;\n\n // Get rates from pricing data with type guards\n const inputRate = 'input_cost_per_token' in modelPricing ? modelPricing.input_cost_per_token : 0;\n const outputRate = 'output_cost_per_token' in modelPricing ? modelPricing.output_cost_per_token : 0;\n const cacheReadRate = 'cache_read_input_token_cost' in modelPricing ? modelPricing.cache_read_input_token_cost : 0;\n const cacheCreationRate =\n 'cache_creation_input_token_cost' in modelPricing ? modelPricing.cache_creation_input_token_cost : 0;\n\n // Calculate costs\n const inputCost = inputTokens * inputRate;\n const outputCost = outputTokens * outputRate;\n const cacheReadCost = cachedInputTokens * cacheReadRate;\n const cacheCreationCost = cacheCreationInputTokens * cacheCreationRate;\n\n const totalCost = inputCost + outputCost + cacheReadCost + cacheCreationCost;\n\n return {\n totalCost,\n inputCost,\n outputCost,\n cacheReadCost: cachedInputTokens > 0 ? cacheReadCost : undefined,\n cacheCreationCost: cacheCreationInputTokens > 0 ? cacheCreationCost : undefined,\n };\n }\n}\n","/**\n * Claude API Streaming Response Parser\n *\n * Parses Claude API streaming responses (Server-Sent Events format)\n * into structured messages following the blazegent message format.\n *\n * Handles:\n * - Text content blocks\n * - Tool use content blocks\n * - Token usage information\n * - Message metadata\n * - Cost calculation based on token usage\n */\n\nimport type { MessagePart } from '../messages';\nimport { CostCalculator } from './costCalculator';\n\nexport class ClaudeParser {\n /**\n * Parses Claude API streaming response into structured message format\n *\n * @param rawText - Raw streaming response text in SSE format\n * @returns Parsed message object with metadata and usage information\n */\n static parseStreamingResponse(\n rawText: string,\n ):\n | { message: { id: string; role: 'assistant'; createdAt: string; parts: MessagePart[] }; metadata: any }\n | { error: string; rawText: string } {\n try {\n const lines = rawText.split('\\n');\n const contentBlocks: any[] = [];\n let messageMetadata: any = null;\n let usage: any = null;\n const blockMap = new Map<number, any>(); // Track content blocks by index\n\n for (const line of lines) {\n if (line.startsWith('data: ')) {\n const dataStr = line.substring(6).trim();\n if (dataStr) {\n try {\n const data = JSON.parse(dataStr);\n\n // Extract message metadata\n if (data.type === 'message_start' && data.message) {\n messageMetadata = data.message;\n }\n\n // Handle content block start (text or tool_use)\n if (data.type === 'content_block_start') {\n const block = {\n index: data.index,\n type: data.content_block.type,\n id: data.content_block.id,\n name: data.content_block.name,\n input: data.content_block.input || {},\n text: '',\n partialJson: '',\n };\n blockMap.set(data.index, block);\n }\n\n // Handle content block deltas (text or JSON input)\n if (data.type === 'content_block_delta') {\n const block = blockMap.get(data.index);\n if (block) {\n if (data.delta.type === 'text_delta') {\n block.text += data.delta.text;\n } else if (data.delta.type === 'input_json_delta') {\n block.partialJson += data.delta.partial_json;\n }\n }\n }\n\n // Handle content block stop\n if (data.type === 'content_block_stop') {\n const block = blockMap.get(data.index);\n if (block) {\n // Try to parse accumulated JSON for tool inputs\n if (block.partialJson) {\n try {\n block.input = JSON.parse(block.partialJson);\n } catch (parseError) {\n // Pure telemetry - no mutations, no logging\n // Leave block.input unchanged\n }\n }\n contentBlocks.push(block);\n }\n }\n\n // Extract final usage information\n if (data.type === 'message_delta' && data.usage) {\n usage = data.usage;\n }\n } catch {\n // Ignore malformed JSON\n }\n }\n }\n }\n\n // Convert content blocks to blazegent parts format\n const parts: MessagePart[] = (contentBlocks || []).map((block) => {\n if (block.type === 'text') {\n return {\n type: 'text' as const,\n text: block.text,\n };\n } else if (block.type === 'tool_use') {\n return {\n type: 'tool-invocation' as const,\n toolInvocation: {\n toolCallId: block.id,\n toolName: block.name,\n args: block.input,\n state: 'call' as const,\n },\n };\n }\n // For unknown block types, create a text part with error message\n return {\n type: 'text' as const,\n text: `Unknown content block type: ${block.type}`,\n };\n });\n\n // Create message in blazegent format\n const message = {\n id: messageMetadata?.id || ClaudeParser.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: parts.length > 0 ? parts : [{ type: 'text' as const, text: '' }],\n };\n\n // Combine usage information from both sources\n const combinedUsage = {\n ...(messageMetadata?.usage || {}),\n ...(usage || {}),\n };\n\n // Normalize token usage data\n const tokenUsage = {\n inputTokens: combinedUsage.input_tokens || combinedUsage.inputTokens,\n outputTokens: combinedUsage.output_tokens || combinedUsage.outputTokens,\n cachedInputTokens: combinedUsage.cache_read_input_tokens || combinedUsage.cachedInputTokens,\n cacheCreationInputTokens: combinedUsage.cache_creation_input_tokens || combinedUsage.cacheCreationInputTokens,\n totalTokens: (combinedUsage.input_tokens || 0) + (combinedUsage.output_tokens || 0),\n cacheCreation: combinedUsage.cache_creation,\n serviceTier: combinedUsage.service_tier,\n };\n\n // Calculate cost if model is available\n const modelName = messageMetadata?.model;\n const cost = modelName ? CostCalculator.calculateCost(modelName, tokenUsage) : null;\n\n // Add metadata with comprehensive usage information\n const result: any = {\n message,\n metadata: {\n model: modelName,\n usage: tokenUsage,\n cost,\n originalMessageId: messageMetadata?.id,\n stopReason: messageMetadata?.stop_reason,\n contentLength: parts.reduce((len, part) => {\n if (part.type === 'text') {\n return len + part.text.length;\n } else if (part.type === 'tool-invocation') {\n return len + JSON.stringify(part.toolInvocation.args || '').length;\n }\n return len + JSON.stringify(part).length;\n }, 0),\n streamingEvents: lines.filter((line) => line.startsWith('event:')).length,\n },\n };\n\n return result;\n } catch (error) {\n return {\n error: `Failed to parse streaming response: ${error}`,\n rawText,\n };\n }\n }\n\n /**\n * Simple ID generator for messages\n */\n private static generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 10);\n }\n}\n\n// Export for backward compatibility\nexport const parseStreamingResponseToMessage = ClaudeParser.parseStreamingResponse;\n"," export default new Proxy({}, {\n get(_, key) {\n throw new Error(`Module \"node:zlib\" has been externalized for browser compatibility. Cannot access \"node:zlib.${key}\" in client code. See https://vite.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.`)\n }\n })","/**\n * Gemini API Streaming / Regular Response Parser\n */\n\nimport * as zlib from 'node:zlib';\n\nexport class GeminiParser {\n /** Entry point used by interceptor */\n static parseStreamingResponse(rawText: string): any {\n try {\n const isSse = rawText.includes('\\ndata:') || rawText.startsWith('data:');\n if (isSse) return this.parseSse(rawText);\n return this.parseRegular(rawText);\n } catch (error) {\n return { error: `Failed to parse Gemini response: ${error}`, rawText };\n }\n }\n\n /** Parse non-stream (may be gzip) */\n private static parseRegular(rawText: string) {\n const looksGzip = this.isProbablyGzip(rawText) || this.hasHighControlCharRatio(rawText);\n let working = rawText;\n if (looksGzip) {\n const gun = this.tryGunzip(rawText);\n if (gun.ok) working = gun.text;\n }\n try {\n const json = JSON.parse(working);\n const response = json.response || json; // generateContent returns { response: { candidates: [...] }}\n const { fullText, finishReason, texts, nextSpeakers, reasoningSegments } = this.extractCandidateText(\n response.candidates,\n );\n const usage = this.normalizeUsage(response.usageMetadata || json.usageMetadata || json);\n const parts =\n texts.length > 0\n ? texts.map((t) => ({ type: 'text', text: t }))\n : fullText\n ? [{ type: 'text', text: fullText }]\n : [];\n return {\n message: {\n id: response.responseId || this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts,\n },\n metadata: {\n model: response.modelVersion || json.modelVersion,\n usage,\n responseId: response.responseId,\n finishReason,\n contentLength: fullText.length,\n streamingEvents: 0,\n compressed: looksGzip || undefined,\n nextSpeakers: nextSpeakers.length ? nextSpeakers : undefined,\n reasoningSegments: reasoningSegments.length ? reasoningSegments : undefined,\n },\n };\n } catch {\n return {\n message: {\n id: this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: [{ type: 'text', text: working }],\n },\n metadata: { raw: true },\n };\n }\n }\n\n /** Parse SSE stream */\n private static parseSse(rawText: string) {\n const lines = rawText.split(/\\r?\\n/);\n let modelVersion: string | undefined;\n let responseId: string | undefined;\n let finishReason: string | undefined;\n let usageMetadata: any;\n const collectedTexts: string[] = [];\n const nextSpeakers: string[] = [];\n const reasoningSegments: string[] = [];\n\n for (const line of lines) {\n if (!line.startsWith('data:')) continue;\n const jsonStr = line.slice(5).trim();\n if (!jsonStr) continue;\n try {\n const payload = JSON.parse(jsonStr);\n const response = payload.response || {};\n if (response.modelVersion) modelVersion = response.modelVersion;\n if (response.responseId) responseId = response.responseId;\n if (response.usageMetadata) usageMetadata = response.usageMetadata;\n const extracted = this.extractCandidateText(response.candidates);\n if (!finishReason && extracted.finishReason) finishReason = extracted.finishReason;\n if (extracted.texts.length) collectedTexts.push(...extracted.texts);\n if (extracted.nextSpeakers.length) nextSpeakers.push(...extracted.nextSpeakers);\n if (extracted.reasoningSegments.length) reasoningSegments.push(...extracted.reasoningSegments);\n } catch {\n // ignore chunk errors\n }\n }\n\n const fullText = collectedTexts.join('');\n const usage = this.normalizeUsage(usageMetadata);\n return {\n message: {\n id: responseId || this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: collectedTexts.length ? collectedTexts.map((t) => ({ type: 'text', text: t })) : [],\n },\n metadata: {\n model: modelVersion,\n usage,\n responseId,\n finishReason,\n contentLength: fullText.length,\n streamingEvents: lines.filter((l) => l.startsWith('data:')).length,\n nextSpeakers: nextSpeakers.length ? nextSpeakers : undefined,\n reasoningSegments: reasoningSegments.length ? reasoningSegments : undefined,\n },\n };\n }\n\n /** Extract text and reasoning from candidates array */\n private static extractCandidateText(candidates: any): {\n fullText: string;\n finishReason?: string;\n texts: string[];\n finishReasonFound?: string;\n nextSpeakers: string[];\n reasoningSegments: string[];\n } {\n const texts: string[] = [];\n const nextSpeakers: string[] = [];\n const reasoningSegments: string[] = [];\n let finishReason: string | undefined;\n if (Array.isArray(candidates)) {\n for (const cand of candidates) {\n if (!finishReason && cand?.finishReason) finishReason = cand.finishReason;\n const parts = cand?.content?.parts || [];\n if (Array.isArray(parts)) {\n for (const part of parts) {\n if (part && typeof part === 'object') {\n if (part.thought === true) continue; // skip internal thoughts\n const raw = part.text;\n if (typeof raw === 'string' && raw.length) {\n // Detect embedded JSON reasoning blob\n const trimmed = raw.trim();\n if (trimmed.startsWith('{') && trimmed.endsWith('}')) {\n try {\n const parsed = JSON.parse(trimmed);\n if (parsed && typeof parsed === 'object') {\n if (typeof parsed.reasoning === 'string') {\n reasoningSegments.push(parsed.reasoning);\n texts.push(parsed.reasoning);\n }\n if (parsed.next_speaker && typeof parsed.next_speaker === 'string')\n nextSpeakers.push(parsed.next_speaker);\n continue; // already added reasoning as text\n }\n } catch {\n // fall through to treat as raw text\n }\n }\n texts.push(raw);\n }\n }\n }\n }\n }\n }\n return {\n fullText: texts.join(''),\n finishReason,\n texts,\n finishReasonFound: finishReason,\n nextSpeakers,\n reasoningSegments,\n };\n }\n\n private static tryGunzip(str: string): { ok: boolean; text: string } {\n try {\n const buf = Buffer.from(str, 'latin1');\n const out = zlib.gunzipSync(buf);\n return { ok: true, text: out.toString('utf8') };\n } catch {\n return { ok: false, text: str };\n }\n }\n\n private static hasHighControlCharRatio(text: string): boolean {\n if (!text) return false;\n let control = 0;\n const sampleLen = Math.min(text.length, 256);\n for (let i = 0; i < sampleLen; i++) {\n const code = text.charCodeAt(i);\n if (code < 32 && code !== 9 && code !== 10 && code !== 13) control++;\n }\n return control / sampleLen > 0.1;\n }\n\n private static isProbablyGzip(text: string): boolean {\n return text.length >= 2 && text.charCodeAt(0) === 0x1f && text.charCodeAt(1) === 0x8b;\n }\n\n private static normalizeUsage(usage: any) {\n if (!usage || typeof usage !== 'object') {\n return { inputTokens: undefined, outputTokens: undefined, totalTokens: undefined };\n }\n const input = usage.promptTokenCount ?? usage.prompt_tokens;\n const output = usage.candidatesTokenCount ?? usage.candidates_tokens;\n const total =\n usage.totalTokenCount ?? (typeof input === 'number' && typeof output === 'number' ? input + output : undefined);\n return { inputTokens: input, outputTokens: output, totalTokens: total };\n }\n\n private static generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 10);\n }\n}\n\nexport const parseGeminiStreamingResponse = GeminiParser.parseStreamingResponse;\n","/**\n * LLM Parser Factory\n *\n * Routes streaming responses to the appropriate parser based on URL endpoint patterns.\n * Supports multiple LLM providers with extensible parser registry.\n */\n\nimport { ClaudeParser } from './claude';\nimport { GeminiParser } from './gemini';\n\n// Parser interface for consistency\ninterface StreamingParser {\n parseStreamingResponse(rawText: string): any;\n}\n\n// Parser registry mapping URL patterns to parser classes\ninterface ParserMapping {\n patterns: RegExp[];\n parser: StreamingParser;\n name: string;\n}\n\nexport class LlmParser {\n private static parsers: ParserMapping[] = [\n {\n name: 'Claude (Anthropic)',\n patterns: [\n /\\/publishers\\/anthropic\\/models/i,\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /\\/v1\\/messages/i, // Anthropic API endpoint\n ],\n parser: ClaudeParser,\n },\n {\n name: 'Google Gemini',\n patterns: [\n /generativelanguage\\.googleapis\\.com/i,\n /cloudcode-pa\\.googleapis\\.com/i,\n /streamGenerateContent/i,\n /generateContent/i,\n ],\n parser: GeminiParser,\n },\n // Future parsers can be added here:\n // {\n // name: 'OpenAI GPT',\n // patterns: [/openai\\.com/i, /api\\.openai\\.com/i, /\\/v1\\/chat\\/completions/i],\n // parser: OpenAIParser,\n // },\n ];\n\n /**\n * Parse streaming response by detecting the appropriate parser based on URL\n *\n * @param url - The API endpoint URL\n * @param rawText - Raw streaming response text\n * @returns Parsed message object or null if no parser matches\n */\n static parseStreamingResponse(url: string, rawText: string): any {\n const parser = LlmParser.getParserForUrl(url);\n\n if (!parser) {\n return {\n error: `No parser found for URL: ${url}`,\n rawText,\n supportedProviders: LlmParser.parsers.map((p) => p.name),\n };\n }\n\n try {\n const result = parser.parseStreamingResponse(rawText);\n\n // Add parser metadata to the result\n if (result && typeof result === 'object' && !result.error) {\n result.metadata = {\n ...result.metadata,\n parser: LlmParser.getParserNameForUrl(url),\n };\n }\n\n return result;\n } catch (error) {\n return {\n error: `Parser failed: ${error}`,\n rawText,\n parser: LlmParser.getParserNameForUrl(url),\n };\n }\n }\n\n /**\n * Get the appropriate parser for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Parser instance or null if no match\n */\n static getParserForUrl(url: string): StreamingParser | null {\n if (!url) return null;\n\n for (const mapping of LlmParser.parsers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.parser;\n }\n }\n\n return null;\n }\n\n /**\n * Get the parser name for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Parser name or 'unknown'\n */\n static getParserNameForUrl(url: string): string {\n if (!url) return 'unknown';\n\n for (const mapping of LlmParser.parsers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.name;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Check if streaming response parsing is supported for a URL\n *\n * @param url - The API endpoint URL\n * @returns True if a parser is available\n */\n static isSupported(url: string): boolean {\n return LlmParser.getParserForUrl(url) !== null;\n }\n\n /**\n * Get list of all supported LLM providers\n *\n * @returns Array of supported provider names\n */\n static getSupportedProviders(): string[] {\n return LlmParser.parsers.map((p) => p.name);\n }\n\n /**\n * Register a new parser for additional LLM providers\n *\n * @param name - Human-readable parser name\n * @param patterns - Array of regex patterns to match URLs\n * @param parser - Parser class implementing StreamingParser interface\n */\n static registerParser(name: string, patterns: RegExp[], parser: StreamingParser): void {\n LlmParser.parsers.push({\n name,\n patterns,\n parser,\n });\n }\n}\n","/**\n * LLM Provider Detection and Utility Class\n *\n * Handles identification of different LLM providers based on URL patterns\n * and provides utilities for working with LLM API calls.\n */\n\nexport class LlmProvider {\n private readonly LLM_PATTERNS = [\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /openai\\.com/i,\n /api\\.openai\\.com/i,\n /chatgpt\\.com\\/backend-api\\/codex/i, // Routing base URL for Claude → OpenAI remapping\n /gemini\\.google/i,\n /generativelanguage\\.googleapis/i,\n /cloudcode-pa\\.googleapis\\.com/i,\n /bedrock.*\\.amazonaws/i,\n /cognitive\\.microsoft/i,\n /azure\\.openai/i,\n /cohere\\.ai/i,\n /huggingface\\.co/i,\n /replicate\\.com/i,\n /together\\.xyz/i,\n /mistral\\.ai/i,\n /groq\\.com/i,\n /perplexity\\.ai/i,\n /ollama/i,\n /localhost:\\d+\\/v1/i,\n /127\\.0\\.0\\.1:\\d+\\/v1/i,\n /\\/v1\\/chat\\/completions/i,\n /\\/v1\\/completions/i,\n /\\/v1\\/embeddings/i,\n /\\/v1\\/messages/i,\n /\\/publishers\\/anthropic\\/models/i,\n ];\n\n private readonly PROVIDER_MAP = [\n { pattern: /\\/publishers\\/anthropic\\/models/i, provider: 'anthropic' },\n { pattern: /anthropic\\.com|claude\\.ai/i, provider: 'anthropic' },\n { pattern: /chatgpt\\.com\\/backend-api\\/codex/i, provider: 'openai' }, // Routing base URL for Claude → OpenAI\n { pattern: /openai\\.com/i, provider: 'openai' },\n { pattern: /gemini\\.google|generativelanguage\\.googleapis|cloudcode-pa\\.googleapis/i, provider: 'google' },\n { pattern: /bedrock.*\\.amazonaws/i, provider: 'aws-bedrock' },\n { pattern: /cognitive\\.microsoft|azure\\.openai/i, provider: 'azure' },\n { pattern: /cohere\\.ai/i, provider: 'cohere' },\n { pattern: /huggingface\\.co/i, provider: 'huggingface' },\n { pattern: /replicate\\.com/i, provider: 'replicate' },\n { pattern: /together\\.xyz/i, provider: 'together' },\n { pattern: /mistral\\.ai/i, provider: 'mistral' },\n { pattern: /groq\\.com/i, provider: 'groq' },\n { pattern: /perplexity\\.ai/i, provider: 'perplexity' },\n { pattern: /ollama|localhost:\\d+\\/v1|127\\.0\\.0\\.1:\\d+\\/v1/i, provider: 'local' },\n ];\n\n /**\n * Check if a URL is related to an LLM service\n * @param url The URL to check\n * @returns true if the URL is LLM-related, false otherwise\n */\n isLLMCall(url: string): boolean {\n if (!url) return false;\n return this.LLM_PATTERNS.some((pattern) => pattern.test(url));\n }\n\n /**\n * Detect which LLM provider a URL belongs to\n * @param url The URL to analyze\n * @returns The provider name or 'unknown' if not recognized\n */\n detectProvider(url: string): string {\n if (!url) return 'unknown';\n\n for (const { pattern, provider } of this.PROVIDER_MAP) {\n if (pattern.test(url)) {\n return provider;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Get all supported provider names\n * @returns Array of supported provider names\n */\n getSupportedProviders(): string[] {\n return Array.from(new Set(this.PROVIDER_MAP.map(({ provider }) => provider)));\n }\n\n /**\n * Check if a specific provider is supported\n * @param providerName The provider name to check\n * @returns true if the provider is supported, false otherwise\n */\n isProviderSupported(providerName: string): boolean {\n return this.getSupportedProviders().includes(providerName.toLowerCase());\n }\n\n /**\n * Get the pattern used for detecting a specific provider\n * @param providerName The provider name\n * @returns The regex pattern or null if provider not found\n */\n getProviderPattern(providerName: string): RegExp | null {\n const mapping = this.PROVIDER_MAP.find(({ provider }) => provider === providerName.toLowerCase());\n return mapping ? mapping.pattern : null;\n }\n}\n\nexport default LlmProvider;\n","/**\n * Claude API Request Enhancer\n *\n * Enhances Claude API requests by injecting context data into the system prompt.\n * This allows passing task descriptions, comments, and other contextual information\n * to Claude without modifying the user's original prompt.\n */\n\nimport { SubEnvManager } from '../../config/subenv';\n\nexport class ClaudeEnhancer {\n /**\n * Enhance Claude API request by injecting context data into system prompt\n *\n * @param requestBody - The original request body as a string\n * @returns Enhanced request body with context data injected\n */\n static enhanceRequest(requestBody: string): string {\n const contextData = SubEnvManager.contextData;\n\n // If no context data, return original request\n if (!contextData) {\n return requestBody;\n }\n\n try {\n const request = JSON.parse(requestBody);\n\n // Ensure system is an array\n if (!request.system) {\n request.system = [];\n } else if (typeof request.system === 'string') {\n // Convert string system to array format\n request.system = [\n {\n type: 'text',\n text: request.system,\n },\n ];\n } else if (!Array.isArray(request.system)) {\n // If it's an object but not an array, wrap it\n request.system = [request.system];\n }\n\n // Insert context after the first system message (which may be auth-related)\n // This avoids adding extra cache control breakpoints\n const contextSystemMessage = {\n type: 'text',\n text: `# Task Context\\n\\n${contextData}`,\n };\n\n // Insert at position 1 (after first message) instead of prepending\n if (request.system.length > 0) {\n request.system.splice(1, 0, contextSystemMessage);\n } else {\n request.system.push(contextSystemMessage);\n }\n\n return JSON.stringify(request);\n } catch (error) {\n // If parsing fails, return original request\n console.error('Failed to enhance Claude request:', error);\n return requestBody;\n }\n }\n}\n","/**\n * Gemini API Request Enhancer\n *\n * Enhances Gemini API requests by injecting context data into the system instruction.\n */\n\nimport { SubEnvManager } from '../../config/subenv';\n\nexport class GeminiEnhancer {\n /**\n * Enhance Gemini API request by injecting context data into system instruction\n *\n * @param requestBody - The original request body as a string\n * @returns Enhanced request body with context data injected\n */\n static enhanceRequest(requestBody: string): string {\n const contextData = SubEnvManager.contextData;\n\n // If no context data, return original request\n if (!contextData) {\n return requestBody;\n }\n\n try {\n const request = JSON.parse(requestBody);\n\n // Inject context into systemInstruction\n const contextInstruction = `# Task Context\\n\\n${contextData}`;\n\n if (!request.systemInstruction) {\n request.systemInstruction = {\n parts: [\n {\n text: contextInstruction,\n },\n ],\n };\n } else if (typeof request.systemInstruction === 'string') {\n // If systemInstruction is a string, convert to parts format\n request.systemInstruction = {\n parts: [\n {\n text: contextInstruction,\n },\n {\n text: request.systemInstruction,\n },\n ],\n };\n } else if (request.systemInstruction.parts) {\n // Prepend context to existing parts\n request.systemInstruction.parts.unshift({\n text: contextInstruction,\n });\n }\n\n return JSON.stringify(request);\n } catch (error) {\n // If parsing fails, return original request\n console.error('Failed to enhance Gemini request:', error);\n return requestBody;\n }\n }\n}\n","/**\n * LLM Request Enhancer Factory\n *\n * Routes LLM requests to the appropriate enhancer based on URL endpoint patterns.\n * Enhancers inject context data (task descriptions, comments, etc.) into requests\n * without modifying the user's original prompt.\n */\n\nimport { ClaudeEnhancer } from './claude';\nimport { GeminiEnhancer } from './gemini';\n\n// Enhancer interface for consistency\ninterface RequestEnhancer {\n enhanceRequest(requestBody: string): string;\n}\n\n// Enhancer registry mapping URL patterns to enhancer classes\ninterface EnhancerMapping {\n patterns: RegExp[];\n enhancer: RequestEnhancer;\n name: string;\n}\n\nexport class LlmEnhancer {\n private static enhancers: EnhancerMapping[] = [\n {\n name: 'Claude (Anthropic)',\n patterns: [\n /\\/publishers\\/anthropic\\/models/i,\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /\\/v1\\/messages/i, // Anthropic API endpoint\n ],\n enhancer: ClaudeEnhancer,\n },\n {\n name: 'Google Gemini',\n patterns: [\n /generativelanguage\\.googleapis\\.com/i,\n /cloudcode-pa\\.googleapis\\.com/i,\n /streamGenerateContent/i,\n /generateContent/i,\n ],\n enhancer: GeminiEnhancer,\n },\n // Future enhancers can be added here:\n // {\n // name: 'OpenAI GPT',\n // patterns: [/openai\\.com/i, /api\\.openai\\.com/i, /\\/v1\\/chat\\/completions/i],\n // enhancer: OpenAIEnhancer,\n // },\n ];\n\n /**\n * Enhance LLM request by detecting the appropriate enhancer based on URL\n *\n * @param url - The API endpoint URL\n * @param requestBody - Original request body as string\n * @returns Enhanced request body with context data injected\n */\n static enhanceRequest(url: string, requestBody: string): string {\n const enhancer = LlmEnhancer.getEnhancerForUrl(url);\n\n if (!enhancer) {\n // No enhancer found, return original request\n return requestBody;\n }\n\n try {\n return enhancer.enhanceRequest(requestBody);\n } catch (error) {\n console.error(`Enhancer failed for ${url}:`, error);\n return requestBody;\n }\n }\n\n /**\n * Get the appropriate enhancer for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Enhancer instance or null if no match\n */\n static getEnhancerForUrl(url: string): RequestEnhancer | null {\n if (!url) return null;\n\n for (const mapping of LlmEnhancer.enhancers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.enhancer;\n }\n }\n\n return null;\n }\n\n /**\n * Get the enhancer name for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Enhancer name or 'unknown'\n */\n static getEnhancerNameForUrl(url: string): string {\n if (!url) return 'unknown';\n\n for (const mapping of LlmEnhancer.enhancers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.name;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Check if request enhancement is supported for a URL\n *\n * @param url - The API endpoint URL\n * @returns True if an enhancer is available\n */\n static isSupported(url: string): boolean {\n return LlmEnhancer.getEnhancerForUrl(url) !== null;\n }\n\n /**\n * Get list of all supported LLM providers\n *\n * @returns Array of supported provider names\n */\n static getSupportedProviders(): string[] {\n return LlmEnhancer.enhancers.map((e) => e.name);\n }\n\n /**\n * Register a new enhancer for additional LLM providers\n *\n * @param name - Human-readable enhancer name\n * @param patterns - Array of regex patterns to match URLs\n * @param enhancer - Enhancer class implementing RequestEnhancer interface\n */\n static registerEnhancer(name: string, patterns: RegExp[], enhancer: RequestEnhancer): void {\n LlmEnhancer.enhancers.push({\n name,\n patterns,\n enhancer,\n });\n }\n}\n","import { performance } from 'node:perf_hooks';\nimport { LlmEnhancer } from '../../enhancers';\nimport type { CommonContext, NetworkLogEntry } from './types';\n\nexport interface FetchInstrumentConfig {\n agentHttpService?: any;\n sessionId?: string;\n isValidMessageRequest: (url: string, method: string, body: string) => boolean;\n sendHttpStart: (entry: NetworkLogEntry) => Promise<void>;\n sendHttpUpdate: (entry: NetworkLogEntry) => Promise<void>;\n}\n\ninterface CaptureResult {\n response: Response;\n responseBody: string;\n processedBody: any;\n bodyBytes: number;\n isEventStream: boolean;\n capturePromise?: Promise<{ responseBody: string; processedBody: any; bodyBytes: number }>;\n}\n\nexport class FetchInstrument {\n private originalFetch: typeof fetch | null = null;\n private static readonly MAX_LOG_BYTES = 256 * 1024;\n\n constructor(\n private ctx: CommonContext,\n private extra: FetchInstrumentConfig,\n ) {}\n\n patch(): void {\n if (typeof globalThis.fetch !== 'function' || this.originalFetch) return;\n\n this.originalFetch = globalThis.fetch.bind(globalThis);\n const ctx = this.ctx;\n const extra = this.extra;\n\n globalThis.fetch = (async (resource: any, init?: RequestInit) => {\n const start = performance.now();\n\n let effectiveResource: any = resource;\n let effectiveInit: RequestInit | undefined = init;\n let url = this.safeGetUrl(effectiveResource);\n const originalUrl = url;\n const method = (\n effectiveInit?.method ||\n (typeof effectiveResource === 'object' && effectiveResource?.method) ||\n 'GET'\n )\n .toString()\n .toUpperCase();\n\n let requestHeaders = this.extractHeaders(effectiveInit?.headers ?? effectiveResource?.headers);\n const requestBody = this.stringifyRequestBody(effectiveInit?.body ?? effectiveResource?.body);\n\n let enhancedBody = requestBody;\n if (ctx.llmProvider.isLLMCall(url) && requestBody && requestBody !== '<request body error>') {\n try {\n enhancedBody = LlmEnhancer.enhanceRequest(url, requestBody);\n if (enhancedBody !== requestBody) {\n if (effectiveInit) {\n effectiveInit.body = enhancedBody;\n } else if (typeof effectiveResource === 'object' && effectiveResource) {\n // Cannot modify read-only body property, create new init instead\n effectiveInit = { body: enhancedBody };\n } else {\n effectiveInit = { body: enhancedBody };\n }\n }\n } catch (err) {\n // Enhancement failed, continue with original body\n }\n }\n\n const routing = await this.applyRouting({\n resource: effectiveResource,\n init: effectiveInit,\n url,\n enhancedBody,\n requestHeaders,\n });\n\n effectiveResource = routing.resource;\n effectiveInit = routing.init;\n url = routing.url;\n enhancedBody = routing.enhancedBody;\n requestHeaders = routing.requestHeaders;\n const wasRouted = routing.wasRouted;\n\n const entryId = ctx.idGenerator();\n const entryTime = new Date().toISOString();\n\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n const startEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n requestHeaders,\n durationMs: 0,\n };\n extra.sendHttpStart(startEntry).catch((err: Error) => {\n if (ctx.logger) {\n ctx.logger.error('Failed to send HTTP start', err);\n }\n });\n }\n\n let res: Response;\n try {\n res = await this.originalFetch!(effectiveResource, effectiveInit);\n } catch (networkError) {\n const duration = performance.now() - start;\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n requestHeaders,\n durationMs: Math.round(duration),\n error: (networkError as Error).message,\n };\n ctx.handleLogEntry(logEntry);\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n throw networkError;\n }\n\n let responseHeaders: Record<string, string> = {};\n\n try {\n const duration = performance.now() - start;\n\n responseHeaders = this.extractHeaders(res.headers);\n\n const capture = await this.captureResponse({\n res,\n method,\n url,\n originalUrl,\n requestBody,\n requestHeaders,\n responseHeaders,\n wasRouted,\n entryId,\n entryTime,\n start,\n });\n\n res = capture.response;\n let responseBody = capture.responseBody;\n let processedBody = capture.processedBody;\n const bodyBytes = capture.bodyBytes;\n const isEventStream = capture.isEventStream;\n\n if (capture.capturePromise) {\n (res as any).__capturePromise = capture.capturePromise;\n }\n\n if (!isEventStream && wasRouted && ctx.router && responseBody && responseBody !== '<response body error>') {\n try {\n responseBody = ctx.router.transformResponse(responseBody);\n } catch (err) {\n // Transformation failed, use original response\n }\n }\n\n if (!isEventStream) {\n const urlForProcessing = wasRouted ? originalUrl : url;\n if (ctx.llmProvider.isLLMCall(urlForProcessing) && responseBody && responseBody !== '<response body error>') {\n try {\n processedBody = ctx.responseProcessor(urlForProcessing, responseBody);\n } catch (err) {\n processedBody = { error: `parser_failed: ${err instanceof Error ? err.message : String(err)}` };\n }\n }\n }\n\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n requestHeaders,\n status: res.status,\n ok: res.ok,\n durationMs: Math.round(duration),\n responseBody,\n responseHeaders,\n processedBody,\n bodyBytes,\n };\n ctx.handleLogEntry(logEntry);\n\n if (\n !isEventStream &&\n extra.agentHttpService &&\n extra.sessionId &&\n extra.isValidMessageRequest(url, method, requestBody)\n ) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n\n return res;\n } catch (error) {\n const duration = performance.now() - start;\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n requestHeaders,\n durationMs: Math.round(duration),\n error: (error as Error).message,\n };\n ctx.handleLogEntry(logEntry);\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n return res;\n }\n }) as typeof fetch;\n }\n\n restore(): void {\n if (this.originalFetch) {\n globalThis.fetch = this.originalFetch;\n this.originalFetch = null;\n }\n }\n\n private safeGetUrl(resource: any): string {\n try {\n if (typeof resource === 'string') return resource;\n if (resource?.url) return resource.url;\n } catch {\n // ignore\n }\n return '';\n }\n\n private extractHeaders(source: any): Record<string, string> {\n const headers: Record<string, string> = {};\n if (!source) return headers;\n\n try {\n if (source instanceof Headers) {\n source.forEach((value, key) => {\n headers[key] = value;\n });\n } else if (Array.isArray(source)) {\n for (const [key, value] of source) {\n headers[key] = value;\n }\n } else if (typeof source === 'object') {\n for (const [key, value] of Object.entries(source)) {\n headers[key] = Array.isArray(value) ? value.join(', ') : String(value);\n }\n }\n } catch {\n return {};\n }\n\n return headers;\n }\n\n private stringifyRequestBody(body: any): string {\n try {\n if (!body) return '';\n if (typeof body === 'string') return body;\n if (body instanceof URLSearchParams) return body.toString();\n if (typeof FormData !== 'undefined' && body instanceof FormData) {\n const obj: Record<string, any> = {};\n for (const [key, value] of body.entries()) {\n obj[key] = typeof value === 'string' ? value : '[File|Blob]';\n }\n return JSON.stringify(obj);\n }\n if (body instanceof Uint8Array || Buffer.isBuffer(body)) {\n return Buffer.from(body).toString('utf8');\n }\n if (typeof body === 'object') {\n return JSON.stringify(body);\n }\n return '';\n } catch {\n return '<request body error>';\n }\n }\n\n private async applyRouting(args: {\n resource: any;\n init: RequestInit | undefined;\n url: string;\n enhancedBody: string;\n requestHeaders: Record<string, string>;\n }): Promise<{\n resource: any;\n init: RequestInit | undefined;\n url: string;\n enhancedBody: string;\n requestHeaders: Record<string, string>;\n wasRouted: boolean;\n }> {\n const ctx = this.ctx;\n let { resource, init, url, enhancedBody, requestHeaders } = args;\n let wasRouted = false;\n\n if (!ctx.router) {\n return { resource, init, url, enhancedBody, requestHeaders, wasRouted };\n }\n\n let shouldRoute = false;\n try {\n shouldRoute = ctx.router.shouldRoute(url, enhancedBody);\n } catch (err) {\n if (ctx.logger) {\n ctx.logger.error('[FetchInstrument] shouldRoute() failed', err as Error);\n }\n }\n\n if (!shouldRoute) {\n // When not routing, let the router clean the request body (removes ChatGPT thinking blocks)\n if (ctx.llmProvider.isLLMCall(url) && enhancedBody) {\n const cleaned = ctx.router.cleanRequestForPassthrough(enhancedBody);\n if (cleaned !== enhancedBody) {\n enhancedBody = cleaned;\n\n // Update the resource and init with cleaned body\n if (typeof resource === 'object' && resource) {\n resource = new Request(url, { ...resource, body: enhancedBody });\n }\n\n if (init) {\n init.body = enhancedBody;\n }\n }\n }\n\n return { resource, init, url, enhancedBody, requestHeaders, wasRouted };\n }\n\n try {\n const transformed = await ctx.router.transformRequest(url, enhancedBody);\n url = transformed.url;\n enhancedBody = transformed.body;\n wasRouted = true;\n\n if (typeof resource === 'string') {\n resource = transformed.url;\n } else if (typeof resource === 'object' && resource) {\n resource = new Request(transformed.url, resource as RequestInit);\n }\n\n if (!init) {\n init = {};\n }\n init.body = transformed.body;\n\n if (transformed.headers) {\n this.mergeHeadersIntoInit(init, transformed.headers);\n requestHeaders = { ...requestHeaders, ...transformed.headers };\n }\n } catch (err) {\n wasRouted = false;\n if (ctx.logger) {\n ctx.logger.error('[FetchInstrument] Failed to route LLM request', err as Error);\n }\n }\n\n return { resource, init, url, enhancedBody, requestHeaders, wasRouted };\n }\n\n private mergeHeadersIntoInit(init: RequestInit, headers: Record<string, string>): void {\n if (!headers || Object.keys(headers).length === 0) return;\n\n if (!init.headers) {\n init.headers = { ...headers };\n return;\n }\n\n if (init.headers instanceof Headers) {\n const target = init.headers as Headers;\n Object.entries(headers).forEach(([key, value]) => target.set(key, value));\n return;\n }\n\n if (Array.isArray(init.headers)) {\n const headerArray = init.headers as Array<[string, string]>;\n Object.entries(headers).forEach(([key, value]) => {\n headerArray.push([key, value]);\n });\n return;\n }\n\n init.headers = { ...(init.headers as Record<string, string>), ...headers };\n }\n\n private async captureResponse(args: {\n res: Response;\n method: string;\n url: string;\n originalUrl: string;\n requestBody: string;\n requestHeaders: Record<string, string>;\n responseHeaders: Record<string, string>;\n wasRouted: boolean;\n entryId: string;\n entryTime: string;\n start: number;\n }): Promise<CaptureResult> {\n const { res, requestHeaders } = args;\n const ctx = this.ctx;\n const extra = this.extra;\n\n const contentType = res.headers.get('content-type') || '';\n const requestAccept = requestHeaders['accept'] || '';\n const isEventStream = /text\\/event-stream/i.test(contentType) || /text\\/event-stream/i.test(requestAccept);\n const isTextLike =\n /^(text\\/|application\\/(json|xml|javascript))/i.test(contentType) || /(json|xml|html)/i.test(contentType);\n\n if (!isEventStream) {\n let responseBody = '';\n let bodyBytes = 0;\n\n try {\n const clone = res.clone();\n const arrayBuffer = await clone.arrayBuffer();\n bodyBytes = arrayBuffer.byteLength;\n\n if (isTextLike) {\n if (arrayBuffer.byteLength <= FetchInstrument.MAX_LOG_BYTES) {\n responseBody = Buffer.from(arrayBuffer).toString('utf8');\n } else {\n const buf = Buffer.from(arrayBuffer.slice(0, FetchInstrument.MAX_LOG_BYTES));\n const remaining = arrayBuffer.byteLength - FetchInstrument.MAX_LOG_BYTES;\n responseBody = `${buf.toString('utf8')}\\n...[truncated ${remaining.toLocaleString()} bytes]`;\n }\n } else {\n responseBody = `<${bodyBytes} bytes binary>`;\n }\n } catch {\n responseBody = '<response body error>';\n }\n\n return {\n response: res,\n responseBody,\n processedBody: null,\n bodyBytes,\n isEventStream: false,\n };\n }\n\n if (!res.body) {\n return {\n response: res,\n responseBody: '<streaming response not buffered>',\n processedBody: null,\n bodyBytes: 0,\n isEventStream: true,\n };\n }\n\n if (args.wasRouted && ctx.router) {\n const reader = res.body.getReader();\n const chunks: Uint8Array[] = [];\n let totalBytes = 0;\n\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n totalBytes += value.byteLength;\n }\n\n const capturedBuffer = Buffer.concat(chunks.map((c) => Buffer.from(c)));\n let capturedText = capturedBuffer.toString('utf8');\n let processedBody: any = null;\n\n try {\n capturedText = ctx.router.transformResponse(capturedText);\n } catch (err) {\n // Transformation failed, use original response\n }\n\n if (ctx.llmProvider.isLLMCall(args.originalUrl) && capturedText) {\n try {\n processedBody = ctx.responseProcessor(args.originalUrl, capturedText);\n } catch (err) {\n processedBody = { error: `parser_failed: ${err instanceof Error ? err.message : String(err)}` };\n }\n }\n\n const finalLogEntry: NetworkLogEntry = {\n id: args.entryId,\n time: args.entryTime,\n transport: 'fetch',\n method: args.method,\n url: args.url,\n requestBody: args.requestBody,\n requestHeaders: args.requestHeaders,\n status: res.status,\n ok: res.ok,\n durationMs: Math.round(performance.now() - args.start),\n responseBody: capturedText,\n responseHeaders: args.responseHeaders,\n processedBody,\n bodyBytes: totalBytes,\n };\n ctx.handleLogEntry(finalLogEntry);\n if (\n extra.agentHttpService &&\n extra.sessionId &&\n extra.isValidMessageRequest(args.url, args.method, args.requestBody)\n ) {\n extra.sendHttpUpdate(finalLogEntry).catch(() => {});\n }\n\n const transformedStream = new ReadableStream({\n start(controller) {\n const encoder = new TextEncoder();\n controller.enqueue(encoder.encode(capturedText));\n controller.close();\n },\n });\n\n const response = new Response(transformedStream, {\n status: res.status,\n statusText: res.statusText,\n headers: res.headers,\n });\n\n return {\n response,\n responseBody: capturedText,\n processedBody,\n bodyBytes: totalBytes,\n isEventStream: true,\n };\n }\n\n const [stream1, stream2] = res.body.tee();\n const capturePromise = (async () => {\n try {\n const reader = stream1.getReader();\n const chunks: Uint8Array[] = [];\n let totalBytes = 0;\n\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n totalBytes += value.byteLength;\n }\n\n const capturedBuffer = Buffer.concat(chunks.map((c) => Buffer.from(c)));\n const capturedText = capturedBuffer.toString('utf8');\n let processedBody: any = null;\n\n if (ctx.llmProvider.isLLMCall(args.url) && capturedText) {\n try {\n processedBody = ctx.responseProcessor(args.url, capturedText);\n } catch (err) {\n processedBody = { error: `parser_failed: ${err instanceof Error ? err.message : String(err)}` };\n }\n }\n\n const finalLogEntry: NetworkLogEntry = {\n id: args.entryId,\n time: args.entryTime,\n transport: 'fetch',\n method: args.method,\n url: args.url,\n requestBody: args.requestBody,\n requestHeaders: args.requestHeaders,\n status: res.status,\n ok: res.ok,\n durationMs: Math.round(performance.now() - args.start),\n responseBody: capturedText,\n responseHeaders: args.responseHeaders,\n processedBody,\n bodyBytes: totalBytes,\n };\n ctx.handleLogEntry(finalLogEntry);\n if (\n extra.agentHttpService &&\n extra.sessionId &&\n extra.isValidMessageRequest(args.url, args.method, args.requestBody)\n ) {\n extra.sendHttpUpdate(finalLogEntry).catch(() => {});\n }\n\n return { responseBody: capturedText, processedBody, bodyBytes: totalBytes };\n } catch (err) {\n return { responseBody: '<stream capture error>', processedBody: null, bodyBytes: 0 };\n }\n })();\n\n const response = new Response(stream2, {\n status: res.status,\n statusText: res.statusText,\n headers: res.headers,\n });\n\n return {\n response,\n responseBody: '<streaming in progress>',\n processedBody: null,\n bodyBytes: 0,\n isEventStream: true,\n capturePromise,\n };\n }\n}\n","import { performance } from 'node:perf_hooks';\nimport type { CommonContext, NetworkLogEntry } from './types';\n\nexport interface HttpInstrumentConfig {\n agentHttpService?: any;\n sessionId?: string;\n isValidMessageRequest: (url: string, method: string, body: string) => boolean;\n sendHttpStart: (entry: NetworkLogEntry) => Promise<void>;\n sendHttpUpdate: (entry: NetworkLogEntry) => Promise<void>;\n}\n\nexport class HttpInstrument {\n private originals: any = null;\n constructor(\n private ctx: CommonContext,\n private enableHttps: boolean,\n private _extra: HttpInstrumentConfig = {\n isValidMessageRequest: () => false,\n sendHttpStart: async () => {},\n sendHttpUpdate: async () => {},\n },\n ) {}\n\n patch(): void {\n try {\n const http = require('http');\n const https = require('https');\n if (this.originals) return;\n\n this.originals = {\n httpRequest: http.request,\n httpsRequest: https.request,\n httpGet: http.get,\n httpsGet: https.get,\n };\n const self = this;\n const maxBody = this.ctx.maxBodyBytes;\n\n function buildUrl(options: any, protocolDefault: string): string {\n try {\n if (typeof options === 'string') return options;\n if (options instanceof URL) return options.toString();\n const protocol = options.protocol || protocolDefault;\n const host = options.hostname || options.host || 'localhost';\n const portPart = options.port ? `:${options.port}` : '';\n const path = options.path || options.pathname || '/';\n return `${protocol}//${host}${portPart}${path}`;\n } catch {\n return '';\n }\n }\n\n function wrapRequest(original: Function, protocol: 'http:' | 'https:') {\n return function wrappedRequest(options: any, callback?: any) {\n const url = buildUrl(options, protocol);\n const method = (typeof options === 'object' && options.method ? options.method : 'GET').toUpperCase();\n\n const startHr = performance.now();\n const entryId = self.ctx.idGenerator();\n const entryTime = new Date().toISOString();\n const requestBodyBuffers: Buffer[] = [];\n let requestLen = 0;\n let requestBodySent = false;\n\n const req = original(options, (res: any) => {\n const responseBuffers: Buffer[] = [];\n let responseBytes = 0;\n let capturedBytes = 0; // track captured (pushed) bytes to avoid O(n^2)\n const contentTypeHeader = () => String(res.headers['content-type'] || '');\n res.on('data', (chunk: Buffer) => {\n responseBytes += chunk.length;\n const ct = contentTypeHeader();\n const isEventStream = /text\\/event-stream/i.test(ct);\n\n // For SSE streams, capture everything without truncation\n if (isEventStream) {\n responseBuffers.push(chunk);\n capturedBytes += chunk.length;\n } else {\n // For non-SSE, apply the maxBody limit\n if (capturedBytes >= maxBody) return; // already at limit\n const remaining = maxBody - capturedBytes;\n if (chunk.length <= remaining) {\n responseBuffers.push(chunk);\n capturedBytes += chunk.length;\n } else {\n responseBuffers.push(chunk.subarray(0, remaining));\n capturedBytes += remaining;\n }\n }\n });\n res.on('end', () => {\n const duration = performance.now() - startHr;\n let responseBody = '';\n try {\n const ct = contentTypeHeader();\n const isEventStream = /text\\/event-stream/i.test(ct);\n const bufferRaw = Buffer.concat(responseBuffers);\n const wasTruncated = responseBytes > bufferRaw.length || bufferRaw.length === maxBody;\n let buffer = bufferRaw;\n\n // Handle decompression for non-SSE responses\n if (!isEventStream) {\n const contentEncoding = String(res.headers['content-encoding'] || '').toLowerCase();\n // Only attempt decompression if we captured the full body (not truncated) & encoding present\n const canDecompress = !wasTruncated && buffer.length && /(gzip|br|deflate)/.test(contentEncoding);\n if (canDecompress) {\n try {\n const zlib = require('node:zlib');\n if (contentEncoding.includes('gzip')) buffer = zlib.gunzipSync(bufferRaw);\n else if (contentEncoding.includes('br')) buffer = zlib.brotliDecompressSync(bufferRaw);\n else if (contentEncoding.includes('deflate')) buffer = zlib.inflateSync(bufferRaw);\n } catch {\n // swallow decompression errors; fall back to raw (likely already decoded or truncated)\n }\n }\n }\n\n // Convert buffer to string for text-based responses (including SSE)\n const isText = /text\\//i.test(ct) || /application\\/(json|xml|javascript)/i.test(ct);\n if (isText || isEventStream) {\n responseBody = buffer.toString('utf8');\n // Don't add truncation message for SSE streams (we capture full SSE)\n if (wasTruncated && !isEventStream) responseBody += '\\n...[truncated]';\n } else if (buffer.length) {\n responseBody = `<${buffer.length} bytes${wasTruncated ? ' (truncated)' : ''} binary>`;\n }\n } catch {\n responseBody = '<response body error>';\n }\n let requestBody = '';\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n } catch {\n requestBody = '<request body error>';\n }\n let processedBody: any = null;\n if (self.ctx.llmProvider.isLLMCall(url)) {\n try {\n // Always use the responseProcessor for LLM calls to get proper parsing\n processedBody = self.ctx.responseProcessor(url, responseBody);\n } catch (e) {\n processedBody = {\n parts: [{ type: 'text', text: responseBody || '<empty response>' }],\n metadata: { error: String(e) },\n };\n }\n }\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n status: res.statusCode,\n ok: res.statusCode >= 200 && res.statusCode < 300,\n durationMs: Math.round(duration),\n responseBody,\n processedBody,\n bodyBytes: responseBytes,\n };\n try {\n const u = new URL(url);\n logEntry.host = u.hostname;\n logEntry.path = u.pathname;\n } catch {}\n self.ctx.handleLogEntry(logEntry);\n\n // Send HTTP update when response is complete\n if (\n self._extra.agentHttpService &&\n self._extra.sessionId &&\n self._extra.isValidMessageRequest(url, method, requestBody)\n ) {\n self._extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n });\n res.on('error', (err: Error) => {\n const duration = performance.now() - startHr;\n let requestBody = '';\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n } catch {\n requestBody = '<request body error>';\n }\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n durationMs: Math.round(duration),\n error: err.message,\n };\n try {\n const u = new URL(url);\n logEntry.host = u.hostname;\n logEntry.path = u.pathname;\n } catch {}\n self.ctx.handleLogEntry(logEntry);\n\n // Send HTTP update for error case\n if (\n self._extra.agentHttpService &&\n self._extra.sessionId &&\n self._extra.isValidMessageRequest(url, method, requestBody)\n ) {\n self._extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n });\n if (callback) callback(res);\n });\n const originalWrite = req.write;\n const originalEnd = req.end;\n req.write = function (chunk: any, encoding?: any, cb?: any) {\n try {\n if (chunk && requestLen < maxBody) {\n const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding || 'utf8');\n const remaining = maxBody - requestLen;\n if (buf.length <= remaining) {\n requestBodyBuffers.push(buf);\n requestLen += buf.length;\n } else {\n requestBodyBuffers.push(buf.subarray(0, remaining));\n requestLen += remaining;\n }\n }\n } catch {}\n return originalWrite.call(this, chunk, encoding, cb);\n };\n req.end = function (chunk?: any, encoding?: any, cb?: any) {\n try {\n if (chunk && requestLen < maxBody) {\n const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding || 'utf8');\n const remaining = maxBody - requestLen;\n if (buf.length <= remaining) {\n requestBodyBuffers.push(buf);\n requestLen += buf.length;\n } else {\n requestBodyBuffers.push(buf.subarray(0, remaining));\n requestLen += remaining;\n }\n }\n\n // Send HTTP start when request is ending (body is complete)\n if (!requestBodySent) {\n requestBodySent = true;\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n let requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n\n const isValid = self._extra.isValidMessageRequest(url, method, requestBody);\n\n if (self._extra.agentHttpService && self._extra.sessionId && isValid) {\n const startEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n durationMs: 0,\n };\n self._extra.sendHttpStart(startEntry).catch(() => {});\n }\n } catch {}\n }\n } catch {}\n return originalEnd.call(this, chunk, encoding, cb);\n };\n return req;\n };\n }\n\n http.request = wrapRequest(this.originals.httpRequest, 'http:');\n http.get = function wrappedGet(o: any, cb?: any) {\n const r = (http.request as any)(o, cb);\n r.end();\n return r;\n };\n if (this.enableHttps !== false) {\n https.request = wrapRequest(this.originals.httpsRequest, 'https:');\n https.get = function wrappedGet(o: any, cb?: any) {\n const r = (https.request as any)(o, cb);\n r.end();\n return r;\n };\n }\n } catch (e) {\n // Silent instrumentation - no logging\n }\n }\n\n restore(): void {\n if (this.originals) {\n try {\n const http = require('http');\n const https = require('https');\n http.request = this.originals.httpRequest;\n http.get = this.originals.httpGet;\n https.request = this.originals.httpsRequest;\n https.get = this.originals.httpsGet;\n } catch {}\n this.originals = null;\n }\n }\n}\n","/**\n * LLM Network Interceptor Class\n *\n * Handles network interception and logging for LLM-related HTTP requests.\n * Patches global fetch and optionally http/https modules to capture request/response data.\n */\n\nimport * as os from 'node:os';\nimport * as path from 'node:path';\nimport { ulid } from 'ulidx';\nimport { SubEnvManager } from '../../config/subenv';\nimport type { AgentHttpService } from '../../services/AgentHttpService';\nimport { FileLogger } from '../loggers/file';\nimport type { MessagePart } from '../messages';\nimport { LlmParser } from '../parsers';\nimport { LlmProvider } from '../providers';\nimport { FetchInstrument } from './instruments/fetchInstrument';\nimport { HttpInstrument } from './instruments/httpInstrument';\nimport type { CommonContext, NetworkLogEntry } from './instruments/types';\n\nexport type { NetworkLogEntry } from './instruments/types';\n\nexport interface LlmInterceptorConfig {\n maxBodyBytes?: number;\n enableHttpsPatching?: boolean;\n enableDebug?: boolean;\n logFilter?: (entry: NetworkLogEntry) => boolean;\n responseProcessor?: (url: string, responseBody: string) => any;\n llmProvider?: LlmProvider;\n idGenerator?: () => string;\n agentHttpService?: AgentHttpService;\n sessionId?: string;\n logFilePath?: string; // optional path to write newline-delimited JSON log entries\n processedLogFilePath?: string; // optional path to write processed body (pretty JSON)\n router?: any; // LlmRouter instance for provider routing\n logger?: FileLogger; // optional logger instance to use for debug logging\n}\n\nexport class LlmInterceptor {\n private static instance: LlmInterceptor | null = null;\n private isPatched = false;\n private config: Required<\n Omit<LlmInterceptorConfig, 'llmProvider' | 'idGenerator' | 'agentHttpService' | 'sessionId' | 'router' | 'logger'>\n >;\n private llmProvider: LlmProvider;\n private idGenerator: () => string;\n private agentHttpService?: AgentHttpService;\n private sessionId?: string;\n private router?: any; // LlmRouter instance\n private activeRequests = new Map<string, string>();\n private rawLogger?: FileLogger; // logger for original request/response\n private errorLogger?: FileLogger; // logger for original request/response\n private processedLogger?: FileLogger; // logger for url + processed response\n private debugLogger?: FileLogger; // optional logger for debug output\n\n // Instruments\n private fetchInstrument?: FetchInstrument;\n private httpInstrument?: HttpInstrument;\n\n constructor(config: LlmInterceptorConfig = {}) {\n // Use provided LlmProvider or create a new instance\n this.llmProvider = config.llmProvider || new LlmProvider();\n\n // Use provided idGenerator or default to ulid\n this.idGenerator = config.idGenerator || ulid;\n\n // Store AgentHttpService and sessionId for promise lifecycle logging\n this.agentHttpService = config.agentHttpService;\n this.sessionId = config.sessionId;\n\n // Store router if provided\n this.router = config.router;\n\n // Store debug logger if provided\n this.debugLogger = config.logger;\n\n this.config = {\n maxBodyBytes: config.maxBodyBytes ?? 8192,\n enableHttpsPatching: config.enableHttpsPatching ?? true, // default to true so https requests are intercepted\n enableDebug: config.enableDebug ?? false,\n logFilter:\n config.logFilter ??\n ((entry) =>\n !!(\n entry.url &&\n !entry.url.includes('statsig') &&\n !entry.url.includes('registry.npmjs.org') &&\n !entry.url.startsWith('data:')\n )),\n // Default processor now parses streaming SSE via LlmParser\n responseProcessor:\n config.responseProcessor ??\n ((url: string, responseBody: string) => {\n try {\n return LlmParser.parseStreamingResponse(url, responseBody);\n } catch {\n return null;\n }\n }),\n } as typeof this.config;\n\n // Use system temp directory for automatic garbage collection\n const baseLogDir = path.join(os.tmpdir(), 'agiflow-agents', 'llm', `session-${process.pid}`);\n const logFilePath = config.logFilePath || path.join(baseLogDir, 'network.log');\n const errorLogFilePath = path.join(baseLogDir, 'error.log'); // Remove config.errorLogFilePath which doesn't exist\n const processedLogFilePath = config.processedLogFilePath || path.join(baseLogDir, 'processed.jsonl');\n\n // Initialize loggers\n try {\n this.rawLogger = new FileLogger(logFilePath);\n } catch {}\n try {\n this.errorLogger = new FileLogger(errorLogFilePath);\n } catch {}\n try {\n this.processedLogger = new FileLogger(processedLogFilePath);\n } catch {}\n }\n\n /**\n * Get singleton instance of LlmInterceptor\n */\n static getInstance(config?: LlmInterceptorConfig): LlmInterceptor {\n if (!LlmInterceptor.instance) {\n LlmInterceptor.instance = new LlmInterceptor(config);\n }\n return LlmInterceptor.instance;\n }\n\n /**\n * Start network interception\n */\n start(): void {\n if (this.isPatched) {\n return;\n }\n\n const common: CommonContext = {\n idGenerator: this.idGenerator,\n maxBodyBytes: this.config.maxBodyBytes,\n enableDebug: this.config.enableDebug,\n logFilter: this.config.logFilter,\n responseProcessor: this.config.responseProcessor,\n llmProvider: this.llmProvider,\n handleLogEntry: (e) => this.handleLogEntry(e),\n router: this.router,\n logger: this.debugLogger,\n };\n\n this.fetchInstrument = new FetchInstrument(common, {\n agentHttpService: this.agentHttpService,\n sessionId: this.sessionId,\n isValidMessageRequest: (url, method, body) => this.isValidMessageRequest(url, method, body),\n sendHttpStart: (entry) => this.sendHttpStart(entry),\n sendHttpUpdate: (entry) => this.sendHttpUpdate(entry),\n });\n this.fetchInstrument.patch();\n\n this.httpInstrument = new HttpInstrument(common, this.config.enableHttpsPatching, {\n agentHttpService: this.agentHttpService,\n sessionId: this.sessionId,\n isValidMessageRequest: (url, method, body) => this.isValidMessageRequest(url, method, body),\n sendHttpStart: (entry) => this.sendHttpStart(entry),\n sendHttpUpdate: (entry) => this.sendHttpUpdate(entry),\n });\n this.httpInstrument.patch();\n\n this.isPatched = true;\n }\n\n /**\n * Stop network interception and restore original functions\n */\n stop(): void {\n if (!this.isPatched) {\n return;\n }\n this.fetchInstrument?.restore();\n this.httpInstrument?.restore();\n this.isPatched = false;\n }\n\n /**\n * Check if interceptor is currently active\n */\n isActive(): boolean {\n return this.isPatched;\n }\n\n /**\n * Update configuration\n */\n updateConfig(config: Partial<LlmInterceptorConfig>): void {\n this.config = { ...this.config, ...config };\n }\n\n /**\n * Handle a log entry - filter and pass to log handler if configured\n */\n private handleLogEntry(entry: NetworkLogEntry): void {\n // Raw logger writes the full network entry\n if (this.rawLogger) {\n this.rawLogger.info('network_entry', { networkLog: entry });\n }\n if (this.debugLogger) {\n this.debugLogger.info('network_entry', { networkLog: entry });\n }\n // Processed logger writes subset if processed body present\n if (this.processedLogger && this.config.logFilter(entry) && entry.processedBody) {\n this.processedLogger.info('processed_entry', {\n id: entry.id,\n url: entry.url,\n method: entry.method,\n status: entry.status,\n durationMs: entry.durationMs,\n timestamp: entry.time,\n processedBody: entry.processedBody,\n });\n }\n // Silent instrumentation - no debug logging\n }\n\n /**\n * Check if this is a valid message HTTP request that should be logged as an agent message\n */\n private isValidMessageRequest(url: string, method: string, requestBody: string): boolean {\n if (this.rawLogger) {\n this.rawLogger.info('isValidMessageRequest checking', {\n url,\n method,\n hasBody: !!requestBody,\n bodyLength: requestBody?.length,\n });\n }\n\n // Only log LLM calls\n if (!this.llmProvider.isLLMCall(url)) {\n if (this.rawLogger) {\n this.rawLogger.info('Not an LLM URL, skipping', { url });\n }\n if (this.rawLogger && this.config.enableDebug) {\n this.rawLogger.debug('Request validation failed: Not an LLM URL', {\n url,\n method,\n });\n }\n return false;\n }\n\n // For Google/Gemini APIs, only track SSE streaming requests\n if (url.includes('cloudcode-pa.googleapis.com') || url.includes('generativelanguage.googleapis.com')) {\n // Only track streamGenerateContent (SSE), skip countTokens, generateContent, loadCodeAssist, etc.\n if (url.includes('streamGenerateContent')) {\n return true;\n } else {\n return false;\n }\n }\n\n // Only log POST requests (actual message requests)\n if (method !== 'POST') {\n return false;\n }\n\n // Check if the request body contains actual message content\n if (!requestBody) {\n return false;\n }\n\n try {\n const body = JSON.parse(requestBody);\n\n // Look for common LLM message patterns\n const hasMessages = body.messages && Array.isArray(body.messages) && body.messages.length > 0;\n const hasPrompt = body.prompt && typeof body.prompt === 'string';\n const hasInput = body.input && typeof body.input === 'string';\n const hasContents = body.contents && Array.isArray(body.contents) && body.contents.length > 0; // Gemini format\n\n // Gemini specific: check for request.contents\n const hasRequestContents =\n body.request?.contents && Array.isArray(body.request.contents) && body.request.contents.length > 0;\n\n // Must have actual content to be considered a valid message request\n const isValid = hasMessages || hasPrompt || hasInput || hasContents || hasRequestContents;\n\n return isValid;\n } catch (err) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to parse request body in isValidMessageRequest', {\n url,\n method,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n // If we can't parse the JSON, it's not a valid message request\n return false;\n }\n }\n\n /**\n * Send HTTP start event using startMessage with request content\n */\n private async sendHttpStart(entry: NetworkLogEntry): Promise<void> {\n if (!this.agentHttpService || !this.sessionId) {\n return;\n }\n\n // Log start request\n if (this.rawLogger) {\n this.rawLogger.info('http_start', { entry });\n }\n\n // Extract the request content to include as parts\n let requestParts: Array<{ type: string; text?: string }> = [];\n if (entry.requestBody) {\n try {\n const requestData = JSON.parse(entry.requestBody);\n\n // Extract user input from common LLM API formats\n if (requestData.messages && Array.isArray(requestData.messages)) {\n // OpenAI/Anthropic format - find the last user message\n const userMessage = requestData.messages.filter((msg: any) => msg.role === 'user').pop();\n if (userMessage?.content) {\n if (typeof userMessage.content === 'string') {\n requestParts.push({ type: 'text', text: userMessage.content });\n } else if (Array.isArray(userMessage.content)) {\n // Handle content array format\n requestParts = userMessage.content.map((part: any) => ({\n type: part.type || 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n }\n } else if (requestData.prompt) {\n // Legacy prompt format\n requestParts.push({ type: 'text', text: requestData.prompt });\n } else if (requestData.input) {\n // Some APIs use 'input'\n requestParts.push({ type: 'text', text: requestData.input });\n } else if (requestData.contents && Array.isArray(requestData.contents)) {\n // Gemini format - extract the last user content\n const userContent = requestData.contents.filter((c: any) => c.role === 'user').pop();\n if (userContent?.parts && Array.isArray(userContent.parts)) {\n requestParts = userContent.parts.map((part: any) => ({\n type: 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n } else if (requestData.request?.contents && Array.isArray(requestData.request.contents)) {\n // Gemini format with nested request - extract the last user content\n const userContent = requestData.request.contents.filter((c: any) => c.role === 'user').pop();\n if (userContent?.parts && Array.isArray(userContent.parts)) {\n requestParts = userContent.parts.map((part: any) => ({\n type: 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n }\n } catch (err) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to parse request data in sendHttpStart', {\n url: entry.url,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n // If we can't parse the request, just include a placeholder\n requestParts.push({ type: 'text', text: 'LLM request initiated...' });\n }\n }\n\n // Extract only necessary metadata for frontend\n const startMetadata = {\n llm_provider: this.llmProvider.detectProvider(entry.url),\n request_started: true,\n };\n\n try {\n if (this.rawLogger) {\n this.rawLogger.info('Calling agentHttpService.startMessage', {\n sessionId: this.sessionId,\n entryId: entry.id,\n });\n }\n\n const response = await this.agentHttpService.startMessage(this.sessionId, {\n messageType: 'output',\n metadata: startMetadata,\n });\n\n // Store the message ID for later update\n this.activeRequests.set(entry.id, response.messageId);\n\n if (this.rawLogger) {\n this.rawLogger.info('startMessage successful', {\n entryId: entry.id,\n messageId: response.messageId,\n });\n }\n } catch (error) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to send HTTP start message', {\n url: entry.url,\n sessionId: this.sessionId,\n error: error instanceof Error ? error.message : String(error),\n });\n }\n }\n }\n\n /**\n * Send HTTP completion event using updateMessage\n */\n private async sendHttpUpdate(entry: NetworkLogEntry): Promise<void> {\n if (!this.agentHttpService || !this.sessionId) return;\n\n // Log update request\n if (this.rawLogger) {\n this.rawLogger.info('http_update', { entry });\n }\n\n const messageId = this.activeRequests.get(entry.id);\n if (!messageId) {\n // No pending message to update, skip\n return;\n }\n\n // Validate that we have actual content to send\n // Check if processedBody has valid parts\n const hasParts = entry.processedBody?.message?.parts?.length > 0 || entry.processedBody?.parts?.length > 0;\n\n if (!hasParts) {\n // Don't update if there's no content - keep the message in pending state\n if (this.rawLogger) {\n this.rawLogger.info('Skipping update - no valid parts yet', {\n entryId: entry.id,\n messageId,\n });\n }\n return;\n }\n\n // Extract only necessary metadata for frontend: model, llm_provider, token usage\n const completionMetadata: Record<string, any> = {\n llm_provider: this.llmProvider.detectProvider(entry.url),\n status: entry.status,\n duration: entry.durationMs,\n };\n\n // Add model and token usage if available from processed response\n if (entry.processedBody?.metadata) {\n const processedMeta = entry.processedBody.metadata;\n if (processedMeta.model) completionMetadata.model = processedMeta.model;\n if (processedMeta.usage) completionMetadata.usage = processedMeta.usage;\n if (processedMeta.total_tokens) completionMetadata.total_tokens = processedMeta.total_tokens;\n if (processedMeta.prompt_tokens) completionMetadata.prompt_tokens = processedMeta.prompt_tokens;\n if (processedMeta.completion_tokens) completionMetadata.completion_tokens = processedMeta.completion_tokens;\n if (processedMeta.cost) completionMetadata.cost = processedMeta.cost;\n }\n\n // Use already-parsed processedBody to extract response content as parts\n let responseParts: MessagePart[] = [];\n if (entry.processedBody) {\n const parsed = entry.processedBody;\n\n if (parsed?.message?.parts && Array.isArray(parsed.message.parts)) {\n // Accept parts array even if empty (valid for token counting, etc.)\n responseParts = parsed.message.parts;\n } else if (parsed?.parts && Array.isArray(parsed.parts)) {\n // httpInstrument.ts error handler creates parts directly without message wrapper\n responseParts = parsed.parts;\n } else if (parsed?.error) {\n responseParts.push({ type: 'text' as const, text: `Parser error: ${parsed.error}` });\n } else {\n // No parts is valid for some API calls (countTokens, loadCodeAssist, etc.)\n responseParts = [];\n }\n } else if (entry.responseBody) {\n // Fallback: create a simple text part for unsupported providers\n responseParts.push({ type: 'text' as const, text: 'LLM response completed' });\n }\n\n try {\n const updateData: any = {\n parts: responseParts,\n metadata: completionMetadata,\n mId: entry.processedBody?.message?.id,\n };\n\n // Only include raw data if explicitly enabled\n if (SubEnvManager.isSaveRawEnabled) {\n updateData.raw = JSON.stringify(entry);\n }\n\n await this.agentHttpService.updateMessage(this.sessionId, messageId, updateData);\n\n // Clean up the active request tracking\n this.activeRequests.delete(entry.id);\n } catch (error) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to send HTTP update message', {\n url: entry.url,\n messageId,\n sessionId: this.sessionId,\n error: error instanceof Error ? error.message : String(error),\n });\n }\n // Clean up even on error\n this.activeRequests.delete(entry.id);\n }\n }\n}\n\nexport default LlmInterceptor;\n","/**\n * Codex Authentication Service\n *\n * Handles OAuth2 token management for OpenAI API access via codex auth file.\n * Based on https://github.com/openai/codex/blob/main/codex-rs/core/src/auth.rs\n *\n * Features:\n * - Reads tokens from $HOME/.codex/auth.json\n * - Checks token expiration by decoding JWT\n * - Refreshes access token using refresh token when expired\n * - Saves updated tokens back to auth file\n */\n\nimport * as fs from 'node:fs';\nimport * as os from 'node:os';\nimport * as path from 'node:path';\n\ninterface CodexTokens {\n id_token: string;\n access_token: string;\n refresh_token: string;\n account_id: string;\n}\n\ninterface CodexAuthFile {\n OPENAI_API_KEY: string | null;\n tokens: CodexTokens;\n last_refresh: string;\n}\n\ninterface JWTPayload {\n exp: number;\n iat: number;\n [key: string]: any;\n}\n\nexport class CodexAuth {\n private static readonly AUTH_FILE_PATH = path.join(os.homedir(), '.codex', 'auth.json');\n private static readonly TOKEN_REFRESH_URL = 'https://auth.openai.com/oauth/token';\n private static readonly CLIENT_ID = 'app_EMoamEEZ73f0CkXaXp7hrann'; // From codex\n\n private logger?: {\n debug: (message: string, data?: any) => void;\n warn: (message: string, data?: any) => void;\n error: (message: string, error?: Error | any) => void;\n };\n\n constructor(logger?: any) {\n this.logger = logger;\n }\n\n /**\n * Get account ID from auth file\n */\n async getAccountId(): Promise<string | null> {\n try {\n const authData = this.readAuthFile();\n if (!authData || !authData.tokens) {\n return null;\n }\n return authData.tokens.account_id || null;\n } catch (error) {\n this.logger?.error('[CodexAuth] Error getting account ID', error as Error);\n return null;\n }\n }\n\n /**\n * Get a valid access token, refreshing if necessary\n */\n async getAccessToken(): Promise<string | null> {\n try {\n // Read auth file\n const authData = this.readAuthFile();\n if (!authData || !authData.tokens) {\n this.logger?.warn('[CodexAuth] No tokens found in auth file');\n return null;\n }\n\n let { access_token, refresh_token } = authData.tokens;\n\n // Remove \"Bearer \" prefix if present in the access token\n if (typeof access_token === 'string' && access_token.startsWith('Bearer ')) {\n access_token = access_token.substring(7); // Remove \"Bearer \" (7 characters)\n }\n\n // Check if access token is expired\n const isExpired = this.isTokenExpired(access_token);\n\n if (!isExpired) {\n return access_token;\n }\n\n // Token is expired, refresh it\n const newTokens = await this.refreshAccessToken(refresh_token);\n\n if (!newTokens) {\n this.logger?.error('[CodexAuth] Failed to refresh access token');\n return null;\n }\n\n // Save updated tokens\n this.saveTokens(newTokens);\n\n return newTokens.access_token;\n } catch (error) {\n this.logger?.error('[CodexAuth] Error getting access token', error as Error);\n return null;\n }\n }\n\n /**\n * Read codex auth file\n */\n private readAuthFile(): CodexAuthFile | null {\n try {\n if (!fs.existsSync(CodexAuth.AUTH_FILE_PATH)) {\n this.logger?.warn('[CodexAuth] Auth file does not exist', {\n path: CodexAuth.AUTH_FILE_PATH,\n });\n return null;\n }\n\n const content = fs.readFileSync(CodexAuth.AUTH_FILE_PATH, 'utf-8');\n return JSON.parse(content) as CodexAuthFile;\n } catch (error) {\n this.logger?.error('[CodexAuth] Failed to read auth file', error as Error);\n return null;\n }\n }\n\n /**\n * Check if JWT token is expired\n * Decodes the JWT and checks the exp claim\n */\n private isTokenExpired(token: string): boolean {\n try {\n const payload = this.decodeJWT(token);\n if (!payload || !payload.exp) {\n return true;\n }\n\n // Check if token expires within next 5 minutes (buffer time)\n const expiresAt = payload.exp * 1000; // Convert to milliseconds\n const now = Date.now();\n const bufferMs = 5 * 60 * 1000; // 5 minutes\n\n return expiresAt - now < bufferMs;\n } catch (error) {\n this.logger?.error('[CodexAuth] Failed to decode token', error as Error);\n return true;\n }\n }\n\n /**\n * Decode JWT token (without verification)\n * We only need to read the exp claim\n */\n private decodeJWT(token: string): JWTPayload | null {\n try {\n // JWT format: header.payload.signature\n const parts = token.split('.');\n if (parts.length !== 3) {\n return null;\n }\n\n // Decode base64url encoded payload\n const payload = parts[1];\n const decoded = Buffer.from(payload, 'base64url').toString('utf-8');\n return JSON.parse(decoded) as JWTPayload;\n } catch (error) {\n this.logger?.error('[CodexAuth] Failed to decode JWT', error as Error);\n return null;\n }\n }\n\n /**\n * Refresh access token using refresh token\n * Calls OpenAI's OAuth2 token endpoint\n */\n private async refreshAccessToken(refreshToken: string): Promise<CodexTokens | null> {\n try {\n const response = await fetch(CodexAuth.TOKEN_REFRESH_URL, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/x-www-form-urlencoded',\n },\n body: new URLSearchParams({\n grant_type: 'refresh_token',\n refresh_token: refreshToken,\n client_id: CodexAuth.CLIENT_ID,\n }),\n });\n\n if (!response.ok) {\n const errorText = await response.text();\n this.logger?.error('[CodexAuth] Token refresh failed', {\n status: response.status,\n error: errorText,\n });\n return null;\n }\n\n const data = await response.json();\n\n // Remove \"Bearer \" prefix if present in the access token\n let accessToken = data.access_token;\n if (typeof accessToken === 'string' && accessToken.startsWith('Bearer ')) {\n accessToken = accessToken.substring(7); // Remove \"Bearer \" (7 characters)\n }\n\n return {\n id_token: data.id_token,\n access_token: accessToken,\n refresh_token: data.refresh_token || refreshToken, // Use new refresh token if provided\n account_id: data.account_id || '',\n };\n } catch (error) {\n this.logger?.error('[CodexAuth] Failed to refresh token', error as Error);\n return null;\n }\n }\n\n /**\n * Save updated tokens to auth file\n */\n private saveTokens(tokens: CodexTokens): void {\n try {\n const authData = this.readAuthFile();\n if (!authData) {\n this.logger?.error('[CodexAuth] Cannot save tokens - auth file not found');\n return;\n }\n\n // Update tokens and last_refresh\n authData.tokens = tokens;\n authData.last_refresh = new Date().toISOString();\n\n // Write back to file\n fs.writeFileSync(CodexAuth.AUTH_FILE_PATH, JSON.stringify(authData, null, 2), 'utf-8');\n } catch (error) {\n this.logger?.error('[CodexAuth] Failed to save tokens', error as Error);\n }\n }\n}\n","export default \"You are a coding agent running in the Codex CLI, a terminal-based coding assistant. Codex CLI is an open source project led by OpenAI. You are expected to be precise, safe, and helpful.\\n\\nYour capabilities:\\n\\n- Receive user prompts and other context provided by the harness, such as files in the workspace.\\n- Communicate with the user by streaming thinking & responses, and by making & updating plans.\\n- Emit function calls to run terminal commands and apply patches. Depending on how this specific run is configured, you can request that these function calls be escalated to the user for approval before running. More on this in the \\\"Sandbox and approvals\\\" section.\\n\\nWithin this context, Codex refers to the open-source agentic coding interface (not the old Codex language model built by OpenAI).\\n\\n# How you work\\n\\n## Personality\\n\\nYour default personality and tone is concise, direct, and friendly. You communicate efficiently, always keeping the user clearly informed about ongoing actions without unnecessary detail. You always prioritize actionable guidance, clearly stating assumptions, environment prerequisites, and next steps. Unless explicitly asked, you avoid excessively verbose explanations about your work.\\n\\n# AGENTS.md spec\\n- Repos often contain AGENTS.md files. These files can appear anywhere within the repository.\\n- These files are a way for humans to give you (the agent) instructions or tips for working within the container.\\n- Some examples might be: coding conventions, info about how code is organized, or instructions for how to run or test code.\\n- Instructions in AGENTS.md files:\\n - The scope of an AGENTS.md file is the entire directory tree rooted at the folder that contains it.\\n - For every file you touch in the final patch, you must obey instructions in any AGENTS.md file whose scope includes that file.\\n - Instructions about code style, structure, naming, etc. apply only to code within the AGENTS.md file's scope, unless the file states otherwise.\\n - More-deeply-nested AGENTS.md files take precedence in the case of conflicting instructions.\\n - Direct system/developer/user instructions (as part of a prompt) take precedence over AGENTS.md instructions.\\n- The contents of the AGENTS.md file at the root of the repo and any directories from the CWD up to the root are included with the developer message and don't need to be re-read. When working in a subdirectory of CWD, or a directory outside the CWD, check for any AGENTS.md files that may be applicable.\\n\\n## Responsiveness\\n\\n### Preamble messages\\n\\nBefore making tool calls, send a brief preamble to the user explaining what you're about to do. When sending preamble messages, follow these principles and examples:\\n\\n- **Logically group related actions**: if you're about to run several related commands, describe them together in one preamble rather than sending a separate note for each.\\n- **Keep it concise**: be no more than 1-2 sentences, focused on immediate, tangible next steps. (8–12 words for quick updates).\\n- **Build on prior context**: if this is not your first tool call, use the preamble message to connect the dots with what's been done so far and create a sense of momentum and clarity for the user to understand your next actions.\\n- **Keep your tone light, friendly and curious**: add small touches of personality in preambles feel collaborative and engaging.\\n- **Exception**: Avoid adding a preamble for every trivial read (e.g., `cat` a single file) unless it's part of a larger grouped action.\\n\\n**Examples:**\\n\\n- \\\"I've explored the repo; now checking the API route definitions.\\\"\\n- \\\"Next, I'll patch the config and update the related tests.\\\"\\n- \\\"I'm about to scaffold the CLI commands and helper functions.\\\"\\n- \\\"Ok cool, so I've wrapped my head around the repo. Now digging into the API routes.\\\"\\n- \\\"Config's looking tidy. Next up is patching helpers to keep things in sync.\\\"\\n- \\\"Finished poking at the DB gateway. I will now chase down error handling.\\\"\\n- \\\"Alright, build pipeline order is interesting. Checking how it reports failures.\\\"\\n- \\\"Spotted a clever caching util; now hunting where it gets used.\\\"\\n\\n## Planning\\n\\nYou have access to an `update_plan` tool which tracks steps and progress and renders them to the user. Using the tool helps demonstrate that you've understood the task and convey how you're approaching it. Plans can help to make complex, ambiguous, or multi-phase work clearer and more collaborative for the user. A good plan should break the task into meaningful, logically ordered steps that are easy to verify as you go.\\n\\nNote that plans are not for padding out simple work with filler steps or stating the obvious. The content of your plan should not involve doing anything that you aren't capable of doing (i.e. don't try to test things that you can't test). Do not use plans for simple or single-step queries that you can just do or answer immediately.\\n\\nDo not repeat the full contents of the plan after an `update_plan` call — the harness already displays it. Instead, summarize the change made and highlight any important context or next step.\\n\\nBefore running a command, consider whether or not you have completed the previous step, and make sure to mark it as completed before moving on to the next step. It may be the case that you complete all steps in your plan after a single pass of implementation. If this is the case, you can simply mark all the planned steps as completed. Sometimes, you may need to change plans in the middle of a task: call `update_plan` with the updated plan and make sure to provide an `explanation` of the rationale when doing so.\\n\\nUse a plan when:\\n\\n- The task is non-trivial and will require multiple actions over a long time horizon.\\n- There are logical phases or dependencies where sequencing matters.\\n- The work has ambiguity that benefits from outlining high-level goals.\\n- You want intermediate checkpoints for feedback and validation.\\n- When the user asked you to do more than one thing in a single prompt\\n- The user has asked you to use the plan tool (aka \\\"TODOs\\\")\\n- You generate additional steps while working, and plan to do them before yielding to the user\\n\\n### Examples\\n\\n**High-quality plans**\\n\\nExample 1:\\n\\n1. Add CLI entry with file args\\n2. Parse Markdown via CommonMark library\\n3. Apply semantic HTML template\\n4. Handle code blocks, images, links\\n5. Add error handling for invalid files\\n\\nExample 2:\\n\\n1. Define CSS variables for colors\\n2. Add toggle with localStorage state\\n3. Refactor components to use variables\\n4. Verify all views for readability\\n5. Add smooth theme-change transition\\n\\nExample 3:\\n\\n1. Set up Node.js + WebSocket server\\n2. Add join/leave broadcast events\\n3. Implement messaging with timestamps\\n4. Add usernames + mention highlighting\\n5. Persist messages in lightweight DB\\n6. Add typing indicators + unread count\\n\\n**Low-quality plans**\\n\\nExample 1:\\n\\n1. Create CLI tool\\n2. Add Markdown parser\\n3. Convert to HTML\\n\\nExample 2:\\n\\n1. Add dark mode toggle\\n2. Save preference\\n3. Make styles look good\\n\\nExample 3:\\n\\n1. Create single-file HTML game\\n2. Run quick sanity check\\n3. Summarize usage instructions\\n\\nIf you need to write a plan, only write high quality plans, not low quality ones.\\n\\n## Task execution\\n\\nYou are a coding agent. Please keep going until the query is completely resolved, before ending your turn and yielding back to the user. Only terminate your turn when you are sure that the problem is solved. Autonomously resolve the query to the best of your ability, using the tools available to you, before coming back to the user. Do NOT guess or make up an answer.\\n\\nYou MUST adhere to the following criteria when solving queries:\\n\\n- Working on the repo(s) in the current environment is allowed, even if they are proprietary.\\n- Analyzing code for vulnerabilities is allowed.\\n- Showing user code and tool call details is allowed.\\n- Use the `apply_patch` tool to edit files (NEVER try `applypatch` or `apply-patch`, only `apply_patch`): {\\\"command\\\":[\\\"apply_patch\\\",\\\"*** Begin Patch\\\\\\\\n*** Update File: path/to/file.py\\\\\\\\n@@ def example():\\\\\\\\n- pass\\\\\\\\n+ return 123\\\\\\\\n*** End Patch\\\"]}\\n\\nIf completing the user's task requires writing or modifying files, your code and final answer should follow these coding guidelines, though user instructions (i.e. AGENTS.md) may override these guidelines:\\n\\n- Fix the problem at the root cause rather than applying surface-level patches, when possible.\\n- Avoid unneeded complexity in your solution.\\n- Do not attempt to fix unrelated bugs or broken tests. It is not your responsibility to fix them. (You may mention them to the user in your final message though.)\\n- Update documentation as necessary.\\n- Keep changes consistent with the style of the existing codebase. Changes should be minimal and focused on the task.\\n- Use `git log` and `git blame` to search the history of the codebase if additional context is required.\\n- NEVER add copyright or license headers unless specifically requested.\\n- Do not waste tokens by re-reading files after calling `apply_patch` on them. The tool call will fail if it didn't work. The same goes for making folders, deleting folders, etc.\\n- Do not `git commit` your changes or create new git branches unless explicitly requested.\\n- Do not add inline comments within code unless explicitly requested.\\n- Do not use one-letter variable names unless explicitly requested.\\n- NEVER output inline citations like \\\"【F:README.md†L5-L14】\\\" in your outputs. The CLI is not able to render these so they will just be broken in the UI. Instead, if you output valid filepaths, users will be able to click on them to open the files in their editor.\\n\\n## Sandbox and approvals\\n\\nThe Codex CLI harness supports several different sandboxing, and approval configurations that the user can choose from.\\n\\nFilesystem sandboxing prevents you from editing files without user approval. The options are:\\n\\n- **read-only**: You can only read files.\\n- **workspace-write**: You can read files. You can write to files in your workspace folder, but not outside it.\\n- **danger-full-access**: No filesystem sandboxing.\\n\\nNetwork sandboxing prevents you from accessing network without approval. Options are\\n\\n- **restricted**\\n- **enabled**\\n\\nApprovals are your mechanism to get user consent to perform more privileged actions. Although they introduce friction to the user because your work is paused until the user responds, you should leverage them to accomplish your important work. Do not let these settings or the sandbox deter you from attempting to accomplish the user's task. Approval options are\\n\\n- **untrusted**: The harness will escalate most commands for user approval, apart from a limited allowlist of safe \\\"read\\\" commands.\\n- **on-failure**: The harness will allow all commands to run in the sandbox (if enabled), and failures will be escalated to the user for approval to run again without the sandbox.\\n- **on-request**: Commands will be run in the sandbox by default, and you can specify in your tool call if you want to escalate a command to run without sandboxing. (Note that this mode is not always available. If it is, you'll see parameters for it in the `shell` command description.)\\n- **never**: This is a non-interactive mode where you may NEVER ask the user for approval to run commands. Instead, you must always persist and work around constraints to solve the task for the user. You MUST do your utmost best to finish the task and validate your work before yielding. If this mode is pared with `danger-full-access`, take advantage of it to deliver the best outcome for the user. Further, in this mode, your default testing philosophy is overridden: Even if you don't see local patterns for testing, you may add tests and scripts to validate your work. Just remove them before yielding.\\n\\nWhen you are running with approvals `on-request`, and sandboxing enabled, here are scenarios where you'll need to request approval:\\n\\n- You need to run a command that writes to a directory that requires it (e.g. running tests that write to /tmp)\\n- You need to run a GUI app (e.g., open/xdg-open/osascript) to open browsers or files.\\n- You are running sandboxed and need to run a command that requires network access (e.g. installing packages)\\n- If you run a command that is important to solving the user's query, but it fails because of sandboxing, rerun the command with approval.\\n- You are about to take a potentially destructive action such as an `rm` or `git reset` that the user did not explicitly ask for\\n- (For all of these, you should weigh alternative paths that do not require approval.)\\n\\nNote that when sandboxing is set to read-only, you'll need to request approval for any command that isn't a read.\\n\\nYou will be told what filesystem sandboxing, network sandboxing, and approval mode are active in a developer or user message. If you are not told about this, assume that you are running with workspace-write, network sandboxing ON, and approval on-failure.\\n\\n## Validating your work\\n\\nIf the codebase has tests or the ability to build or run, consider using them to verify that your work is complete.\\n\\nWhen testing, your philosophy should be to start as specific as possible to the code you changed so that you can catch issues efficiently, then make your way to broader tests as you build confidence. If there's no test for the code you changed, and if the adjacent patterns in the codebases show that there's a logical place for you to add a test, you may do so. However, do not add tests to codebases with no tests.\\n\\nSimilarly, once you're confident in correctness, you can suggest or use formatting commands to ensure that your code is well formatted. If there are issues you can iterate up to 3 times to get formatting right, but if you still can't manage it's better to save the user time and present them a correct solution where you call out the formatting in your final message. If the codebase does not have a formatter configured, do not add one.\\n\\nFor all of testing, running, building, and formatting, do not attempt to fix unrelated bugs. It is not your responsibility to fix them. (You may mention them to the user in your final message though.)\\n\\nBe mindful of whether to run validation commands proactively. In the absence of behavioral guidance:\\n\\n- When running in non-interactive approval modes like **never** or **on-failure**, proactively run tests, lint and do whatever you need to ensure you've completed the task.\\n- When working in interactive approval modes like **untrusted**, or **on-request**, hold off on running tests or lint commands until the user is ready for you to finalize your output, because these commands take time to run and slow down iteration. Instead suggest what you want to do next, and let the user confirm first.\\n- When working on test-related tasks, such as adding tests, fixing tests, or reproducing a bug to verify behavior, you may proactively run tests regardless of approval mode. Use your judgement to decide whether this is a test-related task.\\n\\n## Ambition vs. precision\\n\\nFor tasks that have no prior context (i.e. the user is starting something brand new), you should feel free to be ambitious and demonstrate creativity with your implementation.\\n\\nIf you're operating in an existing codebase, you should make sure you do exactly what the user asks with surgical precision. Treat the surrounding codebase with respect, and don't overstep (i.e. changing filenames or variables unnecessarily). You should balance being sufficiently ambitious and proactive when completing tasks of this nature.\\n\\nYou should use judicious initiative to decide on the right level of detail and complexity to deliver based on the user's needs. This means showing good judgment that you're capable of doing the right extras without gold-plating. This might be demonstrated by high-value, creative touches when scope of the task is vague; while being surgical and targeted when scope is tightly specified.\\n\\n## Sharing progress updates\\n\\nFor especially longer tasks that you work on (i.e. requiring many tool calls, or a plan with multiple steps), you should provide progress updates back to the user at reasonable intervals. These updates should be structured as a concise sentence or two (no more than 8-10 words long) recapping progress so far in plain language: this update demonstrates your understanding of what needs to be done, progress so far (i.e. files explores, subtasks complete), and where you're going next.\\n\\nBefore doing large chunks of work that may incur latency as experienced by the user (i.e. writing a new file), you should send a concise message to the user with an update indicating what you're about to do to ensure they know what you're spending time on. Don't start editing or writing large files before informing the user what you are doing and why.\\n\\nThe messages you send before tool calls should describe what is immediately about to be done next in very concise language. If there was previous work done, this preamble message should also include a note about the work done so far to bring the user along.\\n\\n## Presenting your work and final message\\n\\nYour final message should read naturally, like an update from a concise teammate. For casual conversation, brainstorming tasks, or quick questions from the user, respond in a friendly, conversational tone. You should ask questions, suggest ideas, and adapt to the user's style. If you've finished a large amount of work, when describing what you've done to the user, you should follow the final answer formatting guidelines to communicate substantive changes. You don't need to add structured formatting for one-word answers, greetings, or purely conversational exchanges.\\n\\nYou can skip heavy formatting for single, simple actions or confirmations. In these cases, respond in plain sentences with any relevant next step or quick option. Reserve multi-section structured responses for results that need grouping or explanation.\\n\\nThe user is working on the same computer as you, and has access to your work. As such there's no need to show the full contents of large files you have already written unless the user explicitly asks for them. Similarly, if you've created or modified files using `apply_patch`, there's no need to tell users to \\\"save the file\\\" or \\\"copy the code into a file\\\"—just reference the file path.\\n\\nIf there's something that you think you could help with as a logical next step, concisely ask the user if they want you to do so. Good examples of this are running tests, committing changes, or building out the next logical component. If there's something that you couldn't do (even with approval) but that the user might want to do (such as verifying changes by running the app), include those instructions succinctly.\\n\\nBrevity is very important as a default. You should be very concise (i.e. no more than 10 lines), but can relax this requirement for tasks where additional detail and comprehensiveness is important for the user's understanding.\\n\\n### Final answer structure and style guidelines\\n\\nYou are producing plain text that will later be styled by the CLI. Follow these rules exactly. Formatting should make results easy to scan, but not feel mechanical. Use judgment to decide how much structure adds value.\\n\\n**Section Headers**\\n\\n- Use only when they improve clarity — they are not mandatory for every answer.\\n- Choose descriptive names that fit the content\\n- Keep headers short (1–3 words) and in `**Title Case**`. Always start headers with `**` and end with `**`\\n- Leave no blank line before the first bullet under a header.\\n- Section headers should only be used where they genuinely improve scanability; avoid fragmenting the answer.\\n\\n**Bullets**\\n\\n- Use `-` followed by a space for every bullet.\\n- Merge related points when possible; avoid a bullet for every trivial detail.\\n- Keep bullets to one line unless breaking for clarity is unavoidable.\\n- Group into short lists (4–6 bullets) ordered by importance.\\n- Use consistent keyword phrasing and formatting across sections.\\n\\n**Monospace**\\n\\n- Wrap all commands, file paths, env vars, and code identifiers in backticks (`` `...` ``).\\n- Apply to inline examples and to bullet keywords if the keyword itself is a literal file/command.\\n- Never mix monospace and bold markers; choose one based on whether it's a keyword (`**`) or inline code/path (`` ` ``).\\n\\n**File References**\\nWhen referencing files in your response, make sure to include the relevant start line and always follow the below rules:\\n * Use inline code to make file paths clickable.\\n * Each reference should have a stand alone path. Even if it's the same file.\\n * Accepted: absolute, workspace‑relative, a/ or b/ diff prefixes, or bare filename/suffix.\\n * Line/column (1‑based, optional): :line[:column] or #Lline[Ccolumn] (column defaults to 1).\\n * Do not use URIs like file://, vscode://, or https://.\\n * Do not provide range of lines\\n * Examples: src/app.ts, src/app.ts:42, b/server/index.js#L10, C:\\\\\\\\repo\\\\\\\\project\\\\\\\\main.rs:12:5\\n\\n**Structure**\\n\\n- Place related bullets together; don't mix unrelated concepts in the same section.\\n- Order sections from general → specific → supporting info.\\n- For subsections (e.g., \\\"Binaries\\\" under \\\"Rust Workspace\\\"), introduce with a bolded keyword bullet, then list items under it.\\n- Match structure to complexity:\\n - Multi-part or detailed results → use clear headers and grouped bullets.\\n - Simple results → minimal headers, possibly just a short list or paragraph.\\n\\n**Tone**\\n\\n- Keep the voice collaborative and natural, like a coding partner handing off work.\\n- Be concise and factual — no filler or conversational commentary and avoid unnecessary repetition\\n- Use present tense and active voice (e.g., \\\"Runs tests\\\" not \\\"This will run tests\\\").\\n- Keep descriptions self-contained; don't refer to \\\"above\\\" or \\\"below\\\".\\n- Use parallel structure in lists for consistency.\\n\\n**Don't**\\n\\n- Don't use literal words \\\"bold\\\" or \\\"monospace\\\" in the content.\\n- Don't nest bullets or create deep hierarchies.\\n- Don't output ANSI escape codes directly — the CLI renderer applies them.\\n- Don't cram unrelated keywords into a single bullet; split for clarity.\\n- Don't let keyword lists run long — wrap or reformat for scanability.\\n\\nGenerally, ensure your final answers adapt their shape and depth to the request. For example, answers to code explanations should have a precise, structured explanation with code references that answer the question directly. For tasks with a simple implementation, lead with the outcome and supplement only with what's needed for clarity. Larger changes can be presented as a logical walkthrough of your approach, grouping related steps, explaining rationale where it adds value, and highlighting next actions to accelerate the user. Your answers should provide the right level of detail while being easily scannable.\\n\\nFor casual greetings, acknowledgements, or other one-off conversational messages that are not delivering substantive information or structured results, respond naturally without section headers or bullet formatting.\\n\\n# Tool Guidelines\\n\\n## Shell commands\\n\\nWhen using the shell, you must adhere to the following guidelines:\\n\\n- When searching for text or files, prefer using `rg` or `rg --files` respectively because `rg` is much faster than alternatives like `grep`. (If the `rg` command is not found, then use alternatives.)\\n- Read files in chunks with a max chunk size of 250 lines. Do not use python scripts to attempt to output larger chunks of a file. Command line output will be truncated after 10 kilobytes or 256 lines of output, regardless of the command used.\\n\\n## `update_plan`\\n\\nA tool named `update_plan` is available to you. You can use it to keep an up‑to‑date, step‑by‑step plan for the task.\\n\\nTo create a new plan, call `update_plan` with a short list of 1‑sentence steps (no more than 5-7 words each) with a `status` for each step (`pending`, `in_progress`, or `completed`).\\n\\nWhen steps have been completed, use `update_plan` to mark each finished step as `completed` and the next step you are working on as `in_progress`. There should always be exactly one `in_progress` step until everything is done. You can mark multiple items as complete in a single `update_plan` call.\\n\\nIf all steps are complete, ensure you call `update_plan` to mark all steps as `completed`.\\n\\n## `apply_patch`\\n\\nUse the `apply_patch` shell command to edit files.\\nYour patch language is a stripped‑down, file‑oriented diff format designed to be easy to parse and safe to apply. You can think of it as a high‑level envelope:\\n\\n*** Begin Patch\\n[ one or more file sections ]\\n*** End Patch\\n\\nWithin that envelope, you get a sequence of file operations.\\nYou MUST include a header to specify the action you are taking.\\nEach operation starts with one of three headers:\\n\\n*** Add File: <path> - create a new file. Every following line is a + line (the initial contents).\\n*** Delete File: <path> - remove an existing file. Nothing follows.\\n*** Update File: <path> - patch an existing file in place (optionally with a rename).\\n\\nMay be immediately followed by *** Move to: <new path> if you want to rename the file.\\nThen one or more \\\"hunks\\\", each introduced by @@ (optionally followed by a hunk header).\\nWithin a hunk each line starts with:\\n\\nFor instructions on [context_before] and [context_after]:\\n- By default, show 3 lines of code immediately above and 3 lines immediately below each change. If a change is within 3 lines of a previous change, do NOT duplicate the first change's [context_after] lines in the second change's [context_before] lines.\\n- If 3 lines of context is insufficient to uniquely identify the snippet of code within the file, use the @@ operator to indicate the class or function to which the snippet belongs. For instance, we might have:\\n@@ class BaseClass\\n[3 lines of pre-context]\\n- [old_code]\\n+ [new_code]\\n[3 lines of post-context]\\n\\n- If a code block is repeated so many times in a class or function such that even a single `@@` statement and 3 lines of context cannot uniquely identify the snippet of code, you can use multiple `@@` statements to jump to the right context. For instance:\\n\\n@@ class BaseClass\\n@@ \\t def method():\\n[3 lines of pre-context]\\n- [old_code]\\n+ [new_code]\\n[3 lines of post-context]\\n\\nThe full grammar definition is below:\\nPatch := Begin { FileOp } End\\nBegin := \\\"*** Begin Patch\\\" NEWLINE\\nEnd := \\\"*** End Patch\\\" NEWLINE\\nFileOp := AddFile | DeleteFile | UpdateFile\\nAddFile := \\\"*** Add File: \\\" path NEWLINE { \\\"+\\\" line NEWLINE }\\nDeleteFile := \\\"*** Delete File: \\\" path NEWLINE\\nUpdateFile := \\\"*** Update File: \\\" path NEWLINE [ MoveTo ] { Hunk }\\nMoveTo := \\\"*** Move to: \\\" newPath NEWLINE\\nHunk := \\\"@@\\\" [ header ] NEWLINE { HunkLine } [ \\\"*** End of File\\\" NEWLINE ]\\nHunkLine := (\\\" \\\" | \\\"-\\\" | \\\"+\\\") text NEWLINE\\n\\nA full patch can combine several operations:\\n\\n*** Begin Patch\\n*** Add File: hello.txt\\n+Hello world\\n*** Update File: src/app.py\\n*** Move to: src/main.py\\n@@ def greet():\\n-print(\\\"Hi\\\")\\n+print(\\\"Hello, world!\\\")\\n*** Delete File: obsolete.txt\\n*** End Patch\\n\\nIt is important to remember:\\n\\n- You must include a header with your intended action (Add/Delete/Update)\\n- You must prefix new lines with `+` even when creating a new file\\n- File references can only be relative, NEVER ABSOLUTE.\\n\\nYou can invoke apply_patch like:\\n\\n```\\nshell {\\\"command\\\":[\\\"apply_patch\\\",\\\"*** Begin Patch\\\\\\\\n*** Add File: hello.txt\\\\\\\\n+Hello, world!\\\\\\\\n*** End Patch\\\\\\\\n\\\"]}\\n```\\n\"","/**\n * Claude to OpenAI Request Transformer\n *\n * Converts Anthropic Claude Messages API requests to OpenAI Responses API format.\n * Handles message structure, system prompts, streaming, and other parameters.\n */\n\nimport { randomUUID } from 'node:crypto';\nimport { CodexAuth } from '../auth/CodexAuth';\nimport type { RequestTransformer, RouterConfig, TransformedRequest } from '../types';\n// @ts-expect-error - Vite raw import not recognized by TypeScript\nimport codexInstructions from './codex.md?raw';\n\nexport class ClaudeToOpenAITransformer implements RequestTransformer {\n private config: RouterConfig;\n private codexAuth?: CodexAuth;\n private codexInstructions: string;\n\n constructor(config: RouterConfig, codexAuth?: CodexAuth) {\n this.config = config;\n this.codexAuth = codexAuth;\n // Load Codex CLI system prompt\n this.codexInstructions = this.loadCodexInstructions();\n }\n\n /**\n * Load the complete Codex CLI system prompt from codex.md\n */\n private loadCodexInstructions(): string {\n return codexInstructions;\n }\n\n async transform(_url: string, requestBody: string): Promise<TransformedRequest> {\n try {\n const claudeRequest = JSON.parse(requestBody);\n\n // Log original Claude request body\n this.config.logger?.debug('[ClaudeToOpenAI] ===== ORIGINAL CLAUDE REQUEST =====');\n this.config.logger?.debug('[ClaudeToOpenAI] Original body', {\n body: JSON.stringify(claudeRequest, null, 2),\n });\n\n // Generate conversation/session IDs for Responses API\n const conversationId = randomUUID();\n const sessionId = conversationId; // Use same UUID for both\n\n // Determine reasoning effort:\n // 1. Use session-level override if provided\n // 2. Fall back to model-based detection (Haiku = minimal, others = medium)\n const sessionReasoningEffort = (this.config as any).sessionReasoningEffort;\n const isHaikuModel = claudeRequest.model && claudeRequest.model.toLowerCase().includes('haiku');\n const reasoningEffort = sessionReasoningEffort || (isHaikuModel ? 'minimal' : 'medium');\n\n this.config.logger?.debug('[ClaudeToOpenAI] Model detection and reasoning effort', {\n originalModel: claudeRequest.model,\n isHaikuModel,\n sessionReasoningEffort: sessionReasoningEffort || 'none',\n finalReasoningEffort: reasoningEffort,\n source: sessionReasoningEffort ? 'session override' : 'model-based',\n });\n\n // Transform to Responses API format\n const responsesRequest: any = {\n model: this.config.toModel || 'gpt-5',\n stream: true, // Responses API requires stream to be true\n store: false, // Required by Responses API\n tool_choice: 'auto',\n parallel_tool_calls: false,\n reasoning: {\n effort: reasoningEffort, // Use minimal effort for Haiku models\n summary: 'auto',\n },\n include: ['reasoning.encrypted_content'],\n prompt_cache_key: conversationId, // Use conversation ID as cache key\n };\n\n // Use Codex CLI system prompt as instructions (required for ChatGPT Responses API)\n // Replace Claude-specific model information with ChatGPT model information\n responsesRequest.instructions = this.adaptInstructionsForChatGPT(this.codexInstructions);\n\n // Convert messages to input array format, prepending Claude system messages to first user message\n const input: any[] = [];\n\n // Extract Claude system messages and prepend to first user message\n let claudeSystemPrompt = '';\n if (claudeRequest.system) {\n const systemMessages = this.extractSystemMessages(claudeRequest.system);\n // Add defensive check to ensure systemMessages is an array\n if (systemMessages && Array.isArray(systemMessages) && systemMessages.length > 0) {\n claudeSystemPrompt = systemMessages.map((msg: any) => msg.content).join('\\n\\n');\n }\n }\n\n // Filter out Claude Code-specific instructions from user messages\n claudeSystemPrompt = this.removeClaudeCodeInstructions(claudeSystemPrompt);\n\n if (claudeSystemPrompt) {\n input.push({\n type: 'message',\n role: 'user',\n content: [\n {\n type: 'input_text',\n text: claudeSystemPrompt,\n },\n ],\n });\n }\n\n if (claudeRequest.messages && Array.isArray(claudeRequest.messages)) {\n for (const msg of claudeRequest.messages) {\n const converted = this.convertMessageToInput(msg);\n if (Array.isArray(converted)) {\n // convertMessageToInput can return multiple messages (for tool results)\n input.push(...converted);\n } else if (converted) {\n input.push(converted);\n }\n }\n }\n responsesRequest.input = input;\n\n // Handle tools/function calling\n if (claudeRequest.tools && Array.isArray(claudeRequest.tools)) {\n this.config.logger?.debug('[ClaudeToOpenAI] Original Claude tools', {\n tools: JSON.stringify(claudeRequest.tools, null, 2),\n });\n const convertedTools = this.convertTools(claudeRequest.tools);\n this.config.logger?.debug('[ClaudeToOpenAI] Converted tools', {\n tools: JSON.stringify(convertedTools, null, 2),\n });\n\n // Only add tools if there are valid ones after conversion\n if (convertedTools.length > 0) {\n responsesRequest.tools = convertedTools;\n this.config.logger?.debug('[ClaudeToOpenAI] Added tools to responsesRequest', {\n toolCount: convertedTools.length,\n });\n // Verify tools were actually set\n this.config.logger?.debug('[ClaudeToOpenAI] Verify responsesRequest.tools exists', {\n hasTools: !!responsesRequest.tools,\n toolsLength: responsesRequest.tools?.length,\n keys: Object.keys(responsesRequest),\n });\n } else {\n this.config.logger?.warn('[ClaudeToOpenAI] No valid tools after conversion, omitting tools field');\n }\n } else {\n this.config.logger?.debug('[ClaudeToOpenAI] No tools in Claude request', {\n hasTools: !!claudeRequest.tools,\n isArray: Array.isArray(claudeRequest.tools),\n });\n }\n\n // Responses API endpoint\n const targetUrl = this.config.toEndpoint || 'https://chatgpt.com/backend-api/codex/responses';\n\n // Build headers for Responses API\n const headers: Record<string, string> = {\n version: '0.46.0',\n 'openai-beta': 'responses=experimental',\n conversation_id: conversationId,\n session_id: sessionId,\n accept: 'text/event-stream',\n 'content-type': 'application/json',\n 'user-agent': 'codex_cli_rs/0.46.0 (Mac OS 15.6.0; arm64) iTerm.app/3.6.2',\n originator: 'codex_cli_rs',\n };\n\n // Try CodexAuth first (async OAuth token), fallback to API key\n if (this.codexAuth) {\n const accessToken = await this.codexAuth.getAccessToken();\n this.config.logger?.debug('[ClaudeToOpenAI] Raw access token', {\n token: accessToken?.substring(0, 30) + '...',\n });\n if (accessToken) {\n // Check if token already has \"Bearer \" prefix\n if (accessToken.startsWith('Bearer ')) {\n headers.authorization = accessToken; // Use as-is (lowercase for consistency)\n this.config.logger?.debug('[ClaudeToOpenAI] Token already has Bearer prefix, using as-is');\n } else {\n headers.authorization = `Bearer ${accessToken}`;\n this.config.logger?.debug('[ClaudeToOpenAI] Added Bearer prefix to token');\n }\n }\n // Add account ID if available (from codex auth)\n const accountId = await this.codexAuth.getAccountId();\n if (accountId) {\n headers['chatgpt-account-id'] = accountId;\n }\n } else if (this.config.toApiKey) {\n headers.authorization = `Bearer ${this.config.toApiKey}`;\n }\n\n // Debug logging\n this.config.logger?.debug('[ClaudeToOpenAI] ===== REQUEST DETAILS =====');\n this.config.logger?.debug('[ClaudeToOpenAI] Target URL', { targetUrl });\n this.config.logger?.debug('[ClaudeToOpenAI] Headers', {\n headers: {\n ...headers,\n authorization: headers.authorization ? `${headers.authorization.substring(0, 30)}...` : undefined,\n },\n });\n this.config.logger?.debug('[ClaudeToOpenAI] Body', {\n body: JSON.stringify(responsesRequest, null, 2),\n });\n this.config.logger?.debug('[ClaudeToOpenAI] ===============================');\n\n // Log final transformed body\n this.config.logger?.debug('[ClaudeToOpenAI] ===== FINAL TRANSFORMED REQUEST =====');\n this.config.logger?.debug('[ClaudeToOpenAI] Pre-final check - responsesRequest.tools', {\n hasTools: !!responsesRequest.tools,\n toolsLength: responsesRequest.tools?.length,\n keys: Object.keys(responsesRequest),\n });\n this.config.logger?.debug('[ClaudeToOpenAI] Final body', {\n body: JSON.stringify(responsesRequest, null, 2),\n });\n this.config.logger?.debug('[ClaudeToOpenAI] ===== END FINAL TRANSFORMED REQUEST =====');\n\n return {\n url: targetUrl,\n body: JSON.stringify(responsesRequest),\n headers,\n };\n } catch (error) {\n throw new Error(`Failed to transform Claude request to OpenAI: ${error}`);\n }\n }\n\n /**\n * Adapt Codex instructions for ChatGPT by replacing Claude-specific model information\n */\n private adaptInstructionsForChatGPT(instructions: string): string {\n let adapted = instructions;\n\n // Replace Claude model references with ChatGPT model references\n adapted = adapted.replace(\n /You are powered by the model named Sonnet 4\\.5\\. The exact model ID is claude-sonnet-4-5-\\d+\\./g,\n 'You are powered by ChatGPT (GPT-5 reasoning model).',\n );\n\n // Replace knowledge cutoff date\n adapted = adapted.replace(\n /Assistant knowledge cutoff is January 2025/g,\n 'Assistant knowledge cutoff is October 2023',\n );\n\n // Replace any other Claude-specific references\n adapted = adapted.replace(/\\bClaude\\b/g, 'ChatGPT');\n adapted = adapted.replace(/\\bAnthropic\\b/g, 'OpenAI');\n\n return adapted;\n }\n\n /**\n * Remove Claude Code-specific instructions from system messages\n * These should not be sent to ChatGPT as user messages\n */\n private removeClaudeCodeInstructions(text: string): string {\n // Remove Claude Code introduction and instructions\n const claudeCodePatterns = [\n /You are Claude Code, Anthropic's official CLI for Claude\\.[\\s\\S]*?claude_code_docs_map\\.md/,\n /You are Claude Code[\\s\\S]*?using Claude Code\\n/,\n ];\n\n let filtered = text;\n for (const pattern of claudeCodePatterns) {\n filtered = filtered.replace(pattern, '');\n }\n\n // Clean up excessive whitespace\n filtered = filtered.replace(/\\n{3,}/g, '\\n\\n').trim();\n\n return filtered;\n }\n\n /**\n * Extract and convert system messages from Claude format\n */\n private extractSystemMessages(system: any): any[] {\n const messages: any[] = [];\n\n if (typeof system === 'string') {\n messages.push({ role: 'system', content: system });\n } else if (Array.isArray(system)) {\n for (const item of system) {\n if (typeof item === 'string') {\n messages.push({ role: 'system', content: item });\n } else if (item.type === 'text' && item.text) {\n messages.push({ role: 'system', content: item.text });\n }\n }\n } else if (system.type === 'text' && system.text) {\n messages.push({ role: 'system', content: system.text });\n }\n\n return messages;\n }\n\n /**\n * Convert a single Claude message to Responses API input format\n * Returns a single message or array of messages (for tool results)\n */\n private convertMessageToInput(msg: any): any | any[] | null {\n if (!msg.role || !msg.content) return null;\n\n const textType = msg.role === 'assistant' ? 'output_text' : 'input_text';\n\n // Handle content array - check for tool results, tool uses, and images\n if (Array.isArray(msg.content)) {\n const toolResults: any[] = [];\n const toolCalls: any[] = [];\n const textContent: any[] = [];\n\n for (const block of msg.content) {\n if (block.type === 'tool_result') {\n // Convert to function_call_output\n const resultText = typeof block.content === 'string' ? block.content : JSON.stringify(block.content);\n toolResults.push({\n type: 'function_call_output',\n call_id: block.tool_use_id,\n output: resultText,\n });\n } else if (block.type === 'text') {\n textContent.push({\n type: textType,\n text: block.text || '',\n });\n } else if (block.type === 'image') {\n // Convert Claude image format to OpenAI Codex format\n if (block.source && block.source.type === 'base64' && block.source.data) {\n const mediaType = block.source.media_type || 'image/jpeg';\n const imageUrl = `data:${mediaType};base64,${block.source.data}`;\n textContent.push({\n type: 'input_image',\n image_url: imageUrl,\n });\n this.config.logger?.debug('[ClaudeToOpenAI] Converted image block', {\n mediaType,\n dataLength: block.source.data.length,\n });\n } else {\n this.config.logger?.warn('[ClaudeToOpenAI] Unsupported image format', {\n source: block.source,\n });\n }\n } else if (block.type === 'tool_use') {\n // Convert tool_use to function_call (for assistant messages in conversation history)\n toolCalls.push({\n type: 'function_call',\n call_id: block.id,\n name: block.name,\n arguments: JSON.stringify(block.input || {}),\n });\n }\n }\n\n // Build result array based on what we found\n const result: any[] = [];\n\n // Add text message if present\n if (textContent.length > 0) {\n result.push({\n type: 'message',\n role: msg.role,\n content: textContent,\n });\n }\n\n // Add function calls (from assistant's tool_use blocks)\n if (toolCalls.length > 0) {\n result.push(...toolCalls);\n }\n\n // Add function call outputs (from user's tool_result blocks)\n if (toolResults.length > 0) {\n result.push(...toolResults);\n }\n\n // Return array if we have multiple items, single item if just one, null if none\n if (result.length > 1) {\n return result;\n } else if (result.length === 1) {\n return result[0];\n }\n\n return null;\n } else if (typeof msg.content === 'string') {\n // Convert string content to content array format\n return {\n type: 'message',\n role: msg.role,\n content: [\n {\n type: textType,\n text: msg.content,\n },\n ],\n };\n }\n\n return null;\n }\n\n /**\n * Convert Claude tools to ChatGPT Responses API format\n *\n * The Responses API uses a flat structure with type at the top level:\n * { type: \"function\", name: \"...\", description: \"...\", parameters: {...} }\n */\n private convertTools(tools: any[]): any[] {\n // Add defensive check for undefined/null tools\n if (!tools || !Array.isArray(tools)) {\n return [];\n }\n\n return tools\n .filter((tool) => {\n // Validate that tool has required fields\n if (!tool || typeof tool !== 'object') {\n return false;\n }\n if (!tool.name) {\n return false;\n }\n return true;\n })\n .map((tool) => {\n // ChatGPT Responses API expects flat structure with type as a sibling field\n return {\n type: 'function',\n name: tool.name,\n description: tool.description || '',\n parameters: tool.input_schema || tool.parameters || {},\n };\n });\n }\n}\n","/**\n * OpenAI to Claude Response Transformer\n *\n * Converts OpenAI Chat Completions API streaming responses (SSE format)\n * to Anthropic Claude Messages API streaming format.\n *\n * OpenAI SSE events: data: {\"choices\":[{\"delta\":{\"content\":\"text\"}}]}\n * Claude SSE events: event: content_block_delta\\ndata: {\"delta\":{\"type\":\"text\",\"text\":\"text\"}}\n */\n\nimport { ulid } from 'ulidx';\nimport type { ResponseTransformer } from '../types';\n\ninterface Logger {\n info: (msg: string, data?: any) => void;\n error: (msg: string, error?: Error) => void;\n debug: (msg: string, data?: any) => void;\n warn: (msg: string, data?: any) => void;\n}\n\ninterface ParsedOpenAIStream {\n textSegments: string[];\n thinkingSegments: string[];\n toolCalls: Map<number, { id: string; name: string; argumentChunks: string[] }>;\n model: string;\n inputTokens: number;\n outputTokens: number;\n cachedTokens?: number;\n reasoningTokens?: number;\n reasoningEffort?: string;\n stopReason?: string | null;\n errorMessage?: string;\n}\n\nexport class OpenAIToClaudeTransformer implements ResponseTransformer {\n private logger?: Logger;\n\n constructor(logger?: Logger) {\n this.logger = logger;\n }\n\n transform(responseBody: string): string {\n try {\n // Handle empty or error responses\n if (!responseBody || responseBody.trim() === '') {\n // Return a valid empty Claude response to prevent parsing errors\n return this.createEmptyClaudeResponse();\n }\n\n // For non-streaming responses, return as-is or convert to Claude format\n if (!responseBody.includes('data:')) {\n return this.convertNonStreamingResponse(responseBody);\n }\n\n // Parse SSE stream from OpenAI and convert to Claude format\n const result = this.convertStreamingResponse(responseBody);\n return result;\n } catch (error) {\n this.logger?.error('[OpenAIToClaude] ERROR in transform', error as Error);\n // Return a valid empty Claude response instead of potentially malformed original\n return this.createEmptyClaudeResponse();\n }\n }\n\n /**\n * Create a valid empty Claude streaming response to prevent parsing errors\n */\n private createEmptyClaudeResponse(): string {\n const messageId = `msg_${ulid()}`;\n const events: string[] = [];\n\n // message_start\n events.push('event: message_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'message_start',\n message: {\n id: messageId,\n type: 'message',\n role: 'assistant',\n content: [],\n model: 'gpt-5',\n stop_reason: null,\n stop_sequence: null,\n usage: { input_tokens: 0, output_tokens: 0 },\n },\n })}`,\n );\n events.push('');\n\n // message_stop (no content)\n events.push('event: message_stop');\n events.push('data: {\"type\":\"message_stop\"}');\n events.push('');\n\n return events.join('\\n');\n }\n\n private convertStreamingResponse(sseText: string): string {\n const parsed = this.parseOpenAIStream(sseText);\n const claudeStream = this.createClaudeStreamFromParsed(parsed);\n const fallbackMessage = parsed.errorMessage || 'Empty streaming response from provider';\n return this.ensureValidClaudeStream(claudeStream, fallbackMessage);\n }\n\n private parseOpenAIStream(sseText: string): ParsedOpenAIStream {\n const parsed: ParsedOpenAIStream = {\n textSegments: [],\n thinkingSegments: [],\n toolCalls: new Map<number, { id: string; name: string; argumentChunks: string[] }>(),\n model: 'gpt-5',\n inputTokens: 0,\n outputTokens: 0,\n cachedTokens: 0,\n reasoningTokens: 0,\n reasoningEffort: undefined,\n stopReason: undefined,\n errorMessage: undefined,\n };\n\n const lines = sseText.split('\\n');\n let currentEvent = '';\n const isResponsesApi =\n /event:\\s*response\\./i.test(sseText) ||\n /\"type\"\\s*:\\s*\"response\\./i.test(sseText) ||\n /\"response\"\\s*:\\s*\\{/i.test(sseText);\n\n for (const rawLine of lines) {\n const line = rawLine.trim();\n if (!line) continue;\n\n if (line.startsWith('event:')) {\n currentEvent = line.slice(6).trim();\n continue;\n }\n\n if (!line.startsWith('data:')) continue;\n const dataContent = line.slice(5).trim();\n if (!dataContent || dataContent === '[DONE]') continue;\n\n let data: any;\n try {\n data = JSON.parse(dataContent);\n } catch {\n continue;\n }\n\n if (data?.error) {\n parsed.errorMessage =\n data.error?.message ||\n data.error?.error ||\n (typeof data.error === 'string' ? data.error : 'Unexpected API error');\n break;\n }\n\n if (isResponsesApi || currentEvent.startsWith('response.')) {\n this.handleResponsesEvent(currentEvent, data, parsed);\n } else {\n this.handleChatCompletionChunk(data, parsed);\n }\n }\n\n return parsed;\n }\n\n private handleResponsesEvent(eventName: string, data: any, parsed: ParsedOpenAIStream): void {\n const eventType = typeof data?.type === 'string' ? data.type : eventName;\n\n if (data?.model && typeof data.model === 'string') {\n parsed.model = data.model;\n }\n\n if (data?.usage) {\n parsed.inputTokens = data.usage.input_tokens ?? data.usage.prompt_tokens ?? parsed.inputTokens;\n parsed.outputTokens = data.usage.output_tokens ?? data.usage.completion_tokens ?? parsed.outputTokens;\n }\n\n switch (eventType) {\n case 'response.created':\n if (data?.response?.model) {\n parsed.model = data.response.model;\n }\n break;\n case 'response.reasoning.delta':\n case 'response.reasoning_summary_text.delta':\n case 'response.function_call_arguments.delta':\n case 'response.function_call_arguments.done':\n case 'response.in_progress':\n case 'response.output_item.added':\n case 'response.output_item.done':\n case 'response.content_part.added':\n case 'response.content_part.done':\n case 'response.reasoning_summary_part.added':\n case 'response.reasoning_summary_part.done':\n case 'response.reasoning_summary_text.done':\n // Ignore streaming events - we get the full content in response.completed\n break;\n case 'response.output_text.delta':\n case 'response.delta':\n //this.collectTextFromNode(data?.delta, parsed.textSegments);\n if (data?.delta?.tool_calls) {\n //this.collectToolCalls(data.delta.tool_calls, parsed.toolCalls);\n }\n break;\n case 'response.output_text.done':\n //this.collectTextFromNode(data?.output_text, parsed.textSegments);\n break;\n case 'response.completed':\n if (data?.response?.model) {\n parsed.model = data.response.model;\n }\n if (data?.response?.usage) {\n parsed.inputTokens =\n data.response.usage.input_tokens ?? data.response.usage.prompt_tokens ?? parsed.inputTokens;\n parsed.outputTokens =\n data.response.usage.output_tokens ?? data.response.usage.completion_tokens ?? parsed.outputTokens;\n\n // Capture detailed token usage\n if (data.response.usage.input_tokens_details?.cached_tokens) {\n parsed.cachedTokens = data.response.usage.input_tokens_details.cached_tokens;\n }\n if (data.response.usage.output_tokens_details?.reasoning_tokens) {\n parsed.reasoningTokens = data.response.usage.output_tokens_details.reasoning_tokens;\n }\n }\n\n // Capture reasoning effort for pricing calculation\n if (data?.response?.reasoning?.effort) {\n parsed.reasoningEffort = data.response.reasoning.effort;\n }\n if (Array.isArray(data?.response?.tool_calls)) {\n this.collectToolCalls(data.response.tool_calls, parsed.toolCalls);\n }\n\n // Handle output array from Responses API\n if (Array.isArray(data?.response?.output)) {\n for (const output of data.response.output) {\n if (output?.type === 'reasoning') {\n // Extract thinking from reasoning.summary\n if (Array.isArray(output?.summary)) {\n for (const summaryItem of output.summary) {\n if (summaryItem?.type === 'summary_text' && summaryItem?.text) {\n parsed.thinkingSegments.push(summaryItem.text);\n }\n }\n }\n } else if (output?.type === 'message') {\n // Extract text from message.content\n if (Array.isArray(output?.content)) {\n for (const contentItem of output.content) {\n // The content type could be 'text' or 'output_text'\n if ((contentItem?.type === 'output_text' || contentItem?.type === 'text') && contentItem?.text) {\n parsed.textSegments.push(contentItem.text);\n }\n }\n }\n } else if (output?.type === 'function_call') {\n // Extract tool calls from function_call output\n const toolCallData = {\n index: parsed.toolCalls.size,\n id: output.id || `tool_${ulid()}`,\n function: {\n name: output.name,\n arguments: output.arguments,\n },\n };\n this.collectToolCalls([toolCallData], parsed.toolCalls);\n }\n }\n }\n\n // Fallback to old output_text field for backward compatibility\n if (data?.response?.output_text) {\n this.collectTextFromNode(data.response.output_text, parsed.textSegments);\n }\n parsed.stopReason = this.mapResponseStatusToStopReason(data?.response?.status);\n break;\n case 'response.error':\n parsed.errorMessage = data?.error?.message || data?.message || 'Unexpected API error';\n break;\n default:\n if (data?.delta) {\n this.collectTextFromNode(data.delta, parsed.textSegments);\n if (data.delta.tool_calls) {\n this.collectToolCalls(data.delta.tool_calls, parsed.toolCalls);\n }\n }\n break;\n }\n }\n\n private handleChatCompletionChunk(chunk: any, parsed: ParsedOpenAIStream): void {\n if (!chunk) return;\n\n if (chunk.model && typeof chunk.model === 'string') {\n parsed.model = chunk.model;\n }\n\n if (chunk.usage) {\n parsed.inputTokens = chunk.usage.prompt_tokens ?? parsed.inputTokens;\n parsed.outputTokens = chunk.usage.completion_tokens ?? parsed.outputTokens;\n }\n\n if (!Array.isArray(chunk.choices)) return;\n\n for (const choice of chunk.choices) {\n if (choice?.delta) {\n this.collectTextFromNode(choice.delta, parsed.textSegments);\n if (choice.delta.tool_calls) {\n this.collectToolCalls(choice.delta.tool_calls, parsed.toolCalls);\n }\n }\n\n if (choice?.message?.content) {\n this.collectTextFromNode(choice.message.content, parsed.textSegments);\n }\n\n if (choice?.finish_reason) {\n parsed.stopReason = this.mapFinishReason(choice.finish_reason);\n }\n }\n }\n\n private collectTextFromNode(node: any, collector: string[]): void {\n if (node == null) return;\n\n if (typeof node === 'string') {\n if (node.length > 0) collector.push(node);\n return;\n }\n\n if (Array.isArray(node)) {\n for (const item of node) {\n this.collectTextFromNode(item, collector);\n }\n return;\n }\n\n if (typeof node !== 'object') {\n return;\n }\n\n if (typeof node.text === 'string') {\n collector.push(node.text);\n }\n\n if (typeof node.output_text === 'string') {\n collector.push(node.output_text);\n }\n\n if (typeof node.value === 'string') {\n collector.push(node.value);\n }\n\n if (typeof node.delta === 'string') {\n collector.push(node.delta);\n } else if (node.delta) {\n this.collectTextFromNode(node.delta, collector);\n }\n\n if (node.token && typeof node.token.text === 'string') {\n collector.push(node.token.text);\n }\n\n const nestedKeys = ['content', 'output', 'output_text', 'message', 'choices', 'segments'];\n for (const key of nestedKeys) {\n if (node[key] !== undefined) {\n if (key === 'choices' && Array.isArray(node[key])) {\n for (const choice of node[key]) {\n if (choice?.message?.content) {\n this.collectTextFromNode(choice.message.content, collector);\n }\n if (choice?.delta) {\n this.collectTextFromNode(choice.delta, collector);\n }\n }\n } else {\n this.collectTextFromNode(node[key], collector);\n }\n }\n }\n }\n\n private collectToolCalls(\n rawToolCalls: any,\n toolCallMap: Map<number, { id: string; name: string; argumentChunks: string[] }>,\n ): void {\n if (!rawToolCalls) return;\n\n const callsArray = Array.isArray(rawToolCalls) ? rawToolCalls : [rawToolCalls];\n\n for (const call of callsArray) {\n if (!call) continue;\n\n const index = typeof call.index === 'number' ? call.index : toolCallMap.size;\n const existing = toolCallMap.get(index) || {\n id: '',\n name: '',\n argumentChunks: [],\n };\n\n if (typeof call.id === 'string' && !existing.id) {\n existing.id = call.id;\n }\n\n const functionName = call.function?.name;\n if (typeof functionName === 'string' && functionName.length > 0) {\n existing.name = functionName;\n }\n\n const functionArgs = call.function?.arguments;\n if (typeof functionArgs === 'string' && functionArgs.length > 0) {\n existing.argumentChunks.push(functionArgs);\n }\n\n toolCallMap.set(index, existing);\n }\n }\n\n private createClaudeStreamFromParsed(parsed: ParsedOpenAIStream): string {\n if (parsed.errorMessage) {\n return this.createClaudeErrorStream(parsed.errorMessage);\n }\n\n const thinkingSegments = this.mergeAndChunkSegments(parsed.thinkingSegments);\n const textSegments = this.mergeAndChunkSegments(parsed.textSegments);\n\n const toolCalls = Array.from(parsed.toolCalls.entries())\n .sort((a, b) => a[0] - b[0])\n .map(([index, value]) => ({\n index,\n id: value.id,\n name: value.name,\n arguments: value.argumentChunks.join(''),\n }))\n .filter((call) => call.name);\n\n if (thinkingSegments.length === 0 && textSegments.length === 0 && toolCalls.length === 0) {\n return this.createClaudeErrorStream('Empty streaming response from provider');\n }\n\n const messageId = `msg_${ulid()}`;\n const model = parsed.model || 'gpt-5';\n const events: string[] = [];\n\n events.push('event: message_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'message_start',\n message: {\n id: messageId,\n type: 'message',\n role: 'assistant',\n content: [],\n model,\n stop_reason: null,\n stop_sequence: null,\n usage: { input_tokens: parsed.inputTokens ?? 0, output_tokens: 0 },\n },\n })}`,\n );\n events.push('');\n\n let blockIndex = 0;\n\n if (thinkingSegments.length > 0) {\n events.push('event: content_block_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_start',\n index: blockIndex,\n content_block: { type: 'thinking', thinking: '', _chatgpt_reasoning: true },\n })}`,\n );\n events.push('');\n\n for (const chunk of thinkingSegments) {\n if (!chunk) continue;\n events.push('event: content_block_delta');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_delta',\n index: blockIndex,\n delta: { type: 'thinking_delta', thinking: chunk },\n })}`,\n );\n events.push('');\n }\n\n events.push('event: content_block_stop');\n events.push(`data: ${JSON.stringify({ type: 'content_block_stop', index: blockIndex })}`);\n events.push('');\n blockIndex += 1;\n }\n\n if (textSegments.length > 0) {\n events.push('event: content_block_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_start',\n index: blockIndex,\n content_block: { type: 'text', text: '' },\n })}`,\n );\n events.push('');\n\n for (const chunk of textSegments) {\n if (!chunk) continue;\n events.push('event: content_block_delta');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_delta',\n index: blockIndex,\n delta: { type: 'text_delta', text: chunk },\n })}`,\n );\n events.push('');\n }\n\n events.push('event: content_block_stop');\n events.push(`data: ${JSON.stringify({ type: 'content_block_stop', index: blockIndex })}`);\n events.push('');\n blockIndex += 1;\n }\n\n for (const toolCall of toolCalls) {\n const toolIndex = blockIndex + toolCall.index;\n const toolId = toolCall.id || `tool_${ulid()}`;\n\n events.push('event: content_block_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_start',\n index: toolIndex,\n content_block: {\n type: 'tool_use',\n id: toolId,\n name: toolCall.name,\n input: {},\n },\n })}`,\n );\n events.push('');\n\n if (toolCall.arguments) {\n events.push('event: content_block_delta');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_delta',\n index: toolIndex,\n delta: { type: 'input_json_delta', partial_json: toolCall.arguments },\n })}`,\n );\n events.push('');\n }\n\n events.push('event: content_block_stop');\n events.push(`data: ${JSON.stringify({ type: 'content_block_stop', index: toolIndex })}`);\n events.push('');\n }\n\n const stopReason = parsed.stopReason || 'end_turn';\n const outputTokens = parsed.outputTokens ?? 0;\n\n // Build detailed usage object with OpenAI Responses API metadata\n const usageDetails: any = { output_tokens: outputTokens };\n\n // Add detailed token usage if available\n if (parsed.cachedTokens || parsed.reasoningTokens || parsed.reasoningEffort) {\n usageDetails.metadata = {};\n\n if (parsed.cachedTokens) {\n usageDetails.metadata.cached_tokens = parsed.cachedTokens;\n }\n if (parsed.reasoningTokens) {\n usageDetails.metadata.reasoning_tokens = parsed.reasoningTokens;\n }\n if (parsed.reasoningEffort) {\n usageDetails.metadata.reasoning_effort = parsed.reasoningEffort;\n }\n }\n\n events.push('event: message_delta');\n events.push(\n `data: ${JSON.stringify({\n type: 'message_delta',\n delta: { stop_reason: stopReason, stop_sequence: null },\n usage: usageDetails,\n })}`,\n );\n events.push('');\n\n events.push('event: message_stop');\n events.push('data: {\"type\":\"message_stop\"}');\n events.push('');\n\n const result = events.join('\\n');\n return result;\n }\n\n private mergeAndChunkSegments(segments: string[], chunkSize = 2000): string[] {\n if (!segments || segments.length === 0) return [];\n const combined = segments.join('');\n if (!combined) return [];\n\n const result: string[] = [];\n for (let i = 0; i < combined.length; i += chunkSize) {\n result.push(combined.slice(i, i + chunkSize));\n }\n return result;\n }\n\n private mapResponseStatusToStopReason(status?: string | null): string {\n if (!status) return 'end_turn';\n const mapping: Record<string, string> = {\n completed: 'end_turn',\n completed_with_error: 'error',\n completed_with_streaming_error: 'error',\n cancelled: 'error',\n errored: 'error',\n };\n return mapping[status] || 'end_turn';\n }\n\n /**\n * Convert non-streaming OpenAI response to Claude format\n */\n private convertNonStreamingResponse(responseBody: string): string {\n try {\n const openAIResponse = JSON.parse(responseBody);\n\n if (openAIResponse?.error) {\n const errorMessage =\n openAIResponse.error?.message ||\n openAIResponse.error?.error ||\n (typeof openAIResponse.error === 'string' ? openAIResponse.error : 'Unexpected API error');\n return this.createClaudeErrorResponse(errorMessage);\n }\n\n const choice = openAIResponse.choices?.[0];\n\n if (!choice) {\n return responseBody;\n }\n\n const messageId = `msg_${ulid()}`;\n\n // Build Claude response format\n const claudeResponse = {\n id: messageId,\n type: 'message',\n role: 'assistant',\n content: [\n {\n type: 'text',\n text: choice.message?.content || '',\n },\n ],\n model: openAIResponse.model || 'gpt-4-turbo',\n stop_reason: this.mapFinishReason(choice.finish_reason),\n stop_sequence: null,\n usage: {\n input_tokens: openAIResponse.usage?.prompt_tokens || 0,\n output_tokens: openAIResponse.usage?.completion_tokens || 0,\n },\n };\n\n return JSON.stringify(claudeResponse);\n } catch (error) {\n const fallbackMessage =\n typeof responseBody === 'string' && responseBody.trim() ? responseBody.trim() : 'Unexpected API error';\n return this.createClaudeErrorResponse(fallbackMessage);\n }\n }\n\n /**\n * Map OpenAI finish_reason to Claude stop_reason\n */\n private mapFinishReason(finishReason: string | undefined): string | null {\n if (!finishReason) return null;\n\n const mapping: Record<string, string> = {\n stop: 'end_turn',\n length: 'max_tokens',\n function_call: 'tool_use',\n tool_calls: 'tool_use',\n content_filter: 'stop_sequence',\n };\n\n return mapping[finishReason] || 'end_turn';\n }\n\n private createClaudeErrorResponse(message: string): string {\n const messageId = `msg_${ulid()}`;\n const text = message || 'Unexpected API error';\n\n return JSON.stringify({\n id: messageId,\n type: 'message',\n role: 'assistant',\n content: [\n {\n type: 'text',\n text,\n },\n ],\n model: 'gpt-5',\n stop_reason: 'error',\n stop_sequence: null,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n });\n }\n\n private createClaudeErrorStream(message: string): string {\n const messageId = `msg_${ulid()}`;\n const text = message || 'Unexpected API error';\n const events: string[] = [];\n\n events.push('event: message_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'message_start',\n message: {\n id: messageId,\n type: 'message',\n role: 'assistant',\n content: [],\n model: 'gpt-5',\n stop_reason: null,\n stop_sequence: null,\n usage: { input_tokens: 0, output_tokens: 0 },\n },\n })}`,\n );\n events.push('');\n\n events.push('event: content_block_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_start',\n index: 0,\n content_block: { type: 'text', text: '' },\n })}`,\n );\n events.push('');\n\n events.push('event: content_block_delta');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_delta',\n index: 0,\n delta: { type: 'text_delta', text },\n })}`,\n );\n events.push('');\n\n events.push('event: content_block_stop');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_stop',\n index: 0,\n })}`,\n );\n events.push('');\n\n events.push('event: message_delta');\n events.push(\n `data: ${JSON.stringify({\n type: 'message_delta',\n delta: { stop_reason: 'error', stop_sequence: null },\n usage: { output_tokens: 0 },\n })}`,\n );\n events.push('');\n\n events.push('event: message_stop');\n events.push('data: {\"type\":\"message_stop\"}');\n events.push('');\n\n return events.join('\\n');\n }\n\n private ensureValidClaudeStream(stream: string, errorMessage: string): string {\n if (!stream || !stream.includes('event: message_start')) {\n return this.createClaudeErrorStream(errorMessage);\n }\n return stream;\n }\n}\n","/**\n * LLM Router\n *\n * Main orchestrator for routing LLM requests between providers.\n * Currently supports: Claude (Anthropic) → OpenAI GPT\n *\n * Usage:\n * const router = new LlmRouter({ toProvider: 'openai', toModel: 'gpt-5-turbo', toApiKey: '...' });\n * if (router.shouldRoute(url, body)) {\n * const { url, body, headers } = router.transformRequest(url, body);\n * // Make request to transformed URL with transformed body\n * const response = await fetch(url, { body, headers });\n * // Transform response back to Claude format\n * const claudeResponse = router.transformResponse(responseBody);\n * }\n */\n\nimport { SessionSettingsManager } from '../../utils/sessionSettings';\nimport { CodexAuth } from './auth/CodexAuth';\nimport { ClaudeToOpenAITransformer } from './transformers/claudeToOpenAI';\nimport { OpenAIToClaudeTransformer } from './transformers/openAIToClaude';\nimport type {\n RequestTransformer,\n ResponseTransformer,\n RouterConfig,\n RoutingContext,\n TransformedRequest,\n} from './types';\n\nexport class LlmRouter {\n private config: RouterConfig;\n private requestTransformer: RequestTransformer | null = null;\n private responseTransformer: ResponseTransformer | null = null;\n private routingContextMap = new Map<string, RoutingContext>();\n private codexAuth?: CodexAuth;\n private hasSessionSettings = false; // True when session has a provider configured\n\n // Session settings caching\n private lastSessionCheckTimestamp = 0;\n private readonly SESSION_CHECK_INTERVAL_MS = 1000; // Check every 1 second\n private cachedSessionSettings: any = null;\n\n // URL patterns for detecting source providers\n private readonly CLAUDE_PATTERNS = [\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /\\/v1\\/messages/i,\n /\\/publishers\\/anthropic\\/models/i,\n ];\n\n constructor(config: RouterConfig) {\n this.config = config;\n this.applySessionSettings();\n }\n\n /**\n * Apply session-specific settings if sessionId is provided\n * This runs synchronously during construction by deferring transformer initialization\n */\n private applySessionSettings(): void {\n if (this.config.sessionId) {\n // Load session settings asynchronously, but we need to handle this\n // We'll initialize transformers after loading settings\n this.loadSessionSettingsAsync();\n } else {\n // No session ID, initialize immediately\n this.initializeRouter();\n }\n }\n\n /**\n * Load session settings asynchronously and then initialize router\n */\n private async loadSessionSettingsAsync(): Promise<void> {\n await this.checkAndReloadSessionSettings();\n // Initialize router after loading session settings\n this.initializeRouter();\n }\n\n /**\n * Check if session settings need to be reloaded based on time interval\n * This is called periodically to support dynamic session switching\n */\n private async checkAndReloadSessionSettings(): Promise<void> {\n if (!this.config.sessionId) {\n return;\n }\n\n const now = Date.now();\n\n // Check if enough time has passed since last check\n if (now - this.lastSessionCheckTimestamp < this.SESSION_CHECK_INTERVAL_MS) {\n return;\n }\n\n try {\n const sessionSettings = await SessionSettingsManager.getSessionSettings(this.config.sessionId);\n this.lastSessionCheckTimestamp = now;\n\n // Check if settings have changed\n const settingsChanged = JSON.stringify(sessionSettings) !== JSON.stringify(this.cachedSessionSettings);\n\n if (settingsChanged) {\n this.cachedSessionSettings = sessionSettings;\n\n // Check if session has a provider configured (not just if session exists)\n if (sessionSettings && sessionSettings.provider) {\n // Map provider name (e.g., \"chatgpt\" → \"openai\")\n const mappedProvider = this.mapProviderName(sessionSettings.provider);\n\n // Override config with session settings\n this.config.toProvider = mappedProvider;\n this.config.toModel = sessionSettings.model;\n\n // Store reasoning effort in config for transformer to use\n (this.config as any).sessionReasoningEffort = sessionSettings.reasoningEffort;\n\n // Mark that we have session settings with provider (for shouldRoute logic)\n this.hasSessionSettings = true;\n\n // Re-initialize transformers with new settings\n this.initializeTransformers();\n } else {\n this.hasSessionSettings = false;\n }\n }\n } catch (error) {\n this.config.logger?.error('[Router] Failed to check session settings', error as Error);\n }\n }\n\n /**\n * Initialize CodexAuth and transformers\n */\n private initializeRouter(): void {\n // Initialize CodexAuth if no API key is provided\n if (!this.config.toApiKey) {\n this.codexAuth = new CodexAuth(this.config.logger);\n }\n\n this.initializeTransformers();\n }\n\n /**\n * Map provider names to canonical format\n * e.g., \"chatgpt\" → \"openai\", \"gpt\" → \"openai\"\n */\n private mapProviderName(provider: string): string {\n const normalizedProvider = provider.toLowerCase().trim();\n\n // Map various ChatGPT/GPT names to \"openai\"\n if (normalizedProvider === 'chatgpt' || normalizedProvider === 'gpt' || normalizedProvider === 'openai') {\n return 'openai';\n }\n\n // Add more mappings as needed\n // e.g., \"gemini\" → \"google\", \"claude\" → \"anthropic\"\n\n return normalizedProvider;\n }\n\n /**\n * Initialize transformers based on target provider\n */\n private initializeTransformers(): void {\n const targetProvider = this.config.toProvider?.toLowerCase();\n\n if (targetProvider === 'openai') {\n // Claude → OpenAI\n this.requestTransformer = new ClaudeToOpenAITransformer(this.config, this.codexAuth);\n this.responseTransformer = new OpenAIToClaudeTransformer(this.config.logger);\n }\n // Future: Add support for other provider combinations\n // else if (targetProvider === 'google') { ... }\n }\n\n /**\n * Check if a request should be routed\n *\n * @param url Request URL\n * @param requestBody Request body (JSON string)\n * @returns True if routing should occur\n */\n shouldRoute(url: string, requestBody: string): boolean {\n // Check for session settings updates before routing decision\n this.checkAndReloadSessionSettings().catch((err) => {\n this.config.logger?.error('[Router] Failed to check session settings in shouldRoute', err);\n });\n\n // Must have transformers configured\n if (!this.requestTransformer || !this.responseTransformer) {\n return false;\n }\n\n // Verify we have a valid request body\n if (!requestBody) {\n return false;\n }\n\n try {\n // Ensure it's valid JSON\n JSON.parse(requestBody);\n\n // Only route if session has a provider configured\n if (this.hasSessionSettings) {\n const isClaudeCall = this.CLAUDE_PATTERNS.some((pattern) => pattern.test(url));\n\n if (isClaudeCall) {\n return true;\n }\n }\n\n // No session provider configured - pass through without routing\n return false;\n } catch (err) {\n this.config.logger?.error('[Router] JSON parse error in shouldRoute', err as Error);\n return false;\n }\n }\n\n /**\n * Transform request from source provider to target provider\n *\n * @param url Original request URL\n * @param requestBody Original request body\n * @returns Transformed request with new URL, body, and headers\n */\n async transformRequest(url: string, requestBody: string): Promise<TransformedRequest> {\n if (!this.requestTransformer) {\n throw new Error('Request transformer not initialized');\n }\n\n const transformed = await this.requestTransformer.transform(url, requestBody);\n\n // Store routing context for this request (keyed by original URL + timestamp to handle concurrent requests)\n const contextKey = `${url}_${Date.now()}`;\n this.routingContextMap.set(contextKey, {\n originalUrl: url,\n targetUrl: transformed.url,\n fromProvider: 'anthropic',\n toProvider: this.config.toProvider,\n wasRouted: true,\n });\n\n return transformed;\n }\n\n /**\n * Transform response from target provider back to source provider format\n *\n * @param responseBody Response body from target provider\n * @returns Transformed response in source provider format\n */\n transformResponse(responseBody: string): string {\n if (!this.responseTransformer) {\n throw new Error('Response transformer not initialized');\n }\n\n try {\n return this.responseTransformer.transform(responseBody);\n } catch (error) {\n this.config.logger?.error('[Router] Response transformation failed', error as Error);\n // Return a valid empty Claude response instead of throwing\n // This prevents malformed responses from breaking Claude Code\n return this.createFallbackClaudeResponse();\n }\n }\n\n /**\n * Clean request body for pass-through to Anthropic\n * Removes ChatGPT thinking blocks (marked with _chatgpt_reasoning: true)\n * while preserving native Anthropic thinking blocks\n *\n * @param requestBody Original request body\n * @returns Cleaned request body\n */\n cleanRequestForPassthrough(requestBody: string): string {\n try {\n const requestData = JSON.parse(requestBody);\n\n // Check if request has messages array (Anthropic format)\n if (!requestData.messages || !Array.isArray(requestData.messages)) {\n return requestBody;\n }\n\n let hasThinkingBlocks = false;\n\n // Remove ChatGPT thinking blocks (marked with _chatgpt_reasoning: true)\n // Preserve native Anthropic thinking blocks\n const cleanedMessages = requestData.messages\n .map((msg: any) => {\n if (!msg.content || !Array.isArray(msg.content)) {\n return msg;\n }\n\n // Filter out only ChatGPT thinking blocks (preserve Anthropic thinking blocks)\n const cleanedContent = msg.content.filter((block: any) => {\n if (block.type === 'thinking' && block._chatgpt_reasoning === true) {\n hasThinkingBlocks = true;\n return false; // Remove only ChatGPT thinking blocks\n }\n return true; // Keep all other blocks including Anthropic thinking\n });\n\n // Return message with cleaned content (skip empty content arrays)\n return cleanedContent.length > 0\n ? {\n ...msg,\n content: cleanedContent,\n }\n : null; // Mark for removal if no content left\n })\n .filter(Boolean); // Remove null messages\n\n if (hasThinkingBlocks) {\n // Update request body with cleaned messages\n requestData.messages = cleanedMessages;\n const cleanedBody = JSON.stringify(requestData);\n\n return cleanedBody;\n }\n\n return requestBody;\n } catch (err) {\n // If parsing fails, just pass through the original body\n this.config.logger?.warn('[Router] Failed to clean request for passthrough', {\n error: err instanceof Error ? err.message : String(err),\n });\n return requestBody;\n }\n }\n\n /**\n * Create a valid empty Claude response as fallback when transformation fails\n */\n private createFallbackClaudeResponse(): string {\n const messageId = `msg_error_${Date.now()}`;\n const events: string[] = [];\n\n events.push('event: message_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'message_start',\n message: {\n id: messageId,\n type: 'message',\n role: 'assistant',\n content: [],\n model: 'gpt-5',\n stop_reason: null,\n stop_sequence: null,\n usage: { input_tokens: 0, output_tokens: 0 },\n },\n })}`,\n );\n events.push('');\n\n events.push('event: content_block_start');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_start',\n index: 0,\n content_block: { type: 'text', text: '' },\n })}`,\n );\n events.push('');\n\n events.push('event: content_block_delta');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_delta',\n index: 0,\n delta: {\n type: 'text_delta',\n text: 'Error: Response transformation failed. Please try again.',\n },\n })}`,\n );\n events.push('');\n\n events.push('event: content_block_stop');\n events.push(\n `data: ${JSON.stringify({\n type: 'content_block_stop',\n index: 0,\n })}`,\n );\n events.push('');\n\n events.push('event: message_stop');\n events.push('data: {\"type\":\"message_stop\"}');\n events.push('');\n\n return events.join('\\n');\n }\n\n /**\n * Check if a request was routed (useful for logging/debugging)\n *\n * @param url URL to check\n * @returns True if this URL was routed\n */\n wasRouted(url: string): boolean {\n for (const context of this.routingContextMap.values()) {\n if (context.targetUrl === url) {\n return true;\n }\n }\n return false;\n }\n\n /**\n * Get routing context for a request\n *\n * @param url URL to look up\n * @returns Routing context if found\n */\n getRoutingContext(url: string): RoutingContext | undefined {\n for (const context of this.routingContextMap.values()) {\n if (context.targetUrl === url || context.originalUrl === url) {\n return context;\n }\n }\n return undefined;\n }\n\n /**\n * Clear routing context (cleanup)\n */\n clearRoutingContext(url: string): void {\n for (const [key, context] of this.routingContextMap.entries()) {\n if (context.targetUrl === url || context.originalUrl === url) {\n this.routingContextMap.delete(key);\n }\n }\n }\n\n /**\n * Get the target provider name\n */\n getTargetProvider(): string {\n return this.config.toProvider;\n }\n\n /**\n * Get the target model name\n */\n getTargetModel(): string | undefined {\n return this.config.toModel;\n }\n}\n\nexport * from './types';\n","/**\n * Network logging hook - captures LLM HTTP responses\n *\n * STANDALONE HOOK - No external dependencies\n * This file must be self-contained as it's loaded via --import flag\n *\n * Simple approach:\n * - Patches fetch via LlmInterceptor\n * - Logs directly to file stream (avoids stdout pollution)\n * - Sends LLM calls to remote immediately (fire-and-forget)\n * - No batching, no cleanup, no complex error handling\n */\n\nimport * as os from 'node:os';\nimport * as path from 'node:path';\nimport { ulid } from 'ulidx';\nimport { SubEnvManager } from '../config/subenv';\nimport { LlmInterceptor, LlmProvider } from '../llms';\nimport { FileLogger } from '../llms/loggers/file';\nimport { LlmRouter } from '../llms/router';\nimport { AgentHttpService } from '../services/AgentHttpService';\n\nclass NetworkLog {\n private logger: FileLogger;\n private agentHttpService: AgentHttpService | null = null;\n private interceptor: LlmInterceptor;\n private llmProvider: LlmProvider;\n private maxBodyBytes: number;\n private sessionId: string;\n private router?: LlmRouter;\n\n constructor() {\n const parsedMax = SubEnvManager.netlogMaxBody;\n this.maxBodyBytes = parsedMax || 8192;\n this.sessionId = SubEnvManager.sessionId || '';\n\n this.llmProvider = new LlmProvider();\n\n // Use system temp directory for automatic garbage collection\n const logPath =\n SubEnvManager.netlogFile || path.join(os.tmpdir(), 'agiflow-agents', 'network', `network-${process.pid}.log`);\n this.logger = new FileLogger(logPath);\n\n // Initialize router with session-based configuration\n // Router will read provider/model from ~/.agiflow/sessions.json\n if (this.sessionId) {\n try {\n this.logger.info('[NetworkLog] Creating LlmRouter instance for session', {\n sessionId: this.sessionId,\n });\n\n this.router = new LlmRouter({\n toProvider: 'openai', // Placeholder, will be overridden by session settings\n sessionId: this.sessionId,\n logger: this.logger,\n });\n\n this.logger.info('[NetworkLog] ✓ LLM Router initialized successfully', {\n sessionId: this.sessionId,\n routerExists: !!this.router,\n });\n } catch (error) {\n this.logger.error('[NetworkLog] ✗ Failed to initialize LLM Router', error as Error);\n }\n } else {\n this.logger.info('[NetworkLog] No session ID, skipping router initialization');\n }\n\n let apiUrl = SubEnvManager.serverUrl || '';\n if (apiUrl.startsWith('wss://')) apiUrl = 'https://' + apiUrl.slice(6);\n else if (apiUrl.startsWith('ws://')) apiUrl = 'http://' + apiUrl.slice(5);\n const apiKey = SubEnvManager.apiKey || '';\n const organizationId = SubEnvManager.organizationId || '';\n\n const enableRemote = SubEnvManager.isNetlogRemoteEnabled && !!(this.sessionId && apiUrl);\n\n if (enableRemote && apiUrl) {\n try {\n this.agentHttpService = new AgentHttpService({\n apiUrl,\n organizationId,\n apiKey,\n logger: this.logger as any,\n });\n } catch (error) {\n if (FileLogger.shouldDebug()) {\n this.logger.error('AgentHttpService initialization failed', error as Error);\n }\n }\n }\n\n // Log interceptor initialization\n this.logger.info('[NetworkLog] Initializing LlmInterceptor', {\n hasRouter: !!this.router,\n hasAgentHttpService: !!this.agentHttpService,\n hasSessionId: !!this.sessionId,\n enableDebug: SubEnvManager.isDebugMode || SubEnvManager.isNetlogDebug,\n });\n\n this.interceptor = LlmInterceptor.getInstance({\n maxBodyBytes: this.maxBodyBytes,\n enableDebug: SubEnvManager.isDebugMode || SubEnvManager.isNetlogDebug,\n llmProvider: this.llmProvider,\n idGenerator: ulid,\n agentHttpService: this.agentHttpService || undefined,\n sessionId: this.sessionId,\n router: this.router,\n logger: this.logger,\n logFilePath: this.logger.getLogPath(),\n });\n\n this.logger.info('[NetworkLog] ✓ LlmInterceptor initialized', {\n interceptorExists: !!this.interceptor,\n });\n }\n\n public start(): void {\n this.interceptor.start();\n }\n}\n\n// Initialize and start network logging (guard ensures idempotence)\nconst networkLog = new NetworkLog();\nnetworkLog.start();\n"],"names":["FileLogger","logPath","logDir","path","fs","error","message","data","level","logEntry","SubEnvManager","CostCalculator","modelName","usage","modelPricing","costData","inputTokens","outputTokens","cachedInputTokens","cacheCreationInputTokens","inputRate","outputRate","cacheReadRate","cacheCreationRate","inputCost","outputCost","cacheReadCost","cacheCreationCost","ClaudeParser","rawText","lines","contentBlocks","messageMetadata","blockMap","line","dataStr","block","parts","combinedUsage","tokenUsage","cost","len","part","_","key","GeminiParser","looksGzip","working","gun","json","response","fullText","finishReason","texts","nextSpeakers","reasoningSegments","t","modelVersion","responseId","usageMetadata","collectedTexts","jsonStr","extracted","l","candidates","cand","raw","trimmed","parsed","str","zlib.gunzipSync","text","control","sampleLen","i","code","input","output","total","LlmParser","url","parser","p","result","mapping","pattern","name","patterns","LlmProvider","provider","providerName","ClaudeEnhancer","requestBody","contextData","request","contextSystemMessage","GeminiEnhancer","contextInstruction","LlmEnhancer","enhancer","e","FetchInstrument","ctx","extra","resource","init","start","performance","effectiveResource","effectiveInit","originalUrl","method","requestHeaders","enhancedBody","routing","wasRouted","entryId","entryTime","startEntry","err","res","networkError","duration","responseHeaders","capture","responseBody","processedBody","bodyBytes","isEventStream","urlForProcessing","source","headers","value","body","obj","args","shouldRoute","cleaned","transformed","target","headerArray","contentType","requestAccept","isTextLike","arrayBuffer","buf","remaining","reader","chunks","totalBytes","done","capturedText","c","finalLogEntry","transformedStream","controller","encoder","stream1","stream2","capturePromise","HttpInstrument","enableHttps","_extra","buildUrl","options","protocolDefault","protocol","host","portPart","wrapRequest","original","callback","startHr","self","requestBodyBuffers","requestLen","requestBodySent","req","responseBuffers","responseBytes","capturedBytes","contentTypeHeader","chunk","ct","maxBody","bufferRaw","wasTruncated","buffer","contentEncoding","zlib","combined","u","originalWrite","originalEnd","encoding","cb","isValid","http","https","o","r","LlmInterceptor","config","ulid","entry","baseLogDir","os","logFilePath","errorLogFilePath","processedLogFilePath","common","hasMessages","hasPrompt","hasInput","hasContents","hasRequestContents","requestParts","requestData","userMessage","msg","userContent","startMetadata","messageId","completionMetadata","processedMeta","responseParts","updateData","CodexAuth","logger","authData","access_token","refresh_token","newTokens","content","token","payload","expiresAt","now","bufferMs","decoded","refreshToken","errorText","accessToken","tokens","codexInstructions","ClaudeToOpenAITransformer","codexAuth","_url","claudeRequest","conversationId","randomUUID","sessionId","sessionReasoningEffort","isHaikuModel","reasoningEffort","responsesRequest","claudeSystemPrompt","systemMessages","converted","convertedTools","targetUrl","accountId","instructions","adapted","claudeCodePatterns","filtered","system","messages","item","textType","toolResults","toolCalls","textContent","resultText","mediaType","imageUrl","tools","tool","OpenAIToClaudeTransformer","events","sseText","claudeStream","fallbackMessage","currentEvent","isResponsesApi","rawLine","dataContent","eventName","eventType","summaryItem","contentItem","toolCallData","choice","node","collector","nestedKeys","rawToolCalls","toolCallMap","callsArray","call","index","existing","functionName","functionArgs","thinkingSegments","textSegments","a","b","model","blockIndex","toolCall","toolIndex","toolId","stopReason","usageDetails","segments","chunkSize","status","openAIResponse","errorMessage","claudeResponse","stream","LlmRouter","sessionSettings","SessionSettingsManager","mappedProvider","normalizedProvider","contextKey","hasThinkingBlocks","cleanedMessages","cleanedContent","context","NetworkLog","parsedMax","apiUrl","apiKey","organizationId","AgentHttpService","networkLog"],"mappings":"wfAmBO,MAAMA,CAAW,CACd,UACA,QAER,YAAYC,EAAiB,CAC3B,KAAK,QAAUA,EAGf,MAAMC,EAASC,EAAK,QAAQ,KAAK,OAAO,EACnCC,EAAG,WAAWF,CAAM,GACvBE,EAAG,UAAUF,EAAQ,CAAE,UAAW,GAAM,EAI1C,KAAK,UAAYE,EAAG,kBAAkB,KAAK,QAAS,CAAE,MAAO,IAAK,EAGlE,KAAK,UAAU,GAAG,QAAUC,GAAU,CAEpC,QAAQ,MAAM,2CAA2CA,EAAM,OAAO,EAAE,CAC1E,CAAC,CACH,CAKA,KAAKC,EAAiBC,EAAkC,CACtD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,MAAMD,EAAiBC,EAAkC,CACvD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,KAAKD,EAAiBC,EAAkC,CACtD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,MAAMD,EAAiBC,EAAkC,CACvD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,IAAIC,EAAeF,EAAiBC,EAAkC,CACpE,MAAME,EAAqB,CACzB,KAAM,IAAI,KAAA,EAAO,YAAA,EACjB,MAAAD,EACA,IAAKF,EACL,GAAGC,CAAA,EAGL,GAAI,CACF,KAAK,UAAU,MAAM,KAAK,UAAUE,CAAQ,EAAI;AAAA,CAAI,CACtD,OAASJ,EAAO,CAEd,QAAQ,MAAM,2CAA2CA,CAAK,EAAE,CAClE,CACF,CAKA,OAAc,CACR,KAAK,WAAa,CAAC,KAAK,UAAU,WACpC,KAAK,UAAU,IAAA,CAEnB,CAKA,YAAqB,CACnB,OAAO,KAAK,OACd,CAKA,OAAO,aAAuB,CAC5B,OAAOK,EAAAA,cAAc,aAAeA,EAAAA,cAAc,aACpD,CACF,4/8FCxFO,MAAMC,EAAe,CAI1B,OAAO,cAAcC,EAAmBC,EAAqC,CAC3E,MAAMC,EAAeC,GAASH,CAAkC,EAEhE,GAAI,CAACE,EACH,OAAO,KAGT,MAAME,EAAcH,EAAM,aAAe,EACnCI,EAAeJ,EAAM,cAAgB,EACrCK,EAAoBL,EAAM,mBAAqB,EAC/CM,EAA2BN,EAAM,0BAA4B,EAG7DO,EAAY,yBAA0BN,EAAeA,EAAa,qBAAuB,EACzFO,EAAa,0BAA2BP,EAAeA,EAAa,sBAAwB,EAC5FQ,EAAgB,gCAAiCR,EAAeA,EAAa,4BAA8B,EAC3GS,EACJ,oCAAqCT,EAAeA,EAAa,gCAAkC,EAG/FU,EAAYR,EAAcI,EAC1BK,EAAaR,EAAeI,EAC5BK,EAAgBR,EAAoBI,EACpCK,EAAoBR,EAA2BI,EAIrD,MAAO,CACL,UAHgBC,EAAYC,EAAaC,EAAgBC,EAIzD,UAAAH,EACA,WAAAC,EACA,cAAeP,EAAoB,EAAIQ,EAAgB,OACvD,kBAAmBP,EAA2B,EAAIQ,EAAoB,MAAA,CAE1E,CACF,CC7CO,MAAMC,CAAa,CAOxB,OAAO,uBACLC,EAGqC,CACrC,GAAI,CACF,MAAMC,EAAQD,EAAQ,MAAM;AAAA,CAAI,EAC1BE,EAAuB,CAAA,EAC7B,IAAIC,EAAuB,KACvBnB,EAAa,KACjB,MAAMoB,MAAe,IAErB,UAAWC,KAAQJ,EACjB,GAAII,EAAK,WAAW,QAAQ,EAAG,CAC7B,MAAMC,EAAUD,EAAK,UAAU,CAAC,EAAE,KAAA,EAClC,GAAIC,EACF,GAAI,CACF,MAAM5B,EAAO,KAAK,MAAM4B,CAAO,EAQ/B,GALI5B,EAAK,OAAS,iBAAmBA,EAAK,UACxCyB,EAAkBzB,EAAK,SAIrBA,EAAK,OAAS,sBAAuB,CACvC,MAAM6B,EAAQ,CACZ,MAAO7B,EAAK,MACZ,KAAMA,EAAK,cAAc,KACzB,GAAIA,EAAK,cAAc,GACvB,KAAMA,EAAK,cAAc,KACzB,MAAOA,EAAK,cAAc,OAAS,CAAA,EACnC,KAAM,GACN,YAAa,EAAA,EAEf0B,EAAS,IAAI1B,EAAK,MAAO6B,CAAK,CAChC,CAGA,GAAI7B,EAAK,OAAS,sBAAuB,CACvC,MAAM6B,EAAQH,EAAS,IAAI1B,EAAK,KAAK,EACjC6B,IACE7B,EAAK,MAAM,OAAS,aACtB6B,EAAM,MAAQ7B,EAAK,MAAM,KAChBA,EAAK,MAAM,OAAS,qBAC7B6B,EAAM,aAAe7B,EAAK,MAAM,cAGtC,CAGA,GAAIA,EAAK,OAAS,qBAAsB,CACtC,MAAM6B,EAAQH,EAAS,IAAI1B,EAAK,KAAK,EACrC,GAAI6B,EAAO,CAET,GAAIA,EAAM,YACR,GAAI,CACFA,EAAM,MAAQ,KAAK,MAAMA,EAAM,WAAW,CAC5C,MAAqB,CAGrB,CAEFL,EAAc,KAAKK,CAAK,CAC1B,CACF,CAGI7B,EAAK,OAAS,iBAAmBA,EAAK,QACxCM,EAAQN,EAAK,MAEjB,MAAQ,CAER,CAEJ,CAIF,MAAM8B,GAAwBN,GAAiB,CAAA,GAAI,IAAKK,GAClDA,EAAM,OAAS,OACV,CACL,KAAM,OACN,KAAMA,EAAM,IAAA,EAELA,EAAM,OAAS,WACjB,CACL,KAAM,kBACN,eAAgB,CACd,WAAYA,EAAM,GAClB,SAAUA,EAAM,KAChB,KAAMA,EAAM,MACZ,MAAO,MAAA,CACT,EAIG,CACL,KAAM,OACN,KAAM,+BAA+BA,EAAM,IAAI,EAAA,CAElD,EAGK9B,EAAU,CACd,GAAI0B,GAAiB,IAAMJ,EAAa,WAAA,EACxC,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAOS,EAAM,OAAS,EAAIA,EAAQ,CAAC,CAAE,KAAM,OAAiB,KAAM,EAAA,CAAI,CAAA,EAIlEC,EAAgB,CACpB,GAAIN,GAAiB,OAAS,CAAA,EAC9B,GAAInB,GAAS,CAAA,CAAC,EAIV0B,EAAa,CACjB,YAAaD,EAAc,cAAgBA,EAAc,YACzD,aAAcA,EAAc,eAAiBA,EAAc,aAC3D,kBAAmBA,EAAc,yBAA2BA,EAAc,kBAC1E,yBAA0BA,EAAc,6BAA+BA,EAAc,yBACrF,aAAcA,EAAc,cAAgB,IAAMA,EAAc,eAAiB,GACjF,cAAeA,EAAc,eAC7B,YAAaA,EAAc,YAAA,EAIvB1B,EAAYoB,GAAiB,MAC7BQ,EAAO5B,EAAYD,GAAe,cAAcC,EAAW2B,CAAU,EAAI,KAuB/E,MApBoB,CAClB,QAAAjC,EACA,SAAU,CACR,MAAOM,EACP,MAAO2B,EACP,KAAAC,EACA,kBAAmBR,GAAiB,GACpC,WAAYA,GAAiB,YAC7B,cAAeK,EAAM,OAAO,CAACI,EAAKC,IAC5BA,EAAK,OAAS,OACTD,EAAMC,EAAK,KAAK,OACdA,EAAK,OAAS,kBAChBD,EAAM,KAAK,UAAUC,EAAK,eAAe,MAAQ,EAAE,EAAE,OAEvDD,EAAM,KAAK,UAAUC,CAAI,EAAE,OACjC,CAAC,EACJ,gBAAiBZ,EAAM,OAAQI,GAASA,EAAK,WAAW,QAAQ,CAAC,EAAE,MAAA,CACrE,CAIJ,OAAS7B,EAAO,CACd,MAAO,CACL,MAAO,uCAAuCA,CAAK,GACnD,QAAAwB,CAAA,CAEJ,CACF,CAKA,OAAe,YAAqB,CAClC,OAAO,KAAK,IAAA,EAAM,SAAS,EAAE,EAAI,KAAK,OAAA,EAAS,SAAS,EAAE,EAAE,MAAM,EAAG,EAAE,CACzE,CACF,CAG+CD,EAAa,uBCnM3C,IAAI,MAAM,CAAA,EAAI,CAC3B,IAAIe,EAAGC,EAAK,CACV,MAAM,IAAI,MAAM,gGAAgGA,CAAG,oIAAoI,CACzP,CACJ,CAAG,ECEI,MAAMC,EAAa,CAExB,OAAO,uBAAuBhB,EAAsB,CAClD,GAAI,CAEF,OADcA,EAAQ,SAAS;AAAA,MAAS,GAAKA,EAAQ,WAAW,OAAO,EACrD,KAAK,SAASA,CAAO,EAChC,KAAK,aAAaA,CAAO,CAClC,OAASxB,EAAO,CACd,MAAO,CAAE,MAAO,oCAAoCA,CAAK,GAAI,QAAAwB,CAAA,CAC/D,CACF,CAGA,OAAe,aAAaA,EAAiB,CAC3C,MAAMiB,EAAY,KAAK,eAAejB,CAAO,GAAK,KAAK,wBAAwBA,CAAO,EACtF,IAAIkB,EAAUlB,EACd,GAAIiB,EAAW,CACb,MAAME,EAAM,KAAK,UAAUnB,CAAO,EAC9BmB,EAAI,KAAID,EAAUC,EAAI,KAC5B,CACA,GAAI,CACF,MAAMC,EAAO,KAAK,MAAMF,CAAO,EACzBG,EAAWD,EAAK,UAAYA,EAC5B,CAAE,SAAAE,EAAU,aAAAC,EAAc,MAAAC,EAAO,aAAAC,EAAc,kBAAAC,CAAA,EAAsB,KAAK,qBAC9EL,EAAS,UAAA,EAELrC,EAAQ,KAAK,eAAeqC,EAAS,eAAiBD,EAAK,eAAiBA,CAAI,EAChFZ,EACJgB,EAAM,OAAS,EACXA,EAAM,IAAKG,IAAO,CAAE,KAAM,OAAQ,KAAMA,GAAI,EAC5CL,EACE,CAAC,CAAE,KAAM,OAAQ,KAAMA,CAAA,CAAU,EACjC,CAAA,EACR,MAAO,CACL,QAAS,CACP,GAAID,EAAS,YAAc,KAAK,WAAA,EAChC,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAAb,CAAA,EAEF,SAAU,CACR,MAAOa,EAAS,cAAgBD,EAAK,aACrC,MAAApC,EACA,WAAYqC,EAAS,WACrB,aAAAE,EACA,cAAeD,EAAS,OACxB,gBAAiB,EACjB,WAAYL,GAAa,OACzB,aAAcQ,EAAa,OAASA,EAAe,OACnD,kBAAmBC,EAAkB,OAASA,EAAoB,MAAA,CACpE,CAEJ,MAAQ,CACN,MAAO,CACL,QAAS,CACP,GAAI,KAAK,WAAA,EACT,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAO,CAAC,CAAE,KAAM,OAAQ,KAAMR,EAAS,CAAA,EAEzC,SAAU,CAAE,IAAK,EAAA,CAAK,CAE1B,CACF,CAGA,OAAe,SAASlB,EAAiB,CACvC,MAAMC,EAAQD,EAAQ,MAAM,OAAO,EACnC,IAAI4B,EACAC,EACAN,EACAO,EACJ,MAAMC,EAA2B,CAAA,EAC3BN,EAAyB,CAAA,EACzBC,EAA8B,CAAA,EAEpC,UAAWrB,KAAQJ,EAAO,CACxB,GAAI,CAACI,EAAK,WAAW,OAAO,EAAG,SAC/B,MAAM2B,EAAU3B,EAAK,MAAM,CAAC,EAAE,KAAA,EAC9B,GAAK2B,EACL,GAAI,CAEF,MAAMX,EADU,KAAK,MAAMW,CAAO,EACT,UAAY,CAAA,EACjCX,EAAS,eAAcO,EAAeP,EAAS,cAC/CA,EAAS,aAAYQ,EAAaR,EAAS,YAC3CA,EAAS,gBAAeS,EAAgBT,EAAS,eACrD,MAAMY,EAAY,KAAK,qBAAqBZ,EAAS,UAAU,EAC3D,CAACE,GAAgBU,EAAU,iBAA6BA,EAAU,cAClEA,EAAU,MAAM,UAAuB,KAAK,GAAGA,EAAU,KAAK,EAC9DA,EAAU,aAAa,UAAqB,KAAK,GAAGA,EAAU,YAAY,EAC1EA,EAAU,kBAAkB,UAA0B,KAAK,GAAGA,EAAU,iBAAiB,CAC/F,MAAQ,CAER,CACF,CAEA,MAAMX,EAAWS,EAAe,KAAK,EAAE,EACjC/C,EAAQ,KAAK,eAAe8C,CAAa,EAC/C,MAAO,CACL,QAAS,CACP,GAAID,GAAc,KAAK,WAAA,EACvB,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAOE,EAAe,OAASA,EAAe,IAAKJ,IAAO,CAAE,KAAM,OAAQ,KAAMA,CAAA,EAAI,EAAI,CAAA,CAAC,EAE3F,SAAU,CACR,MAAOC,EACP,MAAA5C,EACA,WAAA6C,EACA,aAAAN,EACA,cAAeD,EAAS,OACxB,gBAAiBrB,EAAM,OAAQiC,GAAMA,EAAE,WAAW,OAAO,CAAC,EAAE,OAC5D,aAAcT,EAAa,OAASA,EAAe,OACnD,kBAAmBC,EAAkB,OAASA,EAAoB,MAAA,CACpE,CAEJ,CAGA,OAAe,qBAAqBS,EAOlC,CACA,MAAMX,EAAkB,CAAA,EAClBC,EAAyB,CAAA,EACzBC,EAA8B,CAAA,EACpC,IAAIH,EACJ,GAAI,MAAM,QAAQY,CAAU,EAC1B,UAAWC,KAAQD,EAAY,CACzB,CAACZ,GAAgBa,GAAM,iBAA6BA,EAAK,cAC7D,MAAM5B,EAAQ4B,GAAM,SAAS,OAAS,CAAA,EACtC,GAAI,MAAM,QAAQ5B,CAAK,GACrB,UAAWK,KAAQL,EACjB,GAAIK,GAAQ,OAAOA,GAAS,SAAU,CACpC,GAAIA,EAAK,UAAY,GAAM,SAC3B,MAAMwB,EAAMxB,EAAK,KACjB,GAAI,OAAOwB,GAAQ,UAAYA,EAAI,OAAQ,CAEzC,MAAMC,EAAUD,EAAI,KAAA,EACpB,GAAIC,EAAQ,WAAW,GAAG,GAAKA,EAAQ,SAAS,GAAG,EACjD,GAAI,CACF,MAAMC,EAAS,KAAK,MAAMD,CAAO,EACjC,GAAIC,GAAU,OAAOA,GAAW,SAAU,CACpC,OAAOA,EAAO,WAAc,WAC9Bb,EAAkB,KAAKa,EAAO,SAAS,EACvCf,EAAM,KAAKe,EAAO,SAAS,GAEzBA,EAAO,cAAgB,OAAOA,EAAO,cAAiB,UACxDd,EAAa,KAAKc,EAAO,YAAY,EACvC,QACF,CACF,MAAQ,CAER,CAEFf,EAAM,KAAKa,CAAG,CAChB,CACF,EAGN,CAEF,MAAO,CACL,SAAUb,EAAM,KAAK,EAAE,EACvB,aAAAD,EACA,MAAAC,EACA,kBAAmBD,EACnB,aAAAE,EACA,kBAAAC,CAAA,CAEJ,CAEA,OAAe,UAAUc,EAA4C,CACnE,GAAI,CAGF,MAAO,CAAE,GAAI,GAAM,KADPC,SADA,OAAO,KAAKD,EAAK,QAAQ,CACN,EACF,SAAS,MAAM,CAAA,CAC9C,MAAQ,CACN,MAAO,CAAE,GAAI,GAAO,KAAMA,CAAA,CAC5B,CACF,CAEA,OAAe,wBAAwBE,EAAuB,CAC5D,GAAI,CAACA,EAAM,MAAO,GAClB,IAAIC,EAAU,EACd,MAAMC,EAAY,KAAK,IAAIF,EAAK,OAAQ,GAAG,EAC3C,QAASG,EAAI,EAAGA,EAAID,EAAWC,IAAK,CAClC,MAAMC,EAAOJ,EAAK,WAAWG,CAAC,EAC1BC,EAAO,IAAMA,IAAS,GAAKA,IAAS,IAAMA,IAAS,IAAIH,GAC7D,CACA,OAAOA,EAAUC,EAAY,EAC/B,CAEA,OAAe,eAAeF,EAAuB,CACnD,OAAOA,EAAK,QAAU,GAAKA,EAAK,WAAW,CAAC,IAAM,IAAQA,EAAK,WAAW,CAAC,IAAM,GACnF,CAEA,OAAe,eAAe1D,EAAY,CACxC,GAAI,CAACA,GAAS,OAAOA,GAAU,SAC7B,MAAO,CAAE,YAAa,OAAW,aAAc,OAAW,YAAa,MAAA,EAEzE,MAAM+D,EAAQ/D,EAAM,kBAAoBA,EAAM,cACxCgE,EAAShE,EAAM,sBAAwBA,EAAM,kBAC7CiE,EACJjE,EAAM,kBAAoB,OAAO+D,GAAU,UAAY,OAAOC,GAAW,SAAWD,EAAQC,EAAS,QACvG,MAAO,CAAE,YAAaD,EAAO,aAAcC,EAAQ,YAAaC,CAAA,CAClE,CAEA,OAAe,YAAqB,CAClC,OAAO,KAAK,IAAA,EAAM,SAAS,EAAE,EAAI,KAAK,OAAA,EAAS,SAAS,EAAE,EAAE,MAAM,EAAG,EAAE,CACzE,CACF,CAE4CjC,GAAa,uBCzMlD,MAAMkC,CAAU,CACrB,OAAe,QAA2B,CACxC,CACE,KAAM,qBACN,SAAU,CACR,mCACA,kBACA,cACA,iBAAA,EAEF,OAAQnD,CAAA,EAEV,CACE,KAAM,gBACN,SAAU,CACR,uCACA,iCACA,yBACA,kBAAA,EAEF,OAAQiB,EAAA,CACV,EAgBF,OAAO,uBAAuBmC,EAAanD,EAAsB,CAC/D,MAAMoD,EAASF,EAAU,gBAAgBC,CAAG,EAE5C,GAAI,CAACC,EACH,MAAO,CACL,MAAO,4BAA4BD,CAAG,GACtC,QAAAnD,EACA,mBAAoBkD,EAAU,QAAQ,IAAKG,GAAMA,EAAE,IAAI,CAAA,EAI3D,GAAI,CACF,MAAMC,EAASF,EAAO,uBAAuBpD,CAAO,EAGpD,OAAIsD,GAAU,OAAOA,GAAW,UAAY,CAACA,EAAO,QAClDA,EAAO,SAAW,CAChB,GAAGA,EAAO,SACV,OAAQJ,EAAU,oBAAoBC,CAAG,CAAA,GAItCG,CACT,OAAS9E,EAAO,CACd,MAAO,CACL,MAAO,kBAAkBA,CAAK,GAC9B,QAAAwB,EACA,OAAQkD,EAAU,oBAAoBC,CAAG,CAAA,CAE7C,CACF,CAQA,OAAO,gBAAgBA,EAAqC,CAC1D,GAAI,CAACA,EAAK,OAAO,KAEjB,UAAWI,KAAWL,EAAU,QAC9B,GAAIK,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,OAInB,OAAO,IACT,CAQA,OAAO,oBAAoBJ,EAAqB,CAC9C,GAAI,CAACA,EAAK,MAAO,UAEjB,UAAWI,KAAWL,EAAU,QAC9B,GAAIK,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,KAInB,MAAO,SACT,CAQA,OAAO,YAAYJ,EAAsB,CACvC,OAAOD,EAAU,gBAAgBC,CAAG,IAAM,IAC5C,CAOA,OAAO,uBAAkC,CACvC,OAAOD,EAAU,QAAQ,IAAKG,GAAMA,EAAE,IAAI,CAC5C,CASA,OAAO,eAAeI,EAAcC,EAAoBN,EAA+B,CACrFF,EAAU,QAAQ,KAAK,CACrB,KAAAO,EACA,SAAAC,EACA,OAAAN,CAAA,CACD,CACH,CACF,CCzJO,MAAMO,EAAY,CACN,aAAe,CAC9B,kBACA,cACA,eACA,oBACA,oCACA,kBACA,kCACA,iCACA,wBACA,wBACA,iBACA,cACA,mBACA,kBACA,iBACA,eACA,aACA,kBACA,UACA,qBACA,wBACA,2BACA,qBACA,oBACA,kBACA,kCAAA,EAGe,aAAe,CAC9B,CAAE,QAAS,mCAAoC,SAAU,WAAA,EACzD,CAAE,QAAS,6BAA8B,SAAU,WAAA,EACnD,CAAE,QAAS,oCAAqC,SAAU,QAAA,EAC1D,CAAE,QAAS,eAAgB,SAAU,QAAA,EACrC,CAAE,QAAS,0EAA2E,SAAU,QAAA,EAChG,CAAE,QAAS,wBAAyB,SAAU,aAAA,EAC9C,CAAE,QAAS,sCAAuC,SAAU,OAAA,EAC5D,CAAE,QAAS,cAAe,SAAU,QAAA,EACpC,CAAE,QAAS,mBAAoB,SAAU,aAAA,EACzC,CAAE,QAAS,kBAAmB,SAAU,WAAA,EACxC,CAAE,QAAS,iBAAkB,SAAU,UAAA,EACvC,CAAE,QAAS,eAAgB,SAAU,SAAA,EACrC,CAAE,QAAS,aAAc,SAAU,MAAA,EACnC,CAAE,QAAS,kBAAmB,SAAU,YAAA,EACxC,CAAE,QAAS,iDAAkD,SAAU,OAAA,CAAQ,EAQjF,UAAUR,EAAsB,CAC9B,OAAKA,EACE,KAAK,aAAa,KAAMK,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EAD3C,EAEnB,CAOA,eAAeA,EAAqB,CAClC,GAAI,CAACA,EAAK,MAAO,UAEjB,SAAW,CAAE,QAAAK,EAAS,SAAAI,CAAA,IAAc,KAAK,aACvC,GAAIJ,EAAQ,KAAKL,CAAG,EAClB,OAAOS,EAIX,MAAO,SACT,CAMA,uBAAkC,CAChC,OAAO,MAAM,KAAK,IAAI,IAAI,KAAK,aAAa,IAAI,CAAC,CAAE,SAAAA,KAAeA,CAAQ,CAAC,CAAC,CAC9E,CAOA,oBAAoBC,EAA+B,CACjD,OAAO,KAAK,sBAAA,EAAwB,SAASA,EAAa,aAAa,CACzE,CAOA,mBAAmBA,EAAqC,CACtD,MAAMN,EAAU,KAAK,aAAa,KAAK,CAAC,CAAE,SAAAK,CAAA,IAAeA,IAAaC,EAAa,aAAa,EAChG,OAAON,EAAUA,EAAQ,QAAU,IACrC,CACF,CClGO,MAAMO,EAAe,CAO1B,OAAO,eAAeC,EAA6B,CACjD,MAAMC,EAAcnF,EAAAA,cAAc,YAGlC,GAAI,CAACmF,EACH,OAAOD,EAGT,GAAI,CACF,MAAME,EAAU,KAAK,MAAMF,CAAW,EAGjCE,EAAQ,OAEF,OAAOA,EAAQ,QAAW,SAEnCA,EAAQ,OAAS,CACf,CACE,KAAM,OACN,KAAMA,EAAQ,MAAA,CAChB,EAEQ,MAAM,QAAQA,EAAQ,MAAM,IAEtCA,EAAQ,OAAS,CAACA,EAAQ,MAAM,GAXhCA,EAAQ,OAAS,CAAA,EAgBnB,MAAMC,EAAuB,CAC3B,KAAM,OACN,KAAM;AAAA;AAAA,EAAqBF,CAAW,EAAA,EAIxC,OAAIC,EAAQ,OAAO,OAAS,EAC1BA,EAAQ,OAAO,OAAO,EAAG,EAAGC,CAAoB,EAEhDD,EAAQ,OAAO,KAAKC,CAAoB,EAGnC,KAAK,UAAUD,CAAO,CAC/B,OAASzF,EAAO,CAEd,eAAQ,MAAM,oCAAqCA,CAAK,EACjDuF,CACT,CACF,CACF,CCzDO,MAAMI,EAAe,CAO1B,OAAO,eAAeJ,EAA6B,CACjD,MAAMC,EAAcnF,EAAAA,cAAc,YAGlC,GAAI,CAACmF,EACH,OAAOD,EAGT,GAAI,CACF,MAAME,EAAU,KAAK,MAAMF,CAAW,EAGhCK,EAAqB;AAAA;AAAA,EAAqBJ,CAAW,GAE3D,OAAKC,EAAQ,kBAQF,OAAOA,EAAQ,mBAAsB,SAE9CA,EAAQ,kBAAoB,CAC1B,MAAO,CACL,CACE,KAAMG,CAAA,EAER,CACE,KAAMH,EAAQ,iBAAA,CAChB,CACF,EAEOA,EAAQ,kBAAkB,OAEnCA,EAAQ,kBAAkB,MAAM,QAAQ,CACtC,KAAMG,CAAA,CACP,EAvBDH,EAAQ,kBAAoB,CAC1B,MAAO,CACL,CACE,KAAMG,CAAA,CACR,CACF,EAqBG,KAAK,UAAUH,CAAO,CAC/B,OAASzF,EAAO,CAEd,eAAQ,MAAM,oCAAqCA,CAAK,EACjDuF,CACT,CACF,CACF,CCxCO,MAAMM,CAAY,CACvB,OAAe,UAA+B,CAC5C,CACE,KAAM,qBACN,SAAU,CACR,mCACA,kBACA,cACA,iBAAA,EAEF,SAAUP,EAAA,EAEZ,CACE,KAAM,gBACN,SAAU,CACR,uCACA,iCACA,yBACA,kBAAA,EAEF,SAAUK,EAAA,CACZ,EAgBF,OAAO,eAAehB,EAAaY,EAA6B,CAC9D,MAAMO,EAAWD,EAAY,kBAAkBlB,CAAG,EAElD,GAAI,CAACmB,EAEH,OAAOP,EAGT,GAAI,CACF,OAAOO,EAAS,eAAeP,CAAW,CAC5C,OAASvF,EAAO,CACd,eAAQ,MAAM,uBAAuB2E,CAAG,IAAK3E,CAAK,EAC3CuF,CACT,CACF,CAQA,OAAO,kBAAkBZ,EAAqC,CAC5D,GAAI,CAACA,EAAK,OAAO,KAEjB,UAAWI,KAAWc,EAAY,UAChC,GAAId,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,SAInB,OAAO,IACT,CAQA,OAAO,sBAAsBJ,EAAqB,CAChD,GAAI,CAACA,EAAK,MAAO,UAEjB,UAAWI,KAAWc,EAAY,UAChC,GAAId,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,KAInB,MAAO,SACT,CAQA,OAAO,YAAYJ,EAAsB,CACvC,OAAOkB,EAAY,kBAAkBlB,CAAG,IAAM,IAChD,CAOA,OAAO,uBAAkC,CACvC,OAAOkB,EAAY,UAAU,IAAKE,GAAMA,EAAE,IAAI,CAChD,CASA,OAAO,iBAAiBd,EAAcC,EAAoBY,EAAiC,CACzFD,EAAY,UAAU,KAAK,CACzB,KAAAZ,EACA,SAAAC,EACA,SAAAY,CAAA,CACD,CACH,CACF,CC5HO,MAAME,CAAgB,CAI3B,YACUC,EACAC,EACR,CAFQ,KAAA,IAAAD,EACA,KAAA,MAAAC,CACP,CANK,cAAqC,KAC7C,OAAwB,cAAgB,IAAM,KAO9C,OAAc,CACZ,GAAI,OAAO,WAAW,OAAU,YAAc,KAAK,cAAe,OAElE,KAAK,cAAgB,WAAW,MAAM,KAAK,UAAU,EACrD,MAAMD,EAAM,KAAK,IACXC,EAAQ,KAAK,MAEnB,WAAW,MAAS,MAAOC,EAAeC,IAAuB,CAC/D,MAAMC,EAAQC,EAAAA,YAAY,IAAA,EAE1B,IAAIC,EAAyBJ,EACzBK,EAAyCJ,EACzCzB,EAAM,KAAK,WAAW4B,CAAiB,EAC3C,MAAME,EAAc9B,EACd+B,GACJF,GAAe,QACd,OAAOD,GAAsB,UAAYA,GAAmB,QAC7D,OAEC,SAAA,EACA,YAAA,EAEH,IAAII,EAAiB,KAAK,eAAeH,GAAe,SAAWD,GAAmB,OAAO,EAC7F,MAAMhB,EAAc,KAAK,qBAAqBiB,GAAe,MAAQD,GAAmB,IAAI,EAE5F,IAAIK,EAAerB,EACnB,GAAIU,EAAI,YAAY,UAAUtB,CAAG,GAAKY,GAAeA,IAAgB,uBACnE,GAAI,CACFqB,EAAef,EAAY,eAAelB,EAAKY,CAAW,EACtDqB,IAAiBrB,IACfiB,EACFA,EAAc,KAAOI,EACZ,OAAOL,GAAsB,UAAYA,EAElDC,EAAgB,CAAE,KAAMI,CAAA,EAExBJ,EAAgB,CAAE,KAAMI,CAAA,EAG9B,MAAc,CAEd,CAGF,MAAMC,EAAU,MAAM,KAAK,aAAa,CACtC,SAAUN,EACV,KAAMC,EACN,IAAA7B,EACA,aAAAiC,EACA,eAAAD,CAAA,CACD,EAEDJ,EAAoBM,EAAQ,SAC5BL,EAAgBK,EAAQ,KACxBlC,EAAMkC,EAAQ,IACdD,EAAeC,EAAQ,aACvBF,EAAiBE,EAAQ,eACzB,MAAMC,EAAYD,EAAQ,UAEpBE,EAAUd,EAAI,YAAA,EACde,EAAY,IAAI,KAAA,EAAO,YAAA,EAE7B,GAAId,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBvB,EAAK+B,EAAQnB,CAAW,EAAG,CACtG,MAAM0B,EAA8B,CAClC,GAAIF,EACJ,KAAMC,EACN,UAAW,QACX,OAAAN,EACA,IAAA/B,EACA,YAAAY,EACA,eAAAoB,EACA,WAAY,CAAA,EAEdT,EAAM,cAAce,CAAU,EAAE,MAAOC,GAAe,CAChDjB,EAAI,QACNA,EAAI,OAAO,MAAM,4BAA6BiB,CAAG,CAErD,CAAC,CACH,CAEA,IAAIC,EACJ,GAAI,CACFA,EAAM,MAAM,KAAK,cAAeZ,EAAmBC,CAAa,CAClE,OAASY,EAAc,CACrB,MAAMC,EAAWf,EAAAA,YAAY,IAAA,EAAQD,EAC/BjG,EAA4B,CAChC,GAAI2G,EACJ,KAAMC,EACN,UAAW,QACX,OAAAN,EACA,IAAA/B,EACA,YAAAY,EACA,eAAAoB,EACA,WAAY,KAAK,MAAMU,CAAQ,EAC/B,MAAQD,EAAuB,OAAA,EAEjC,MAAAnB,EAAI,eAAe7F,CAAQ,EACvB8F,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBvB,EAAK+B,EAAQnB,CAAW,GACnGW,EAAM,eAAe9F,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAEzCgH,CACR,CAEA,IAAIE,EAA0C,CAAA,EAE9C,GAAI,CACF,MAAMD,EAAWf,EAAAA,YAAY,IAAA,EAAQD,EAErCiB,EAAkB,KAAK,eAAeH,EAAI,OAAO,EAEjD,MAAMI,EAAU,MAAM,KAAK,gBAAgB,CACzC,IAAAJ,EACA,OAAAT,EACA,IAAA/B,EACA,YAAA8B,EACA,YAAAlB,EACA,eAAAoB,EACA,gBAAAW,EACA,UAAAR,EACA,QAAAC,EACA,UAAAC,EACA,MAAAX,CAAA,CACD,EAEDc,EAAMI,EAAQ,SACd,IAAIC,EAAeD,EAAQ,aACvBE,EAAgBF,EAAQ,cAC5B,MAAMG,EAAYH,EAAQ,UACpBI,EAAgBJ,EAAQ,cAM9B,GAJIA,EAAQ,iBACTJ,EAAY,iBAAmBI,EAAQ,gBAGtC,CAACI,GAAiBb,GAAab,EAAI,QAAUuB,GAAgBA,IAAiB,wBAChF,GAAI,CACFA,EAAevB,EAAI,OAAO,kBAAkBuB,CAAY,CAC1D,MAAc,CAEd,CAGF,GAAI,CAACG,EAAe,CAClB,MAAMC,EAAmBd,EAAYL,EAAc9B,EACnD,GAAIsB,EAAI,YAAY,UAAU2B,CAAgB,GAAKJ,GAAgBA,IAAiB,wBAClF,GAAI,CACFC,EAAgBxB,EAAI,kBAAkB2B,EAAkBJ,CAAY,CACtE,OAASN,EAAK,CACZO,EAAgB,CAAE,MAAO,kBAAkBP,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAC,EAAA,CAC7F,CAEJ,CAEA,MAAM9G,EAA4B,CAChC,GAAI2G,EACJ,KAAMC,EACN,UAAW,QACX,OAAAN,EACA,IAAA/B,EACA,YAAAY,EACA,eAAAoB,EACA,OAAQQ,EAAI,OACZ,GAAIA,EAAI,GACR,WAAY,KAAK,MAAME,CAAQ,EAC/B,aAAAG,EACA,gBAAAF,EACA,cAAAG,EACA,UAAAC,CAAA,EAEF,OAAAzB,EAAI,eAAe7F,CAAQ,EAGzB,CAACuH,GACDzB,EAAM,kBACNA,EAAM,WACNA,EAAM,sBAAsBvB,EAAK+B,EAAQnB,CAAW,GAEpDW,EAAM,eAAe9F,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAGxC+G,CACT,OAASnH,EAAO,CACd,MAAMqH,EAAWf,EAAAA,YAAY,IAAA,EAAQD,EAC/BjG,EAA4B,CAChC,GAAI2G,EACJ,KAAMC,EACN,UAAW,QACX,OAAAN,EACA,IAAA/B,EACA,YAAAY,EACA,eAAAoB,EACA,WAAY,KAAK,MAAMU,CAAQ,EAC/B,MAAQrH,EAAgB,OAAA,EAE1B,OAAAiG,EAAI,eAAe7F,CAAQ,EACvB8F,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBvB,EAAK+B,EAAQnB,CAAW,GACnGW,EAAM,eAAe9F,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAExC+G,CACT,CACF,CACF,CAEA,SAAgB,CACV,KAAK,gBACP,WAAW,MAAQ,KAAK,cACxB,KAAK,cAAgB,KAEzB,CAEQ,WAAWhB,EAAuB,CACxC,GAAI,CACF,GAAI,OAAOA,GAAa,SAAU,OAAOA,EACzC,GAAIA,GAAU,IAAK,OAAOA,EAAS,GACrC,MAAQ,CAER,CACA,MAAO,EACT,CAEQ,eAAe0B,EAAqC,CAC1D,MAAMC,EAAkC,CAAA,EACxC,GAAI,CAACD,EAAQ,OAAOC,EAEpB,GAAI,CACF,GAAID,aAAkB,QACpBA,EAAO,QAAQ,CAACE,EAAOxF,IAAQ,CAC7BuF,EAAQvF,CAAG,EAAIwF,CACjB,CAAC,UACQ,MAAM,QAAQF,CAAM,EAC7B,SAAW,CAACtF,EAAKwF,CAAK,IAAKF,EACzBC,EAAQvF,CAAG,EAAIwF,UAER,OAAOF,GAAW,SAC3B,SAAW,CAACtF,EAAKwF,CAAK,IAAK,OAAO,QAAQF,CAAM,EAC9CC,EAAQvF,CAAG,EAAI,MAAM,QAAQwF,CAAK,EAAIA,EAAM,KAAK,IAAI,EAAI,OAAOA,CAAK,CAG3E,MAAQ,CACN,MAAO,CAAA,CACT,CAEA,OAAOD,CACT,CAEQ,qBAAqBE,EAAmB,CAC9C,GAAI,CACF,GAAI,CAACA,EAAM,MAAO,GAClB,GAAI,OAAOA,GAAS,SAAU,OAAOA,EACrC,GAAIA,aAAgB,gBAAiB,OAAOA,EAAK,SAAA,EACjD,GAAI,OAAO,SAAa,KAAeA,aAAgB,SAAU,CAC/D,MAAMC,EAA2B,CAAA,EACjC,SAAW,CAAC1F,EAAKwF,CAAK,IAAKC,EAAK,UAC9BC,EAAI1F,CAAG,EAAI,OAAOwF,GAAU,SAAWA,EAAQ,cAEjD,OAAO,KAAK,UAAUE,CAAG,CAC3B,CACA,OAAID,aAAgB,YAAc,OAAO,SAASA,CAAI,EAC7C,OAAO,KAAKA,CAAI,EAAE,SAAS,MAAM,EAEtC,OAAOA,GAAS,SACX,KAAK,UAAUA,CAAI,EAErB,EACT,MAAQ,CACN,MAAO,sBACT,CACF,CAEA,MAAc,aAAaE,EAaxB,CACD,MAAMjC,EAAM,KAAK,IACjB,GAAI,CAAE,SAAAE,EAAU,KAAAC,EAAM,IAAAzB,EAAK,aAAAiC,EAAc,eAAAD,GAAmBuB,EACxDpB,EAAY,GAEhB,GAAI,CAACb,EAAI,OACP,MAAO,CAAE,SAAAE,EAAU,KAAAC,EAAM,IAAAzB,EAAK,aAAAiC,EAAc,eAAAD,EAAgB,UAAAG,CAAA,EAG9D,IAAIqB,EAAc,GAClB,GAAI,CACFA,EAAclC,EAAI,OAAO,YAAYtB,EAAKiC,CAAY,CACxD,OAASM,EAAK,CACRjB,EAAI,QACNA,EAAI,OAAO,MAAM,yCAA0CiB,CAAY,CAE3E,CAEA,GAAI,CAACiB,EAAa,CAEhB,GAAIlC,EAAI,YAAY,UAAUtB,CAAG,GAAKiC,EAAc,CAClD,MAAMwB,EAAUnC,EAAI,OAAO,2BAA2BW,CAAY,EAC9DwB,IAAYxB,IACdA,EAAewB,EAGX,OAAOjC,GAAa,UAAYA,IAClCA,EAAW,IAAI,QAAQxB,EAAK,CAAE,GAAGwB,EAAU,KAAMS,EAAc,GAG7DR,IACFA,EAAK,KAAOQ,GAGlB,CAEA,MAAO,CAAE,SAAAT,EAAU,KAAAC,EAAM,IAAAzB,EAAK,aAAAiC,EAAc,eAAAD,EAAgB,UAAAG,CAAA,CAC9D,CAEA,GAAI,CACF,MAAMuB,EAAc,MAAMpC,EAAI,OAAO,iBAAiBtB,EAAKiC,CAAY,EACvEjC,EAAM0D,EAAY,IAClBzB,EAAeyB,EAAY,KAC3BvB,EAAY,GAER,OAAOX,GAAa,SACtBA,EAAWkC,EAAY,IACd,OAAOlC,GAAa,UAAYA,IACzCA,EAAW,IAAI,QAAQkC,EAAY,IAAKlC,CAAuB,GAG5DC,IACHA,EAAO,CAAA,GAETA,EAAK,KAAOiC,EAAY,KAEpBA,EAAY,UACd,KAAK,qBAAqBjC,EAAMiC,EAAY,OAAO,EACnD1B,EAAiB,CAAE,GAAGA,EAAgB,GAAG0B,EAAY,OAAA,EAEzD,OAASnB,EAAK,CACZJ,EAAY,GACRb,EAAI,QACNA,EAAI,OAAO,MAAM,gDAAiDiB,CAAY,CAElF,CAEA,MAAO,CAAE,SAAAf,EAAU,KAAAC,EAAM,IAAAzB,EAAK,aAAAiC,EAAc,eAAAD,EAAgB,UAAAG,CAAA,CAC9D,CAEQ,qBAAqBV,EAAmB0B,EAAuC,CACrF,GAAI,GAACA,GAAW,OAAO,KAAKA,CAAO,EAAE,SAAW,GAEhD,IAAI,CAAC1B,EAAK,QAAS,CACjBA,EAAK,QAAU,CAAE,GAAG0B,CAAA,EACpB,MACF,CAEA,GAAI1B,EAAK,mBAAmB,QAAS,CACnC,MAAMkC,EAASlC,EAAK,QACpB,OAAO,QAAQ0B,CAAO,EAAE,QAAQ,CAAC,CAACvF,EAAKwF,CAAK,IAAMO,EAAO,IAAI/F,EAAKwF,CAAK,CAAC,EACxE,MACF,CAEA,GAAI,MAAM,QAAQ3B,EAAK,OAAO,EAAG,CAC/B,MAAMmC,EAAcnC,EAAK,QACzB,OAAO,QAAQ0B,CAAO,EAAE,QAAQ,CAAC,CAACvF,EAAKwF,CAAK,IAAM,CAChDQ,EAAY,KAAK,CAAChG,EAAKwF,CAAK,CAAC,CAC/B,CAAC,EACD,MACF,CAEA3B,EAAK,QAAU,CAAE,GAAIA,EAAK,QAAoC,GAAG0B,CAAA,EACnE,CAEA,MAAc,gBAAgBI,EAYH,CACzB,KAAM,CAAE,IAAAf,EAAK,eAAAR,CAAA,EAAmBuB,EAC1BjC,EAAM,KAAK,IACXC,EAAQ,KAAK,MAEbsC,EAAcrB,EAAI,QAAQ,IAAI,cAAc,GAAK,GACjDsB,EAAgB9B,EAAe,QAAa,GAC5CgB,EAAgB,sBAAsB,KAAKa,CAAW,GAAK,sBAAsB,KAAKC,CAAa,EACnGC,EACJ,gDAAgD,KAAKF,CAAW,GAAK,mBAAmB,KAAKA,CAAW,EAE1G,GAAI,CAACb,EAAe,CAClB,IAAIH,EAAe,GACfE,EAAY,EAEhB,GAAI,CAEF,MAAMiB,EAAc,MADNxB,EAAI,MAAA,EACc,YAAA,EAGhC,GAFAO,EAAYiB,EAAY,WAEpBD,EACF,GAAIC,EAAY,YAAc3C,EAAgB,cAC5CwB,EAAe,OAAO,KAAKmB,CAAW,EAAE,SAAS,MAAM,MAClD,CACL,MAAMC,EAAM,OAAO,KAAKD,EAAY,MAAM,EAAG3C,EAAgB,aAAa,CAAC,EACrE6C,EAAYF,EAAY,WAAa3C,EAAgB,cAC3DwB,EAAe,GAAGoB,EAAI,SAAS,MAAM,CAAC;AAAA,gBAAmBC,EAAU,gBAAgB,SACrF,MAEArB,EAAe,IAAIE,CAAS,gBAEhC,MAAQ,CACNF,EAAe,uBACjB,CAEA,MAAO,CACL,SAAUL,EACV,aAAAK,EACA,cAAe,KACf,UAAAE,EACA,cAAe,EAAA,CAEnB,CAEA,GAAI,CAACP,EAAI,KACP,MAAO,CACL,SAAUA,EACV,aAAc,oCACd,cAAe,KACf,UAAW,EACX,cAAe,EAAA,EAInB,GAAIe,EAAK,WAAajC,EAAI,OAAQ,CAChC,MAAM6C,EAAS3B,EAAI,KAAK,UAAA,EAClB4B,EAAuB,CAAA,EAC7B,IAAIC,EAAa,EAEjB,OAAa,CACX,KAAM,CAAE,KAAAC,EAAM,MAAAlB,CAAA,EAAU,MAAMe,EAAO,KAAA,EACrC,GAAIG,EAAM,MACVF,EAAO,KAAKhB,CAAK,EACjBiB,GAAcjB,EAAM,UACtB,CAGA,IAAImB,EADmB,OAAO,OAAOH,EAAO,IAAKI,GAAM,OAAO,KAAKA,CAAC,CAAC,CAAC,EACpC,SAAS,MAAM,EAC7C1B,EAAqB,KAEzB,GAAI,CACFyB,EAAejD,EAAI,OAAO,kBAAkBiD,CAAY,CAC1D,MAAc,CAEd,CAEA,GAAIjD,EAAI,YAAY,UAAUiC,EAAK,WAAW,GAAKgB,EACjD,GAAI,CACFzB,EAAgBxB,EAAI,kBAAkBiC,EAAK,YAAagB,CAAY,CACtE,OAAShC,EAAK,CACZO,EAAgB,CAAE,MAAO,kBAAkBP,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAC,EAAA,CAC7F,CAGF,MAAMkC,EAAiC,CACrC,GAAIlB,EAAK,QACT,KAAMA,EAAK,UACX,UAAW,QACX,OAAQA,EAAK,OACb,IAAKA,EAAK,IACV,YAAaA,EAAK,YAClB,eAAgBA,EAAK,eACrB,OAAQf,EAAI,OACZ,GAAIA,EAAI,GACR,WAAY,KAAK,MAAMb,EAAAA,YAAY,IAAA,EAAQ4B,EAAK,KAAK,EACrD,aAAcgB,EACd,gBAAiBhB,EAAK,gBACtB,cAAAT,EACA,UAAWuB,CAAA,EAEb/C,EAAI,eAAemD,CAAa,EAE9BlD,EAAM,kBACNA,EAAM,WACNA,EAAM,sBAAsBgC,EAAK,IAAKA,EAAK,OAAQA,EAAK,WAAW,GAEnEhC,EAAM,eAAekD,CAAa,EAAE,MAAM,IAAM,CAAC,CAAC,EAGpD,MAAMC,EAAoB,IAAI,eAAe,CAC3C,MAAMC,EAAY,CAChB,MAAMC,EAAU,IAAI,YACpBD,EAAW,QAAQC,EAAQ,OAAOL,CAAY,CAAC,EAC/CI,EAAW,MAAA,CACb,CAAA,CACD,EAQD,MAAO,CACL,SAPe,IAAI,SAASD,EAAmB,CAC/C,OAAQlC,EAAI,OACZ,WAAYA,EAAI,WAChB,QAASA,EAAI,OAAA,CACd,EAIC,aAAc+B,EACd,cAAAzB,EACA,UAAWuB,EACX,cAAe,EAAA,CAEnB,CAEA,KAAM,CAACQ,EAASC,CAAO,EAAItC,EAAI,KAAK,IAAA,EAC9BuC,GAAkB,SAAY,CAClC,GAAI,CACF,MAAMZ,EAASU,EAAQ,UAAA,EACjBT,EAAuB,CAAA,EAC7B,IAAIC,EAAa,EAEjB,OAAa,CACX,KAAM,CAAE,KAAAC,EAAM,MAAAlB,CAAA,EAAU,MAAMe,EAAO,KAAA,EACrC,GAAIG,EAAM,MACVF,EAAO,KAAKhB,CAAK,EACjBiB,GAAcjB,EAAM,UACtB,CAGA,MAAMmB,EADiB,OAAO,OAAOH,EAAO,IAAKI,GAAM,OAAO,KAAKA,CAAC,CAAC,CAAC,EAClC,SAAS,MAAM,EACnD,IAAI1B,EAAqB,KAEzB,GAAIxB,EAAI,YAAY,UAAUiC,EAAK,GAAG,GAAKgB,EACzC,GAAI,CACFzB,EAAgBxB,EAAI,kBAAkBiC,EAAK,IAAKgB,CAAY,CAC9D,OAAShC,EAAK,CACZO,EAAgB,CAAE,MAAO,kBAAkBP,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAC,EAAA,CAC7F,CAGF,MAAMkC,EAAiC,CACrC,GAAIlB,EAAK,QACT,KAAMA,EAAK,UACX,UAAW,QACX,OAAQA,EAAK,OACb,IAAKA,EAAK,IACV,YAAaA,EAAK,YAClB,eAAgBA,EAAK,eACrB,OAAQf,EAAI,OACZ,GAAIA,EAAI,GACR,WAAY,KAAK,MAAMb,EAAAA,YAAY,IAAA,EAAQ4B,EAAK,KAAK,EACrD,aAAcgB,EACd,gBAAiBhB,EAAK,gBACtB,cAAAT,EACA,UAAWuB,CAAA,EAEb,OAAA/C,EAAI,eAAemD,CAAa,EAE9BlD,EAAM,kBACNA,EAAM,WACNA,EAAM,sBAAsBgC,EAAK,IAAKA,EAAK,OAAQA,EAAK,WAAW,GAEnEhC,EAAM,eAAekD,CAAa,EAAE,MAAM,IAAM,CAAC,CAAC,EAG7C,CAAE,aAAcF,EAAc,cAAAzB,EAAe,UAAWuB,CAAA,CACjE,MAAc,CACZ,MAAO,CAAE,aAAc,yBAA0B,cAAe,KAAM,UAAW,CAAA,CACnF,CACF,GAAA,EAQA,MAAO,CACL,SAPe,IAAI,SAASS,EAAS,CACrC,OAAQtC,EAAI,OACZ,WAAYA,EAAI,WAChB,QAASA,EAAI,OAAA,CACd,EAIC,aAAc,0BACd,cAAe,KACf,UAAW,EACX,cAAe,GACf,eAAAuC,CAAA,CAEJ,CACF,CCpmBO,MAAMC,EAAe,CAE1B,YACU1D,EACA2D,EACAC,EAA+B,CACrC,sBAAuB,IAAM,GAC7B,cAAe,SAAY,CAAC,EAC5B,eAAgB,SAAY,CAAC,CAAA,EAE/B,CAPQ,KAAA,IAAA5D,EACA,KAAA,YAAA2D,EACA,KAAA,OAAAC,CAKP,CATK,UAAiB,KAWzB,OAAc,CACZ,GAAI,CAcF,IAASC,EAAT,SAAkBC,EAAcC,EAAiC,CAC/D,GAAI,CACF,GAAI,OAAOD,GAAY,SAAU,OAAOA,EACxC,GAAIA,aAAmB,IAAK,OAAOA,EAAQ,SAAA,EAC3C,MAAME,EAAWF,EAAQ,UAAYC,EAC/BE,EAAOH,EAAQ,UAAYA,EAAQ,MAAQ,YAC3CI,EAAWJ,EAAQ,KAAO,IAAIA,EAAQ,IAAI,GAAK,GAC/CjK,EAAOiK,EAAQ,MAAQA,EAAQ,UAAY,IACjD,MAAO,GAAGE,CAAQ,KAAKC,CAAI,GAAGC,CAAQ,GAAGrK,CAAI,EAC/C,MAAQ,CACN,MAAO,EACT,CACF,EAESsK,EAAT,SAAqBC,EAAoBJ,EAA8B,CACrE,OAAO,SAAwBF,EAAcO,EAAgB,CAC3D,MAAM3F,EAAMmF,EAASC,EAASE,CAAQ,EAChCvD,GAAU,OAAOqD,GAAY,UAAYA,EAAQ,OAASA,EAAQ,OAAS,OAAO,YAAA,EAElFQ,EAAUjE,EAAAA,YAAY,IAAA,EACtBS,EAAUyD,EAAK,IAAI,YAAA,EACnBxD,EAAY,IAAI,KAAA,EAAO,YAAA,EACvByD,EAA+B,CAAA,EACrC,IAAIC,EAAa,EACbC,EAAkB,GAEtB,MAAMC,EAAMP,EAASN,EAAU5C,GAAa,CAC1C,MAAM0D,EAA4B,CAAA,EAClC,IAAIC,EAAgB,EAChBC,EAAgB,EACpB,MAAMC,EAAoB,IAAM,OAAO7D,EAAI,QAAQ,cAAc,GAAK,EAAE,EACxEA,EAAI,GAAG,OAAS8D,GAAkB,CAChCH,GAAiBG,EAAM,OACvB,MAAMC,EAAKF,EAAA,EAIX,GAHsB,sBAAsB,KAAKE,CAAE,EAIjDL,EAAgB,KAAKI,CAAK,EAC1BF,GAAiBE,EAAM,WAClB,CAEL,GAAIF,GAAiBI,EAAS,OAC9B,MAAMtC,EAAYsC,EAAUJ,EACxBE,EAAM,QAAUpC,GAClBgC,EAAgB,KAAKI,CAAK,EAC1BF,GAAiBE,EAAM,SAEvBJ,EAAgB,KAAKI,EAAM,SAAS,EAAGpC,CAAS,CAAC,EACjDkC,GAAiBlC,EAErB,CACF,CAAC,EACD1B,EAAI,GAAG,MAAO,IAAM,CAClB,MAAME,EAAWf,EAAAA,YAAY,IAAA,EAAQiE,EACrC,IAAI/C,EAAe,GACnB,GAAI,CACF,MAAM0D,EAAKF,EAAA,EACLrD,EAAgB,sBAAsB,KAAKuD,CAAE,EAC7CE,EAAY,OAAO,OAAOP,CAAe,EACzCQ,EAAeP,EAAgBM,EAAU,QAAUA,EAAU,SAAWD,EAC9E,IAAIG,EAASF,EAGb,GAAI,CAACzD,EAAe,CAClB,MAAM4D,EAAkB,OAAOpE,EAAI,QAAQ,kBAAkB,GAAK,EAAE,EAAE,YAAA,EAGtE,GADsB,CAACkE,GAAgBC,EAAO,QAAU,oBAAoB,KAAKC,CAAe,EAE9F,GAAI,CACF,MAAMC,EAAO,QAAQ,WAAW,EAC5BD,EAAgB,SAAS,MAAM,EAAGD,EAASE,EAAK,WAAWJ,CAAS,EAC/DG,EAAgB,SAAS,IAAI,EAAGD,EAASE,EAAK,qBAAqBJ,CAAS,EAC5EG,EAAgB,SAAS,SAAS,IAAGD,EAASE,EAAK,YAAYJ,CAAS,EACnF,MAAQ,CAER,CAEJ,CAGe,UAAU,KAAKF,CAAE,GAAK,sCAAsC,KAAKA,CAAE,GACpEvD,GACZH,EAAe8D,EAAO,SAAS,MAAM,EAEjCD,GAAgB,CAAC1D,IAAeH,GAAgB;AAAA,kBAC3C8D,EAAO,SAChB9D,EAAe,IAAI8D,EAAO,MAAM,SAASD,EAAe,eAAiB,EAAE,WAE/E,MAAQ,CACN7D,EAAe,uBACjB,CACA,IAAIjC,EAAc,GAClB,GAAI,CACF,MAAMkG,EAAW,OAAO,OAAOhB,CAAkB,EAC3CY,EAAeX,EAAae,EAAS,QAAUA,EAAS,SAAWN,EACzE5F,EAAckG,EAAS,SAAS,MAAM,EAClCJ,IAAc9F,GAAe;AAAA,gBACnC,MAAQ,CACNA,EAAc,sBAChB,CACA,IAAIkC,EAAqB,KACzB,GAAI+C,EAAK,IAAI,YAAY,UAAU7F,CAAG,EACpC,GAAI,CAEF8C,EAAgB+C,EAAK,IAAI,kBAAkB7F,EAAK6C,CAAY,CAC9D,OAASzB,EAAG,CACV0B,EAAgB,CACd,MAAO,CAAC,CAAE,KAAM,OAAQ,KAAMD,GAAgB,mBAAoB,EAClE,SAAU,CAAE,MAAO,OAAOzB,CAAC,CAAA,CAAE,CAEjC,CAEF,MAAM3F,EAA4B,CAChC,GAAI2G,EACJ,KAAMC,EACN,UAAWiD,IAAa,SAAW,QAAU,OAC7C,OAAAvD,EACA,IAAA/B,EACA,YAAAY,EACA,OAAQ4B,EAAI,WACZ,GAAIA,EAAI,YAAc,KAAOA,EAAI,WAAa,IAC9C,WAAY,KAAK,MAAME,CAAQ,EAC/B,aAAAG,EACA,cAAAC,EACA,UAAWqD,CAAA,EAEb,GAAI,CACF,MAAMY,EAAI,IAAI,IAAI/G,CAAG,EACrBvE,EAAS,KAAOsL,EAAE,SAClBtL,EAAS,KAAOsL,EAAE,QACpB,MAAQ,CAAC,CACTlB,EAAK,IAAI,eAAepK,CAAQ,EAI9BoK,EAAK,OAAO,kBACZA,EAAK,OAAO,WACZA,EAAK,OAAO,sBAAsB7F,EAAK+B,EAAQnB,CAAW,GAE1DiF,EAAK,OAAO,eAAepK,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,CAEvD,CAAC,EACD+G,EAAI,GAAG,QAAUD,GAAe,CAC9B,MAAMG,EAAWf,EAAAA,YAAY,IAAA,EAAQiE,EACrC,IAAIhF,EAAc,GAClB,GAAI,CACF,MAAMkG,EAAW,OAAO,OAAOhB,CAAkB,EAC3CY,EAAeX,EAAae,EAAS,QAAUA,EAAS,SAAWN,EACzE5F,EAAckG,EAAS,SAAS,MAAM,EAClCJ,IAAc9F,GAAe;AAAA,gBACnC,MAAQ,CACNA,EAAc,sBAChB,CACA,MAAMnF,EAA4B,CAChC,GAAI2G,EACJ,KAAMC,EACN,UAAWiD,IAAa,SAAW,QAAU,OAC7C,OAAAvD,EACA,IAAA/B,EACA,YAAAY,EACA,WAAY,KAAK,MAAM8B,CAAQ,EAC/B,MAAOH,EAAI,OAAA,EAEb,GAAI,CACF,MAAMwE,EAAI,IAAI,IAAI/G,CAAG,EACrBvE,EAAS,KAAOsL,EAAE,SAClBtL,EAAS,KAAOsL,EAAE,QACpB,MAAQ,CAAC,CACTlB,EAAK,IAAI,eAAepK,CAAQ,EAI9BoK,EAAK,OAAO,kBACZA,EAAK,OAAO,WACZA,EAAK,OAAO,sBAAsB7F,EAAK+B,EAAQnB,CAAW,GAE1DiF,EAAK,OAAO,eAAepK,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,CAEvD,CAAC,EACGkK,KAAmBnD,CAAG,CAC5B,CAAC,EACKwE,EAAgBf,EAAI,MACpBgB,EAAchB,EAAI,IACxB,OAAAA,EAAI,MAAQ,SAAUK,EAAYY,EAAgBC,EAAU,CAC1D,GAAI,CACF,GAAIb,GAASP,EAAaS,EAAS,CACjC,MAAMvC,EAAM,OAAO,SAASqC,CAAK,EAAIA,EAAQ,OAAO,KAAKA,EAAOY,GAAY,MAAM,EAC5EhD,EAAYsC,EAAUT,EACxB9B,EAAI,QAAUC,GAChB4B,EAAmB,KAAK7B,CAAG,EAC3B8B,GAAc9B,EAAI,SAElB6B,EAAmB,KAAK7B,EAAI,SAAS,EAAGC,CAAS,CAAC,EAClD6B,GAAc7B,EAElB,CACF,MAAQ,CAAC,CACT,OAAO8C,EAAc,KAAK,KAAMV,EAAOY,EAAUC,CAAE,CACrD,EACAlB,EAAI,IAAM,SAAUK,EAAaY,EAAgBC,EAAU,CACzD,GAAI,CACF,GAAIb,GAASP,EAAaS,EAAS,CACjC,MAAMvC,EAAM,OAAO,SAASqC,CAAK,EAAIA,EAAQ,OAAO,KAAKA,EAAOY,GAAY,MAAM,EAC5EhD,EAAYsC,EAAUT,EACxB9B,EAAI,QAAUC,GAChB4B,EAAmB,KAAK7B,CAAG,EAC3B8B,GAAc9B,EAAI,SAElB6B,EAAmB,KAAK7B,EAAI,SAAS,EAAGC,CAAS,CAAC,EAClD6B,GAAc7B,EAElB,CAGA,GAAI,CAAC8B,EAAiB,CACpBA,EAAkB,GAClB,GAAI,CACF,MAAMc,EAAW,OAAO,OAAOhB,CAAkB,EAC3CY,EAAeX,EAAae,EAAS,QAAUA,EAAS,SAAWN,EACzE,IAAI5F,EAAckG,EAAS,SAAS,MAAM,EACtCJ,IAAc9F,GAAe;AAAA,iBAEjC,MAAMwG,EAAUvB,EAAK,OAAO,sBAAsB7F,EAAK+B,EAAQnB,CAAW,EAE1E,GAAIiF,EAAK,OAAO,kBAAoBA,EAAK,OAAO,WAAauB,EAAS,CACpE,MAAM9E,EAA8B,CAClC,GAAIF,EACJ,KAAMC,EACN,UAAWiD,IAAa,SAAW,QAAU,OAC7C,OAAAvD,EACA,IAAA/B,EACA,YAAAY,EACA,WAAY,CAAA,EAEdiF,EAAK,OAAO,cAAcvD,CAAU,EAAE,MAAM,IAAM,CAAC,CAAC,CACtD,CACF,MAAQ,CAAC,CACX,CACF,MAAQ,CAAC,CACT,OAAO2E,EAAY,KAAK,KAAMX,EAAOY,EAAUC,CAAE,CACnD,EACOlB,CACT,CACF,EAjQA,MAAMoB,EAAO,QAAQ,MAAM,EACrBC,EAAQ,QAAQ,OAAO,EAC7B,GAAI,KAAK,UAAW,OAEpB,KAAK,UAAY,CACf,YAAaD,EAAK,QAClB,aAAcC,EAAM,QACpB,QAASD,EAAK,IACd,SAAUC,EAAM,GAAA,EAElB,MAAMzB,EAAO,KACPW,EAAU,KAAK,IAAI,aAwPzBa,EAAK,QAAU5B,EAAY,KAAK,UAAU,YAAa,OAAO,EAC9D4B,EAAK,IAAM,SAAoBE,EAAQJ,EAAU,CAC/C,MAAMK,EAAKH,EAAK,QAAgBE,EAAGJ,CAAE,EACrC,OAAAK,EAAE,IAAA,EACKA,CACT,EACI,KAAK,cAAgB,KACvBF,EAAM,QAAU7B,EAAY,KAAK,UAAU,aAAc,QAAQ,EACjE6B,EAAM,IAAM,SAAoBC,EAAQJ,EAAU,CAChD,MAAMK,EAAKF,EAAM,QAAgBC,EAAGJ,CAAE,EACtC,OAAAK,EAAE,IAAA,EACKA,CACT,EAEJ,MAAY,CAEZ,CACF,CAEA,SAAgB,CACd,GAAI,KAAK,UAAW,CAClB,GAAI,CACF,MAAMH,EAAO,QAAQ,MAAM,EACrBC,EAAQ,QAAQ,OAAO,EAC7BD,EAAK,QAAU,KAAK,UAAU,YAC9BA,EAAK,IAAM,KAAK,UAAU,QAC1BC,EAAM,QAAU,KAAK,UAAU,aAC/BA,EAAM,IAAM,KAAK,UAAU,QAC7B,MAAQ,CAAC,CACT,KAAK,UAAY,IACnB,CACF,CACF,CCtRO,MAAMG,CAAe,CAC1B,OAAe,SAAkC,KACzC,UAAY,GACZ,OAGA,YACA,YACA,iBACA,UACA,OACA,mBAAqB,IACrB,UACA,YACA,gBACA,YAGA,gBACA,eAER,YAAYC,EAA+B,GAAI,CAE7C,KAAK,YAAcA,EAAO,aAAe,IAAIlH,GAG7C,KAAK,YAAckH,EAAO,aAAeC,EAAAA,KAGzC,KAAK,iBAAmBD,EAAO,iBAC/B,KAAK,UAAYA,EAAO,UAGxB,KAAK,OAASA,EAAO,OAGrB,KAAK,YAAcA,EAAO,OAE1B,KAAK,OAAS,CACZ,aAAcA,EAAO,cAAgB,KACrC,oBAAqBA,EAAO,qBAAuB,GACnD,YAAaA,EAAO,aAAe,GACnC,UACEA,EAAO,YACLE,GACA,CAAC,EACCA,EAAM,KACN,CAACA,EAAM,IAAI,SAAS,SAAS,GAC7B,CAACA,EAAM,IAAI,SAAS,oBAAoB,GACxC,CAACA,EAAM,IAAI,WAAW,OAAO,IAGnC,kBACEF,EAAO,oBACN,CAAC1H,EAAa6C,IAAyB,CACtC,GAAI,CACF,OAAO9C,EAAU,uBAAuBC,EAAK6C,CAAY,CAC3D,MAAQ,CACN,OAAO,IACT,CACF,EAAA,EAIJ,MAAMgF,EAAa1M,EAAK,KAAK2M,EAAG,OAAA,EAAU,iBAAkB,MAAO,WAAW,QAAQ,GAAG,EAAE,EACrFC,EAAcL,EAAO,aAAevM,EAAK,KAAK0M,EAAY,aAAa,EACvEG,EAAmB7M,EAAK,KAAK0M,EAAY,WAAW,EACpDI,EAAuBP,EAAO,sBAAwBvM,EAAK,KAAK0M,EAAY,iBAAiB,EAGnG,GAAI,CACF,KAAK,UAAY,IAAI7M,EAAW+M,CAAW,CAC7C,MAAQ,CAAC,CACT,GAAI,CACF,KAAK,YAAc,IAAI/M,EAAWgN,CAAgB,CACpD,MAAQ,CAAC,CACT,GAAI,CACF,KAAK,gBAAkB,IAAIhN,EAAWiN,CAAoB,CAC5D,MAAQ,CAAC,CACX,CAKA,OAAO,YAAYP,EAA+C,CAChE,OAAKD,EAAe,WAClBA,EAAe,SAAW,IAAIA,EAAeC,CAAM,GAE9CD,EAAe,QACxB,CAKA,OAAc,CACZ,GAAI,KAAK,UACP,OAGF,MAAMS,EAAwB,CAC5B,YAAa,KAAK,YAClB,aAAc,KAAK,OAAO,aAC1B,YAAa,KAAK,OAAO,YACzB,UAAW,KAAK,OAAO,UACvB,kBAAmB,KAAK,OAAO,kBAC/B,YAAa,KAAK,YAClB,eAAiB,GAAM,KAAK,eAAe,CAAC,EAC5C,OAAQ,KAAK,OACb,OAAQ,KAAK,WAAA,EAGf,KAAK,gBAAkB,IAAI7G,EAAgB6G,EAAQ,CACjD,iBAAkB,KAAK,iBACvB,UAAW,KAAK,UAChB,sBAAuB,CAAClI,EAAK+B,EAAQsB,IAAS,KAAK,sBAAsBrD,EAAK+B,EAAQsB,CAAI,EAC1F,cAAgBuE,GAAU,KAAK,cAAcA,CAAK,EAClD,eAAiBA,GAAU,KAAK,eAAeA,CAAK,CAAA,CACrD,EACD,KAAK,gBAAgB,MAAA,EAErB,KAAK,eAAiB,IAAI5C,GAAekD,EAAQ,KAAK,OAAO,oBAAqB,CAChF,iBAAkB,KAAK,iBACvB,UAAW,KAAK,UAChB,sBAAuB,CAAClI,EAAK+B,EAAQsB,IAAS,KAAK,sBAAsBrD,EAAK+B,EAAQsB,CAAI,EAC1F,cAAgBuE,GAAU,KAAK,cAAcA,CAAK,EAClD,eAAiBA,GAAU,KAAK,eAAeA,CAAK,CAAA,CACrD,EACD,KAAK,eAAe,MAAA,EAEpB,KAAK,UAAY,EACnB,CAKA,MAAa,CACN,KAAK,YAGV,KAAK,iBAAiB,QAAA,EACtB,KAAK,gBAAgB,QAAA,EACrB,KAAK,UAAY,GACnB,CAKA,UAAoB,CAClB,OAAO,KAAK,SACd,CAKA,aAAaF,EAA6C,CACxD,KAAK,OAAS,CAAE,GAAG,KAAK,OAAQ,GAAGA,CAAA,CACrC,CAKQ,eAAeE,EAA8B,CAE/C,KAAK,WACP,KAAK,UAAU,KAAK,gBAAiB,CAAE,WAAYA,EAAO,EAExD,KAAK,aACP,KAAK,YAAY,KAAK,gBAAiB,CAAE,WAAYA,EAAO,EAG1D,KAAK,iBAAmB,KAAK,OAAO,UAAUA,CAAK,GAAKA,EAAM,eAChE,KAAK,gBAAgB,KAAK,kBAAmB,CAC3C,GAAIA,EAAM,GACV,IAAKA,EAAM,IACX,OAAQA,EAAM,OACd,OAAQA,EAAM,OACd,WAAYA,EAAM,WAClB,UAAWA,EAAM,KACjB,cAAeA,EAAM,aAAA,CACtB,CAGL,CAKQ,sBAAsB5H,EAAa+B,EAAgBnB,EAA8B,CAWvF,GAVI,KAAK,WACP,KAAK,UAAU,KAAK,iCAAkC,CACpD,IAAAZ,EACA,OAAA+B,EACA,QAAS,CAAC,CAACnB,EACX,WAAYA,GAAa,MAAA,CAC1B,EAIC,CAAC,KAAK,YAAY,UAAUZ,CAAG,EACjC,OAAI,KAAK,WACP,KAAK,UAAU,KAAK,2BAA4B,CAAE,IAAAA,EAAK,EAErD,KAAK,WAAa,KAAK,OAAO,aAChC,KAAK,UAAU,MAAM,4CAA6C,CAChE,IAAAA,EACA,OAAA+B,CAAA,CACD,EAEI,GAIT,GAAI/B,EAAI,SAAS,6BAA6B,GAAKA,EAAI,SAAS,mCAAmC,EAEjG,MAAI,EAAAA,EAAI,SAAS,uBAAuB,EAa1C,GALI+B,IAAW,QAKX,CAACnB,EACH,MAAO,GAGT,GAAI,CACF,MAAMyC,EAAO,KAAK,MAAMzC,CAAW,EAG7BuH,EAAc9E,EAAK,UAAY,MAAM,QAAQA,EAAK,QAAQ,GAAKA,EAAK,SAAS,OAAS,EACtF+E,EAAY/E,EAAK,QAAU,OAAOA,EAAK,QAAW,SAClDgF,EAAWhF,EAAK,OAAS,OAAOA,EAAK,OAAU,SAC/CiF,EAAcjF,EAAK,UAAY,MAAM,QAAQA,EAAK,QAAQ,GAAKA,EAAK,SAAS,OAAS,EAGtFkF,EACJlF,EAAK,SAAS,UAAY,MAAM,QAAQA,EAAK,QAAQ,QAAQ,GAAKA,EAAK,QAAQ,SAAS,OAAS,EAKnG,OAFgB8E,GAAeC,GAAaC,GAAYC,GAAeC,CAGzE,OAAShG,EAAK,CACZ,OAAI,KAAK,aACP,KAAK,YAAY,MAAM,wDAAyD,CAC9E,IAAAvC,EACA,OAAA+B,EACA,MAAOQ,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAA,CACvD,EAGI,EACT,CACF,CAKA,MAAc,cAAcqF,EAAuC,CACjE,GAAI,CAAC,KAAK,kBAAoB,CAAC,KAAK,UAClC,OAIE,KAAK,WACP,KAAK,UAAU,KAAK,aAAc,CAAE,MAAAA,EAAO,EAI7C,IAAIY,EAAuD,CAAA,EAC3D,GAAIZ,EAAM,YACR,GAAI,CACF,MAAMa,EAAc,KAAK,MAAMb,EAAM,WAAW,EAGhD,GAAIa,EAAY,UAAY,MAAM,QAAQA,EAAY,QAAQ,EAAG,CAE/D,MAAMC,EAAcD,EAAY,SAAS,OAAQE,GAAaA,EAAI,OAAS,MAAM,EAAE,IAAA,EAC/ED,GAAa,UACX,OAAOA,EAAY,SAAY,SACjCF,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAME,EAAY,QAAS,EACpD,MAAM,QAAQA,EAAY,OAAO,IAE1CF,EAAeE,EAAY,QAAQ,IAAKhL,IAAe,CACrD,KAAMA,EAAK,MAAQ,OACnB,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,GAGR,SAAW+K,EAAY,OAErBD,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAMC,EAAY,OAAQ,UACnDA,EAAY,MAErBD,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAMC,EAAY,MAAO,UAClDA,EAAY,UAAY,MAAM,QAAQA,EAAY,QAAQ,EAAG,CAEtE,MAAMG,EAAcH,EAAY,SAAS,OAAQjE,GAAWA,EAAE,OAAS,MAAM,EAAE,IAAA,EAC3EoE,GAAa,OAAS,MAAM,QAAQA,EAAY,KAAK,IACvDJ,EAAeI,EAAY,MAAM,IAAKlL,IAAe,CACnD,KAAM,OACN,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,EAEN,SAAW+K,EAAY,SAAS,UAAY,MAAM,QAAQA,EAAY,QAAQ,QAAQ,EAAG,CAEvF,MAAMG,EAAcH,EAAY,QAAQ,SAAS,OAAQjE,GAAWA,EAAE,OAAS,MAAM,EAAE,IAAA,EACnFoE,GAAa,OAAS,MAAM,QAAQA,EAAY,KAAK,IACvDJ,EAAeI,EAAY,MAAM,IAAKlL,IAAe,CACnD,KAAM,OACN,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,EAEN,CACF,OAAS6E,EAAK,CACR,KAAK,aACP,KAAK,YAAY,MAAM,gDAAiD,CACtE,IAAKqF,EAAM,IACX,MAAOrF,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAA,CACvD,EAGHiG,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAM,2BAA4B,CACtE,CAIF,MAAMK,EAAgB,CACpB,aAAc,KAAK,YAAY,eAAejB,EAAM,GAAG,EACvD,gBAAiB,EAAA,EAGnB,GAAI,CACE,KAAK,WACP,KAAK,UAAU,KAAK,wCAAyC,CAC3D,UAAW,KAAK,UAChB,QAASA,EAAM,EAAA,CAChB,EAGH,MAAM1J,EAAW,MAAM,KAAK,iBAAiB,aAAa,KAAK,UAAW,CACxE,YAAa,SACb,SAAU2K,CAAA,CACX,EAGD,KAAK,eAAe,IAAIjB,EAAM,GAAI1J,EAAS,SAAS,EAEhD,KAAK,WACP,KAAK,UAAU,KAAK,0BAA2B,CAC7C,QAAS0J,EAAM,GACf,UAAW1J,EAAS,SAAA,CACrB,CAEL,OAAS7C,EAAO,CACV,KAAK,aACP,KAAK,YAAY,MAAM,oCAAqC,CAC1D,IAAKuM,EAAM,IACX,UAAW,KAAK,UAChB,MAAOvM,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,CAAA,CAC7D,CAEL,CACF,CAKA,MAAc,eAAeuM,EAAuC,CAClE,GAAI,CAAC,KAAK,kBAAoB,CAAC,KAAK,UAAW,OAG3C,KAAK,WACP,KAAK,UAAU,KAAK,cAAe,CAAE,MAAAA,EAAO,EAG9C,MAAMkB,EAAY,KAAK,eAAe,IAAIlB,EAAM,EAAE,EAClD,GAAI,CAACkB,EAEH,OAOF,GAAI,EAFalB,EAAM,eAAe,SAAS,OAAO,OAAS,GAAKA,EAAM,eAAe,OAAO,OAAS,GAE1F,CAET,KAAK,WACP,KAAK,UAAU,KAAK,uCAAwC,CAC1D,QAASA,EAAM,GACf,UAAAkB,CAAA,CACD,EAEH,MACF,CAGA,MAAMC,EAA0C,CAC9C,aAAc,KAAK,YAAY,eAAenB,EAAM,GAAG,EACvD,OAAQA,EAAM,OACd,SAAUA,EAAM,UAAA,EAIlB,GAAIA,EAAM,eAAe,SAAU,CACjC,MAAMoB,EAAgBpB,EAAM,cAAc,SACtCoB,EAAc,QAAOD,EAAmB,MAAQC,EAAc,OAC9DA,EAAc,QAAOD,EAAmB,MAAQC,EAAc,OAC9DA,EAAc,eAAcD,EAAmB,aAAeC,EAAc,cAC5EA,EAAc,gBAAeD,EAAmB,cAAgBC,EAAc,eAC9EA,EAAc,oBAAmBD,EAAmB,kBAAoBC,EAAc,mBACtFA,EAAc,OAAMD,EAAmB,KAAOC,EAAc,KAClE,CAGA,IAAIC,EAA+B,CAAA,EACnC,GAAIrB,EAAM,cAAe,CACvB,MAAMxI,EAASwI,EAAM,cAEjBxI,GAAQ,SAAS,OAAS,MAAM,QAAQA,EAAO,QAAQ,KAAK,EAE9D6J,EAAgB7J,EAAO,QAAQ,MACtBA,GAAQ,OAAS,MAAM,QAAQA,EAAO,KAAK,EAEpD6J,EAAgB7J,EAAO,MACdA,GAAQ,MACjB6J,EAAc,KAAK,CAAE,KAAM,OAAiB,KAAM,iBAAiB7J,EAAO,KAAK,GAAI,EAGnF6J,EAAgB,CAAA,CAEpB,MAAWrB,EAAM,cAEfqB,EAAc,KAAK,CAAE,KAAM,OAAiB,KAAM,yBAA0B,EAG9E,GAAI,CACF,MAAMC,EAAkB,CACtB,MAAOD,EACP,SAAUF,EACV,IAAKnB,EAAM,eAAe,SAAS,EAAA,EAIjClM,EAAAA,cAAc,mBAChBwN,EAAW,IAAM,KAAK,UAAUtB,CAAK,GAGvC,MAAM,KAAK,iBAAiB,cAAc,KAAK,UAAWkB,EAAWI,CAAU,EAG/E,KAAK,eAAe,OAAOtB,EAAM,EAAE,CACrC,OAASvM,EAAO,CACV,KAAK,aACP,KAAK,YAAY,MAAM,qCAAsC,CAC3D,IAAKuM,EAAM,IACX,UAAAkB,EACA,UAAW,KAAK,UAChB,MAAOzN,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,CAAA,CAC7D,EAGH,KAAK,eAAe,OAAOuM,EAAM,EAAE,CACrC,CACF,CACF,CC1dO,MAAMuB,CAAU,CACrB,OAAwB,eAAiBhO,EAAK,KAAK2M,EAAG,QAAA,EAAW,SAAU,WAAW,EACtF,OAAwB,kBAAoB,sCAC5C,OAAwB,UAAY,+BAE5B,OAMR,YAAYsB,EAAc,CACxB,KAAK,OAASA,CAChB,CAKA,MAAM,cAAuC,CAC3C,GAAI,CACF,MAAMC,EAAW,KAAK,aAAA,EACtB,MAAI,CAACA,GAAY,CAACA,EAAS,OAClB,KAEFA,EAAS,OAAO,YAAc,IACvC,OAAShO,EAAO,CACd,YAAK,QAAQ,MAAM,uCAAwCA,CAAc,EAClE,IACT,CACF,CAKA,MAAM,gBAAyC,CAC7C,GAAI,CAEF,MAAMgO,EAAW,KAAK,aAAA,EACtB,GAAI,CAACA,GAAY,CAACA,EAAS,OACzB,YAAK,QAAQ,KAAK,0CAA0C,EACrD,KAGT,GAAI,CAAE,aAAAC,EAAc,cAAAC,CAAA,EAAkBF,EAAS,OAU/C,GAPI,OAAOC,GAAiB,UAAYA,EAAa,WAAW,SAAS,IACvEA,EAAeA,EAAa,UAAU,CAAC,GAMrC,CAFc,KAAK,eAAeA,CAAY,EAGhD,OAAOA,EAIT,MAAME,EAAY,MAAM,KAAK,mBAAmBD,CAAa,EAE7D,OAAKC,GAML,KAAK,WAAWA,CAAS,EAElBA,EAAU,eAPf,KAAK,QAAQ,MAAM,4CAA4C,EACxD,KAOX,OAASnO,EAAO,CACd,YAAK,QAAQ,MAAM,yCAA0CA,CAAc,EACpE,IACT,CACF,CAKQ,cAAqC,CAC3C,GAAI,CACF,GAAI,CAACD,EAAG,WAAW+N,EAAU,cAAc,EACzC,YAAK,QAAQ,KAAK,uCAAwC,CACxD,KAAMA,EAAU,cAAA,CACjB,EACM,KAGT,MAAMM,EAAUrO,EAAG,aAAa+N,EAAU,eAAgB,OAAO,EACjE,OAAO,KAAK,MAAMM,CAAO,CAC3B,OAASpO,EAAO,CACd,YAAK,QAAQ,MAAM,uCAAwCA,CAAc,EAClE,IACT,CACF,CAMQ,eAAeqO,EAAwB,CAC7C,GAAI,CACF,MAAMC,EAAU,KAAK,UAAUD,CAAK,EACpC,GAAI,CAACC,GAAW,CAACA,EAAQ,IACvB,MAAO,GAIT,MAAMC,EAAYD,EAAQ,IAAM,IAC1BE,EAAM,KAAK,IAAA,EACXC,EAAW,IAAS,IAE1B,OAAOF,EAAYC,EAAMC,CAC3B,OAASzO,EAAO,CACd,YAAK,QAAQ,MAAM,qCAAsCA,CAAc,EAChE,EACT,CACF,CAMQ,UAAUqO,EAAkC,CAClD,GAAI,CAEF,MAAMrM,EAAQqM,EAAM,MAAM,GAAG,EAC7B,GAAIrM,EAAM,SAAW,EACnB,OAAO,KAIT,MAAMsM,EAAUtM,EAAM,CAAC,EACjB0M,EAAU,OAAO,KAAKJ,EAAS,WAAW,EAAE,SAAS,OAAO,EAClE,OAAO,KAAK,MAAMI,CAAO,CAC3B,OAAS1O,EAAO,CACd,YAAK,QAAQ,MAAM,mCAAoCA,CAAc,EAC9D,IACT,CACF,CAMA,MAAc,mBAAmB2O,EAAmD,CAClF,GAAI,CACF,MAAM9L,EAAW,MAAM,MAAMiL,EAAU,kBAAmB,CACxD,OAAQ,OACR,QAAS,CACP,eAAgB,mCAAA,EAElB,KAAM,IAAI,gBAAgB,CACxB,WAAY,gBACZ,cAAea,EACf,UAAWb,EAAU,SAAA,CACtB,CAAA,CACF,EAED,GAAI,CAACjL,EAAS,GAAI,CAChB,MAAM+L,EAAY,MAAM/L,EAAS,KAAA,EACjC,YAAK,QAAQ,MAAM,mCAAoC,CACrD,OAAQA,EAAS,OACjB,MAAO+L,CAAA,CACR,EACM,IACT,CAEA,MAAM1O,EAAO,MAAM2C,EAAS,KAAA,EAG5B,IAAIgM,EAAc3O,EAAK,aACvB,OAAI,OAAO2O,GAAgB,UAAYA,EAAY,WAAW,SAAS,IACrEA,EAAcA,EAAY,UAAU,CAAC,GAGhC,CACL,SAAU3O,EAAK,SACf,aAAc2O,EACd,cAAe3O,EAAK,eAAiByO,EACrC,WAAYzO,EAAK,YAAc,EAAA,CAEnC,OAASF,EAAO,CACd,YAAK,QAAQ,MAAM,sCAAuCA,CAAc,EACjE,IACT,CACF,CAKQ,WAAW8O,EAA2B,CAC5C,GAAI,CACF,MAAMd,EAAW,KAAK,aAAA,EACtB,GAAI,CAACA,EAAU,CACb,KAAK,QAAQ,MAAM,sDAAsD,EACzE,MACF,CAGAA,EAAS,OAASc,EAClBd,EAAS,aAAe,IAAI,KAAA,EAAO,YAAA,EAGnCjO,EAAG,cAAc+N,EAAU,eAAgB,KAAK,UAAUE,EAAU,KAAM,CAAC,EAAG,OAAO,CACvF,OAAShO,EAAO,CACd,KAAK,QAAQ,MAAM,oCAAqCA,CAAc,CACxE,CACF,CACF,CCpPA,MAAA+O,GAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,ECaR,MAAMC,EAAwD,CAC3D,OACA,UACA,kBAER,YAAY3C,EAAsB4C,EAAuB,CACvD,KAAK,OAAS5C,EACd,KAAK,UAAY4C,EAEjB,KAAK,kBAAoB,KAAK,sBAAA,CAChC,CAKQ,uBAAgC,CACtC,OAAOF,EACT,CAEA,MAAM,UAAUG,EAAc3J,EAAkD,CAC9E,GAAI,CACF,MAAM4J,EAAgB,KAAK,MAAM5J,CAAW,EAG5C,KAAK,OAAO,QAAQ,MAAM,sDAAsD,EAChF,KAAK,OAAO,QAAQ,MAAM,iCAAkC,CAC1D,KAAM,KAAK,UAAU4J,EAAe,KAAM,CAAC,CAAA,CAC5C,EAGD,MAAMC,EAAiBC,GAAAA,WAAA,EACjBC,EAAYF,EAKZG,EAA0B,KAAK,OAAe,uBAC9CC,EAAeL,EAAc,OAASA,EAAc,MAAM,YAAA,EAAc,SAAS,OAAO,EACxFM,EAAkBF,IAA2BC,EAAe,UAAY,UAE9E,KAAK,OAAO,QAAQ,MAAM,wDAAyD,CACjF,cAAeL,EAAc,MAC7B,aAAAK,EACA,uBAAwBD,GAA0B,OAClD,qBAAsBE,EACtB,OAAQF,EAAyB,mBAAqB,aAAA,CACvD,EAGD,MAAMG,EAAwB,CAC5B,MAAO,KAAK,OAAO,SAAW,QAC9B,OAAQ,GACR,MAAO,GACP,YAAa,OACb,oBAAqB,GACrB,UAAW,CACT,OAAQD,EACR,QAAS,MAAA,EAEX,QAAS,CAAC,6BAA6B,EACvC,iBAAkBL,CAAA,EAKpBM,EAAiB,aAAe,KAAK,4BAA4B,KAAK,iBAAiB,EAGvF,MAAMnL,EAAe,CAAA,EAGrB,IAAIoL,EAAqB,GACzB,GAAIR,EAAc,OAAQ,CACxB,MAAMS,EAAiB,KAAK,sBAAsBT,EAAc,MAAM,EAElES,GAAkB,MAAM,QAAQA,CAAc,GAAKA,EAAe,OAAS,IAC7ED,EAAqBC,EAAe,IAAKtC,GAAaA,EAAI,OAAO,EAAE,KAAK;AAAA;AAAA,CAAM,EAElF,CAkBA,GAfAqC,EAAqB,KAAK,6BAA6BA,CAAkB,EAErEA,GACFpL,EAAM,KAAK,CACT,KAAM,UACN,KAAM,OACN,QAAS,CACP,CACE,KAAM,aACN,KAAMoL,CAAA,CACR,CACF,CACD,EAGCR,EAAc,UAAY,MAAM,QAAQA,EAAc,QAAQ,EAChE,UAAW7B,KAAO6B,EAAc,SAAU,CACxC,MAAMU,EAAY,KAAK,sBAAsBvC,CAAG,EAC5C,MAAM,QAAQuC,CAAS,EAEzBtL,EAAM,KAAK,GAAGsL,CAAS,EACdA,GACTtL,EAAM,KAAKsL,CAAS,CAExB,CAKF,GAHAH,EAAiB,MAAQnL,EAGrB4K,EAAc,OAAS,MAAM,QAAQA,EAAc,KAAK,EAAG,CAC7D,KAAK,OAAO,QAAQ,MAAM,yCAA0C,CAClE,MAAO,KAAK,UAAUA,EAAc,MAAO,KAAM,CAAC,CAAA,CACnD,EACD,MAAMW,EAAiB,KAAK,aAAaX,EAAc,KAAK,EAC5D,KAAK,OAAO,QAAQ,MAAM,mCAAoC,CAC5D,MAAO,KAAK,UAAUW,EAAgB,KAAM,CAAC,CAAA,CAC9C,EAGGA,EAAe,OAAS,GAC1BJ,EAAiB,MAAQI,EACzB,KAAK,OAAO,QAAQ,MAAM,mDAAoD,CAC5E,UAAWA,EAAe,MAAA,CAC3B,EAED,KAAK,OAAO,QAAQ,MAAM,wDAAyD,CACjF,SAAU,CAAC,CAACJ,EAAiB,MAC7B,YAAaA,EAAiB,OAAO,OACrC,KAAM,OAAO,KAAKA,CAAgB,CAAA,CACnC,GAED,KAAK,OAAO,QAAQ,KAAK,wEAAwE,CAErG,MACE,KAAK,OAAO,QAAQ,MAAM,8CAA+C,CACvE,SAAU,CAAC,CAACP,EAAc,MAC1B,QAAS,MAAM,QAAQA,EAAc,KAAK,CAAA,CAC3C,EAIH,MAAMY,EAAY,KAAK,OAAO,YAAc,kDAGtCjI,EAAkC,CACtC,QAAS,SACT,cAAe,yBACf,gBAAiBsH,EACjB,WAAYE,EACZ,OAAQ,oBACR,eAAgB,mBAChB,aAAc,6DACd,WAAY,cAAA,EAId,GAAI,KAAK,UAAW,CAClB,MAAMT,EAAc,MAAM,KAAK,UAAU,eAAA,EACzC,KAAK,OAAO,QAAQ,MAAM,oCAAqC,CAC7D,MAAOA,GAAa,UAAU,EAAG,EAAE,EAAI,KAAA,CACxC,EACGA,IAEEA,EAAY,WAAW,SAAS,GAClC/G,EAAQ,cAAgB+G,EACxB,KAAK,OAAO,QAAQ,MAAM,+DAA+D,IAEzF/G,EAAQ,cAAgB,UAAU+G,CAAW,GAC7C,KAAK,OAAO,QAAQ,MAAM,+CAA+C,IAI7E,MAAMmB,EAAY,MAAM,KAAK,UAAU,aAAA,EACnCA,IACFlI,EAAQ,oBAAoB,EAAIkI,EAEpC,MAAW,KAAK,OAAO,WACrBlI,EAAQ,cAAgB,UAAU,KAAK,OAAO,QAAQ,IAIxD,YAAK,OAAO,QAAQ,MAAM,8CAA8C,EACxE,KAAK,OAAO,QAAQ,MAAM,8BAA+B,CAAE,UAAAiI,EAAW,EACtE,KAAK,OAAO,QAAQ,MAAM,2BAA4B,CACpD,QAAS,CACP,GAAGjI,EACH,cAAeA,EAAQ,cAAgB,GAAGA,EAAQ,cAAc,UAAU,EAAG,EAAE,CAAC,MAAQ,MAAA,CAC1F,CACD,EACD,KAAK,OAAO,QAAQ,MAAM,wBAAyB,CACjD,KAAM,KAAK,UAAU4H,EAAkB,KAAM,CAAC,CAAA,CAC/C,EACD,KAAK,OAAO,QAAQ,MAAM,kDAAkD,EAG5E,KAAK,OAAO,QAAQ,MAAM,wDAAwD,EAClF,KAAK,OAAO,QAAQ,MAAM,4DAA6D,CACrF,SAAU,CAAC,CAACA,EAAiB,MAC7B,YAAaA,EAAiB,OAAO,OACrC,KAAM,OAAO,KAAKA,CAAgB,CAAA,CACnC,EACD,KAAK,OAAO,QAAQ,MAAM,8BAA+B,CACvD,KAAM,KAAK,UAAUA,EAAkB,KAAM,CAAC,CAAA,CAC/C,EACD,KAAK,OAAO,QAAQ,MAAM,4DAA4D,EAE/E,CACL,IAAKK,EACL,KAAM,KAAK,UAAUL,CAAgB,EACrC,QAAA5H,CAAA,CAEJ,OAAS9H,EAAO,CACd,MAAM,IAAI,MAAM,iDAAiDA,CAAK,EAAE,CAC1E,CACF,CAKQ,4BAA4BiQ,EAA8B,CAChE,IAAIC,EAAUD,EAGd,OAAAC,EAAUA,EAAQ,QAChB,kGACA,qDAAA,EAIFA,EAAUA,EAAQ,QAChB,8CACA,4CAAA,EAIFA,EAAUA,EAAQ,QAAQ,cAAe,SAAS,EAClDA,EAAUA,EAAQ,QAAQ,iBAAkB,QAAQ,EAE7CA,CACT,CAMQ,6BAA6BhM,EAAsB,CAEzD,MAAMiM,EAAqB,CACzB,6FACA,gDAAA,EAGF,IAAIC,EAAWlM,EACf,UAAWc,KAAWmL,EACpBC,EAAWA,EAAS,QAAQpL,EAAS,EAAE,EAIzC,OAAAoL,EAAWA,EAAS,QAAQ,UAAW;AAAA;AAAA,CAAM,EAAE,KAAA,EAExCA,CACT,CAKQ,sBAAsBC,EAAoB,CAChD,MAAMC,EAAkB,CAAA,EAExB,GAAI,OAAOD,GAAW,SACpBC,EAAS,KAAK,CAAE,KAAM,SAAU,QAASD,EAAQ,UACxC,MAAM,QAAQA,CAAM,EAC7B,UAAWE,KAAQF,EACb,OAAOE,GAAS,SAClBD,EAAS,KAAK,CAAE,KAAM,SAAU,QAASC,EAAM,EACtCA,EAAK,OAAS,QAAUA,EAAK,MACtCD,EAAS,KAAK,CAAE,KAAM,SAAU,QAASC,EAAK,KAAM,OAG/CF,EAAO,OAAS,QAAUA,EAAO,MAC1CC,EAAS,KAAK,CAAE,KAAM,SAAU,QAASD,EAAO,KAAM,EAGxD,OAAOC,CACT,CAMQ,sBAAsBhD,EAA8B,CAC1D,GAAI,CAACA,EAAI,MAAQ,CAACA,EAAI,QAAS,OAAO,KAEtC,MAAMkD,EAAWlD,EAAI,OAAS,YAAc,cAAgB,aAG5D,GAAI,MAAM,QAAQA,EAAI,OAAO,EAAG,CAC9B,MAAMmD,EAAqB,CAAA,EACrBC,EAAmB,CAAA,EACnBC,EAAqB,CAAA,EAE3B,UAAW5O,KAASuL,EAAI,QACtB,GAAIvL,EAAM,OAAS,cAAe,CAEhC,MAAM6O,EAAa,OAAO7O,EAAM,SAAY,SAAWA,EAAM,QAAU,KAAK,UAAUA,EAAM,OAAO,EACnG0O,EAAY,KAAK,CACf,KAAM,uBACN,QAAS1O,EAAM,YACf,OAAQ6O,CAAA,CACT,CACH,SAAW7O,EAAM,OAAS,OACxB4O,EAAY,KAAK,CACf,KAAMH,EACN,KAAMzO,EAAM,MAAQ,EAAA,CACrB,UACQA,EAAM,OAAS,QAExB,GAAIA,EAAM,QAAUA,EAAM,OAAO,OAAS,UAAYA,EAAM,OAAO,KAAM,CACvE,MAAM8O,EAAY9O,EAAM,OAAO,YAAc,aACvC+O,EAAW,QAAQD,CAAS,WAAW9O,EAAM,OAAO,IAAI,GAC9D4O,EAAY,KAAK,CACf,KAAM,cACN,UAAWG,CAAA,CACZ,EACD,KAAK,OAAO,QAAQ,MAAM,yCAA0C,CAClE,UAAAD,EACA,WAAY9O,EAAM,OAAO,KAAK,MAAA,CAC/B,CACH,MACE,KAAK,OAAO,QAAQ,KAAK,4CAA6C,CACpE,OAAQA,EAAM,MAAA,CACf,OAEMA,EAAM,OAAS,YAExB2O,EAAU,KAAK,CACb,KAAM,gBACN,QAAS3O,EAAM,GACf,KAAMA,EAAM,KACZ,UAAW,KAAK,UAAUA,EAAM,OAAS,CAAA,CAAE,CAAA,CAC5C,EAKL,MAAM+C,EAAgB,CAAA,EAsBtB,OAnBI6L,EAAY,OAAS,GACvB7L,EAAO,KAAK,CACV,KAAM,UACN,KAAMwI,EAAI,KACV,QAASqD,CAAA,CACV,EAICD,EAAU,OAAS,GACrB5L,EAAO,KAAK,GAAG4L,CAAS,EAItBD,EAAY,OAAS,GACvB3L,EAAO,KAAK,GAAG2L,CAAW,EAIxB3L,EAAO,OAAS,EACXA,EACEA,EAAO,SAAW,EACpBA,EAAO,CAAC,EAGV,IACT,SAAW,OAAOwI,EAAI,SAAY,SAEhC,MAAO,CACL,KAAM,UACN,KAAMA,EAAI,KACV,QAAS,CACP,CACE,KAAMkD,EACN,KAAMlD,EAAI,OAAA,CACZ,CACF,EAIJ,OAAO,IACT,CAQQ,aAAayD,EAAqB,CAExC,MAAI,CAACA,GAAS,CAAC,MAAM,QAAQA,CAAK,EACzB,CAAA,EAGFA,EACJ,OAAQC,GAEH,GAACA,GAAQ,OAAOA,GAAS,UAGzB,CAACA,EAAK,KAIX,EACA,IAAKA,IAEG,CACL,KAAM,WACN,KAAMA,EAAK,KACX,YAAaA,EAAK,aAAe,GACjC,WAAYA,EAAK,cAAgBA,EAAK,YAAc,CAAA,CAAC,EAExD,CACL,CACF,CCpZO,MAAMC,EAAyD,CAC5D,OAER,YAAYlD,EAAiB,CAC3B,KAAK,OAASA,CAChB,CAEA,UAAUvG,EAA8B,CACtC,GAAI,CAEF,MAAI,CAACA,GAAgBA,EAAa,KAAA,IAAW,GAEpC,KAAK,0BAAA,EAITA,EAAa,SAAS,OAAO,EAKnB,KAAK,yBAAyBA,CAAY,EAJhD,KAAK,4BAA4BA,CAAY,CAMxD,OAASxH,EAAO,CACd,YAAK,QAAQ,MAAM,sCAAuCA,CAAc,EAEjE,KAAK,0BAAA,CACd,CACF,CAKQ,2BAAoC,CAC1C,MAAMyN,EAAY,OAAOnB,EAAAA,KAAA,CAAM,GACzB4E,EAAmB,CAAA,EAGzB,OAAAA,EAAO,KAAK,sBAAsB,EAClCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,gBACN,QAAS,CACP,GAAIzD,EACJ,KAAM,UACN,KAAM,YACN,QAAS,CAAA,EACT,MAAO,QACP,YAAa,KACb,cAAe,KACf,MAAO,CAAE,aAAc,EAAG,cAAe,CAAA,CAAE,CAC7C,CACD,CAAC,EAAA,EAEJyD,EAAO,KAAK,EAAE,EAGdA,EAAO,KAAK,qBAAqB,EACjCA,EAAO,KAAK,+BAA+B,EAC3CA,EAAO,KAAK,EAAE,EAEPA,EAAO,KAAK;AAAA,CAAI,CACzB,CAEQ,yBAAyBC,EAAyB,CACxD,MAAMpN,EAAS,KAAK,kBAAkBoN,CAAO,EACvCC,EAAe,KAAK,6BAA6BrN,CAAM,EACvDsN,EAAkBtN,EAAO,cAAgB,yCAC/C,OAAO,KAAK,wBAAwBqN,EAAcC,CAAe,CACnE,CAEQ,kBAAkBF,EAAqC,CAC7D,MAAMpN,EAA6B,CACjC,aAAc,CAAA,EACd,iBAAkB,CAAA,EAClB,cAAe,IACf,MAAO,QACP,YAAa,EACb,aAAc,EACd,aAAc,EACd,gBAAiB,EACjB,gBAAiB,OACjB,WAAY,OACZ,aAAc,MAAA,EAGVtC,EAAQ0P,EAAQ,MAAM;AAAA,CAAI,EAChC,IAAIG,EAAe,GACnB,MAAMC,EACJ,uBAAuB,KAAKJ,CAAO,GACnC,4BAA4B,KAAKA,CAAO,GACxC,uBAAuB,KAAKA,CAAO,EAErC,UAAWK,KAAW/P,EAAO,CAC3B,MAAMI,EAAO2P,EAAQ,KAAA,EACrB,GAAI,CAAC3P,EAAM,SAEX,GAAIA,EAAK,WAAW,QAAQ,EAAG,CAC7ByP,EAAezP,EAAK,MAAM,CAAC,EAAE,KAAA,EAC7B,QACF,CAEA,GAAI,CAACA,EAAK,WAAW,OAAO,EAAG,SAC/B,MAAM4P,EAAc5P,EAAK,MAAM,CAAC,EAAE,KAAA,EAClC,GAAI,CAAC4P,GAAeA,IAAgB,SAAU,SAE9C,IAAIvR,EACJ,GAAI,CACFA,EAAO,KAAK,MAAMuR,CAAW,CAC/B,MAAQ,CACN,QACF,CAEA,GAAIvR,GAAM,MAAO,CACf6D,EAAO,aACL7D,EAAK,OAAO,SACZA,EAAK,OAAO,QACX,OAAOA,EAAK,OAAU,SAAWA,EAAK,MAAQ,wBACjD,KACF,CAEIqR,GAAkBD,EAAa,WAAW,WAAW,EACvD,KAAK,qBAAqBA,EAAcpR,EAAM6D,CAAM,EAEpD,KAAK,0BAA0B7D,EAAM6D,CAAM,CAE/C,CAEA,OAAOA,CACT,CAEQ,qBAAqB2N,EAAmBxR,EAAW6D,EAAkC,CAC3F,MAAM4N,EAAY,OAAOzR,GAAM,MAAS,SAAWA,EAAK,KAAOwR,EAW/D,OATIxR,GAAM,OAAS,OAAOA,EAAK,OAAU,WACvC6D,EAAO,MAAQ7D,EAAK,OAGlBA,GAAM,QACR6D,EAAO,YAAc7D,EAAK,MAAM,cAAgBA,EAAK,MAAM,eAAiB6D,EAAO,YACnFA,EAAO,aAAe7D,EAAK,MAAM,eAAiBA,EAAK,MAAM,mBAAqB6D,EAAO,cAGnF4N,EAAA,CACN,IAAK,mBACCzR,GAAM,UAAU,QAClB6D,EAAO,MAAQ7D,EAAK,SAAS,OAE/B,MACF,IAAK,2BACL,IAAK,wCACL,IAAK,yCACL,IAAK,wCACL,IAAK,uBACL,IAAK,6BACL,IAAK,4BACL,IAAK,8BACL,IAAK,6BACL,IAAK,wCACL,IAAK,uCACL,IAAK,uCAEH,MACF,IAAK,6BACL,IAAK,iBAECA,GAAM,OAAO,WAGjB,MACF,IAAK,4BAEH,MACF,IAAK,qBA4BH,GA3BIA,GAAM,UAAU,QAClB6D,EAAO,MAAQ7D,EAAK,SAAS,OAE3BA,GAAM,UAAU,QAClB6D,EAAO,YACL7D,EAAK,SAAS,MAAM,cAAgBA,EAAK,SAAS,MAAM,eAAiB6D,EAAO,YAClFA,EAAO,aACL7D,EAAK,SAAS,MAAM,eAAiBA,EAAK,SAAS,MAAM,mBAAqB6D,EAAO,aAGnF7D,EAAK,SAAS,MAAM,sBAAsB,gBAC5C6D,EAAO,aAAe7D,EAAK,SAAS,MAAM,qBAAqB,eAE7DA,EAAK,SAAS,MAAM,uBAAuB,mBAC7C6D,EAAO,gBAAkB7D,EAAK,SAAS,MAAM,sBAAsB,mBAKnEA,GAAM,UAAU,WAAW,SAC7B6D,EAAO,gBAAkB7D,EAAK,SAAS,UAAU,QAE/C,MAAM,QAAQA,GAAM,UAAU,UAAU,GAC1C,KAAK,iBAAiBA,EAAK,SAAS,WAAY6D,EAAO,SAAS,EAI9D,MAAM,QAAQ7D,GAAM,UAAU,MAAM,GACtC,UAAWsE,KAAUtE,EAAK,SAAS,OACjC,GAAIsE,GAAQ,OAAS,aAEnB,GAAI,MAAM,QAAQA,GAAQ,OAAO,EAC/B,UAAWoN,KAAepN,EAAO,QAC3BoN,GAAa,OAAS,gBAAkBA,GAAa,MACvD7N,EAAO,iBAAiB,KAAK6N,EAAY,IAAI,UAI1CpN,GAAQ,OAAS,WAE1B,GAAI,MAAM,QAAQA,GAAQ,OAAO,EAC/B,UAAWqN,KAAerN,EAAO,SAE1BqN,GAAa,OAAS,eAAiBA,GAAa,OAAS,SAAWA,GAAa,MACxF9N,EAAO,aAAa,KAAK8N,EAAY,IAAI,UAItCrN,GAAQ,OAAS,gBAAiB,CAE3C,MAAMsN,EAAe,CACnB,MAAO/N,EAAO,UAAU,KACxB,GAAIS,EAAO,IAAM,QAAQ8H,EAAAA,MAAM,GAC/B,SAAU,CACR,KAAM9H,EAAO,KACb,UAAWA,EAAO,SAAA,CACpB,EAEF,KAAK,iBAAiB,CAACsN,CAAY,EAAG/N,EAAO,SAAS,CACxD,EAKA7D,GAAM,UAAU,aAClB,KAAK,oBAAoBA,EAAK,SAAS,YAAa6D,EAAO,YAAY,EAEzEA,EAAO,WAAa,KAAK,8BAA8B7D,GAAM,UAAU,MAAM,EAC7E,MACF,IAAK,iBACH6D,EAAO,aAAe7D,GAAM,OAAO,SAAWA,GAAM,SAAW,uBAC/D,MACF,QACMA,GAAM,QACR,KAAK,oBAAoBA,EAAK,MAAO6D,EAAO,YAAY,EACpD7D,EAAK,MAAM,YACb,KAAK,iBAAiBA,EAAK,MAAM,WAAY6D,EAAO,SAAS,GAGjE,KAAA,CAEN,CAEQ,0BAA0BkH,EAAYlH,EAAkC,CAC9E,GAAKkH,IAEDA,EAAM,OAAS,OAAOA,EAAM,OAAU,WACxClH,EAAO,MAAQkH,EAAM,OAGnBA,EAAM,QACRlH,EAAO,YAAckH,EAAM,MAAM,eAAiBlH,EAAO,YACzDA,EAAO,aAAekH,EAAM,MAAM,mBAAqBlH,EAAO,cAG5D,EAAC,MAAM,QAAQkH,EAAM,OAAO,GAEhC,UAAW8G,KAAU9G,EAAM,QACrB8G,GAAQ,QACV,KAAK,oBAAoBA,EAAO,MAAOhO,EAAO,YAAY,EACtDgO,EAAO,MAAM,YACf,KAAK,iBAAiBA,EAAO,MAAM,WAAYhO,EAAO,SAAS,GAI/DgO,GAAQ,SAAS,SACnB,KAAK,oBAAoBA,EAAO,QAAQ,QAAShO,EAAO,YAAY,EAGlEgO,GAAQ,gBACVhO,EAAO,WAAa,KAAK,gBAAgBgO,EAAO,aAAa,EAGnE,CAEQ,oBAAoBC,EAAWC,EAA2B,CAChE,GAAID,GAAQ,KAAM,OAElB,GAAI,OAAOA,GAAS,SAAU,CACxBA,EAAK,OAAS,GAAGC,EAAU,KAAKD,CAAI,EACxC,MACF,CAEA,GAAI,MAAM,QAAQA,CAAI,EAAG,CACvB,UAAWzB,KAAQyB,EACjB,KAAK,oBAAoBzB,EAAM0B,CAAS,EAE1C,MACF,CAEA,GAAI,OAAOD,GAAS,SAClB,OAGE,OAAOA,EAAK,MAAS,UACvBC,EAAU,KAAKD,EAAK,IAAI,EAGtB,OAAOA,EAAK,aAAgB,UAC9BC,EAAU,KAAKD,EAAK,WAAW,EAG7B,OAAOA,EAAK,OAAU,UACxBC,EAAU,KAAKD,EAAK,KAAK,EAGvB,OAAOA,EAAK,OAAU,SACxBC,EAAU,KAAKD,EAAK,KAAK,EAChBA,EAAK,OACd,KAAK,oBAAoBA,EAAK,MAAOC,CAAS,EAG5CD,EAAK,OAAS,OAAOA,EAAK,MAAM,MAAS,UAC3CC,EAAU,KAAKD,EAAK,MAAM,IAAI,EAGhC,MAAME,EAAa,CAAC,UAAW,SAAU,cAAe,UAAW,UAAW,UAAU,EACxF,UAAW3P,KAAO2P,EAChB,GAAIF,EAAKzP,CAAG,IAAM,OAChB,GAAIA,IAAQ,WAAa,MAAM,QAAQyP,EAAKzP,CAAG,CAAC,EAC9C,UAAWwP,KAAUC,EAAKzP,CAAG,EACvBwP,GAAQ,SAAS,SACnB,KAAK,oBAAoBA,EAAO,QAAQ,QAASE,CAAS,EAExDF,GAAQ,OACV,KAAK,oBAAoBA,EAAO,MAAOE,CAAS,OAIpD,KAAK,oBAAoBD,EAAKzP,CAAG,EAAG0P,CAAS,CAIrD,CAEQ,iBACNE,EACAC,EACM,CACN,GAAI,CAACD,EAAc,OAEnB,MAAME,EAAa,MAAM,QAAQF,CAAY,EAAIA,EAAe,CAACA,CAAY,EAE7E,UAAWG,KAAQD,EAAY,CAC7B,GAAI,CAACC,EAAM,SAEX,MAAMC,EAAQ,OAAOD,EAAK,OAAU,SAAWA,EAAK,MAAQF,EAAY,KAClEI,EAAWJ,EAAY,IAAIG,CAAK,GAAK,CACzC,GAAI,GACJ,KAAM,GACN,eAAgB,CAAA,CAAC,EAGf,OAAOD,EAAK,IAAO,UAAY,CAACE,EAAS,KAC3CA,EAAS,GAAKF,EAAK,IAGrB,MAAMG,EAAeH,EAAK,UAAU,KAChC,OAAOG,GAAiB,UAAYA,EAAa,OAAS,IAC5DD,EAAS,KAAOC,GAGlB,MAAMC,EAAeJ,EAAK,UAAU,UAChC,OAAOI,GAAiB,UAAYA,EAAa,OAAS,GAC5DF,EAAS,eAAe,KAAKE,CAAY,EAG3CN,EAAY,IAAIG,EAAOC,CAAQ,CACjC,CACF,CAEQ,6BAA6BzO,EAAoC,CACvE,GAAIA,EAAO,aACT,OAAO,KAAK,wBAAwBA,EAAO,YAAY,EAGzD,MAAM4O,EAAmB,KAAK,sBAAsB5O,EAAO,gBAAgB,EACrE6O,EAAe,KAAK,sBAAsB7O,EAAO,YAAY,EAE7D2M,EAAY,MAAM,KAAK3M,EAAO,UAAU,SAAS,EACpD,KAAK,CAAC8O,EAAGC,IAAMD,EAAE,CAAC,EAAIC,EAAE,CAAC,CAAC,EAC1B,IAAI,CAAC,CAACP,EAAOxK,CAAK,KAAO,CACxB,MAAAwK,EACA,GAAIxK,EAAM,GACV,KAAMA,EAAM,KACZ,UAAWA,EAAM,eAAe,KAAK,EAAE,CAAA,EACvC,EACD,OAAQuK,GAASA,EAAK,IAAI,EAE7B,GAAIK,EAAiB,SAAW,GAAKC,EAAa,SAAW,GAAKlC,EAAU,SAAW,EACrF,OAAO,KAAK,wBAAwB,wCAAwC,EAG9E,MAAMjD,EAAY,OAAOnB,EAAAA,KAAA,CAAM,GACzByG,EAAQhP,EAAO,OAAS,QACxBmN,EAAmB,CAAA,EAEzBA,EAAO,KAAK,sBAAsB,EAClCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,gBACN,QAAS,CACP,GAAIzD,EACJ,KAAM,UACN,KAAM,YACN,QAAS,CAAA,EACT,MAAAsF,EACA,YAAa,KACb,cAAe,KACf,MAAO,CAAE,aAAchP,EAAO,aAAe,EAAG,cAAe,CAAA,CAAE,CACnE,CACD,CAAC,EAAA,EAEJmN,EAAO,KAAK,EAAE,EAEd,IAAI8B,EAAa,EAEjB,GAAIL,EAAiB,OAAS,EAAG,CAC/BzB,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAO8B,EACP,cAAe,CAAE,KAAM,WAAY,SAAU,GAAI,mBAAoB,EAAA,CAAK,CAC3E,CAAC,EAAA,EAEJ9B,EAAO,KAAK,EAAE,EAEd,UAAWjG,KAAS0H,EACb1H,IACLiG,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAO8B,EACP,MAAO,CAAE,KAAM,iBAAkB,SAAU/H,CAAA,CAAM,CAClD,CAAC,EAAA,EAEJiG,EAAO,KAAK,EAAE,GAGhBA,EAAO,KAAK,2BAA2B,EACvCA,EAAO,KAAK,SAAS,KAAK,UAAU,CAAE,KAAM,qBAAsB,MAAO8B,CAAA,CAAY,CAAC,EAAE,EACxF9B,EAAO,KAAK,EAAE,EACd8B,GAAc,CAChB,CAEA,GAAIJ,EAAa,OAAS,EAAG,CAC3B1B,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAO8B,EACP,cAAe,CAAE,KAAM,OAAQ,KAAM,EAAA,CAAG,CACzC,CAAC,EAAA,EAEJ9B,EAAO,KAAK,EAAE,EAEd,UAAWjG,KAAS2H,EACb3H,IACLiG,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAO8B,EACP,MAAO,CAAE,KAAM,aAAc,KAAM/H,CAAA,CAAM,CAC1C,CAAC,EAAA,EAEJiG,EAAO,KAAK,EAAE,GAGhBA,EAAO,KAAK,2BAA2B,EACvCA,EAAO,KAAK,SAAS,KAAK,UAAU,CAAE,KAAM,qBAAsB,MAAO8B,CAAA,CAAY,CAAC,EAAE,EACxF9B,EAAO,KAAK,EAAE,EACd8B,GAAc,CAChB,CAEA,UAAWC,KAAYvC,EAAW,CAChC,MAAMwC,EAAYF,EAAaC,EAAS,MAClCE,EAASF,EAAS,IAAM,QAAQ3G,EAAAA,MAAM,GAE5C4E,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAOgC,EACP,cAAe,CACb,KAAM,WACN,GAAIC,EACJ,KAAMF,EAAS,KACf,MAAO,CAAA,CAAC,CACV,CACD,CAAC,EAAA,EAEJ/B,EAAO,KAAK,EAAE,EAEV+B,EAAS,YACX/B,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAOgC,EACP,MAAO,CAAE,KAAM,mBAAoB,aAAcD,EAAS,SAAA,CAAU,CACrE,CAAC,EAAA,EAEJ/B,EAAO,KAAK,EAAE,GAGhBA,EAAO,KAAK,2BAA2B,EACvCA,EAAO,KAAK,SAAS,KAAK,UAAU,CAAE,KAAM,qBAAsB,MAAOgC,CAAA,CAAW,CAAC,EAAE,EACvFhC,EAAO,KAAK,EAAE,CAChB,CAEA,MAAMkC,EAAarP,EAAO,YAAc,WAIlCsP,EAAoB,CAAE,cAHPtP,EAAO,cAAgB,CAGD,EAG3C,OAAIA,EAAO,cAAgBA,EAAO,iBAAmBA,EAAO,mBAC1DsP,EAAa,SAAW,CAAA,EAEpBtP,EAAO,eACTsP,EAAa,SAAS,cAAgBtP,EAAO,cAE3CA,EAAO,kBACTsP,EAAa,SAAS,iBAAmBtP,EAAO,iBAE9CA,EAAO,kBACTsP,EAAa,SAAS,iBAAmBtP,EAAO,kBAIpDmN,EAAO,KAAK,sBAAsB,EAClCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,gBACN,MAAO,CAAE,YAAakC,EAAY,cAAe,IAAA,EACjD,MAAOC,CAAA,CACR,CAAC,EAAA,EAEJnC,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,qBAAqB,EACjCA,EAAO,KAAK,+BAA+B,EAC3CA,EAAO,KAAK,EAAE,EAECA,EAAO,KAAK;AAAA,CAAI,CAEjC,CAEQ,sBAAsBoC,EAAoBC,EAAY,IAAgB,CAC5E,GAAI,CAACD,GAAYA,EAAS,SAAW,QAAU,CAAA,EAC/C,MAAM7H,EAAW6H,EAAS,KAAK,EAAE,EACjC,GAAI,CAAC7H,EAAU,MAAO,CAAA,EAEtB,MAAM3G,EAAmB,CAAA,EACzB,QAAST,EAAI,EAAGA,EAAIoH,EAAS,OAAQpH,GAAKkP,EACxCzO,EAAO,KAAK2G,EAAS,MAAMpH,EAAGA,EAAIkP,CAAS,CAAC,EAE9C,OAAOzO,CACT,CAEQ,8BAA8B0O,EAAgC,CACpE,OAAKA,GACmC,CACtC,UAAW,WACX,qBAAsB,QACtB,+BAAgC,QAChC,UAAW,QACX,QAAS,OAAA,EAEIA,CAAM,GAAK,UAC5B,CAKQ,4BAA4BhM,EAA8B,CAChE,GAAI,CACF,MAAMiM,EAAiB,KAAK,MAAMjM,CAAY,EAE9C,GAAIiM,GAAgB,MAAO,CACzB,MAAMC,EACJD,EAAe,OAAO,SACtBA,EAAe,OAAO,QACrB,OAAOA,EAAe,OAAU,SAAWA,EAAe,MAAQ,wBACrE,OAAO,KAAK,0BAA0BC,CAAY,CACpD,CAEA,MAAM3B,EAAS0B,EAAe,UAAU,CAAC,EAEzC,GAAI,CAAC1B,EACH,OAAOvK,EAMT,MAAMmM,EAAiB,CACrB,GAJgB,OAAOrH,EAAAA,KAAA,CAAM,GAK7B,KAAM,UACN,KAAM,YACN,QAAS,CACP,CACE,KAAM,OACN,KAAMyF,EAAO,SAAS,SAAW,EAAA,CACnC,EAEF,MAAO0B,EAAe,OAAS,cAC/B,YAAa,KAAK,gBAAgB1B,EAAO,aAAa,EACtD,cAAe,KACf,MAAO,CACL,aAAc0B,EAAe,OAAO,eAAiB,EACrD,cAAeA,EAAe,OAAO,mBAAqB,CAAA,CAC5D,EAGF,OAAO,KAAK,UAAUE,CAAc,CACtC,MAAgB,CACd,MAAMtC,EACJ,OAAO7J,GAAiB,UAAYA,EAAa,OAASA,EAAa,KAAA,EAAS,uBAClF,OAAO,KAAK,0BAA0B6J,CAAe,CACvD,CACF,CAKQ,gBAAgBtO,EAAiD,CACvE,OAAKA,EAEmC,CACtC,KAAM,WACN,OAAQ,aACR,cAAe,WACf,WAAY,WACZ,eAAgB,eAAA,EAGHA,CAAY,GAAK,WAVN,IAW5B,CAEQ,0BAA0B9C,EAAyB,CACzD,MAAMwN,EAAY,OAAOnB,EAAAA,KAAA,CAAM,GAG/B,OAAO,KAAK,UAAU,CACpB,GAAImB,EACJ,KAAM,UACN,KAAM,YACN,QAAS,CACP,CACE,KAAM,OACN,KATOxN,GAAW,sBASlB,CACF,EAEF,MAAO,QACP,YAAa,QACb,cAAe,KACf,MAAO,CACL,aAAc,EACd,cAAe,CAAA,CACjB,CACD,CACH,CAEQ,wBAAwBA,EAAyB,CACvD,MAAMwN,EAAY,OAAOnB,EAAAA,KAAA,CAAM,GACzBpI,EAAOjE,GAAW,uBAClBiR,EAAmB,CAAA,EAEzB,OAAAA,EAAO,KAAK,sBAAsB,EAClCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,gBACN,QAAS,CACP,GAAIzD,EACJ,KAAM,UACN,KAAM,YACN,QAAS,CAAA,EACT,MAAO,QACP,YAAa,KACb,cAAe,KACf,MAAO,CAAE,aAAc,EAAG,cAAe,CAAA,CAAE,CAC7C,CACD,CAAC,EAAA,EAEJyD,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAO,EACP,cAAe,CAAE,KAAM,OAAQ,KAAM,EAAA,CAAG,CACzC,CAAC,EAAA,EAEJA,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAO,EACP,MAAO,CAAE,KAAM,aAAc,KAAAhN,CAAA,CAAK,CACnC,CAAC,EAAA,EAEJgN,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,2BAA2B,EACvCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,qBACN,MAAO,CAAA,CACR,CAAC,EAAA,EAEJA,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,sBAAsB,EAClCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,gBACN,MAAO,CAAE,YAAa,QAAS,cAAe,IAAA,EAC9C,MAAO,CAAE,cAAe,CAAA,CAAE,CAC3B,CAAC,EAAA,EAEJA,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,qBAAqB,EACjCA,EAAO,KAAK,+BAA+B,EAC3CA,EAAO,KAAK,EAAE,EAEPA,EAAO,KAAK;AAAA,CAAI,CACzB,CAEQ,wBAAwB0C,EAAgBF,EAA8B,CAC5E,MAAI,CAACE,GAAU,CAACA,EAAO,SAAS,sBAAsB,EAC7C,KAAK,wBAAwBF,CAAY,EAE3CE,CACT,CACF,CC1vBO,MAAMC,EAAU,CACb,OACA,mBAAgD,KAChD,oBAAkD,KAClD,sBAAwB,IACxB,UACA,mBAAqB,GAGrB,0BAA4B,EACnB,0BAA4B,IACrC,sBAA6B,KAGpB,gBAAkB,CACjC,kBACA,cACA,kBACA,kCAAA,EAGF,YAAYxH,EAAsB,CAChC,KAAK,OAASA,EACd,KAAK,qBAAA,CACP,CAMQ,sBAA6B,CAC/B,KAAK,OAAO,UAGd,KAAK,yBAAA,EAGL,KAAK,iBAAA,CAET,CAKA,MAAc,0BAA0C,CACtD,MAAM,KAAK,8BAAA,EAEX,KAAK,iBAAA,CACP,CAMA,MAAc,+BAA+C,CAC3D,GAAI,CAAC,KAAK,OAAO,UACf,OAGF,MAAMmC,EAAM,KAAK,IAAA,EAGjB,GAAI,EAAAA,EAAM,KAAK,0BAA4B,KAAK,2BAIhD,GAAI,CACF,MAAMsF,EAAkB,MAAMC,EAAAA,uBAAuB,mBAAmB,KAAK,OAAO,SAAS,EAM7F,GALA,KAAK,0BAA4BvF,EAGT,KAAK,UAAUsF,CAAe,IAAM,KAAK,UAAU,KAAK,qBAAqB,EAMnG,GAHA,KAAK,sBAAwBA,EAGzBA,GAAmBA,EAAgB,SAAU,CAE/C,MAAME,EAAiB,KAAK,gBAAgBF,EAAgB,QAAQ,EAGpE,KAAK,OAAO,WAAaE,EACzB,KAAK,OAAO,QAAUF,EAAgB,MAGrC,KAAK,OAAe,uBAAyBA,EAAgB,gBAG9D,KAAK,mBAAqB,GAG1B,KAAK,uBAAA,CACP,MACE,KAAK,mBAAqB,EAGhC,OAAS9T,EAAO,CACd,KAAK,OAAO,QAAQ,MAAM,4CAA6CA,CAAc,CACvF,CACF,CAKQ,kBAAyB,CAE1B,KAAK,OAAO,WACf,KAAK,UAAY,IAAI8N,EAAU,KAAK,OAAO,MAAM,GAGnD,KAAK,uBAAA,CACP,CAMQ,gBAAgB1I,EAA0B,CAChD,MAAM6O,EAAqB7O,EAAS,YAAA,EAAc,KAAA,EAGlD,OAAI6O,IAAuB,WAAaA,IAAuB,OAASA,IAAuB,SACtF,SAMFA,CACT,CAKQ,wBAA+B,CACd,KAAK,OAAO,YAAY,YAAA,IAExB,WAErB,KAAK,mBAAqB,IAAIjF,GAA0B,KAAK,OAAQ,KAAK,SAAS,EACnF,KAAK,oBAAsB,IAAIiC,GAA0B,KAAK,OAAO,MAAM,EAI/E,CASA,YAAYtM,EAAaY,EAA8B,CAYrD,GAVA,KAAK,8BAAA,EAAgC,MAAO2B,GAAQ,CAClD,KAAK,OAAO,QAAQ,MAAM,2DAA4DA,CAAG,CAC3F,CAAC,EAGG,CAAC,KAAK,oBAAsB,CAAC,KAAK,qBAKlC,CAAC3B,EACH,MAAO,GAGT,GAAI,CAKF,OAHA,KAAK,MAAMA,CAAW,EAGlB,QAAK,oBACc,KAAK,gBAAgB,KAAMP,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EASjF,OAASuC,EAAK,CACZ,YAAK,OAAO,QAAQ,MAAM,2CAA4CA,CAAY,EAC3E,EACT,CACF,CASA,MAAM,iBAAiBvC,EAAaY,EAAkD,CACpF,GAAI,CAAC,KAAK,mBACR,MAAM,IAAI,MAAM,qCAAqC,EAGvD,MAAM8C,EAAc,MAAM,KAAK,mBAAmB,UAAU1D,EAAKY,CAAW,EAGtE2O,EAAa,GAAGvP,CAAG,IAAI,KAAK,KAAK,GACvC,YAAK,kBAAkB,IAAIuP,EAAY,CACrC,YAAavP,EACb,UAAW0D,EAAY,IACvB,aAAc,YACd,WAAY,KAAK,OAAO,WACxB,UAAW,EAAA,CACZ,EAEMA,CACT,CAQA,kBAAkBb,EAA8B,CAC9C,GAAI,CAAC,KAAK,oBACR,MAAM,IAAI,MAAM,sCAAsC,EAGxD,GAAI,CACF,OAAO,KAAK,oBAAoB,UAAUA,CAAY,CACxD,OAASxH,EAAO,CACd,YAAK,OAAO,QAAQ,MAAM,0CAA2CA,CAAc,EAG5E,KAAK,6BAAA,CACd,CACF,CAUA,2BAA2BuF,EAA6B,CACtD,GAAI,CACF,MAAM6H,EAAc,KAAK,MAAM7H,CAAW,EAG1C,GAAI,CAAC6H,EAAY,UAAY,CAAC,MAAM,QAAQA,EAAY,QAAQ,EAC9D,OAAO7H,EAGT,IAAI4O,EAAoB,GAIxB,MAAMC,EAAkBhH,EAAY,SACjC,IAAKE,GAAa,CACjB,GAAI,CAACA,EAAI,SAAW,CAAC,MAAM,QAAQA,EAAI,OAAO,EAC5C,OAAOA,EAIT,MAAM+G,EAAiB/G,EAAI,QAAQ,OAAQvL,GACrCA,EAAM,OAAS,YAAcA,EAAM,qBAAuB,IAC5DoS,EAAoB,GACb,IAEF,EACR,EAGD,OAAOE,EAAe,OAAS,EAC3B,CACE,GAAG/G,EACH,QAAS+G,CAAA,EAEX,IACN,CAAC,EACA,OAAO,OAAO,EAEjB,OAAIF,GAEF/G,EAAY,SAAWgH,EACH,KAAK,UAAUhH,CAAW,GAKzC7H,CACT,OAAS2B,EAAK,CAEZ,YAAK,OAAO,QAAQ,KAAK,mDAAoD,CAC3E,MAAOA,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAA,CACvD,EACM3B,CACT,CACF,CAKQ,8BAAuC,CAC7C,MAAMkI,EAAY,aAAa,KAAK,IAAA,CAAK,GACnCyD,EAAmB,CAAA,EAEzB,OAAAA,EAAO,KAAK,sBAAsB,EAClCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,gBACN,QAAS,CACP,GAAIzD,EACJ,KAAM,UACN,KAAM,YACN,QAAS,CAAA,EACT,MAAO,QACP,YAAa,KACb,cAAe,KACf,MAAO,CAAE,aAAc,EAAG,cAAe,CAAA,CAAE,CAC7C,CACD,CAAC,EAAA,EAEJyD,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAO,EACP,cAAe,CAAE,KAAM,OAAQ,KAAM,EAAA,CAAG,CACzC,CAAC,EAAA,EAEJA,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,4BAA4B,EACxCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,sBACN,MAAO,EACP,MAAO,CACL,KAAM,aACN,KAAM,0DAAA,CACR,CACD,CAAC,EAAA,EAEJA,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,2BAA2B,EACvCA,EAAO,KACL,SAAS,KAAK,UAAU,CACtB,KAAM,qBACN,MAAO,CAAA,CACR,CAAC,EAAA,EAEJA,EAAO,KAAK,EAAE,EAEdA,EAAO,KAAK,qBAAqB,EACjCA,EAAO,KAAK,+BAA+B,EAC3CA,EAAO,KAAK,EAAE,EAEPA,EAAO,KAAK;AAAA,CAAI,CACzB,CAQA,UAAUvM,EAAsB,CAC9B,UAAW2P,KAAW,KAAK,kBAAkB,OAAA,EAC3C,GAAIA,EAAQ,YAAc3P,EACxB,MAAO,GAGX,MAAO,EACT,CAQA,kBAAkBA,EAAyC,CACzD,UAAW2P,KAAW,KAAK,kBAAkB,OAAA,EAC3C,GAAIA,EAAQ,YAAc3P,GAAO2P,EAAQ,cAAgB3P,EACvD,OAAO2P,CAIb,CAKA,oBAAoB3P,EAAmB,CACrC,SAAW,CAACpC,EAAK+R,CAAO,IAAK,KAAK,kBAAkB,WAC9CA,EAAQ,YAAc3P,GAAO2P,EAAQ,cAAgB3P,IACvD,KAAK,kBAAkB,OAAOpC,CAAG,CAGvC,CAKA,mBAA4B,CAC1B,OAAO,KAAK,OAAO,UACrB,CAKA,gBAAqC,CACnC,OAAO,KAAK,OAAO,OACrB,CACF,CC5aA,MAAMgS,EAAW,CACP,OACA,iBAA4C,KAC5C,YACA,YACA,aACA,UACA,OAER,aAAc,CACZ,MAAMC,EAAYnU,EAAAA,cAAc,cAChC,KAAK,aAAemU,GAAa,KACjC,KAAK,UAAYnU,gBAAc,WAAa,GAE5C,KAAK,YAAc,IAAI8E,GAGvB,MAAMvF,EACJS,EAAAA,cAAc,YAAcP,EAAK,KAAK2M,EAAG,OAAA,EAAU,iBAAkB,UAAW,WAAW,QAAQ,GAAG,MAAM,EAK9G,GAJA,KAAK,OAAS,IAAI9M,EAAWC,CAAO,EAIhC,KAAK,UACP,GAAI,CACF,KAAK,OAAO,KAAK,uDAAwD,CACvE,UAAW,KAAK,SAAA,CACjB,EAED,KAAK,OAAS,IAAIiU,GAAU,CAC1B,WAAY,SACZ,UAAW,KAAK,UAChB,OAAQ,KAAK,MAAA,CACd,EAED,KAAK,OAAO,KAAK,qDAAsD,CACrE,UAAW,KAAK,UAChB,aAAc,CAAC,CAAC,KAAK,MAAA,CACtB,CACH,OAAS7T,EAAO,CACd,KAAK,OAAO,MAAM,iDAAkDA,CAAc,CACpF,MAEA,KAAK,OAAO,KAAK,4DAA4D,EAG/E,IAAIyU,EAASpU,EAAAA,cAAc,UACvBoU,EAAO,WAAW,QAAQ,IAAY,WAAaA,EAAO,MAAM,CAAC,EAC5DA,EAAO,WAAW,OAAO,MAAY,UAAYA,EAAO,MAAM,CAAC,GACxE,MAAMC,EAASrU,gBAAc,QAAU,GACjCsU,EAAiBtU,gBAAc,gBAAkB,GAIvD,GAFqBA,EAAAA,cAAc,uBAAyB,CAAC,EAAE,KAAK,WAAaoU,IAE7DA,EAClB,GAAI,CACF,KAAK,iBAAmB,IAAIG,mBAAiB,CAC3C,OAAAH,EACA,eAAAE,EACA,OAAAD,EACA,OAAQ,KAAK,MAAA,CACd,CACH,OAAS1U,EAAO,CACVL,EAAW,eACb,KAAK,OAAO,MAAM,yCAA0CK,CAAc,CAE9E,CAIF,KAAK,OAAO,KAAK,2CAA4C,CAC3D,UAAW,CAAC,CAAC,KAAK,OAClB,oBAAqB,CAAC,CAAC,KAAK,iBAC5B,aAAc,CAAC,CAAC,KAAK,UACrB,YAAaK,EAAAA,cAAc,aAAeA,gBAAc,aAAA,CACzD,EAED,KAAK,YAAc+L,EAAe,YAAY,CAC5C,aAAc,KAAK,aACnB,YAAa/L,EAAAA,cAAc,aAAeA,EAAAA,cAAc,cACxD,YAAa,KAAK,YAClB,YAAaiM,EAAAA,KACb,iBAAkB,KAAK,kBAAoB,OAC3C,UAAW,KAAK,UAChB,OAAQ,KAAK,OACb,OAAQ,KAAK,OACb,YAAa,KAAK,OAAO,WAAA,CAAW,CACrC,EAED,KAAK,OAAO,KAAK,4CAA6C,CAC5D,kBAAmB,CAAC,CAAC,KAAK,WAAA,CAC3B,CACH,CAEO,OAAc,CACnB,KAAK,YAAY,MAAA,CACnB,CACF,CAGA,MAAMuI,GAAa,IAAIN,GACvBM,GAAW,MAAA"}
|