@agiflowai/agent-cli 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/{AgentHttpService-CKoRDtlb.js → AgentHttpService-CYYuD3Wj.js} +3 -3
- package/dist/AgentHttpService-CYYuD3Wj.js.map +1 -0
- package/dist/claudePostToolUse.js +4 -4
- package/dist/{cli-Zr10_hUF.js → cli-CUdEH93F.js} +61 -61
- package/dist/{cli-Zr10_hUF.js.map → cli-CUdEH93F.js.map} +1 -1
- package/dist/cli.js +1 -1
- package/dist/index.js +1 -1
- package/dist/networkLog.js +15 -11
- package/dist/networkLog.js.map +1 -1
- package/dist/package.json +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
- package/dist/AgentHttpService-CKoRDtlb.js.map +0 -1
package/dist/networkLog.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"networkLog.js","sources":["../src/llms/loggers/file.ts","../src/llms/parsers/costCalculator.ts","../src/llms/parsers/claude.ts","../__vite-browser-external:node:zlib","../src/llms/parsers/gemini.ts","../src/llms/parsers/index.ts","../src/llms/providers/index.ts","../src/llms/interceptors/instruments/fetchInstrument.ts","../src/llms/interceptors/instruments/httpInstrument.ts","../src/llms/interceptors/index.ts","../src/hooks/networkLog.ts"],"sourcesContent":["// filepath: /Users/vuongngo/workspace/agiflow/apps/agent-cli/src/llms/loggers/file.ts\n/**\n * FileLogger - Direct file logging without stdout pollution\n *\n * This logger writes structured JSON logs directly to a file stream\n * to avoid interfering with stdout when used as an imported module\n */\n\nimport * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport { SubEnvManager } from '../../config/subenv';\n\nexport interface LogEntry {\n time: string;\n level: number;\n msg: string;\n [key: string]: any;\n}\n\nexport class FileLogger {\n private logStream: fs.WriteStream;\n private logPath: string;\n\n constructor(logPath: string) {\n this.logPath = logPath;\n\n // Ensure directory exists\n const logDir = path.dirname(this.logPath);\n if (!fs.existsSync(logDir)) {\n fs.mkdirSync(logDir, { recursive: true });\n }\n\n // Create write stream for direct file logging\n this.logStream = fs.createWriteStream(this.logPath, { flags: 'a' });\n\n // Handle stream errors gracefully\n this.logStream.on('error', (error) => {\n // Only log to stderr to avoid stdout pollution\n console.error(`[FileLogger] Error writing to log file: ${error.message}`);\n });\n }\n\n /**\n * Log an info-level message\n */\n info(message: string, data?: Record<string, any>): void {\n this.log(30, message, data);\n }\n\n /**\n * Log a debug-level message\n */\n debug(message: string, data?: Record<string, any>): void {\n this.log(20, message, data);\n }\n\n /**\n * Log a warning-level message\n */\n warn(message: string, data?: Record<string, any>): void {\n this.log(40, message, data);\n }\n\n /**\n * Log an error-level message\n */\n error(message: string, data?: Record<string, any>): void {\n this.log(50, message, data);\n }\n\n /**\n * Log a message with specified level\n */\n log(level: number, message: string, data?: Record<string, any>): void {\n const logEntry: LogEntry = {\n time: new Date().toISOString(),\n level,\n msg: message,\n ...data,\n };\n\n try {\n this.logStream.write(JSON.stringify(logEntry) + '\\n');\n } catch (error) {\n // Only log to stderr to avoid stdout pollution\n console.error(`[FileLogger] Failed to write log entry: ${error}`);\n }\n }\n\n /**\n * Close the log stream\n */\n close(): void {\n if (this.logStream && !this.logStream.destroyed) {\n this.logStream.end();\n }\n }\n\n /**\n * Get the current log file path\n */\n getLogPath(): string {\n return this.logPath;\n }\n\n /**\n * Check if debug logging should be enabled based on environment\n */\n static shouldDebug(): boolean {\n return SubEnvManager.isDebugMode || SubEnvManager.isNetlogDebug;\n }\n}\n","/**\n * Cost Calculator for LLM Models\n *\n * Calculates costs based on token usage using pricing data from cost.json\n */\n\nimport costData from '../../../cost.json';\n\nexport interface ModelCost {\n totalCost: number;\n inputCost: number;\n outputCost: number;\n cacheReadCost?: number;\n cacheCreationCost?: number;\n}\n\nexport interface TokenUsage {\n inputTokens?: number;\n outputTokens?: number;\n cachedInputTokens?: number;\n cacheCreationInputTokens?: number;\n}\n\nexport class CostCalculator {\n /**\n * Calculate cost for a given model and token usage\n */\n static calculateCost(modelName: string, usage: TokenUsage): ModelCost | null {\n const modelPricing = costData[modelName as keyof typeof costData];\n\n if (!modelPricing) {\n return null;\n }\n\n const inputTokens = usage.inputTokens || 0;\n const outputTokens = usage.outputTokens || 0;\n const cachedInputTokens = usage.cachedInputTokens || 0;\n const cacheCreationInputTokens = usage.cacheCreationInputTokens || 0;\n\n // Get rates from pricing data with type guards\n const inputRate = 'input_cost_per_token' in modelPricing ? modelPricing.input_cost_per_token : 0;\n const outputRate = 'output_cost_per_token' in modelPricing ? modelPricing.output_cost_per_token : 0;\n const cacheReadRate = 'cache_read_input_token_cost' in modelPricing ? modelPricing.cache_read_input_token_cost : 0;\n const cacheCreationRate =\n 'cache_creation_input_token_cost' in modelPricing ? modelPricing.cache_creation_input_token_cost : 0;\n\n // Calculate costs\n const inputCost = inputTokens * inputRate;\n const outputCost = outputTokens * outputRate;\n const cacheReadCost = cachedInputTokens * cacheReadRate;\n const cacheCreationCost = cacheCreationInputTokens * cacheCreationRate;\n\n const totalCost = inputCost + outputCost + cacheReadCost + cacheCreationCost;\n\n return {\n totalCost,\n inputCost,\n outputCost,\n cacheReadCost: cachedInputTokens > 0 ? cacheReadCost : undefined,\n cacheCreationCost: cacheCreationInputTokens > 0 ? cacheCreationCost : undefined,\n };\n }\n}\n","/**\n * Claude API Streaming Response Parser\n *\n * Parses Claude API streaming responses (Server-Sent Events format)\n * into structured messages following the blazegent message format.\n *\n * Handles:\n * - Text content blocks\n * - Tool use content blocks\n * - Token usage information\n * - Message metadata\n * - Cost calculation based on token usage\n */\n\nimport type { MessagePart } from '../messages';\nimport { CostCalculator } from './costCalculator';\n\nexport class ClaudeParser {\n /**\n * Parses Claude API streaming response into structured message format\n *\n * @param rawText - Raw streaming response text in SSE format\n * @returns Parsed message object with metadata and usage information\n */\n static parseStreamingResponse(\n rawText: string,\n ):\n | { message: { id: string; role: 'assistant'; createdAt: string; parts: MessagePart[] }; metadata: any }\n | { error: string; rawText: string } {\n try {\n const lines = rawText.split('\\n');\n const contentBlocks: any[] = [];\n let messageMetadata: any = null;\n let usage: any = null;\n const blockMap = new Map<number, any>(); // Track content blocks by index\n\n for (const line of lines) {\n if (line.startsWith('data: ')) {\n const dataStr = line.substring(6).trim();\n if (dataStr) {\n try {\n const data = JSON.parse(dataStr);\n\n // Extract message metadata\n if (data.type === 'message_start' && data.message) {\n messageMetadata = data.message;\n }\n\n // Handle content block start (text or tool_use)\n if (data.type === 'content_block_start') {\n const block = {\n index: data.index,\n type: data.content_block.type,\n id: data.content_block.id,\n name: data.content_block.name,\n input: data.content_block.input || {},\n text: '',\n partialJson: '',\n };\n blockMap.set(data.index, block);\n }\n\n // Handle content block deltas (text or JSON input)\n if (data.type === 'content_block_delta') {\n const block = blockMap.get(data.index);\n if (block) {\n if (data.delta.type === 'text_delta') {\n block.text += data.delta.text;\n } else if (data.delta.type === 'input_json_delta') {\n block.partialJson += data.delta.partial_json;\n }\n }\n }\n\n // Handle content block stop\n if (data.type === 'content_block_stop') {\n const block = blockMap.get(data.index);\n if (block) {\n // Try to parse accumulated JSON for tool inputs\n if (block.partialJson) {\n try {\n block.input = JSON.parse(block.partialJson);\n } catch (parseError) {\n // Pure telemetry - no mutations, no logging\n // Leave block.input unchanged\n }\n }\n contentBlocks.push(block);\n }\n }\n\n // Extract final usage information\n if (data.type === 'message_delta' && data.usage) {\n usage = data.usage;\n }\n } catch {\n // Ignore malformed JSON\n }\n }\n }\n }\n\n // Convert content blocks to blazegent parts format\n const parts: MessagePart[] = contentBlocks.map((block) => {\n if (block.type === 'text') {\n return {\n type: 'text' as const,\n text: block.text,\n };\n } else if (block.type === 'tool_use') {\n return {\n type: 'tool-invocation' as const,\n toolInvocation: {\n toolCallId: block.id,\n toolName: block.name,\n args: block.input,\n state: 'call' as const,\n },\n };\n }\n // For unknown block types, create a text part with error message\n return {\n type: 'text' as const,\n text: `Unknown content block type: ${block.type}`,\n };\n });\n\n // Create message in blazegent format\n const message = {\n id: messageMetadata?.id || ClaudeParser.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: parts.length > 0 ? parts : [{ type: 'text' as const, text: '' }],\n };\n\n // Combine usage information from both sources\n const combinedUsage = {\n ...(messageMetadata?.usage || {}),\n ...(usage || {}),\n };\n\n // Normalize token usage data\n const tokenUsage = {\n inputTokens: combinedUsage.input_tokens || combinedUsage.inputTokens,\n outputTokens: combinedUsage.output_tokens || combinedUsage.outputTokens,\n cachedInputTokens: combinedUsage.cache_read_input_tokens || combinedUsage.cachedInputTokens,\n cacheCreationInputTokens: combinedUsage.cache_creation_input_tokens || combinedUsage.cacheCreationInputTokens,\n totalTokens: (combinedUsage.input_tokens || 0) + (combinedUsage.output_tokens || 0),\n cacheCreation: combinedUsage.cache_creation,\n serviceTier: combinedUsage.service_tier,\n };\n\n // Calculate cost if model is available\n const modelName = messageMetadata?.model;\n const cost = modelName ? CostCalculator.calculateCost(modelName, tokenUsage) : null;\n\n // Add metadata with comprehensive usage information\n const result: any = {\n message,\n metadata: {\n model: modelName,\n usage: tokenUsage,\n cost,\n originalMessageId: messageMetadata?.id,\n stopReason: messageMetadata?.stop_reason,\n contentLength: parts.reduce((len, part) => {\n if (part.type === 'text') {\n return len + part.text.length;\n } else if (part.type === 'tool-invocation') {\n return len + JSON.stringify(part.toolInvocation.args || '').length;\n }\n return len + JSON.stringify(part).length;\n }, 0),\n streamingEvents: lines.filter((line) => line.startsWith('event:')).length,\n },\n };\n\n return result;\n } catch (error) {\n return {\n error: `Failed to parse streaming response: ${error}`,\n rawText,\n };\n }\n }\n\n /**\n * Simple ID generator for messages\n */\n private static generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 10);\n }\n}\n\n// Export for backward compatibility\nexport const parseStreamingResponseToMessage = ClaudeParser.parseStreamingResponse;\n"," export default new Proxy({}, {\n get(_, key) {\n throw new Error(`Module \"node:zlib\" has been externalized for browser compatibility. Cannot access \"node:zlib.${key}\" in client code. See https://vite.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.`)\n }\n })","/**\n * Gemini API Streaming / Regular Response Parser\n */\n\nimport * as zlib from 'node:zlib';\n\nexport class GeminiParser {\n /** Entry point used by interceptor */\n static parseStreamingResponse(rawText: string): any {\n try {\n const isSse = rawText.includes('\\ndata:') || rawText.startsWith('data:');\n if (isSse) return this.parseSse(rawText);\n return this.parseRegular(rawText);\n } catch (error) {\n return { error: `Failed to parse Gemini response: ${error}`, rawText };\n }\n }\n\n /** Parse non-stream (may be gzip) */\n private static parseRegular(rawText: string) {\n const looksGzip = this.isProbablyGzip(rawText) || this.hasHighControlCharRatio(rawText);\n let working = rawText;\n if (looksGzip) {\n const gun = this.tryGunzip(rawText);\n if (gun.ok) working = gun.text;\n }\n try {\n const json = JSON.parse(working);\n const response = json.response || json; // generateContent returns { response: { candidates: [...] }}\n const { fullText, finishReason, texts, nextSpeakers, reasoningSegments } = this.extractCandidateText(\n response.candidates,\n );\n const usage = this.normalizeUsage(response.usageMetadata || json.usageMetadata || json);\n const parts =\n texts.length > 0\n ? texts.map((t) => ({ type: 'text', text: t }))\n : fullText\n ? [{ type: 'text', text: fullText }]\n : [];\n return {\n message: {\n id: response.responseId || this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts,\n },\n metadata: {\n model: response.modelVersion || json.modelVersion,\n usage,\n responseId: response.responseId,\n finishReason,\n contentLength: fullText.length,\n streamingEvents: 0,\n compressed: looksGzip || undefined,\n nextSpeakers: nextSpeakers.length ? nextSpeakers : undefined,\n reasoningSegments: reasoningSegments.length ? reasoningSegments : undefined,\n },\n };\n } catch {\n return {\n message: {\n id: this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: [{ type: 'text', text: working }],\n },\n metadata: { raw: true },\n };\n }\n }\n\n /** Parse SSE stream */\n private static parseSse(rawText: string) {\n const lines = rawText.split(/\\r?\\n/);\n let modelVersion: string | undefined;\n let responseId: string | undefined;\n let finishReason: string | undefined;\n let usageMetadata: any;\n const collectedTexts: string[] = [];\n const nextSpeakers: string[] = [];\n const reasoningSegments: string[] = [];\n\n for (const line of lines) {\n if (!line.startsWith('data:')) continue;\n const jsonStr = line.slice(5).trim();\n if (!jsonStr) continue;\n try {\n const payload = JSON.parse(jsonStr);\n const response = payload.response || {};\n if (response.modelVersion) modelVersion = response.modelVersion;\n if (response.responseId) responseId = response.responseId;\n if (response.usageMetadata) usageMetadata = response.usageMetadata;\n const extracted = this.extractCandidateText(response.candidates);\n if (!finishReason && extracted.finishReason) finishReason = extracted.finishReason;\n if (extracted.texts.length) collectedTexts.push(...extracted.texts);\n if (extracted.nextSpeakers.length) nextSpeakers.push(...extracted.nextSpeakers);\n if (extracted.reasoningSegments.length) reasoningSegments.push(...extracted.reasoningSegments);\n } catch {\n // ignore chunk errors\n }\n }\n\n const fullText = collectedTexts.join('');\n const usage = this.normalizeUsage(usageMetadata);\n return {\n message: {\n id: responseId || this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: collectedTexts.length ? collectedTexts.map((t) => ({ type: 'text', text: t })) : [],\n },\n metadata: {\n model: modelVersion,\n usage,\n responseId,\n finishReason,\n contentLength: fullText.length,\n streamingEvents: lines.filter((l) => l.startsWith('data:')).length,\n nextSpeakers: nextSpeakers.length ? nextSpeakers : undefined,\n reasoningSegments: reasoningSegments.length ? reasoningSegments : undefined,\n },\n };\n }\n\n /** Extract text and reasoning from candidates array */\n private static extractCandidateText(candidates: any): {\n fullText: string;\n finishReason?: string;\n texts: string[];\n finishReasonFound?: string;\n nextSpeakers: string[];\n reasoningSegments: string[];\n } {\n const texts: string[] = [];\n const nextSpeakers: string[] = [];\n const reasoningSegments: string[] = [];\n let finishReason: string | undefined;\n if (Array.isArray(candidates)) {\n for (const cand of candidates) {\n if (!finishReason && cand?.finishReason) finishReason = cand.finishReason;\n const parts = cand?.content?.parts || [];\n if (Array.isArray(parts)) {\n for (const part of parts) {\n if (part && typeof part === 'object') {\n if (part.thought === true) continue; // skip internal thoughts\n const raw = part.text;\n if (typeof raw === 'string' && raw.length) {\n // Detect embedded JSON reasoning blob\n const trimmed = raw.trim();\n if (trimmed.startsWith('{') && trimmed.endsWith('}')) {\n try {\n const parsed = JSON.parse(trimmed);\n if (parsed && typeof parsed === 'object') {\n if (typeof parsed.reasoning === 'string') {\n reasoningSegments.push(parsed.reasoning);\n texts.push(parsed.reasoning);\n }\n if (parsed.next_speaker && typeof parsed.next_speaker === 'string')\n nextSpeakers.push(parsed.next_speaker);\n continue; // already added reasoning as text\n }\n } catch {\n // fall through to treat as raw text\n }\n }\n texts.push(raw);\n }\n }\n }\n }\n }\n }\n return {\n fullText: texts.join(''),\n finishReason,\n texts,\n finishReasonFound: finishReason,\n nextSpeakers,\n reasoningSegments,\n };\n }\n\n private static tryGunzip(str: string): { ok: boolean; text: string } {\n try {\n const buf = Buffer.from(str, 'latin1');\n const out = zlib.gunzipSync(buf);\n return { ok: true, text: out.toString('utf8') };\n } catch {\n return { ok: false, text: str };\n }\n }\n\n private static hasHighControlCharRatio(text: string): boolean {\n if (!text) return false;\n let control = 0;\n const sampleLen = Math.min(text.length, 256);\n for (let i = 0; i < sampleLen; i++) {\n const code = text.charCodeAt(i);\n if (code < 32 && code !== 9 && code !== 10 && code !== 13) control++;\n }\n return control / sampleLen > 0.1;\n }\n\n private static isProbablyGzip(text: string): boolean {\n return text.length >= 2 && text.charCodeAt(0) === 0x1f && text.charCodeAt(1) === 0x8b;\n }\n\n private static normalizeUsage(usage: any) {\n if (!usage || typeof usage !== 'object') {\n return { inputTokens: undefined, outputTokens: undefined, totalTokens: undefined };\n }\n const input = usage.promptTokenCount ?? usage.prompt_tokens;\n const output = usage.candidatesTokenCount ?? usage.candidates_tokens;\n const total =\n usage.totalTokenCount ?? (typeof input === 'number' && typeof output === 'number' ? input + output : undefined);\n return { inputTokens: input, outputTokens: output, totalTokens: total };\n }\n\n private static generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 10);\n }\n}\n\nexport const parseGeminiStreamingResponse = GeminiParser.parseStreamingResponse;\n","/**\n * LLM Parser Factory\n *\n * Routes streaming responses to the appropriate parser based on URL endpoint patterns.\n * Supports multiple LLM providers with extensible parser registry.\n */\n\nimport { ClaudeParser } from './claude';\nimport { GeminiParser } from './gemini';\n\n// Parser interface for consistency\ninterface StreamingParser {\n parseStreamingResponse(rawText: string): any;\n}\n\n// Parser registry mapping URL patterns to parser classes\ninterface ParserMapping {\n patterns: RegExp[];\n parser: StreamingParser;\n name: string;\n}\n\nexport class LlmParser {\n private static parsers: ParserMapping[] = [\n {\n name: 'Claude (Anthropic)',\n patterns: [\n /\\/publishers\\/anthropic\\/models/i,\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /\\/v1\\/messages/i, // Anthropic API endpoint\n ],\n parser: ClaudeParser,\n },\n {\n name: 'Google Gemini',\n patterns: [\n /generativelanguage\\.googleapis\\.com/i,\n /cloudcode-pa\\.googleapis\\.com/i,\n /streamGenerateContent/i,\n /generateContent/i,\n ],\n parser: GeminiParser,\n },\n // Future parsers can be added here:\n // {\n // name: 'OpenAI GPT',\n // patterns: [/openai\\.com/i, /api\\.openai\\.com/i, /\\/v1\\/chat\\/completions/i],\n // parser: OpenAIParser,\n // },\n ];\n\n /**\n * Parse streaming response by detecting the appropriate parser based on URL\n *\n * @param url - The API endpoint URL\n * @param rawText - Raw streaming response text\n * @returns Parsed message object or null if no parser matches\n */\n static parseStreamingResponse(url: string, rawText: string): any {\n const parser = LlmParser.getParserForUrl(url);\n\n if (!parser) {\n return {\n error: `No parser found for URL: ${url}`,\n rawText,\n supportedProviders: LlmParser.parsers.map((p) => p.name),\n };\n }\n\n try {\n const result = parser.parseStreamingResponse(rawText);\n\n // Add parser metadata to the result\n if (result && typeof result === 'object' && !result.error) {\n result.metadata = {\n ...result.metadata,\n parser: LlmParser.getParserNameForUrl(url),\n };\n }\n\n return result;\n } catch (error) {\n return {\n error: `Parser failed: ${error}`,\n rawText,\n parser: LlmParser.getParserNameForUrl(url),\n };\n }\n }\n\n /**\n * Get the appropriate parser for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Parser instance or null if no match\n */\n static getParserForUrl(url: string): StreamingParser | null {\n if (!url) return null;\n\n for (const mapping of LlmParser.parsers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.parser;\n }\n }\n\n return null;\n }\n\n /**\n * Get the parser name for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Parser name or 'unknown'\n */\n static getParserNameForUrl(url: string): string {\n if (!url) return 'unknown';\n\n for (const mapping of LlmParser.parsers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.name;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Check if streaming response parsing is supported for a URL\n *\n * @param url - The API endpoint URL\n * @returns True if a parser is available\n */\n static isSupported(url: string): boolean {\n return LlmParser.getParserForUrl(url) !== null;\n }\n\n /**\n * Get list of all supported LLM providers\n *\n * @returns Array of supported provider names\n */\n static getSupportedProviders(): string[] {\n return LlmParser.parsers.map((p) => p.name);\n }\n\n /**\n * Register a new parser for additional LLM providers\n *\n * @param name - Human-readable parser name\n * @param patterns - Array of regex patterns to match URLs\n * @param parser - Parser class implementing StreamingParser interface\n */\n static registerParser(name: string, patterns: RegExp[], parser: StreamingParser): void {\n LlmParser.parsers.push({\n name,\n patterns,\n parser,\n });\n }\n}\n","/**\n * LLM Provider Detection and Utility Class\n *\n * Handles identification of different LLM providers based on URL patterns\n * and provides utilities for working with LLM API calls.\n */\n\nexport class LlmProvider {\n private readonly LLM_PATTERNS = [\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /openai\\.com/i,\n /api\\.openai\\.com/i,\n /gemini\\.google/i,\n /generativelanguage\\.googleapis/i,\n /cloudcode-pa\\.googleapis\\.com/i,\n /bedrock.*\\.amazonaws/i,\n /cognitive\\.microsoft/i,\n /azure\\.openai/i,\n /cohere\\.ai/i,\n /huggingface\\.co/i,\n /replicate\\.com/i,\n /together\\.xyz/i,\n /mistral\\.ai/i,\n /groq\\.com/i,\n /perplexity\\.ai/i,\n /ollama/i,\n /localhost:\\d+\\/v1/i,\n /127\\.0\\.0\\.1:\\d+\\/v1/i,\n /\\/v1\\/chat\\/completions/i,\n /\\/v1\\/completions/i,\n /\\/v1\\/embeddings/i,\n /\\/v1\\/messages/i,\n /\\/publishers\\/anthropic\\/models/i,\n ];\n\n private readonly PROVIDER_MAP = [\n { pattern: /\\/publishers\\/anthropic\\/models/i, provider: 'anthropic' },\n { pattern: /anthropic\\.com|claude\\.ai/i, provider: 'anthropic' },\n { pattern: /openai\\.com/i, provider: 'openai' },\n { pattern: /gemini\\.google|generativelanguage\\.googleapis|cloudcode-pa\\.googleapis/i, provider: 'google' },\n { pattern: /bedrock.*\\.amazonaws/i, provider: 'aws-bedrock' },\n { pattern: /cognitive\\.microsoft|azure\\.openai/i, provider: 'azure' },\n { pattern: /cohere\\.ai/i, provider: 'cohere' },\n { pattern: /huggingface\\.co/i, provider: 'huggingface' },\n { pattern: /replicate\\.com/i, provider: 'replicate' },\n { pattern: /together\\.xyz/i, provider: 'together' },\n { pattern: /mistral\\.ai/i, provider: 'mistral' },\n { pattern: /groq\\.com/i, provider: 'groq' },\n { pattern: /perplexity\\.ai/i, provider: 'perplexity' },\n { pattern: /ollama|localhost:\\d+\\/v1|127\\.0\\.0\\.1:\\d+\\/v1/i, provider: 'local' },\n ];\n\n /**\n * Check if a URL is related to an LLM service\n * @param url The URL to check\n * @returns true if the URL is LLM-related, false otherwise\n */\n isLLMCall(url: string): boolean {\n if (!url) return false;\n return this.LLM_PATTERNS.some((pattern) => pattern.test(url));\n }\n\n /**\n * Detect which LLM provider a URL belongs to\n * @param url The URL to analyze\n * @returns The provider name or 'unknown' if not recognized\n */\n detectProvider(url: string): string {\n if (!url) return 'unknown';\n\n for (const { pattern, provider } of this.PROVIDER_MAP) {\n if (pattern.test(url)) {\n return provider;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Get all supported provider names\n * @returns Array of supported provider names\n */\n getSupportedProviders(): string[] {\n return Array.from(new Set(this.PROVIDER_MAP.map(({ provider }) => provider)));\n }\n\n /**\n * Check if a specific provider is supported\n * @param providerName The provider name to check\n * @returns true if the provider is supported, false otherwise\n */\n isProviderSupported(providerName: string): boolean {\n return this.getSupportedProviders().includes(providerName.toLowerCase());\n }\n\n /**\n * Get the pattern used for detecting a specific provider\n * @param providerName The provider name\n * @returns The regex pattern or null if provider not found\n */\n getProviderPattern(providerName: string): RegExp | null {\n const mapping = this.PROVIDER_MAP.find(({ provider }) => provider === providerName.toLowerCase());\n return mapping ? mapping.pattern : null;\n }\n}\n\nexport default LlmProvider;\n","import { performance } from 'node:perf_hooks';\n// Removed zlib usage to avoid synchronous decompression & event loop blocking\n// import * as zlib from 'node:zlib';\nimport type { CommonContext, NetworkLogEntry } from './types';\n\nexport interface FetchInstrumentConfig {\n agentHttpService?: any;\n sessionId?: string;\n isValidMessageRequest: (url: string, method: string, body: string) => boolean;\n sendHttpStart: (entry: NetworkLogEntry) => Promise<void>;\n sendHttpUpdate: (entry: NetworkLogEntry) => Promise<void>;\n}\n\nexport class FetchInstrument {\n private originalFetch: typeof fetch | null = null;\n // Maximum bytes to fully log / parse (helps protect memory usage)\n // Note: SSE streams are captured in full regardless of this limit\n private static readonly MAX_LOG_BYTES = 256 * 1024; // 256 KB\n private ctx: CommonContext;\n private extra: FetchInstrumentConfig;\n\n constructor(ctx: CommonContext, extra: FetchInstrumentConfig) {\n this.ctx = ctx;\n this.extra = extra;\n }\n\n patch(): void {\n if (typeof globalThis.fetch !== 'function' || this.originalFetch) return;\n\n this.originalFetch = globalThis.fetch.bind(globalThis);\n const ctx = this.ctx;\n const extra = this.extra;\n\n globalThis.fetch = (async (resource: any, init?: any) => {\n const start = performance.now();\n let url = '';\n try {\n url = typeof resource === 'string' ? resource : resource?.url || '';\n } catch {}\n const method = (init?.method || resource?.method || 'GET').toUpperCase();\n\n // --- Extract request body safely without consuming streams ---\n let requestBody = '';\n try {\n const body: any = init?.body ?? resource?.body;\n if (body) {\n if (typeof body === 'string') requestBody = body;\n else if (body instanceof URLSearchParams) requestBody = body.toString();\n else if (typeof FormData !== 'undefined' && body instanceof FormData) {\n const obj: Record<string, any> = {};\n for (const [k, v] of body.entries()) obj[k] = typeof v === 'string' ? v : '[File|Blob]';\n requestBody = JSON.stringify(obj);\n } else if (body instanceof Uint8Array || Buffer.isBuffer(body))\n requestBody = Buffer.from(body).toString('utf8');\n else if (typeof body === 'object') requestBody = JSON.stringify(body);\n }\n } catch {\n requestBody = '<request body error>';\n }\n\n const entryId = ctx.idGenerator();\n const entryTime = new Date().toISOString();\n\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n const startEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n durationMs: 0,\n };\n extra.sendHttpStart(startEntry).catch((err) => {\n console.error('[FetchInstrument] Failed to send HTTP start:', err);\n });\n }\n\n // --- Perform network request with outer error logging ---\n let res: Response;\n try {\n res = await this.originalFetch!(resource, init);\n } catch (networkError) {\n const duration = performance.now() - start;\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n durationMs: Math.round(duration),\n error: (networkError as Error).message,\n };\n ctx.handleLogEntry(logEntry);\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n throw networkError; // preserve original rejection semantics\n }\n\n try {\n const duration = performance.now() - start;\n let responseBody = '';\n let bodyBytes = 0;\n let processedBody: any = null;\n let isEventStream = false;\n\n // --- Capture response body with safeguards ---\n try {\n const contentType = res.headers.get('content-type') || '';\n isEventStream = /text\\/event-stream/i.test(contentType);\n const isTextLike =\n /^(text\\/|application\\/(json|xml|javascript))/i.test(contentType) || /(json|xml|html)/i.test(contentType);\n\n if (!isEventStream) {\n // Fully buffer only non-streaming types (or when small enough)\n const clone = res.clone();\n const arrayBuffer = await clone.arrayBuffer();\n bodyBytes = arrayBuffer.byteLength;\n const max = FetchInstrument.MAX_LOG_BYTES;\n if (isTextLike) {\n if (arrayBuffer.byteLength <= max) {\n responseBody = Buffer.from(arrayBuffer).toString('utf8');\n } else {\n const buf = Buffer.from(arrayBuffer.slice(0, max));\n responseBody = `${buf.toString('utf8')}\\n...[truncated ${(arrayBuffer.byteLength - max).toLocaleString()} bytes]`;\n }\n } else {\n responseBody = `<${bodyBytes} bytes binary>`;\n }\n } else {\n // For event-stream, we need to tee the stream to capture data without consuming it\n const originalBody = res.body;\n if (originalBody) {\n const [stream1, stream2] = originalBody.tee();\n\n // Capture stream1 data for logging/processing\n const capturePromise = (async () => {\n try {\n const reader = stream1.getReader();\n const chunks: Uint8Array[] = [];\n let totalBytes = 0;\n\n // For SSE streams, capture everything without limit\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n\n chunks.push(value);\n totalBytes += value.byteLength;\n }\n\n // Convert captured chunks to string for processing\n const capturedBuffer = Buffer.concat(chunks.map((c) => Buffer.from(c)));\n const capturedText = capturedBuffer.toString('utf8');\n responseBody = capturedText; // No truncation for SSE\n bodyBytes = totalBytes;\n\n // Process the captured streaming response for LLM calls\n if (ctx.llmProvider.isLLMCall(url) && responseBody) {\n try {\n processedBody = ctx.responseProcessor(url, responseBody);\n } catch (err) {\n processedBody = { error: `parser_failed: ${err instanceof Error ? err.message : String(err)}` };\n }\n }\n\n // Update the log entry through context handler (which will trigger sendHttpUpdate)\n const finalLogEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n status: res.status,\n ok: res.ok,\n durationMs: Math.round(performance.now() - start),\n responseBody,\n processedBody,\n bodyBytes,\n };\n ctx.handleLogEntry(finalLogEntry);\n\n // Send the update if this is a valid message request\n if (\n extra.agentHttpService &&\n extra.sessionId &&\n extra.isValidMessageRequest(url, method, requestBody)\n ) {\n extra.sendHttpUpdate(finalLogEntry).catch(() => {});\n }\n\n return { responseBody, processedBody, bodyBytes };\n } catch (err) {\n console.error('Error capturing stream:', err);\n return { responseBody: '<stream capture error>', processedBody: null, bodyBytes: 0 };\n }\n })();\n\n // Store promise for later use if needed\n (res as any).__capturePromise = capturePromise;\n\n // Return response with stream2 (unconsumed)\n res = new Response(stream2, {\n status: res.status,\n statusText: res.statusText,\n headers: res.headers,\n });\n\n // Set initial values for streaming\n responseBody = '<streaming in progress>';\n processedBody = null;\n } else {\n responseBody = '<streaming response not buffered>';\n }\n }\n } catch {\n responseBody = '<response body error>';\n }\n\n // Process non-streaming responses\n if (\n !isEventStream &&\n ctx.llmProvider.isLLMCall(url) &&\n responseBody &&\n responseBody !== '<response body error>'\n ) {\n try {\n processedBody = ctx.responseProcessor(url, responseBody);\n } catch (err) {\n processedBody = { error: `parser_failed: ${err instanceof Error ? err.message : String(err)}` };\n }\n }\n\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n status: res.status,\n ok: res.ok,\n durationMs: Math.round(duration),\n responseBody,\n processedBody,\n bodyBytes,\n };\n ctx.handleLogEntry(logEntry);\n\n // For non-streaming responses or when streaming capture fails, send update immediately\n // For streaming responses, the update will be sent after capture completes\n if (\n !isEventStream &&\n extra.agentHttpService &&\n extra.sessionId &&\n extra.isValidMessageRequest(url, method, requestBody)\n ) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n return res;\n } catch (error) {\n const duration = performance.now() - start;\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n durationMs: Math.round(duration),\n error: (error as Error).message,\n };\n ctx.handleLogEntry(logEntry);\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n return res; // still return original response if available\n }\n }) as typeof fetch;\n }\n\n restore(): void {\n if (this.originalFetch) {\n globalThis.fetch = this.originalFetch;\n this.originalFetch = null;\n }\n }\n}\n","import { performance } from 'node:perf_hooks';\nimport type { CommonContext, NetworkLogEntry } from './types';\n\nexport interface HttpInstrumentConfig {\n agentHttpService?: any;\n sessionId?: string;\n isValidMessageRequest: (url: string, method: string, body: string) => boolean;\n sendHttpStart: (entry: NetworkLogEntry) => Promise<void>;\n sendHttpUpdate: (entry: NetworkLogEntry) => Promise<void>;\n}\n\nexport class HttpInstrument {\n private originals: any = null;\n constructor(\n private ctx: CommonContext,\n private enableHttps: boolean,\n private _extra: HttpInstrumentConfig = {\n isValidMessageRequest: () => false,\n sendHttpStart: async () => {},\n sendHttpUpdate: async () => {},\n },\n ) {}\n\n patch(): void {\n try {\n const http = require('http');\n const https = require('https');\n if (this.originals) return;\n\n this.originals = {\n httpRequest: http.request,\n httpsRequest: https.request,\n httpGet: http.get,\n httpsGet: https.get,\n };\n const self = this;\n const maxBody = this.ctx.maxBodyBytes;\n\n function buildUrl(options: any, protocolDefault: string): string {\n try {\n if (typeof options === 'string') return options;\n if (options instanceof URL) return options.toString();\n const protocol = options.protocol || protocolDefault;\n const host = options.hostname || options.host || 'localhost';\n const portPart = options.port ? `:${options.port}` : '';\n const path = options.path || options.pathname || '/';\n return `${protocol}//${host}${portPart}${path}`;\n } catch {\n return '';\n }\n }\n\n function wrapRequest(original: Function, protocol: 'http:' | 'https:') {\n return function wrappedRequest(options: any, callback?: any) {\n const url = buildUrl(options, protocol);\n const method = (typeof options === 'object' && options.method ? options.method : 'GET').toUpperCase();\n\n const startHr = performance.now();\n const entryId = self.ctx.idGenerator();\n const entryTime = new Date().toISOString();\n const requestBodyBuffers: Buffer[] = [];\n let requestLen = 0;\n let requestBodySent = false;\n\n const req = original(options, (res: any) => {\n const responseBuffers: Buffer[] = [];\n let responseBytes = 0;\n let capturedBytes = 0; // track captured (pushed) bytes to avoid O(n^2)\n const contentTypeHeader = () => String(res.headers['content-type'] || '');\n res.on('data', (chunk: Buffer) => {\n responseBytes += chunk.length;\n const ct = contentTypeHeader();\n const isEventStream = /text\\/event-stream/i.test(ct);\n\n // For SSE streams, capture everything without truncation\n if (isEventStream) {\n responseBuffers.push(chunk);\n capturedBytes += chunk.length;\n } else {\n // For non-SSE, apply the maxBody limit\n if (capturedBytes >= maxBody) return; // already at limit\n const remaining = maxBody - capturedBytes;\n if (chunk.length <= remaining) {\n responseBuffers.push(chunk);\n capturedBytes += chunk.length;\n } else {\n responseBuffers.push(chunk.subarray(0, remaining));\n capturedBytes += remaining;\n }\n }\n });\n res.on('end', () => {\n const duration = performance.now() - startHr;\n let responseBody = '';\n try {\n const ct = contentTypeHeader();\n const isEventStream = /text\\/event-stream/i.test(ct);\n const bufferRaw = Buffer.concat(responseBuffers);\n const wasTruncated = responseBytes > bufferRaw.length || bufferRaw.length === maxBody;\n let buffer = bufferRaw;\n\n // Handle decompression for non-SSE responses\n if (!isEventStream) {\n const contentEncoding = String(res.headers['content-encoding'] || '').toLowerCase();\n // Only attempt decompression if we captured the full body (not truncated) & encoding present\n const canDecompress = !wasTruncated && buffer.length && /(gzip|br|deflate)/.test(contentEncoding);\n if (canDecompress) {\n try {\n const zlib = require('node:zlib');\n if (contentEncoding.includes('gzip')) buffer = zlib.gunzipSync(bufferRaw);\n else if (contentEncoding.includes('br')) buffer = zlib.brotliDecompressSync(bufferRaw);\n else if (contentEncoding.includes('deflate')) buffer = zlib.inflateSync(bufferRaw);\n } catch {\n // swallow decompression errors; fall back to raw (likely already decoded or truncated)\n }\n }\n }\n\n // Convert buffer to string for text-based responses (including SSE)\n const isText = /text\\//i.test(ct) || /application\\/(json|xml|javascript)/i.test(ct);\n if (isText || isEventStream) {\n responseBody = buffer.toString('utf8');\n // Don't add truncation message for SSE streams (we capture full SSE)\n if (wasTruncated && !isEventStream) responseBody += '\\n...[truncated]';\n } else if (buffer.length) {\n responseBody = `<${buffer.length} bytes${wasTruncated ? ' (truncated)' : ''} binary>`;\n }\n } catch {\n responseBody = '<response body error>';\n }\n let requestBody = '';\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n } catch {\n requestBody = '<request body error>';\n }\n let processedBody: any = null;\n if (self.ctx.llmProvider.isLLMCall(url)) {\n try {\n // Always use the responseProcessor for LLM calls to get proper parsing\n processedBody = self.ctx.responseProcessor(url, responseBody);\n } catch (e) {\n processedBody = {\n parts: [{ type: 'text', text: responseBody || '<empty response>' }],\n metadata: { error: String(e) },\n };\n }\n }\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n status: res.statusCode,\n ok: res.statusCode >= 200 && res.statusCode < 300,\n durationMs: Math.round(duration),\n responseBody,\n processedBody,\n bodyBytes: responseBytes,\n };\n try {\n const u = new URL(url);\n logEntry.host = u.hostname;\n logEntry.path = u.pathname;\n } catch {}\n self.ctx.handleLogEntry(logEntry);\n\n // Send HTTP update when response is complete\n if (\n self._extra.agentHttpService &&\n self._extra.sessionId &&\n self._extra.isValidMessageRequest(url, method, requestBody)\n ) {\n self._extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n });\n res.on('error', (err: Error) => {\n const duration = performance.now() - startHr;\n let requestBody = '';\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n } catch {\n requestBody = '<request body error>';\n }\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n durationMs: Math.round(duration),\n error: err.message,\n };\n try {\n const u = new URL(url);\n logEntry.host = u.hostname;\n logEntry.path = u.pathname;\n } catch {}\n self.ctx.handleLogEntry(logEntry);\n\n // Send HTTP update for error case\n if (\n self._extra.agentHttpService &&\n self._extra.sessionId &&\n self._extra.isValidMessageRequest(url, method, requestBody)\n ) {\n self._extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n });\n if (callback) callback(res);\n });\n const originalWrite = req.write;\n const originalEnd = req.end;\n req.write = function (chunk: any, encoding?: any, cb?: any) {\n try {\n if (chunk && requestLen < maxBody) {\n const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding || 'utf8');\n const remaining = maxBody - requestLen;\n if (buf.length <= remaining) {\n requestBodyBuffers.push(buf);\n requestLen += buf.length;\n } else {\n requestBodyBuffers.push(buf.subarray(0, remaining));\n requestLen += remaining;\n }\n }\n } catch {}\n return originalWrite.call(this, chunk, encoding, cb);\n };\n req.end = function (chunk?: any, encoding?: any, cb?: any) {\n try {\n if (chunk && requestLen < maxBody) {\n const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding || 'utf8');\n const remaining = maxBody - requestLen;\n if (buf.length <= remaining) {\n requestBodyBuffers.push(buf);\n requestLen += buf.length;\n } else {\n requestBodyBuffers.push(buf.subarray(0, remaining));\n requestLen += remaining;\n }\n }\n\n // Send HTTP start when request is ending (body is complete)\n if (!requestBodySent) {\n requestBodySent = true;\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n let requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n\n const isValid = self._extra.isValidMessageRequest(url, method, requestBody);\n\n if (self._extra.agentHttpService && self._extra.sessionId && isValid) {\n const startEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n durationMs: 0,\n };\n self._extra.sendHttpStart(startEntry).catch(() => {});\n }\n } catch {}\n }\n } catch {}\n return originalEnd.call(this, chunk, encoding, cb);\n };\n return req;\n };\n }\n\n http.request = wrapRequest(this.originals.httpRequest, 'http:');\n http.get = function wrappedGet(o: any, cb?: any) {\n const r = (http.request as any)(o, cb);\n r.end();\n return r;\n };\n if (this.enableHttps !== false) {\n https.request = wrapRequest(this.originals.httpsRequest, 'https:');\n https.get = function wrappedGet(o: any, cb?: any) {\n const r = (https.request as any)(o, cb);\n r.end();\n return r;\n };\n }\n } catch (e) {\n // Silent instrumentation - no logging\n }\n }\n\n restore(): void {\n if (this.originals) {\n try {\n const http = require('http');\n const https = require('https');\n http.request = this.originals.httpRequest;\n http.get = this.originals.httpGet;\n https.request = this.originals.httpsRequest;\n https.get = this.originals.httpsGet;\n } catch {}\n this.originals = null;\n }\n }\n}\n","/**\n * LLM Network Interceptor Class\n *\n * Handles network interception and logging for LLM-related HTTP requests.\n * Patches global fetch and optionally http/https modules to capture request/response data.\n */\n\nimport * as os from 'node:os';\nimport * as path from 'node:path';\nimport { ulid } from 'ulidx';\nimport { SubEnvManager } from '../../config/subenv';\nimport type { AgentHttpService } from '../../services/AgentHttpService';\nimport { FileLogger } from '../loggers/file';\nimport type { MessagePart } from '../messages';\nimport { LlmParser } from '../parsers';\nimport { LlmProvider } from '../providers';\nimport { FetchInstrument } from './instruments/fetchInstrument';\nimport { HttpInstrument } from './instruments/httpInstrument';\nimport type { CommonContext, NetworkLogEntry } from './instruments/types';\n\nexport type { NetworkLogEntry } from './instruments/types';\n\nexport interface LlmInterceptorConfig {\n maxBodyBytes?: number;\n enableHttpsPatching?: boolean;\n enableDebug?: boolean;\n logFilter?: (entry: NetworkLogEntry) => boolean;\n responseProcessor?: (url: string, responseBody: string) => any;\n llmProvider?: LlmProvider;\n idGenerator?: () => string;\n agentHttpService?: AgentHttpService;\n sessionId?: string;\n logFilePath?: string; // optional path to write newline-delimited JSON log entries\n processedLogFilePath?: string; // optional path to write processed body (pretty JSON)\n}\n\nexport class LlmInterceptor {\n private static instance: LlmInterceptor | null = null;\n private isPatched = false;\n private config: Required<\n Omit<LlmInterceptorConfig, 'llmProvider' | 'idGenerator' | 'agentHttpService' | 'sessionId'>\n >;\n private llmProvider: LlmProvider;\n private idGenerator: () => string;\n private agentHttpService?: AgentHttpService;\n private sessionId?: string;\n private activeRequests = new Map<string, string>();\n private rawLogger?: FileLogger; // logger for original request/response\n private errorLogger?: FileLogger; // logger for original request/response\n private processedLogger?: FileLogger; // logger for url + processed response\n\n // Instruments\n private fetchInstrument?: FetchInstrument;\n private httpInstrument?: HttpInstrument;\n\n constructor(config: LlmInterceptorConfig = {}) {\n // Use provided LlmProvider or create a new instance\n this.llmProvider = config.llmProvider || new LlmProvider();\n\n // Use provided idGenerator or default to ulid\n this.idGenerator = config.idGenerator || ulid;\n\n // Store AgentHttpService and sessionId for promise lifecycle logging\n this.agentHttpService = config.agentHttpService;\n this.sessionId = config.sessionId;\n\n this.config = {\n maxBodyBytes: config.maxBodyBytes ?? 8192,\n enableHttpsPatching: config.enableHttpsPatching ?? true, // default to true so https requests are intercepted\n enableDebug: config.enableDebug ?? false,\n logFilter:\n config.logFilter ??\n ((entry) =>\n !!(\n entry.url &&\n !entry.url.includes('statsig') &&\n !entry.url.includes('registry.npmjs.org') &&\n !entry.url.startsWith('data:')\n )),\n // Default processor now parses streaming SSE via LlmParser\n responseProcessor:\n config.responseProcessor ??\n ((url: string, responseBody: string) => {\n try {\n return LlmParser.parseStreamingResponse(url, responseBody);\n } catch {\n return null;\n }\n }),\n } as typeof this.config;\n\n // Use system temp directory for automatic garbage collection\n const baseLogDir = path.join(os.tmpdir(), 'agiflow-agents', 'llm', `session-${process.pid}`);\n const logFilePath = config.logFilePath || path.join(baseLogDir, 'network.log');\n const errorLogFilePath = path.join(baseLogDir, 'error.log'); // Remove config.errorLogFilePath which doesn't exist\n const processedLogFilePath = config.processedLogFilePath || path.join(baseLogDir, 'processed.jsonl');\n\n // Initialize loggers\n try {\n this.rawLogger = new FileLogger(logFilePath);\n } catch {}\n try {\n this.errorLogger = new FileLogger(errorLogFilePath);\n } catch {}\n try {\n this.processedLogger = new FileLogger(processedLogFilePath);\n } catch {}\n }\n\n /**\n * Get singleton instance of LlmInterceptor\n */\n static getInstance(config?: LlmInterceptorConfig): LlmInterceptor {\n if (!LlmInterceptor.instance) {\n LlmInterceptor.instance = new LlmInterceptor(config);\n }\n return LlmInterceptor.instance;\n }\n\n /**\n * Start network interception\n */\n start(): void {\n if (this.isPatched) {\n return;\n }\n\n const common: CommonContext = {\n idGenerator: this.idGenerator,\n maxBodyBytes: this.config.maxBodyBytes,\n enableDebug: this.config.enableDebug,\n logFilter: this.config.logFilter,\n responseProcessor: this.config.responseProcessor,\n llmProvider: this.llmProvider,\n handleLogEntry: (e) => this.handleLogEntry(e),\n };\n\n this.fetchInstrument = new FetchInstrument(common, {\n agentHttpService: this.agentHttpService,\n sessionId: this.sessionId,\n isValidMessageRequest: (url, method, body) => this.isValidMessageRequest(url, method, body),\n sendHttpStart: (entry) => this.sendHttpStart(entry),\n sendHttpUpdate: (entry) => this.sendHttpUpdate(entry),\n });\n this.fetchInstrument.patch();\n\n this.httpInstrument = new HttpInstrument(common, this.config.enableHttpsPatching, {\n agentHttpService: this.agentHttpService,\n sessionId: this.sessionId,\n isValidMessageRequest: (url, method, body) => this.isValidMessageRequest(url, method, body),\n sendHttpStart: (entry) => this.sendHttpStart(entry),\n sendHttpUpdate: (entry) => this.sendHttpUpdate(entry),\n });\n this.httpInstrument.patch();\n\n this.isPatched = true;\n }\n\n /**\n * Stop network interception and restore original functions\n */\n stop(): void {\n if (!this.isPatched) {\n return;\n }\n this.fetchInstrument?.restore();\n this.httpInstrument?.restore();\n this.isPatched = false;\n }\n\n /**\n * Check if interceptor is currently active\n */\n isActive(): boolean {\n return this.isPatched;\n }\n\n /**\n * Update configuration\n */\n updateConfig(config: Partial<LlmInterceptorConfig>): void {\n this.config = { ...this.config, ...config };\n }\n\n /**\n * Handle a log entry - filter and pass to log handler if configured\n */\n private handleLogEntry(entry: NetworkLogEntry): void {\n // Raw logger writes the full network entry\n if (this.rawLogger) {\n this.rawLogger.info('network_entry', { networkLog: entry });\n }\n // Processed logger writes subset if processed body present\n if (this.processedLogger && this.config.logFilter(entry) && entry.processedBody) {\n this.processedLogger.info('processed_entry', {\n id: entry.id,\n url: entry.url,\n method: entry.method,\n status: entry.status,\n durationMs: entry.durationMs,\n timestamp: entry.time,\n processedBody: entry.processedBody,\n });\n }\n // Silent instrumentation - no debug logging\n }\n\n /**\n * Check if this is a valid message HTTP request that should be logged as an agent message\n */\n private isValidMessageRequest(url: string, method: string, requestBody: string): boolean {\n if (this.rawLogger) {\n this.rawLogger.info('isValidMessageRequest checking', {\n url,\n method,\n hasBody: !!requestBody,\n bodyLength: requestBody?.length,\n });\n }\n\n // Only log LLM calls\n if (!this.llmProvider.isLLMCall(url)) {\n if (this.rawLogger) {\n this.rawLogger.info('Not an LLM URL, skipping', { url });\n }\n if (this.rawLogger && this.config.enableDebug) {\n this.rawLogger.debug('Request validation failed: Not an LLM URL', {\n url,\n method,\n });\n }\n return false;\n }\n\n // For Google/Gemini APIs, only track SSE streaming requests\n if (url.includes('cloudcode-pa.googleapis.com') || url.includes('generativelanguage.googleapis.com')) {\n // Only track streamGenerateContent (SSE), skip countTokens, generateContent, loadCodeAssist, etc.\n if (url.includes('streamGenerateContent')) {\n return true;\n } else {\n return false;\n }\n }\n\n // Only log POST requests (actual message requests)\n if (method !== 'POST') {\n return false;\n }\n\n // Check if the request body contains actual message content\n if (!requestBody) {\n return false;\n }\n\n try {\n const body = JSON.parse(requestBody);\n\n // Look for common LLM message patterns\n const hasMessages = body.messages && Array.isArray(body.messages) && body.messages.length > 0;\n const hasPrompt = body.prompt && typeof body.prompt === 'string';\n const hasInput = body.input && typeof body.input === 'string';\n const hasContents = body.contents && Array.isArray(body.contents) && body.contents.length > 0; // Gemini format\n\n // Gemini specific: check for request.contents\n const hasRequestContents =\n body.request?.contents && Array.isArray(body.request.contents) && body.request.contents.length > 0;\n\n // Must have actual content to be considered a valid message request\n const isValid = hasMessages || hasPrompt || hasInput || hasContents || hasRequestContents;\n\n return isValid;\n } catch (err) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to parse request body in isValidMessageRequest', {\n url,\n method,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n // If we can't parse the JSON, it's not a valid message request\n return false;\n }\n }\n\n /**\n * Send HTTP start event using startMessage with request content\n */\n private async sendHttpStart(entry: NetworkLogEntry): Promise<void> {\n if (!this.agentHttpService || !this.sessionId) {\n return;\n }\n\n // Log start request\n if (this.rawLogger) {\n this.rawLogger.info('http_start', { entry });\n }\n\n // Extract the request content to include as parts\n let requestParts: Array<{ type: string; text?: string }> = [];\n if (entry.requestBody) {\n try {\n const requestData = JSON.parse(entry.requestBody);\n\n // Extract user input from common LLM API formats\n if (requestData.messages && Array.isArray(requestData.messages)) {\n // OpenAI/Anthropic format - find the last user message\n const userMessage = requestData.messages.filter((msg: any) => msg.role === 'user').pop();\n if (userMessage?.content) {\n if (typeof userMessage.content === 'string') {\n requestParts.push({ type: 'text', text: userMessage.content });\n } else if (Array.isArray(userMessage.content)) {\n // Handle content array format\n requestParts = userMessage.content.map((part: any) => ({\n type: part.type || 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n }\n } else if (requestData.prompt) {\n // Legacy prompt format\n requestParts.push({ type: 'text', text: requestData.prompt });\n } else if (requestData.input) {\n // Some APIs use 'input'\n requestParts.push({ type: 'text', text: requestData.input });\n } else if (requestData.contents && Array.isArray(requestData.contents)) {\n // Gemini format - extract the last user content\n const userContent = requestData.contents.filter((c: any) => c.role === 'user').pop();\n if (userContent?.parts && Array.isArray(userContent.parts)) {\n requestParts = userContent.parts.map((part: any) => ({\n type: 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n } else if (requestData.request?.contents && Array.isArray(requestData.request.contents)) {\n // Gemini format with nested request - extract the last user content\n const userContent = requestData.request.contents.filter((c: any) => c.role === 'user').pop();\n if (userContent?.parts && Array.isArray(userContent.parts)) {\n requestParts = userContent.parts.map((part: any) => ({\n type: 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n }\n } catch (err) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to parse request data in sendHttpStart', {\n url: entry.url,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n // If we can't parse the request, just include a placeholder\n requestParts.push({ type: 'text', text: 'LLM request initiated...' });\n }\n }\n\n // Extract only necessary metadata for frontend\n const startMetadata = {\n llm_provider: this.llmProvider.detectProvider(entry.url),\n request_started: true,\n };\n\n try {\n if (this.rawLogger) {\n this.rawLogger.info('Calling agentHttpService.startMessage', {\n sessionId: this.sessionId,\n entryId: entry.id,\n });\n }\n\n const response = await this.agentHttpService.startMessage(this.sessionId, {\n messageType: 'output',\n metadata: startMetadata,\n });\n\n // Store the message ID for later update\n this.activeRequests.set(entry.id, response.messageId);\n\n if (this.rawLogger) {\n this.rawLogger.info('startMessage successful', {\n entryId: entry.id,\n messageId: response.messageId,\n });\n }\n } catch (error) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to send HTTP start message', {\n url: entry.url,\n sessionId: this.sessionId,\n error: error instanceof Error ? error.message : String(error),\n });\n }\n }\n }\n\n /**\n * Send HTTP completion event using updateMessage\n */\n private async sendHttpUpdate(entry: NetworkLogEntry): Promise<void> {\n if (!this.agentHttpService || !this.sessionId) return;\n\n // Log update request\n if (this.rawLogger) {\n this.rawLogger.info('http_update', { entry });\n }\n\n const messageId = this.activeRequests.get(entry.id);\n if (!messageId) {\n return;\n }\n\n // Extract only necessary metadata for frontend: model, llm_provider, token usage\n const completionMetadata: Record<string, any> = {\n llm_provider: this.llmProvider.detectProvider(entry.url),\n status: entry.status,\n duration: entry.durationMs,\n };\n\n // Add model and token usage if available from processed response\n if (entry.processedBody?.metadata) {\n const processedMeta = entry.processedBody.metadata;\n if (processedMeta.model) completionMetadata.model = processedMeta.model;\n if (processedMeta.usage) completionMetadata.usage = processedMeta.usage;\n if (processedMeta.total_tokens) completionMetadata.total_tokens = processedMeta.total_tokens;\n if (processedMeta.prompt_tokens) completionMetadata.prompt_tokens = processedMeta.prompt_tokens;\n if (processedMeta.completion_tokens) completionMetadata.completion_tokens = processedMeta.completion_tokens;\n if (processedMeta.cost) completionMetadata.cost = processedMeta.cost;\n }\n\n // Use already-parsed processedBody to extract response content as parts\n let responseParts: MessagePart[] = [];\n if (entry.processedBody) {\n const parsed = entry.processedBody;\n\n if (parsed?.message?.parts && Array.isArray(parsed.message.parts)) {\n // Accept parts array even if empty (valid for token counting, etc.)\n responseParts = parsed.message.parts;\n } else if (parsed?.parts && Array.isArray(parsed.parts)) {\n // httpInstrument.ts error handler creates parts directly without message wrapper\n responseParts = parsed.parts;\n } else if (parsed?.error) {\n responseParts.push({ type: 'text' as const, text: `Parser error: ${parsed.error}` });\n } else {\n // No parts is valid for some API calls (countTokens, loadCodeAssist, etc.)\n responseParts = [];\n }\n } else if (entry.responseBody) {\n // Fallback: create a simple text part for unsupported providers\n responseParts.push({ type: 'text' as const, text: 'LLM response completed' });\n }\n\n try {\n const updateData: any = {\n parts: responseParts,\n metadata: completionMetadata,\n mId: entry.processedBody?.message?.id,\n };\n\n // Only include raw data if explicitly enabled\n if (SubEnvManager.isSaveRawEnabled) {\n updateData.raw = JSON.stringify(entry);\n }\n\n await this.agentHttpService.updateMessage(this.sessionId, messageId, updateData);\n\n // Clean up the active request tracking\n this.activeRequests.delete(entry.id);\n } catch (error) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to send HTTP update message', {\n url: entry.url,\n messageId,\n sessionId: this.sessionId,\n error: error instanceof Error ? error.message : String(error),\n });\n }\n // Clean up even on error\n this.activeRequests.delete(entry.id);\n }\n }\n}\n\nexport default LlmInterceptor;\n","/**\n * Network logging hook - captures LLM HTTP responses\n *\n * STANDALONE HOOK - No external dependencies\n * This file must be self-contained as it's loaded via --import flag\n *\n * Simple approach:\n * - Patches fetch via LlmInterceptor\n * - Logs directly to file stream (avoids stdout pollution)\n * - Sends LLM calls to remote immediately (fire-and-forget)\n * - No batching, no cleanup, no complex error handling\n */\n\nimport * as os from 'node:os';\nimport * as path from 'node:path';\nimport { ulid } from 'ulidx';\nimport { SubEnvManager } from '../config/subenv';\nimport { LlmInterceptor, LlmProvider } from '../llms';\nimport { FileLogger } from '../llms/loggers/file';\nimport { AgentHttpService } from '../services/AgentHttpService';\n\nclass NetworkLog {\n private logger: FileLogger;\n private agentHttpService: AgentHttpService | null = null;\n private interceptor: LlmInterceptor;\n private llmProvider: LlmProvider;\n private maxBodyBytes: number;\n private sessionId: string;\n\n constructor() {\n const parsedMax = SubEnvManager.netlogMaxBody;\n this.maxBodyBytes = parsedMax || 8192;\n this.sessionId = SubEnvManager.sessionId || '';\n\n this.llmProvider = new LlmProvider();\n\n // Use system temp directory for automatic garbage collection\n const logPath =\n SubEnvManager.netlogFile || path.join(os.tmpdir(), 'agiflow-agents', 'network', `network-${process.pid}.log`);\n this.logger = new FileLogger(logPath);\n\n let apiUrl = SubEnvManager.serverUrl || '';\n if (apiUrl.startsWith('wss://')) apiUrl = 'https://' + apiUrl.slice(6);\n else if (apiUrl.startsWith('ws://')) apiUrl = 'http://' + apiUrl.slice(5);\n const apiKey = SubEnvManager.apiKey || '';\n const organizationId = SubEnvManager.organizationId || '';\n\n const enableRemote = SubEnvManager.isNetlogRemoteEnabled && !!(this.sessionId && apiUrl);\n\n if (enableRemote && apiUrl) {\n try {\n this.agentHttpService = new AgentHttpService({\n apiUrl,\n organizationId,\n apiKey,\n logger: this.logger as any,\n });\n } catch (error) {\n if (FileLogger.shouldDebug()) {\n this.logger.error('AgentHttpService initialization failed', error as Error);\n }\n }\n }\n\n this.interceptor = LlmInterceptor.getInstance({\n maxBodyBytes: this.maxBodyBytes,\n enableDebug: SubEnvManager.isDebugMode || SubEnvManager.isNetlogDebug,\n llmProvider: this.llmProvider,\n idGenerator: ulid,\n agentHttpService: this.agentHttpService || undefined,\n sessionId: this.sessionId,\n });\n }\n\n public start(): void {\n this.interceptor.start();\n }\n}\n\n// Initialize and start network logging (guard ensures idempotence)\nconst networkLog = new NetworkLog();\nnetworkLog.start();\n"],"names":["FileLogger","logPath","logDir","path","fs","error","message","data","level","logEntry","SubEnvManager","CostCalculator","modelName","usage","modelPricing","costData","inputTokens","outputTokens","cachedInputTokens","cacheCreationInputTokens","inputRate","outputRate","cacheReadRate","cacheCreationRate","inputCost","outputCost","cacheReadCost","cacheCreationCost","ClaudeParser","rawText","lines","contentBlocks","messageMetadata","blockMap","line","dataStr","block","parts","combinedUsage","tokenUsage","cost","len","part","_","key","GeminiParser","looksGzip","working","gun","json","response","fullText","finishReason","texts","nextSpeakers","reasoningSegments","t","modelVersion","responseId","usageMetadata","collectedTexts","jsonStr","extracted","l","candidates","cand","raw","trimmed","parsed","str","zlib.gunzipSync","text","control","sampleLen","i","code","input","output","total","LlmParser","url","parser","p","result","mapping","pattern","name","patterns","LlmProvider","provider","providerName","FetchInstrument","ctx","extra","resource","init","start","performance","method","requestBody","body","obj","k","v","entryId","entryTime","startEntry","err","res","networkError","duration","responseBody","bodyBytes","processedBody","isEventStream","contentType","isTextLike","originalBody","stream1","stream2","capturePromise","reader","chunks","totalBytes","done","value","c","finalLogEntry","arrayBuffer","max","HttpInstrument","enableHttps","_extra","buildUrl","options","protocolDefault","protocol","host","portPart","wrapRequest","original","callback","startHr","self","requestBodyBuffers","requestLen","requestBodySent","req","responseBuffers","responseBytes","capturedBytes","contentTypeHeader","chunk","ct","maxBody","remaining","bufferRaw","wasTruncated","buffer","contentEncoding","zlib","combined","e","u","originalWrite","originalEnd","encoding","cb","buf","isValid","http","https","o","r","LlmInterceptor","config","ulid","entry","baseLogDir","os","logFilePath","errorLogFilePath","processedLogFilePath","common","hasMessages","hasPrompt","hasInput","hasContents","hasRequestContents","requestParts","requestData","userMessage","msg","userContent","startMetadata","messageId","completionMetadata","processedMeta","responseParts","updateData","NetworkLog","parsedMax","apiUrl","apiKey","organizationId","AgentHttpService","networkLog"],"mappings":"8dAmBO,MAAMA,CAAW,CACd,UACA,QAER,YAAYC,EAAiB,CAC3B,KAAK,QAAUA,EAGf,MAAMC,EAASC,EAAK,QAAQ,KAAK,OAAO,EACnCC,EAAG,WAAWF,CAAM,GACvBE,EAAG,UAAUF,EAAQ,CAAE,UAAW,GAAM,EAI1C,KAAK,UAAYE,EAAG,kBAAkB,KAAK,QAAS,CAAE,MAAO,IAAK,EAGlE,KAAK,UAAU,GAAG,QAAUC,GAAU,CAEpC,QAAQ,MAAM,2CAA2CA,EAAM,OAAO,EAAE,CAC1E,CAAC,CACH,CAKA,KAAKC,EAAiBC,EAAkC,CACtD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,MAAMD,EAAiBC,EAAkC,CACvD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,KAAKD,EAAiBC,EAAkC,CACtD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,MAAMD,EAAiBC,EAAkC,CACvD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,IAAIC,EAAeF,EAAiBC,EAAkC,CACpE,MAAME,EAAqB,CACzB,KAAM,IAAI,KAAA,EAAO,YAAA,EACjB,MAAAD,EACA,IAAKF,EACL,GAAGC,CAAA,EAGL,GAAI,CACF,KAAK,UAAU,MAAM,KAAK,UAAUE,CAAQ,EAAI;AAAA,CAAI,CACtD,OAASJ,EAAO,CAEd,QAAQ,MAAM,2CAA2CA,CAAK,EAAE,CAClE,CACF,CAKA,OAAc,CACR,KAAK,WAAa,CAAC,KAAK,UAAU,WACpC,KAAK,UAAU,IAAA,CAEnB,CAKA,YAAqB,CACnB,OAAO,KAAK,OACd,CAKA,OAAO,aAAuB,CAC5B,OAAOK,EAAAA,cAAc,aAAeA,EAAAA,cAAc,aACpD,CACF,837FCxFO,MAAMC,EAAe,CAI1B,OAAO,cAAcC,EAAmBC,EAAqC,CAC3E,MAAMC,EAAeC,GAASH,CAAkC,EAEhE,GAAI,CAACE,EACH,OAAO,KAGT,MAAME,EAAcH,EAAM,aAAe,EACnCI,EAAeJ,EAAM,cAAgB,EACrCK,EAAoBL,EAAM,mBAAqB,EAC/CM,EAA2BN,EAAM,0BAA4B,EAG7DO,EAAY,yBAA0BN,EAAeA,EAAa,qBAAuB,EACzFO,EAAa,0BAA2BP,EAAeA,EAAa,sBAAwB,EAC5FQ,EAAgB,gCAAiCR,EAAeA,EAAa,4BAA8B,EAC3GS,EACJ,oCAAqCT,EAAeA,EAAa,gCAAkC,EAG/FU,EAAYR,EAAcI,EAC1BK,EAAaR,EAAeI,EAC5BK,EAAgBR,EAAoBI,EACpCK,EAAoBR,EAA2BI,EAIrD,MAAO,CACL,UAHgBC,EAAYC,EAAaC,EAAgBC,EAIzD,UAAAH,EACA,WAAAC,EACA,cAAeP,EAAoB,EAAIQ,EAAgB,OACvD,kBAAmBP,EAA2B,EAAIQ,EAAoB,MAAA,CAE1E,CACF,CC7CO,MAAMC,CAAa,CAOxB,OAAO,uBACLC,EAGqC,CACrC,GAAI,CACF,MAAMC,EAAQD,EAAQ,MAAM;AAAA,CAAI,EAC1BE,EAAuB,CAAA,EAC7B,IAAIC,EAAuB,KACvBnB,EAAa,KACjB,MAAMoB,MAAe,IAErB,UAAWC,KAAQJ,EACjB,GAAII,EAAK,WAAW,QAAQ,EAAG,CAC7B,MAAMC,EAAUD,EAAK,UAAU,CAAC,EAAE,KAAA,EAClC,GAAIC,EACF,GAAI,CACF,MAAM5B,EAAO,KAAK,MAAM4B,CAAO,EAQ/B,GALI5B,EAAK,OAAS,iBAAmBA,EAAK,UACxCyB,EAAkBzB,EAAK,SAIrBA,EAAK,OAAS,sBAAuB,CACvC,MAAM6B,EAAQ,CACZ,MAAO7B,EAAK,MACZ,KAAMA,EAAK,cAAc,KACzB,GAAIA,EAAK,cAAc,GACvB,KAAMA,EAAK,cAAc,KACzB,MAAOA,EAAK,cAAc,OAAS,CAAA,EACnC,KAAM,GACN,YAAa,EAAA,EAEf0B,EAAS,IAAI1B,EAAK,MAAO6B,CAAK,CAChC,CAGA,GAAI7B,EAAK,OAAS,sBAAuB,CACvC,MAAM6B,EAAQH,EAAS,IAAI1B,EAAK,KAAK,EACjC6B,IACE7B,EAAK,MAAM,OAAS,aACtB6B,EAAM,MAAQ7B,EAAK,MAAM,KAChBA,EAAK,MAAM,OAAS,qBAC7B6B,EAAM,aAAe7B,EAAK,MAAM,cAGtC,CAGA,GAAIA,EAAK,OAAS,qBAAsB,CACtC,MAAM6B,EAAQH,EAAS,IAAI1B,EAAK,KAAK,EACrC,GAAI6B,EAAO,CAET,GAAIA,EAAM,YACR,GAAI,CACFA,EAAM,MAAQ,KAAK,MAAMA,EAAM,WAAW,CAC5C,MAAqB,CAGrB,CAEFL,EAAc,KAAKK,CAAK,CAC1B,CACF,CAGI7B,EAAK,OAAS,iBAAmBA,EAAK,QACxCM,EAAQN,EAAK,MAEjB,MAAQ,CAER,CAEJ,CAIF,MAAM8B,EAAuBN,EAAc,IAAKK,GAC1CA,EAAM,OAAS,OACV,CACL,KAAM,OACN,KAAMA,EAAM,IAAA,EAELA,EAAM,OAAS,WACjB,CACL,KAAM,kBACN,eAAgB,CACd,WAAYA,EAAM,GAClB,SAAUA,EAAM,KAChB,KAAMA,EAAM,MACZ,MAAO,MAAA,CACT,EAIG,CACL,KAAM,OACN,KAAM,+BAA+BA,EAAM,IAAI,EAAA,CAElD,EAGK9B,EAAU,CACd,GAAI0B,GAAiB,IAAMJ,EAAa,WAAA,EACxC,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAOS,EAAM,OAAS,EAAIA,EAAQ,CAAC,CAAE,KAAM,OAAiB,KAAM,EAAA,CAAI,CAAA,EAIlEC,EAAgB,CACpB,GAAIN,GAAiB,OAAS,CAAA,EAC9B,GAAInB,GAAS,CAAA,CAAC,EAIV0B,EAAa,CACjB,YAAaD,EAAc,cAAgBA,EAAc,YACzD,aAAcA,EAAc,eAAiBA,EAAc,aAC3D,kBAAmBA,EAAc,yBAA2BA,EAAc,kBAC1E,yBAA0BA,EAAc,6BAA+BA,EAAc,yBACrF,aAAcA,EAAc,cAAgB,IAAMA,EAAc,eAAiB,GACjF,cAAeA,EAAc,eAC7B,YAAaA,EAAc,YAAA,EAIvB1B,EAAYoB,GAAiB,MAC7BQ,EAAO5B,EAAYD,GAAe,cAAcC,EAAW2B,CAAU,EAAI,KAuB/E,MApBoB,CAClB,QAAAjC,EACA,SAAU,CACR,MAAOM,EACP,MAAO2B,EACP,KAAAC,EACA,kBAAmBR,GAAiB,GACpC,WAAYA,GAAiB,YAC7B,cAAeK,EAAM,OAAO,CAACI,EAAKC,IAC5BA,EAAK,OAAS,OACTD,EAAMC,EAAK,KAAK,OACdA,EAAK,OAAS,kBAChBD,EAAM,KAAK,UAAUC,EAAK,eAAe,MAAQ,EAAE,EAAE,OAEvDD,EAAM,KAAK,UAAUC,CAAI,EAAE,OACjC,CAAC,EACJ,gBAAiBZ,EAAM,OAAQI,GAASA,EAAK,WAAW,QAAQ,CAAC,EAAE,MAAA,CACrE,CAIJ,OAAS7B,EAAO,CACd,MAAO,CACL,MAAO,uCAAuCA,CAAK,GACnD,QAAAwB,CAAA,CAEJ,CACF,CAKA,OAAe,YAAqB,CAClC,OAAO,KAAK,IAAA,EAAM,SAAS,EAAE,EAAI,KAAK,OAAA,EAAS,SAAS,EAAE,EAAE,MAAM,EAAG,EAAE,CACzE,CACF,CAG+CD,EAAa,uBCnM3C,IAAI,MAAM,CAAA,EAAI,CAC3B,IAAIe,EAAGC,EAAK,CACV,MAAM,IAAI,MAAM,gGAAgGA,CAAG,oIAAoI,CACzP,CACJ,CAAG,ECEI,MAAMC,CAAa,CAExB,OAAO,uBAAuBhB,EAAsB,CAClD,GAAI,CAEF,OADcA,EAAQ,SAAS;AAAA,MAAS,GAAKA,EAAQ,WAAW,OAAO,EACrD,KAAK,SAASA,CAAO,EAChC,KAAK,aAAaA,CAAO,CAClC,OAASxB,EAAO,CACd,MAAO,CAAE,MAAO,oCAAoCA,CAAK,GAAI,QAAAwB,CAAA,CAC/D,CACF,CAGA,OAAe,aAAaA,EAAiB,CAC3C,MAAMiB,EAAY,KAAK,eAAejB,CAAO,GAAK,KAAK,wBAAwBA,CAAO,EACtF,IAAIkB,EAAUlB,EACd,GAAIiB,EAAW,CACb,MAAME,EAAM,KAAK,UAAUnB,CAAO,EAC9BmB,EAAI,KAAID,EAAUC,EAAI,KAC5B,CACA,GAAI,CACF,MAAMC,EAAO,KAAK,MAAMF,CAAO,EACzBG,EAAWD,EAAK,UAAYA,EAC5B,CAAE,SAAAE,EAAU,aAAAC,EAAc,MAAAC,EAAO,aAAAC,EAAc,kBAAAC,CAAA,EAAsB,KAAK,qBAC9EL,EAAS,UAAA,EAELrC,EAAQ,KAAK,eAAeqC,EAAS,eAAiBD,EAAK,eAAiBA,CAAI,EAChFZ,EACJgB,EAAM,OAAS,EACXA,EAAM,IAAKG,IAAO,CAAE,KAAM,OAAQ,KAAMA,GAAI,EAC5CL,EACE,CAAC,CAAE,KAAM,OAAQ,KAAMA,CAAA,CAAU,EACjC,CAAA,EACR,MAAO,CACL,QAAS,CACP,GAAID,EAAS,YAAc,KAAK,WAAA,EAChC,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAAb,CAAA,EAEF,SAAU,CACR,MAAOa,EAAS,cAAgBD,EAAK,aACrC,MAAApC,EACA,WAAYqC,EAAS,WACrB,aAAAE,EACA,cAAeD,EAAS,OACxB,gBAAiB,EACjB,WAAYL,GAAa,OACzB,aAAcQ,EAAa,OAASA,EAAe,OACnD,kBAAmBC,EAAkB,OAASA,EAAoB,MAAA,CACpE,CAEJ,MAAQ,CACN,MAAO,CACL,QAAS,CACP,GAAI,KAAK,WAAA,EACT,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAO,CAAC,CAAE,KAAM,OAAQ,KAAMR,EAAS,CAAA,EAEzC,SAAU,CAAE,IAAK,EAAA,CAAK,CAE1B,CACF,CAGA,OAAe,SAASlB,EAAiB,CACvC,MAAMC,EAAQD,EAAQ,MAAM,OAAO,EACnC,IAAI4B,EACAC,EACAN,EACAO,EACJ,MAAMC,EAA2B,CAAA,EAC3BN,EAAyB,CAAA,EACzBC,EAA8B,CAAA,EAEpC,UAAWrB,KAAQJ,EAAO,CACxB,GAAI,CAACI,EAAK,WAAW,OAAO,EAAG,SAC/B,MAAM2B,EAAU3B,EAAK,MAAM,CAAC,EAAE,KAAA,EAC9B,GAAK2B,EACL,GAAI,CAEF,MAAMX,EADU,KAAK,MAAMW,CAAO,EACT,UAAY,CAAA,EACjCX,EAAS,eAAcO,EAAeP,EAAS,cAC/CA,EAAS,aAAYQ,EAAaR,EAAS,YAC3CA,EAAS,gBAAeS,EAAgBT,EAAS,eACrD,MAAMY,EAAY,KAAK,qBAAqBZ,EAAS,UAAU,EAC3D,CAACE,GAAgBU,EAAU,iBAA6BA,EAAU,cAClEA,EAAU,MAAM,UAAuB,KAAK,GAAGA,EAAU,KAAK,EAC9DA,EAAU,aAAa,UAAqB,KAAK,GAAGA,EAAU,YAAY,EAC1EA,EAAU,kBAAkB,UAA0B,KAAK,GAAGA,EAAU,iBAAiB,CAC/F,MAAQ,CAER,CACF,CAEA,MAAMX,EAAWS,EAAe,KAAK,EAAE,EACjC/C,EAAQ,KAAK,eAAe8C,CAAa,EAC/C,MAAO,CACL,QAAS,CACP,GAAID,GAAc,KAAK,WAAA,EACvB,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAOE,EAAe,OAASA,EAAe,IAAKJ,IAAO,CAAE,KAAM,OAAQ,KAAMA,CAAA,EAAI,EAAI,CAAA,CAAC,EAE3F,SAAU,CACR,MAAOC,EACP,MAAA5C,EACA,WAAA6C,EACA,aAAAN,EACA,cAAeD,EAAS,OACxB,gBAAiBrB,EAAM,OAAQiC,GAAMA,EAAE,WAAW,OAAO,CAAC,EAAE,OAC5D,aAAcT,EAAa,OAASA,EAAe,OACnD,kBAAmBC,EAAkB,OAASA,EAAoB,MAAA,CACpE,CAEJ,CAGA,OAAe,qBAAqBS,EAOlC,CACA,MAAMX,EAAkB,CAAA,EAClBC,EAAyB,CAAA,EACzBC,EAA8B,CAAA,EACpC,IAAIH,EACJ,GAAI,MAAM,QAAQY,CAAU,EAC1B,UAAWC,KAAQD,EAAY,CACzB,CAACZ,GAAgBa,GAAM,iBAA6BA,EAAK,cAC7D,MAAM5B,EAAQ4B,GAAM,SAAS,OAAS,CAAA,EACtC,GAAI,MAAM,QAAQ5B,CAAK,GACrB,UAAWK,KAAQL,EACjB,GAAIK,GAAQ,OAAOA,GAAS,SAAU,CACpC,GAAIA,EAAK,UAAY,GAAM,SAC3B,MAAMwB,EAAMxB,EAAK,KACjB,GAAI,OAAOwB,GAAQ,UAAYA,EAAI,OAAQ,CAEzC,MAAMC,EAAUD,EAAI,KAAA,EACpB,GAAIC,EAAQ,WAAW,GAAG,GAAKA,EAAQ,SAAS,GAAG,EACjD,GAAI,CACF,MAAMC,EAAS,KAAK,MAAMD,CAAO,EACjC,GAAIC,GAAU,OAAOA,GAAW,SAAU,CACpC,OAAOA,EAAO,WAAc,WAC9Bb,EAAkB,KAAKa,EAAO,SAAS,EACvCf,EAAM,KAAKe,EAAO,SAAS,GAEzBA,EAAO,cAAgB,OAAOA,EAAO,cAAiB,UACxDd,EAAa,KAAKc,EAAO,YAAY,EACvC,QACF,CACF,MAAQ,CAER,CAEFf,EAAM,KAAKa,CAAG,CAChB,CACF,EAGN,CAEF,MAAO,CACL,SAAUb,EAAM,KAAK,EAAE,EACvB,aAAAD,EACA,MAAAC,EACA,kBAAmBD,EACnB,aAAAE,EACA,kBAAAC,CAAA,CAEJ,CAEA,OAAe,UAAUc,EAA4C,CACnE,GAAI,CAGF,MAAO,CAAE,GAAI,GAAM,KADPC,SADA,OAAO,KAAKD,EAAK,QAAQ,CACN,EACF,SAAS,MAAM,CAAA,CAC9C,MAAQ,CACN,MAAO,CAAE,GAAI,GAAO,KAAMA,CAAA,CAC5B,CACF,CAEA,OAAe,wBAAwBE,EAAuB,CAC5D,GAAI,CAACA,EAAM,MAAO,GAClB,IAAIC,EAAU,EACd,MAAMC,EAAY,KAAK,IAAIF,EAAK,OAAQ,GAAG,EAC3C,QAASG,EAAI,EAAGA,EAAID,EAAWC,IAAK,CAClC,MAAMC,EAAOJ,EAAK,WAAWG,CAAC,EAC1BC,EAAO,IAAMA,IAAS,GAAKA,IAAS,IAAMA,IAAS,IAAIH,GAC7D,CACA,OAAOA,EAAUC,EAAY,EAC/B,CAEA,OAAe,eAAeF,EAAuB,CACnD,OAAOA,EAAK,QAAU,GAAKA,EAAK,WAAW,CAAC,IAAM,IAAQA,EAAK,WAAW,CAAC,IAAM,GACnF,CAEA,OAAe,eAAe1D,EAAY,CACxC,GAAI,CAACA,GAAS,OAAOA,GAAU,SAC7B,MAAO,CAAE,YAAa,OAAW,aAAc,OAAW,YAAa,MAAA,EAEzE,MAAM+D,EAAQ/D,EAAM,kBAAoBA,EAAM,cACxCgE,EAAShE,EAAM,sBAAwBA,EAAM,kBAC7CiE,EACJjE,EAAM,kBAAoB,OAAO+D,GAAU,UAAY,OAAOC,GAAW,SAAWD,EAAQC,EAAS,QACvG,MAAO,CAAE,YAAaD,EAAO,aAAcC,EAAQ,YAAaC,CAAA,CAClE,CAEA,OAAe,YAAqB,CAClC,OAAO,KAAK,IAAA,EAAM,SAAS,EAAE,EAAI,KAAK,OAAA,EAAS,SAAS,EAAE,EAAE,MAAM,EAAG,EAAE,CACzE,CACF,CAE4CjC,EAAa,uBCzMlD,MAAMkC,CAAU,CACrB,OAAe,QAA2B,CACxC,CACE,KAAM,qBACN,SAAU,CACR,mCACA,kBACA,cACA,iBAAA,EAEF,OAAQnD,CAAA,EAEV,CACE,KAAM,gBACN,SAAU,CACR,uCACA,iCACA,yBACA,kBAAA,EAEF,OAAQiB,CAAA,CACV,EAgBF,OAAO,uBAAuBmC,EAAanD,EAAsB,CAC/D,MAAMoD,EAASF,EAAU,gBAAgBC,CAAG,EAE5C,GAAI,CAACC,EACH,MAAO,CACL,MAAO,4BAA4BD,CAAG,GACtC,QAAAnD,EACA,mBAAoBkD,EAAU,QAAQ,IAAKG,GAAMA,EAAE,IAAI,CAAA,EAI3D,GAAI,CACF,MAAMC,EAASF,EAAO,uBAAuBpD,CAAO,EAGpD,OAAIsD,GAAU,OAAOA,GAAW,UAAY,CAACA,EAAO,QAClDA,EAAO,SAAW,CAChB,GAAGA,EAAO,SACV,OAAQJ,EAAU,oBAAoBC,CAAG,CAAA,GAItCG,CACT,OAAS9E,EAAO,CACd,MAAO,CACL,MAAO,kBAAkBA,CAAK,GAC9B,QAAAwB,EACA,OAAQkD,EAAU,oBAAoBC,CAAG,CAAA,CAE7C,CACF,CAQA,OAAO,gBAAgBA,EAAqC,CAC1D,GAAI,CAACA,EAAK,OAAO,KAEjB,UAAWI,KAAWL,EAAU,QAC9B,GAAIK,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,OAInB,OAAO,IACT,CAQA,OAAO,oBAAoBJ,EAAqB,CAC9C,GAAI,CAACA,EAAK,MAAO,UAEjB,UAAWI,KAAWL,EAAU,QAC9B,GAAIK,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,KAInB,MAAO,SACT,CAQA,OAAO,YAAYJ,EAAsB,CACvC,OAAOD,EAAU,gBAAgBC,CAAG,IAAM,IAC5C,CAOA,OAAO,uBAAkC,CACvC,OAAOD,EAAU,QAAQ,IAAKG,GAAMA,EAAE,IAAI,CAC5C,CASA,OAAO,eAAeI,EAAcC,EAAoBN,EAA+B,CACrFF,EAAU,QAAQ,KAAK,CACrB,KAAAO,EACA,SAAAC,EACA,OAAAN,CAAA,CACD,CACH,CACF,CCzJO,MAAMO,CAAY,CACN,aAAe,CAC9B,kBACA,cACA,eACA,oBACA,kBACA,kCACA,iCACA,wBACA,wBACA,iBACA,cACA,mBACA,kBACA,iBACA,eACA,aACA,kBACA,UACA,qBACA,wBACA,2BACA,qBACA,oBACA,kBACA,kCAAA,EAGe,aAAe,CAC9B,CAAE,QAAS,mCAAoC,SAAU,WAAA,EACzD,CAAE,QAAS,6BAA8B,SAAU,WAAA,EACnD,CAAE,QAAS,eAAgB,SAAU,QAAA,EACrC,CAAE,QAAS,0EAA2E,SAAU,QAAA,EAChG,CAAE,QAAS,wBAAyB,SAAU,aAAA,EAC9C,CAAE,QAAS,sCAAuC,SAAU,OAAA,EAC5D,CAAE,QAAS,cAAe,SAAU,QAAA,EACpC,CAAE,QAAS,mBAAoB,SAAU,aAAA,EACzC,CAAE,QAAS,kBAAmB,SAAU,WAAA,EACxC,CAAE,QAAS,iBAAkB,SAAU,UAAA,EACvC,CAAE,QAAS,eAAgB,SAAU,SAAA,EACrC,CAAE,QAAS,aAAc,SAAU,MAAA,EACnC,CAAE,QAAS,kBAAmB,SAAU,YAAA,EACxC,CAAE,QAAS,iDAAkD,SAAU,OAAA,CAAQ,EAQjF,UAAUR,EAAsB,CAC9B,OAAKA,EACE,KAAK,aAAa,KAAMK,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EAD3C,EAEnB,CAOA,eAAeA,EAAqB,CAClC,GAAI,CAACA,EAAK,MAAO,UAEjB,SAAW,CAAE,QAAAK,EAAS,SAAAI,CAAA,IAAc,KAAK,aACvC,GAAIJ,EAAQ,KAAKL,CAAG,EAClB,OAAOS,EAIX,MAAO,SACT,CAMA,uBAAkC,CAChC,OAAO,MAAM,KAAK,IAAI,IAAI,KAAK,aAAa,IAAI,CAAC,CAAE,SAAAA,KAAeA,CAAQ,CAAC,CAAC,CAC9E,CAOA,oBAAoBC,EAA+B,CACjD,OAAO,KAAK,sBAAA,EAAwB,SAASA,EAAa,aAAa,CACzE,CAOA,mBAAmBA,EAAqC,CACtD,MAAMN,EAAU,KAAK,aAAa,KAAK,CAAC,CAAE,SAAAK,CAAA,IAAeA,IAAaC,EAAa,aAAa,EAChG,OAAON,EAAUA,EAAQ,QAAU,IACrC,CACF,CC7FO,MAAMO,CAAgB,CACnB,cAAqC,KAG7C,OAAwB,cAAgB,IAAM,KACtC,IACA,MAER,YAAYC,EAAoBC,EAA8B,CAC5D,KAAK,IAAMD,EACX,KAAK,MAAQC,CACf,CAEA,OAAc,CACZ,GAAI,OAAO,WAAW,OAAU,YAAc,KAAK,cAAe,OAElE,KAAK,cAAgB,WAAW,MAAM,KAAK,UAAU,EACrD,MAAMD,EAAM,KAAK,IACXC,EAAQ,KAAK,MAEnB,WAAW,MAAS,MAAOC,EAAeC,IAAe,CACvD,MAAMC,EAAQC,EAAAA,YAAY,IAAA,EAC1B,IAAIjB,EAAM,GACV,GAAI,CACFA,EAAM,OAAOc,GAAa,SAAWA,EAAWA,GAAU,KAAO,EACnE,MAAQ,CAAC,CACT,MAAMI,GAAUH,GAAM,QAAUD,GAAU,QAAU,OAAO,YAAA,EAG3D,IAAIK,EAAc,GAClB,GAAI,CACF,MAAMC,EAAYL,GAAM,MAAQD,GAAU,KAC1C,GAAIM,EACF,GAAI,OAAOA,GAAS,SAAUD,EAAcC,UACnCA,aAAgB,gBAAiBD,EAAcC,EAAK,SAAA,UACpD,OAAO,SAAa,KAAeA,aAAgB,SAAU,CACpE,MAAMC,EAA2B,CAAA,EACjC,SAAW,CAACC,EAAGC,CAAC,IAAKH,EAAK,QAAA,EAAWC,EAAIC,CAAC,EAAI,OAAOC,GAAM,SAAWA,EAAI,cAC1EJ,EAAc,KAAK,UAAUE,CAAG,CAClC,MAAWD,aAAgB,YAAc,OAAO,SAASA,CAAI,EAC3DD,EAAc,OAAO,KAAKC,CAAI,EAAE,SAAS,MAAM,EACxC,OAAOA,GAAS,WAAUD,EAAc,KAAK,UAAUC,CAAI,EAExE,MAAQ,CACND,EAAc,sBAChB,CAEA,MAAMK,EAAUZ,EAAI,YAAA,EACda,EAAY,IAAI,KAAA,EAAO,YAAA,EAE7B,GAAIZ,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBb,EAAKkB,EAAQC,CAAW,EAAG,CACtG,MAAMO,EAA8B,CAClC,GAAIF,EACJ,KAAMC,EACN,UAAW,QACX,OAAAP,EACA,IAAAlB,EACA,YAAAmB,EACA,WAAY,CAAA,EAEdN,EAAM,cAAca,CAAU,EAAE,MAAOC,GAAQ,CAC7C,QAAQ,MAAM,+CAAgDA,CAAG,CACnE,CAAC,CACH,CAGA,IAAIC,EACJ,GAAI,CACFA,EAAM,MAAM,KAAK,cAAed,EAAUC,CAAI,CAChD,OAASc,EAAc,CACrB,MAAMC,EAAWb,EAAAA,YAAY,IAAA,EAAQD,EAC/BvF,EAA4B,CAChC,GAAI+F,EACJ,KAAMC,EACN,UAAW,QACX,OAAAP,EACA,IAAAlB,EACA,YAAAmB,EACA,WAAY,KAAK,MAAMW,CAAQ,EAC/B,MAAQD,EAAuB,OAAA,EAEjC,MAAAjB,EAAI,eAAenF,CAAQ,EACvBoF,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBb,EAAKkB,EAAQC,CAAW,GACnGN,EAAM,eAAepF,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAEzCoG,CACR,CAEA,GAAI,CACF,MAAMC,EAAWb,EAAAA,YAAY,IAAA,EAAQD,EACrC,IAAIe,EAAe,GACfC,EAAY,EACZC,EAAqB,KACrBC,EAAgB,GAGpB,GAAI,CACF,MAAMC,EAAcP,EAAI,QAAQ,IAAI,cAAc,GAAK,GACvDM,EAAgB,sBAAsB,KAAKC,CAAW,EACtD,MAAMC,EACJ,gDAAgD,KAAKD,CAAW,GAAK,mBAAmB,KAAKA,CAAW,EAE1G,GAAKD,EAgBE,CAEL,MAAMG,EAAeT,EAAI,KACzB,GAAIS,EAAc,CAChB,KAAM,CAACC,EAASC,CAAO,EAAIF,EAAa,IAAA,EAGlCG,GAAkB,SAAY,CAClC,GAAI,CACF,MAAMC,EAASH,EAAQ,UAAA,EACjBI,EAAuB,CAAA,EAC7B,IAAIC,EAAa,EAGjB,OAAa,CACX,KAAM,CAAE,KAAAC,EAAM,MAAAC,CAAA,EAAU,MAAMJ,EAAO,KAAA,EACrC,GAAIG,EAAM,MAEVF,EAAO,KAAKG,CAAK,EACjBF,GAAcE,EAAM,UACtB,CASA,GAJAd,EAFuB,OAAO,OAAOW,EAAO,IAAKI,GAAM,OAAO,KAAKA,CAAC,CAAC,CAAC,EAClC,SAAS,MAAM,EAEnDd,EAAYW,EAGR/B,EAAI,YAAY,UAAUZ,CAAG,GAAK+B,EACpC,GAAI,CACFE,EAAgBrB,EAAI,kBAAkBZ,EAAK+B,CAAY,CACzD,OAASJ,EAAK,CACZM,EAAgB,CAAE,MAAO,kBAAkBN,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAC,EAAA,CAC7F,CAIF,MAAMoB,EAAiC,CACrC,GAAIvB,EACJ,KAAMC,EACN,UAAW,QACX,OAAAP,EACA,IAAAlB,EACA,YAAAmB,EACA,OAAQS,EAAI,OACZ,GAAIA,EAAI,GACR,WAAY,KAAK,MAAMX,EAAAA,YAAY,IAAA,EAAQD,CAAK,EAChD,aAAAe,EACA,cAAAE,EACA,UAAAD,CAAA,EAEF,OAAApB,EAAI,eAAemC,CAAa,EAI9BlC,EAAM,kBACNA,EAAM,WACNA,EAAM,sBAAsBb,EAAKkB,EAAQC,CAAW,GAEpDN,EAAM,eAAekC,CAAa,EAAE,MAAM,IAAM,CAAC,CAAC,EAG7C,CAAE,aAAAhB,EAAc,cAAAE,EAAe,UAAAD,CAAA,CACxC,OAASL,EAAK,CACZ,eAAQ,MAAM,0BAA2BA,CAAG,EACrC,CAAE,aAAc,yBAA0B,cAAe,KAAM,UAAW,CAAA,CACnF,CACF,GAAA,EAGCC,EAAY,iBAAmBY,EAGhCZ,EAAM,IAAI,SAASW,EAAS,CAC1B,OAAQX,EAAI,OACZ,WAAYA,EAAI,WAChB,QAASA,EAAI,OAAA,CACd,EAGDG,EAAe,0BACfE,EAAgB,IAClB,MACEF,EAAe,mCAEnB,KAtGoB,CAGlB,MAAMiB,EAAc,MADNpB,EAAI,MAAA,EACc,YAAA,EAChCI,EAAYgB,EAAY,WACxB,MAAMC,EAAMtC,EAAgB,cACxByB,EACEY,EAAY,YAAcC,EAC5BlB,EAAe,OAAO,KAAKiB,CAAW,EAAE,SAAS,MAAM,EAGvDjB,EAAe,GADH,OAAO,KAAKiB,EAAY,MAAM,EAAGC,CAAG,CAAC,EAC3B,SAAS,MAAM,CAAC;AAAA,iBAAoBD,EAAY,WAAaC,GAAK,eAAA,CAAgB,UAG1GlB,EAAe,IAAIC,CAAS,gBAEhC,CAuFF,MAAQ,CACND,EAAe,uBACjB,CAGA,GACE,CAACG,GACDtB,EAAI,YAAY,UAAUZ,CAAG,GAC7B+B,GACAA,IAAiB,wBAEjB,GAAI,CACFE,EAAgBrB,EAAI,kBAAkBZ,EAAK+B,CAAY,CACzD,OAASJ,EAAK,CACZM,EAAgB,CAAE,MAAO,kBAAkBN,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAC,EAAA,CAC7F,CAGF,MAAMlG,EAA4B,CAChC,GAAI+F,EACJ,KAAMC,EACN,UAAW,QACX,OAAAP,EACA,IAAAlB,EACA,YAAAmB,EACA,OAAQS,EAAI,OACZ,GAAIA,EAAI,GACR,WAAY,KAAK,MAAME,CAAQ,EAC/B,aAAAC,EACA,cAAAE,EACA,UAAAD,CAAA,EAEF,OAAApB,EAAI,eAAenF,CAAQ,EAKzB,CAACyG,GACDrB,EAAM,kBACNA,EAAM,WACNA,EAAM,sBAAsBb,EAAKkB,EAAQC,CAAW,GAEpDN,EAAM,eAAepF,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAExCmG,CACT,OAASvG,EAAO,CACd,MAAMyG,EAAWb,EAAAA,YAAY,IAAA,EAAQD,EAC/BvF,EAA4B,CAChC,GAAI+F,EACJ,KAAMC,EACN,UAAW,QACX,OAAAP,EACA,IAAAlB,EACA,YAAAmB,EACA,WAAY,KAAK,MAAMW,CAAQ,EAC/B,MAAQzG,EAAgB,OAAA,EAE1B,OAAAuF,EAAI,eAAenF,CAAQ,EACvBoF,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBb,EAAKkB,EAAQC,CAAW,GACnGN,EAAM,eAAepF,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAExCmG,CACT,CACF,CACF,CAEA,SAAgB,CACV,KAAK,gBACP,WAAW,MAAQ,KAAK,cACxB,KAAK,cAAgB,KAEzB,CACF,CCvRO,MAAMsB,EAAe,CAE1B,YACUtC,EACAuC,EACAC,EAA+B,CACrC,sBAAuB,IAAM,GAC7B,cAAe,SAAY,CAAC,EAC5B,eAAgB,SAAY,CAAC,CAAA,EAE/B,CAPQ,KAAA,IAAAxC,EACA,KAAA,YAAAuC,EACA,KAAA,OAAAC,CAKP,CATK,UAAiB,KAWzB,OAAc,CACZ,GAAI,CAcF,IAASC,EAAT,SAAkBC,EAAcC,EAAiC,CAC/D,GAAI,CACF,GAAI,OAAOD,GAAY,SAAU,OAAOA,EACxC,GAAIA,aAAmB,IAAK,OAAOA,EAAQ,SAAA,EAC3C,MAAME,EAAWF,EAAQ,UAAYC,EAC/BE,EAAOH,EAAQ,UAAYA,EAAQ,MAAQ,YAC3CI,EAAWJ,EAAQ,KAAO,IAAIA,EAAQ,IAAI,GAAK,GAC/CnI,EAAOmI,EAAQ,MAAQA,EAAQ,UAAY,IACjD,MAAO,GAAGE,CAAQ,KAAKC,CAAI,GAAGC,CAAQ,GAAGvI,CAAI,EAC/C,MAAQ,CACN,MAAO,EACT,CACF,EAESwI,EAAT,SAAqBC,EAAoBJ,EAA8B,CACrE,OAAO,SAAwBF,EAAcO,EAAgB,CAC3D,MAAM7D,EAAMqD,EAASC,EAASE,CAAQ,EAChCtC,GAAU,OAAOoC,GAAY,UAAYA,EAAQ,OAASA,EAAQ,OAAS,OAAO,YAAA,EAElFQ,EAAU7C,EAAAA,YAAY,IAAA,EACtBO,EAAUuC,EAAK,IAAI,YAAA,EACnBtC,EAAY,IAAI,KAAA,EAAO,YAAA,EACvBuC,EAA+B,CAAA,EACrC,IAAIC,EAAa,EACbC,EAAkB,GAEtB,MAAMC,EAAMP,EAASN,EAAU1B,GAAa,CAC1C,MAAMwC,EAA4B,CAAA,EAClC,IAAIC,EAAgB,EAChBC,EAAgB,EACpB,MAAMC,EAAoB,IAAM,OAAO3C,EAAI,QAAQ,cAAc,GAAK,EAAE,EACxEA,EAAI,GAAG,OAAS4C,GAAkB,CAChCH,GAAiBG,EAAM,OACvB,MAAMC,EAAKF,EAAA,EAIX,GAHsB,sBAAsB,KAAKE,CAAE,EAIjDL,EAAgB,KAAKI,CAAK,EAC1BF,GAAiBE,EAAM,WAClB,CAEL,GAAIF,GAAiBI,EAAS,OAC9B,MAAMC,EAAYD,EAAUJ,EACxBE,EAAM,QAAUG,GAClBP,EAAgB,KAAKI,CAAK,EAC1BF,GAAiBE,EAAM,SAEvBJ,EAAgB,KAAKI,EAAM,SAAS,EAAGG,CAAS,CAAC,EACjDL,GAAiBK,EAErB,CACF,CAAC,EACD/C,EAAI,GAAG,MAAO,IAAM,CAClB,MAAME,EAAWb,EAAAA,YAAY,IAAA,EAAQ6C,EACrC,IAAI/B,EAAe,GACnB,GAAI,CACF,MAAM0C,EAAKF,EAAA,EACLrC,EAAgB,sBAAsB,KAAKuC,CAAE,EAC7CG,EAAY,OAAO,OAAOR,CAAe,EACzCS,EAAeR,EAAgBO,EAAU,QAAUA,EAAU,SAAWF,EAC9E,IAAII,EAASF,EAGb,GAAI,CAAC1C,EAAe,CAClB,MAAM6C,EAAkB,OAAOnD,EAAI,QAAQ,kBAAkB,GAAK,EAAE,EAAE,YAAA,EAGtE,GADsB,CAACiD,GAAgBC,EAAO,QAAU,oBAAoB,KAAKC,CAAe,EAE9F,GAAI,CACF,MAAMC,EAAO,QAAQ,WAAW,EAC5BD,EAAgB,SAAS,MAAM,EAAGD,EAASE,EAAK,WAAWJ,CAAS,EAC/DG,EAAgB,SAAS,IAAI,EAAGD,EAASE,EAAK,qBAAqBJ,CAAS,EAC5EG,EAAgB,SAAS,SAAS,IAAGD,EAASE,EAAK,YAAYJ,CAAS,EACnF,MAAQ,CAER,CAEJ,CAGe,UAAU,KAAKH,CAAE,GAAK,sCAAsC,KAAKA,CAAE,GACpEvC,GACZH,EAAe+C,EAAO,SAAS,MAAM,EAEjCD,GAAgB,CAAC3C,IAAeH,GAAgB;AAAA,kBAC3C+C,EAAO,SAChB/C,EAAe,IAAI+C,EAAO,MAAM,SAASD,EAAe,eAAiB,EAAE,WAE/E,MAAQ,CACN9C,EAAe,uBACjB,CACA,IAAIZ,EAAc,GAClB,GAAI,CACF,MAAM8D,EAAW,OAAO,OAAOjB,CAAkB,EAC3Ca,EAAeZ,EAAagB,EAAS,QAAUA,EAAS,SAAWP,EACzEvD,EAAc8D,EAAS,SAAS,MAAM,EAClCJ,IAAc1D,GAAe;AAAA,gBACnC,MAAQ,CACNA,EAAc,sBAChB,CACA,IAAIc,EAAqB,KACzB,GAAI8B,EAAK,IAAI,YAAY,UAAU/D,CAAG,EACpC,GAAI,CAEFiC,EAAgB8B,EAAK,IAAI,kBAAkB/D,EAAK+B,CAAY,CAC9D,OAASmD,EAAG,CACVjD,EAAgB,CACd,MAAO,CAAC,CAAE,KAAM,OAAQ,KAAMF,GAAgB,mBAAoB,EAClE,SAAU,CAAE,MAAO,OAAOmD,CAAC,CAAA,CAAE,CAEjC,CAEF,MAAMzJ,EAA4B,CAChC,GAAI+F,EACJ,KAAMC,EACN,UAAW+B,IAAa,SAAW,QAAU,OAC7C,OAAAtC,EACA,IAAAlB,EACA,YAAAmB,EACA,OAAQS,EAAI,WACZ,GAAIA,EAAI,YAAc,KAAOA,EAAI,WAAa,IAC9C,WAAY,KAAK,MAAME,CAAQ,EAC/B,aAAAC,EACA,cAAAE,EACA,UAAWoC,CAAA,EAEb,GAAI,CACF,MAAMc,EAAI,IAAI,IAAInF,CAAG,EACrBvE,EAAS,KAAO0J,EAAE,SAClB1J,EAAS,KAAO0J,EAAE,QACpB,MAAQ,CAAC,CACTpB,EAAK,IAAI,eAAetI,CAAQ,EAI9BsI,EAAK,OAAO,kBACZA,EAAK,OAAO,WACZA,EAAK,OAAO,sBAAsB/D,EAAKkB,EAAQC,CAAW,GAE1D4C,EAAK,OAAO,eAAetI,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,CAEvD,CAAC,EACDmG,EAAI,GAAG,QAAUD,GAAe,CAC9B,MAAMG,EAAWb,EAAAA,YAAY,IAAA,EAAQ6C,EACrC,IAAI3C,EAAc,GAClB,GAAI,CACF,MAAM8D,EAAW,OAAO,OAAOjB,CAAkB,EAC3Ca,EAAeZ,EAAagB,EAAS,QAAUA,EAAS,SAAWP,EACzEvD,EAAc8D,EAAS,SAAS,MAAM,EAClCJ,IAAc1D,GAAe;AAAA,gBACnC,MAAQ,CACNA,EAAc,sBAChB,CACA,MAAM1F,EAA4B,CAChC,GAAI+F,EACJ,KAAMC,EACN,UAAW+B,IAAa,SAAW,QAAU,OAC7C,OAAAtC,EACA,IAAAlB,EACA,YAAAmB,EACA,WAAY,KAAK,MAAMW,CAAQ,EAC/B,MAAOH,EAAI,OAAA,EAEb,GAAI,CACF,MAAMwD,EAAI,IAAI,IAAInF,CAAG,EACrBvE,EAAS,KAAO0J,EAAE,SAClB1J,EAAS,KAAO0J,EAAE,QACpB,MAAQ,CAAC,CACTpB,EAAK,IAAI,eAAetI,CAAQ,EAI9BsI,EAAK,OAAO,kBACZA,EAAK,OAAO,WACZA,EAAK,OAAO,sBAAsB/D,EAAKkB,EAAQC,CAAW,GAE1D4C,EAAK,OAAO,eAAetI,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,CAEvD,CAAC,EACGoI,KAAmBjC,CAAG,CAC5B,CAAC,EACKwD,EAAgBjB,EAAI,MACpBkB,EAAclB,EAAI,IACxB,OAAAA,EAAI,MAAQ,SAAUK,EAAYc,EAAgBC,EAAU,CAC1D,GAAI,CACF,GAAIf,GAASP,EAAaS,EAAS,CACjC,MAAMc,EAAM,OAAO,SAAShB,CAAK,EAAIA,EAAQ,OAAO,KAAKA,EAAOc,GAAY,MAAM,EAC5EX,EAAYD,EAAUT,EACxBuB,EAAI,QAAUb,GAChBX,EAAmB,KAAKwB,CAAG,EAC3BvB,GAAcuB,EAAI,SAElBxB,EAAmB,KAAKwB,EAAI,SAAS,EAAGb,CAAS,CAAC,EAClDV,GAAcU,EAElB,CACF,MAAQ,CAAC,CACT,OAAOS,EAAc,KAAK,KAAMZ,EAAOc,EAAUC,CAAE,CACrD,EACApB,EAAI,IAAM,SAAUK,EAAac,EAAgBC,EAAU,CACzD,GAAI,CACF,GAAIf,GAASP,EAAaS,EAAS,CACjC,MAAMc,EAAM,OAAO,SAAShB,CAAK,EAAIA,EAAQ,OAAO,KAAKA,EAAOc,GAAY,MAAM,EAC5EX,EAAYD,EAAUT,EACxBuB,EAAI,QAAUb,GAChBX,EAAmB,KAAKwB,CAAG,EAC3BvB,GAAcuB,EAAI,SAElBxB,EAAmB,KAAKwB,EAAI,SAAS,EAAGb,CAAS,CAAC,EAClDV,GAAcU,EAElB,CAGA,GAAI,CAACT,EAAiB,CACpBA,EAAkB,GAClB,GAAI,CACF,MAAMe,EAAW,OAAO,OAAOjB,CAAkB,EAC3Ca,EAAeZ,EAAagB,EAAS,QAAUA,EAAS,SAAWP,EACzE,IAAIvD,EAAc8D,EAAS,SAAS,MAAM,EACtCJ,IAAc1D,GAAe;AAAA,iBAEjC,MAAMsE,EAAU1B,EAAK,OAAO,sBAAsB/D,EAAKkB,EAAQC,CAAW,EAE1E,GAAI4C,EAAK,OAAO,kBAAoBA,EAAK,OAAO,WAAa0B,EAAS,CACpE,MAAM/D,EAA8B,CAClC,GAAIF,EACJ,KAAMC,EACN,UAAW+B,IAAa,SAAW,QAAU,OAC7C,OAAAtC,EACA,IAAAlB,EACA,YAAAmB,EACA,WAAY,CAAA,EAEd4C,EAAK,OAAO,cAAcrC,CAAU,EAAE,MAAM,IAAM,CAAC,CAAC,CACtD,CACF,MAAQ,CAAC,CACX,CACF,MAAQ,CAAC,CACT,OAAO2D,EAAY,KAAK,KAAMb,EAAOc,EAAUC,CAAE,CACnD,EACOpB,CACT,CACF,EAjQA,MAAMuB,EAAO,QAAQ,MAAM,EACrBC,EAAQ,QAAQ,OAAO,EAC7B,GAAI,KAAK,UAAW,OAEpB,KAAK,UAAY,CACf,YAAaD,EAAK,QAClB,aAAcC,EAAM,QACpB,QAASD,EAAK,IACd,SAAUC,EAAM,GAAA,EAElB,MAAM5B,EAAO,KACPW,EAAU,KAAK,IAAI,aAwPzBgB,EAAK,QAAU/B,EAAY,KAAK,UAAU,YAAa,OAAO,EAC9D+B,EAAK,IAAM,SAAoBE,EAAQL,EAAU,CAC/C,MAAMM,EAAKH,EAAK,QAAgBE,EAAGL,CAAE,EACrC,OAAAM,EAAE,IAAA,EACKA,CACT,EACI,KAAK,cAAgB,KACvBF,EAAM,QAAUhC,EAAY,KAAK,UAAU,aAAc,QAAQ,EACjEgC,EAAM,IAAM,SAAoBC,EAAQL,EAAU,CAChD,MAAMM,EAAKF,EAAM,QAAgBC,EAAGL,CAAE,EACtC,OAAAM,EAAE,IAAA,EACKA,CACT,EAEJ,MAAY,CAEZ,CACF,CAEA,SAAgB,CACd,GAAI,KAAK,UAAW,CAClB,GAAI,CACF,MAAMH,EAAO,QAAQ,MAAM,EACrBC,EAAQ,QAAQ,OAAO,EAC7BD,EAAK,QAAU,KAAK,UAAU,YAC9BA,EAAK,IAAM,KAAK,UAAU,QAC1BC,EAAM,QAAU,KAAK,UAAU,aAC/BA,EAAM,IAAM,KAAK,UAAU,QAC7B,MAAQ,CAAC,CACT,KAAK,UAAY,IACnB,CACF,CACF,CCxRO,MAAMG,CAAe,CAC1B,OAAe,SAAkC,KACzC,UAAY,GACZ,OAGA,YACA,YACA,iBACA,UACA,mBAAqB,IACrB,UACA,YACA,gBAGA,gBACA,eAER,YAAYC,EAA+B,GAAI,CAE7C,KAAK,YAAcA,EAAO,aAAe,IAAIvF,EAG7C,KAAK,YAAcuF,EAAO,aAAeC,EAAAA,KAGzC,KAAK,iBAAmBD,EAAO,iBAC/B,KAAK,UAAYA,EAAO,UAExB,KAAK,OAAS,CACZ,aAAcA,EAAO,cAAgB,KACrC,oBAAqBA,EAAO,qBAAuB,GACnD,YAAaA,EAAO,aAAe,GACnC,UACEA,EAAO,YACLE,GACA,CAAC,EACCA,EAAM,KACN,CAACA,EAAM,IAAI,SAAS,SAAS,GAC7B,CAACA,EAAM,IAAI,SAAS,oBAAoB,GACxC,CAACA,EAAM,IAAI,WAAW,OAAO,IAGnC,kBACEF,EAAO,oBACN,CAAC/F,EAAa+B,IAAyB,CACtC,GAAI,CACF,OAAOhC,EAAU,uBAAuBC,EAAK+B,CAAY,CAC3D,MAAQ,CACN,OAAO,IACT,CACF,EAAA,EAIJ,MAAMmE,EAAa/K,EAAK,KAAKgL,EAAG,OAAA,EAAU,iBAAkB,MAAO,WAAW,QAAQ,GAAG,EAAE,EACrFC,EAAcL,EAAO,aAAe5K,EAAK,KAAK+K,EAAY,aAAa,EACvEG,EAAmBlL,EAAK,KAAK+K,EAAY,WAAW,EACpDI,EAAuBP,EAAO,sBAAwB5K,EAAK,KAAK+K,EAAY,iBAAiB,EAGnG,GAAI,CACF,KAAK,UAAY,IAAIlL,EAAWoL,CAAW,CAC7C,MAAQ,CAAC,CACT,GAAI,CACF,KAAK,YAAc,IAAIpL,EAAWqL,CAAgB,CACpD,MAAQ,CAAC,CACT,GAAI,CACF,KAAK,gBAAkB,IAAIrL,EAAWsL,CAAoB,CAC5D,MAAQ,CAAC,CACX,CAKA,OAAO,YAAYP,EAA+C,CAChE,OAAKD,EAAe,WAClBA,EAAe,SAAW,IAAIA,EAAeC,CAAM,GAE9CD,EAAe,QACxB,CAKA,OAAc,CACZ,GAAI,KAAK,UACP,OAGF,MAAMS,EAAwB,CAC5B,YAAa,KAAK,YAClB,aAAc,KAAK,OAAO,aAC1B,YAAa,KAAK,OAAO,YACzB,UAAW,KAAK,OAAO,UACvB,kBAAmB,KAAK,OAAO,kBAC/B,YAAa,KAAK,YAClB,eAAiB,GAAM,KAAK,eAAe,CAAC,CAAA,EAG9C,KAAK,gBAAkB,IAAI5F,EAAgB4F,EAAQ,CACjD,iBAAkB,KAAK,iBACvB,UAAW,KAAK,UAChB,sBAAuB,CAACvG,EAAKkB,EAAQE,IAAS,KAAK,sBAAsBpB,EAAKkB,EAAQE,CAAI,EAC1F,cAAgB6E,GAAU,KAAK,cAAcA,CAAK,EAClD,eAAiBA,GAAU,KAAK,eAAeA,CAAK,CAAA,CACrD,EACD,KAAK,gBAAgB,MAAA,EAErB,KAAK,eAAiB,IAAI/C,GAAeqD,EAAQ,KAAK,OAAO,oBAAqB,CAChF,iBAAkB,KAAK,iBACvB,UAAW,KAAK,UAChB,sBAAuB,CAACvG,EAAKkB,EAAQE,IAAS,KAAK,sBAAsBpB,EAAKkB,EAAQE,CAAI,EAC1F,cAAgB6E,GAAU,KAAK,cAAcA,CAAK,EAClD,eAAiBA,GAAU,KAAK,eAAeA,CAAK,CAAA,CACrD,EACD,KAAK,eAAe,MAAA,EAEpB,KAAK,UAAY,EACnB,CAKA,MAAa,CACN,KAAK,YAGV,KAAK,iBAAiB,QAAA,EACtB,KAAK,gBAAgB,QAAA,EACrB,KAAK,UAAY,GACnB,CAKA,UAAoB,CAClB,OAAO,KAAK,SACd,CAKA,aAAaF,EAA6C,CACxD,KAAK,OAAS,CAAE,GAAG,KAAK,OAAQ,GAAGA,CAAA,CACrC,CAKQ,eAAeE,EAA8B,CAE/C,KAAK,WACP,KAAK,UAAU,KAAK,gBAAiB,CAAE,WAAYA,EAAO,EAGxD,KAAK,iBAAmB,KAAK,OAAO,UAAUA,CAAK,GAAKA,EAAM,eAChE,KAAK,gBAAgB,KAAK,kBAAmB,CAC3C,GAAIA,EAAM,GACV,IAAKA,EAAM,IACX,OAAQA,EAAM,OACd,OAAQA,EAAM,OACd,WAAYA,EAAM,WAClB,UAAWA,EAAM,KACjB,cAAeA,EAAM,aAAA,CACtB,CAGL,CAKQ,sBAAsBjG,EAAakB,EAAgBC,EAA8B,CAWvF,GAVI,KAAK,WACP,KAAK,UAAU,KAAK,iCAAkC,CACpD,IAAAnB,EACA,OAAAkB,EACA,QAAS,CAAC,CAACC,EACX,WAAYA,GAAa,MAAA,CAC1B,EAIC,CAAC,KAAK,YAAY,UAAUnB,CAAG,EACjC,OAAI,KAAK,WACP,KAAK,UAAU,KAAK,2BAA4B,CAAE,IAAAA,EAAK,EAErD,KAAK,WAAa,KAAK,OAAO,aAChC,KAAK,UAAU,MAAM,4CAA6C,CAChE,IAAAA,EACA,OAAAkB,CAAA,CACD,EAEI,GAIT,GAAIlB,EAAI,SAAS,6BAA6B,GAAKA,EAAI,SAAS,mCAAmC,EAEjG,MAAI,EAAAA,EAAI,SAAS,uBAAuB,EAa1C,GALIkB,IAAW,QAKX,CAACC,EACH,MAAO,GAGT,GAAI,CACF,MAAMC,EAAO,KAAK,MAAMD,CAAW,EAG7BqF,EAAcpF,EAAK,UAAY,MAAM,QAAQA,EAAK,QAAQ,GAAKA,EAAK,SAAS,OAAS,EACtFqF,EAAYrF,EAAK,QAAU,OAAOA,EAAK,QAAW,SAClDsF,EAAWtF,EAAK,OAAS,OAAOA,EAAK,OAAU,SAC/CuF,EAAcvF,EAAK,UAAY,MAAM,QAAQA,EAAK,QAAQ,GAAKA,EAAK,SAAS,OAAS,EAGtFwF,EACJxF,EAAK,SAAS,UAAY,MAAM,QAAQA,EAAK,QAAQ,QAAQ,GAAKA,EAAK,QAAQ,SAAS,OAAS,EAKnG,OAFgBoF,GAAeC,GAAaC,GAAYC,GAAeC,CAGzE,OAASjF,EAAK,CACZ,OAAI,KAAK,aACP,KAAK,YAAY,MAAM,wDAAyD,CAC9E,IAAA3B,EACA,OAAAkB,EACA,MAAOS,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAA,CACvD,EAGI,EACT,CACF,CAKA,MAAc,cAAcsE,EAAuC,CACjE,GAAI,CAAC,KAAK,kBAAoB,CAAC,KAAK,UAClC,OAIE,KAAK,WACP,KAAK,UAAU,KAAK,aAAc,CAAE,MAAAA,EAAO,EAI7C,IAAIY,EAAuD,CAAA,EAC3D,GAAIZ,EAAM,YACR,GAAI,CACF,MAAMa,EAAc,KAAK,MAAMb,EAAM,WAAW,EAGhD,GAAIa,EAAY,UAAY,MAAM,QAAQA,EAAY,QAAQ,EAAG,CAE/D,MAAMC,EAAcD,EAAY,SAAS,OAAQE,GAAaA,EAAI,OAAS,MAAM,EAAE,IAAA,EAC/ED,GAAa,UACX,OAAOA,EAAY,SAAY,SACjCF,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAME,EAAY,QAAS,EACpD,MAAM,QAAQA,EAAY,OAAO,IAE1CF,EAAeE,EAAY,QAAQ,IAAKrJ,IAAe,CACrD,KAAMA,EAAK,MAAQ,OACnB,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,GAGR,SAAWoJ,EAAY,OAErBD,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAMC,EAAY,OAAQ,UACnDA,EAAY,MAErBD,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAMC,EAAY,MAAO,UAClDA,EAAY,UAAY,MAAM,QAAQA,EAAY,QAAQ,EAAG,CAEtE,MAAMG,EAAcH,EAAY,SAAS,OAAQhE,GAAWA,EAAE,OAAS,MAAM,EAAE,IAAA,EAC3EmE,GAAa,OAAS,MAAM,QAAQA,EAAY,KAAK,IACvDJ,EAAeI,EAAY,MAAM,IAAKvJ,IAAe,CACnD,KAAM,OACN,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,EAEN,SAAWoJ,EAAY,SAAS,UAAY,MAAM,QAAQA,EAAY,QAAQ,QAAQ,EAAG,CAEvF,MAAMG,EAAcH,EAAY,QAAQ,SAAS,OAAQhE,GAAWA,EAAE,OAAS,MAAM,EAAE,IAAA,EACnFmE,GAAa,OAAS,MAAM,QAAQA,EAAY,KAAK,IACvDJ,EAAeI,EAAY,MAAM,IAAKvJ,IAAe,CACnD,KAAM,OACN,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,EAEN,CACF,OAASiE,EAAK,CACR,KAAK,aACP,KAAK,YAAY,MAAM,gDAAiD,CACtE,IAAKsE,EAAM,IACX,MAAOtE,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAA,CACvD,EAGHkF,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAM,2BAA4B,CACtE,CAIF,MAAMK,EAAgB,CACpB,aAAc,KAAK,YAAY,eAAejB,EAAM,GAAG,EACvD,gBAAiB,EAAA,EAGnB,GAAI,CACE,KAAK,WACP,KAAK,UAAU,KAAK,wCAAyC,CAC3D,UAAW,KAAK,UAChB,QAASA,EAAM,EAAA,CAChB,EAGH,MAAM/H,EAAW,MAAM,KAAK,iBAAiB,aAAa,KAAK,UAAW,CACxE,YAAa,SACb,SAAUgJ,CAAA,CACX,EAGD,KAAK,eAAe,IAAIjB,EAAM,GAAI/H,EAAS,SAAS,EAEhD,KAAK,WACP,KAAK,UAAU,KAAK,0BAA2B,CAC7C,QAAS+H,EAAM,GACf,UAAW/H,EAAS,SAAA,CACrB,CAEL,OAAS7C,EAAO,CACV,KAAK,aACP,KAAK,YAAY,MAAM,oCAAqC,CAC1D,IAAK4K,EAAM,IACX,UAAW,KAAK,UAChB,MAAO5K,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,CAAA,CAC7D,CAEL,CACF,CAKA,MAAc,eAAe4K,EAAuC,CAClE,GAAI,CAAC,KAAK,kBAAoB,CAAC,KAAK,UAAW,OAG3C,KAAK,WACP,KAAK,UAAU,KAAK,cAAe,CAAE,MAAAA,EAAO,EAG9C,MAAMkB,EAAY,KAAK,eAAe,IAAIlB,EAAM,EAAE,EAClD,GAAI,CAACkB,EACH,OAIF,MAAMC,EAA0C,CAC9C,aAAc,KAAK,YAAY,eAAenB,EAAM,GAAG,EACvD,OAAQA,EAAM,OACd,SAAUA,EAAM,UAAA,EAIlB,GAAIA,EAAM,eAAe,SAAU,CACjC,MAAMoB,EAAgBpB,EAAM,cAAc,SACtCoB,EAAc,QAAOD,EAAmB,MAAQC,EAAc,OAC9DA,EAAc,QAAOD,EAAmB,MAAQC,EAAc,OAC9DA,EAAc,eAAcD,EAAmB,aAAeC,EAAc,cAC5EA,EAAc,gBAAeD,EAAmB,cAAgBC,EAAc,eAC9EA,EAAc,oBAAmBD,EAAmB,kBAAoBC,EAAc,mBACtFA,EAAc,OAAMD,EAAmB,KAAOC,EAAc,KAClE,CAGA,IAAIC,EAA+B,CAAA,EACnC,GAAIrB,EAAM,cAAe,CACvB,MAAM7G,EAAS6G,EAAM,cAEjB7G,GAAQ,SAAS,OAAS,MAAM,QAAQA,EAAO,QAAQ,KAAK,EAE9DkI,EAAgBlI,EAAO,QAAQ,MACtBA,GAAQ,OAAS,MAAM,QAAQA,EAAO,KAAK,EAEpDkI,EAAgBlI,EAAO,MACdA,GAAQ,MACjBkI,EAAc,KAAK,CAAE,KAAM,OAAiB,KAAM,iBAAiBlI,EAAO,KAAK,GAAI,EAGnFkI,EAAgB,CAAA,CAEpB,MAAWrB,EAAM,cAEfqB,EAAc,KAAK,CAAE,KAAM,OAAiB,KAAM,yBAA0B,EAG9E,GAAI,CACF,MAAMC,EAAkB,CACtB,MAAOD,EACP,SAAUF,EACV,IAAKnB,EAAM,eAAe,SAAS,EAAA,EAIjCvK,EAAAA,cAAc,mBAChB6L,EAAW,IAAM,KAAK,UAAUtB,CAAK,GAGvC,MAAM,KAAK,iBAAiB,cAAc,KAAK,UAAWkB,EAAWI,CAAU,EAG/E,KAAK,eAAe,OAAOtB,EAAM,EAAE,CACrC,OAAS5K,EAAO,CACV,KAAK,aACP,KAAK,YAAY,MAAM,qCAAsC,CAC3D,IAAK4K,EAAM,IACX,UAAAkB,EACA,UAAW,KAAK,UAChB,MAAO9L,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,CAAA,CAC7D,EAGH,KAAK,eAAe,OAAO4K,EAAM,EAAE,CACrC,CACF,CACF,CC1cA,MAAMuB,EAAW,CACP,OACA,iBAA4C,KAC5C,YACA,YACA,aACA,UAER,aAAc,CACZ,MAAMC,EAAY/L,EAAAA,cAAc,cAChC,KAAK,aAAe+L,GAAa,KACjC,KAAK,UAAY/L,gBAAc,WAAa,GAE5C,KAAK,YAAc,IAAI8E,EAGvB,MAAMvF,EACJS,EAAAA,cAAc,YAAcP,EAAK,KAAKgL,EAAG,OAAA,EAAU,iBAAkB,UAAW,WAAW,QAAQ,GAAG,MAAM,EAC9G,KAAK,OAAS,IAAInL,EAAWC,CAAO,EAEpC,IAAIyM,EAAShM,EAAAA,cAAc,UACvBgM,EAAO,WAAW,QAAQ,IAAY,WAAaA,EAAO,MAAM,CAAC,EAC5DA,EAAO,WAAW,OAAO,MAAY,UAAYA,EAAO,MAAM,CAAC,GACxE,MAAMC,EAASjM,gBAAc,QAAU,GACjCkM,EAAiBlM,gBAAc,gBAAkB,GAIvD,GAFqBA,EAAAA,cAAc,uBAAyB,CAAC,EAAE,KAAK,WAAagM,IAE7DA,EAClB,GAAI,CACF,KAAK,iBAAmB,IAAIG,mBAAiB,CAC3C,OAAAH,EACA,eAAAE,EACA,OAAAD,EACA,OAAQ,KAAK,MAAA,CACd,CACH,OAAStM,EAAO,CACVL,EAAW,eACb,KAAK,OAAO,MAAM,yCAA0CK,CAAc,CAE9E,CAGF,KAAK,YAAcyK,EAAe,YAAY,CAC5C,aAAc,KAAK,aACnB,YAAapK,EAAAA,cAAc,aAAeA,EAAAA,cAAc,cACxD,YAAa,KAAK,YAClB,YAAasK,EAAAA,KACb,iBAAkB,KAAK,kBAAoB,OAC3C,UAAW,KAAK,SAAA,CACjB,CACH,CAEO,OAAc,CACnB,KAAK,YAAY,MAAA,CACnB,CACF,CAGA,MAAM8B,GAAa,IAAIN,GACvBM,GAAW,MAAA"}
|
|
1
|
+
{"version":3,"file":"networkLog.js","sources":["../src/llms/loggers/file.ts","../src/llms/parsers/costCalculator.ts","../src/llms/parsers/claude.ts","../__vite-browser-external:node:zlib","../src/llms/parsers/gemini.ts","../src/llms/parsers/index.ts","../src/llms/providers/index.ts","../src/llms/enhancers/claude.ts","../src/llms/enhancers/gemini.ts","../src/llms/enhancers/index.ts","../src/llms/interceptors/instruments/fetchInstrument.ts","../src/llms/interceptors/instruments/httpInstrument.ts","../src/llms/interceptors/index.ts","../src/hooks/networkLog.ts"],"sourcesContent":["// filepath: /Users/vuongngo/workspace/agiflow/apps/agent-cli/src/llms/loggers/file.ts\n/**\n * FileLogger - Direct file logging without stdout pollution\n *\n * This logger writes structured JSON logs directly to a file stream\n * to avoid interfering with stdout when used as an imported module\n */\n\nimport * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport { SubEnvManager } from '../../config/subenv';\n\nexport interface LogEntry {\n time: string;\n level: number;\n msg: string;\n [key: string]: any;\n}\n\nexport class FileLogger {\n private logStream: fs.WriteStream;\n private logPath: string;\n\n constructor(logPath: string) {\n this.logPath = logPath;\n\n // Ensure directory exists\n const logDir = path.dirname(this.logPath);\n if (!fs.existsSync(logDir)) {\n fs.mkdirSync(logDir, { recursive: true });\n }\n\n // Create write stream for direct file logging\n this.logStream = fs.createWriteStream(this.logPath, { flags: 'a' });\n\n // Handle stream errors gracefully\n this.logStream.on('error', (error) => {\n // Only log to stderr to avoid stdout pollution\n console.error(`[FileLogger] Error writing to log file: ${error.message}`);\n });\n }\n\n /**\n * Log an info-level message\n */\n info(message: string, data?: Record<string, any>): void {\n this.log(30, message, data);\n }\n\n /**\n * Log a debug-level message\n */\n debug(message: string, data?: Record<string, any>): void {\n this.log(20, message, data);\n }\n\n /**\n * Log a warning-level message\n */\n warn(message: string, data?: Record<string, any>): void {\n this.log(40, message, data);\n }\n\n /**\n * Log an error-level message\n */\n error(message: string, data?: Record<string, any>): void {\n this.log(50, message, data);\n }\n\n /**\n * Log a message with specified level\n */\n log(level: number, message: string, data?: Record<string, any>): void {\n const logEntry: LogEntry = {\n time: new Date().toISOString(),\n level,\n msg: message,\n ...data,\n };\n\n try {\n this.logStream.write(JSON.stringify(logEntry) + '\\n');\n } catch (error) {\n // Only log to stderr to avoid stdout pollution\n console.error(`[FileLogger] Failed to write log entry: ${error}`);\n }\n }\n\n /**\n * Close the log stream\n */\n close(): void {\n if (this.logStream && !this.logStream.destroyed) {\n this.logStream.end();\n }\n }\n\n /**\n * Get the current log file path\n */\n getLogPath(): string {\n return this.logPath;\n }\n\n /**\n * Check if debug logging should be enabled based on environment\n */\n static shouldDebug(): boolean {\n return SubEnvManager.isDebugMode || SubEnvManager.isNetlogDebug;\n }\n}\n","/**\n * Cost Calculator for LLM Models\n *\n * Calculates costs based on token usage using pricing data from cost.json\n */\n\nimport costData from '../../../cost.json';\n\nexport interface ModelCost {\n totalCost: number;\n inputCost: number;\n outputCost: number;\n cacheReadCost?: number;\n cacheCreationCost?: number;\n}\n\nexport interface TokenUsage {\n inputTokens?: number;\n outputTokens?: number;\n cachedInputTokens?: number;\n cacheCreationInputTokens?: number;\n}\n\nexport class CostCalculator {\n /**\n * Calculate cost for a given model and token usage\n */\n static calculateCost(modelName: string, usage: TokenUsage): ModelCost | null {\n const modelPricing = costData[modelName as keyof typeof costData];\n\n if (!modelPricing) {\n return null;\n }\n\n const inputTokens = usage.inputTokens || 0;\n const outputTokens = usage.outputTokens || 0;\n const cachedInputTokens = usage.cachedInputTokens || 0;\n const cacheCreationInputTokens = usage.cacheCreationInputTokens || 0;\n\n // Get rates from pricing data with type guards\n const inputRate = 'input_cost_per_token' in modelPricing ? modelPricing.input_cost_per_token : 0;\n const outputRate = 'output_cost_per_token' in modelPricing ? modelPricing.output_cost_per_token : 0;\n const cacheReadRate = 'cache_read_input_token_cost' in modelPricing ? modelPricing.cache_read_input_token_cost : 0;\n const cacheCreationRate =\n 'cache_creation_input_token_cost' in modelPricing ? modelPricing.cache_creation_input_token_cost : 0;\n\n // Calculate costs\n const inputCost = inputTokens * inputRate;\n const outputCost = outputTokens * outputRate;\n const cacheReadCost = cachedInputTokens * cacheReadRate;\n const cacheCreationCost = cacheCreationInputTokens * cacheCreationRate;\n\n const totalCost = inputCost + outputCost + cacheReadCost + cacheCreationCost;\n\n return {\n totalCost,\n inputCost,\n outputCost,\n cacheReadCost: cachedInputTokens > 0 ? cacheReadCost : undefined,\n cacheCreationCost: cacheCreationInputTokens > 0 ? cacheCreationCost : undefined,\n };\n }\n}\n","/**\n * Claude API Streaming Response Parser\n *\n * Parses Claude API streaming responses (Server-Sent Events format)\n * into structured messages following the blazegent message format.\n *\n * Handles:\n * - Text content blocks\n * - Tool use content blocks\n * - Token usage information\n * - Message metadata\n * - Cost calculation based on token usage\n */\n\nimport type { MessagePart } from '../messages';\nimport { CostCalculator } from './costCalculator';\n\nexport class ClaudeParser {\n /**\n * Parses Claude API streaming response into structured message format\n *\n * @param rawText - Raw streaming response text in SSE format\n * @returns Parsed message object with metadata and usage information\n */\n static parseStreamingResponse(\n rawText: string,\n ):\n | { message: { id: string; role: 'assistant'; createdAt: string; parts: MessagePart[] }; metadata: any }\n | { error: string; rawText: string } {\n try {\n const lines = rawText.split('\\n');\n const contentBlocks: any[] = [];\n let messageMetadata: any = null;\n let usage: any = null;\n const blockMap = new Map<number, any>(); // Track content blocks by index\n\n for (const line of lines) {\n if (line.startsWith('data: ')) {\n const dataStr = line.substring(6).trim();\n if (dataStr) {\n try {\n const data = JSON.parse(dataStr);\n\n // Extract message metadata\n if (data.type === 'message_start' && data.message) {\n messageMetadata = data.message;\n }\n\n // Handle content block start (text or tool_use)\n if (data.type === 'content_block_start') {\n const block = {\n index: data.index,\n type: data.content_block.type,\n id: data.content_block.id,\n name: data.content_block.name,\n input: data.content_block.input || {},\n text: '',\n partialJson: '',\n };\n blockMap.set(data.index, block);\n }\n\n // Handle content block deltas (text or JSON input)\n if (data.type === 'content_block_delta') {\n const block = blockMap.get(data.index);\n if (block) {\n if (data.delta.type === 'text_delta') {\n block.text += data.delta.text;\n } else if (data.delta.type === 'input_json_delta') {\n block.partialJson += data.delta.partial_json;\n }\n }\n }\n\n // Handle content block stop\n if (data.type === 'content_block_stop') {\n const block = blockMap.get(data.index);\n if (block) {\n // Try to parse accumulated JSON for tool inputs\n if (block.partialJson) {\n try {\n block.input = JSON.parse(block.partialJson);\n } catch (parseError) {\n // Pure telemetry - no mutations, no logging\n // Leave block.input unchanged\n }\n }\n contentBlocks.push(block);\n }\n }\n\n // Extract final usage information\n if (data.type === 'message_delta' && data.usage) {\n usage = data.usage;\n }\n } catch {\n // Ignore malformed JSON\n }\n }\n }\n }\n\n // Convert content blocks to blazegent parts format\n const parts: MessagePart[] = contentBlocks.map((block) => {\n if (block.type === 'text') {\n return {\n type: 'text' as const,\n text: block.text,\n };\n } else if (block.type === 'tool_use') {\n return {\n type: 'tool-invocation' as const,\n toolInvocation: {\n toolCallId: block.id,\n toolName: block.name,\n args: block.input,\n state: 'call' as const,\n },\n };\n }\n // For unknown block types, create a text part with error message\n return {\n type: 'text' as const,\n text: `Unknown content block type: ${block.type}`,\n };\n });\n\n // Create message in blazegent format\n const message = {\n id: messageMetadata?.id || ClaudeParser.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: parts.length > 0 ? parts : [{ type: 'text' as const, text: '' }],\n };\n\n // Combine usage information from both sources\n const combinedUsage = {\n ...(messageMetadata?.usage || {}),\n ...(usage || {}),\n };\n\n // Normalize token usage data\n const tokenUsage = {\n inputTokens: combinedUsage.input_tokens || combinedUsage.inputTokens,\n outputTokens: combinedUsage.output_tokens || combinedUsage.outputTokens,\n cachedInputTokens: combinedUsage.cache_read_input_tokens || combinedUsage.cachedInputTokens,\n cacheCreationInputTokens: combinedUsage.cache_creation_input_tokens || combinedUsage.cacheCreationInputTokens,\n totalTokens: (combinedUsage.input_tokens || 0) + (combinedUsage.output_tokens || 0),\n cacheCreation: combinedUsage.cache_creation,\n serviceTier: combinedUsage.service_tier,\n };\n\n // Calculate cost if model is available\n const modelName = messageMetadata?.model;\n const cost = modelName ? CostCalculator.calculateCost(modelName, tokenUsage) : null;\n\n // Add metadata with comprehensive usage information\n const result: any = {\n message,\n metadata: {\n model: modelName,\n usage: tokenUsage,\n cost,\n originalMessageId: messageMetadata?.id,\n stopReason: messageMetadata?.stop_reason,\n contentLength: parts.reduce((len, part) => {\n if (part.type === 'text') {\n return len + part.text.length;\n } else if (part.type === 'tool-invocation') {\n return len + JSON.stringify(part.toolInvocation.args || '').length;\n }\n return len + JSON.stringify(part).length;\n }, 0),\n streamingEvents: lines.filter((line) => line.startsWith('event:')).length,\n },\n };\n\n return result;\n } catch (error) {\n return {\n error: `Failed to parse streaming response: ${error}`,\n rawText,\n };\n }\n }\n\n /**\n * Simple ID generator for messages\n */\n private static generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 10);\n }\n}\n\n// Export for backward compatibility\nexport const parseStreamingResponseToMessage = ClaudeParser.parseStreamingResponse;\n"," export default new Proxy({}, {\n get(_, key) {\n throw new Error(`Module \"node:zlib\" has been externalized for browser compatibility. Cannot access \"node:zlib.${key}\" in client code. See https://vite.dev/guide/troubleshooting.html#module-externalized-for-browser-compatibility for more details.`)\n }\n })","/**\n * Gemini API Streaming / Regular Response Parser\n */\n\nimport * as zlib from 'node:zlib';\n\nexport class GeminiParser {\n /** Entry point used by interceptor */\n static parseStreamingResponse(rawText: string): any {\n try {\n const isSse = rawText.includes('\\ndata:') || rawText.startsWith('data:');\n if (isSse) return this.parseSse(rawText);\n return this.parseRegular(rawText);\n } catch (error) {\n return { error: `Failed to parse Gemini response: ${error}`, rawText };\n }\n }\n\n /** Parse non-stream (may be gzip) */\n private static parseRegular(rawText: string) {\n const looksGzip = this.isProbablyGzip(rawText) || this.hasHighControlCharRatio(rawText);\n let working = rawText;\n if (looksGzip) {\n const gun = this.tryGunzip(rawText);\n if (gun.ok) working = gun.text;\n }\n try {\n const json = JSON.parse(working);\n const response = json.response || json; // generateContent returns { response: { candidates: [...] }}\n const { fullText, finishReason, texts, nextSpeakers, reasoningSegments } = this.extractCandidateText(\n response.candidates,\n );\n const usage = this.normalizeUsage(response.usageMetadata || json.usageMetadata || json);\n const parts =\n texts.length > 0\n ? texts.map((t) => ({ type: 'text', text: t }))\n : fullText\n ? [{ type: 'text', text: fullText }]\n : [];\n return {\n message: {\n id: response.responseId || this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts,\n },\n metadata: {\n model: response.modelVersion || json.modelVersion,\n usage,\n responseId: response.responseId,\n finishReason,\n contentLength: fullText.length,\n streamingEvents: 0,\n compressed: looksGzip || undefined,\n nextSpeakers: nextSpeakers.length ? nextSpeakers : undefined,\n reasoningSegments: reasoningSegments.length ? reasoningSegments : undefined,\n },\n };\n } catch {\n return {\n message: {\n id: this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: [{ type: 'text', text: working }],\n },\n metadata: { raw: true },\n };\n }\n }\n\n /** Parse SSE stream */\n private static parseSse(rawText: string) {\n const lines = rawText.split(/\\r?\\n/);\n let modelVersion: string | undefined;\n let responseId: string | undefined;\n let finishReason: string | undefined;\n let usageMetadata: any;\n const collectedTexts: string[] = [];\n const nextSpeakers: string[] = [];\n const reasoningSegments: string[] = [];\n\n for (const line of lines) {\n if (!line.startsWith('data:')) continue;\n const jsonStr = line.slice(5).trim();\n if (!jsonStr) continue;\n try {\n const payload = JSON.parse(jsonStr);\n const response = payload.response || {};\n if (response.modelVersion) modelVersion = response.modelVersion;\n if (response.responseId) responseId = response.responseId;\n if (response.usageMetadata) usageMetadata = response.usageMetadata;\n const extracted = this.extractCandidateText(response.candidates);\n if (!finishReason && extracted.finishReason) finishReason = extracted.finishReason;\n if (extracted.texts.length) collectedTexts.push(...extracted.texts);\n if (extracted.nextSpeakers.length) nextSpeakers.push(...extracted.nextSpeakers);\n if (extracted.reasoningSegments.length) reasoningSegments.push(...extracted.reasoningSegments);\n } catch {\n // ignore chunk errors\n }\n }\n\n const fullText = collectedTexts.join('');\n const usage = this.normalizeUsage(usageMetadata);\n return {\n message: {\n id: responseId || this.generateId(),\n role: 'assistant' as const,\n createdAt: new Date().toISOString(),\n parts: collectedTexts.length ? collectedTexts.map((t) => ({ type: 'text', text: t })) : [],\n },\n metadata: {\n model: modelVersion,\n usage,\n responseId,\n finishReason,\n contentLength: fullText.length,\n streamingEvents: lines.filter((l) => l.startsWith('data:')).length,\n nextSpeakers: nextSpeakers.length ? nextSpeakers : undefined,\n reasoningSegments: reasoningSegments.length ? reasoningSegments : undefined,\n },\n };\n }\n\n /** Extract text and reasoning from candidates array */\n private static extractCandidateText(candidates: any): {\n fullText: string;\n finishReason?: string;\n texts: string[];\n finishReasonFound?: string;\n nextSpeakers: string[];\n reasoningSegments: string[];\n } {\n const texts: string[] = [];\n const nextSpeakers: string[] = [];\n const reasoningSegments: string[] = [];\n let finishReason: string | undefined;\n if (Array.isArray(candidates)) {\n for (const cand of candidates) {\n if (!finishReason && cand?.finishReason) finishReason = cand.finishReason;\n const parts = cand?.content?.parts || [];\n if (Array.isArray(parts)) {\n for (const part of parts) {\n if (part && typeof part === 'object') {\n if (part.thought === true) continue; // skip internal thoughts\n const raw = part.text;\n if (typeof raw === 'string' && raw.length) {\n // Detect embedded JSON reasoning blob\n const trimmed = raw.trim();\n if (trimmed.startsWith('{') && trimmed.endsWith('}')) {\n try {\n const parsed = JSON.parse(trimmed);\n if (parsed && typeof parsed === 'object') {\n if (typeof parsed.reasoning === 'string') {\n reasoningSegments.push(parsed.reasoning);\n texts.push(parsed.reasoning);\n }\n if (parsed.next_speaker && typeof parsed.next_speaker === 'string')\n nextSpeakers.push(parsed.next_speaker);\n continue; // already added reasoning as text\n }\n } catch {\n // fall through to treat as raw text\n }\n }\n texts.push(raw);\n }\n }\n }\n }\n }\n }\n return {\n fullText: texts.join(''),\n finishReason,\n texts,\n finishReasonFound: finishReason,\n nextSpeakers,\n reasoningSegments,\n };\n }\n\n private static tryGunzip(str: string): { ok: boolean; text: string } {\n try {\n const buf = Buffer.from(str, 'latin1');\n const out = zlib.gunzipSync(buf);\n return { ok: true, text: out.toString('utf8') };\n } catch {\n return { ok: false, text: str };\n }\n }\n\n private static hasHighControlCharRatio(text: string): boolean {\n if (!text) return false;\n let control = 0;\n const sampleLen = Math.min(text.length, 256);\n for (let i = 0; i < sampleLen; i++) {\n const code = text.charCodeAt(i);\n if (code < 32 && code !== 9 && code !== 10 && code !== 13) control++;\n }\n return control / sampleLen > 0.1;\n }\n\n private static isProbablyGzip(text: string): boolean {\n return text.length >= 2 && text.charCodeAt(0) === 0x1f && text.charCodeAt(1) === 0x8b;\n }\n\n private static normalizeUsage(usage: any) {\n if (!usage || typeof usage !== 'object') {\n return { inputTokens: undefined, outputTokens: undefined, totalTokens: undefined };\n }\n const input = usage.promptTokenCount ?? usage.prompt_tokens;\n const output = usage.candidatesTokenCount ?? usage.candidates_tokens;\n const total =\n usage.totalTokenCount ?? (typeof input === 'number' && typeof output === 'number' ? input + output : undefined);\n return { inputTokens: input, outputTokens: output, totalTokens: total };\n }\n\n private static generateId(): string {\n return Date.now().toString(36) + Math.random().toString(36).slice(2, 10);\n }\n}\n\nexport const parseGeminiStreamingResponse = GeminiParser.parseStreamingResponse;\n","/**\n * LLM Parser Factory\n *\n * Routes streaming responses to the appropriate parser based on URL endpoint patterns.\n * Supports multiple LLM providers with extensible parser registry.\n */\n\nimport { ClaudeParser } from './claude';\nimport { GeminiParser } from './gemini';\n\n// Parser interface for consistency\ninterface StreamingParser {\n parseStreamingResponse(rawText: string): any;\n}\n\n// Parser registry mapping URL patterns to parser classes\ninterface ParserMapping {\n patterns: RegExp[];\n parser: StreamingParser;\n name: string;\n}\n\nexport class LlmParser {\n private static parsers: ParserMapping[] = [\n {\n name: 'Claude (Anthropic)',\n patterns: [\n /\\/publishers\\/anthropic\\/models/i,\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /\\/v1\\/messages/i, // Anthropic API endpoint\n ],\n parser: ClaudeParser,\n },\n {\n name: 'Google Gemini',\n patterns: [\n /generativelanguage\\.googleapis\\.com/i,\n /cloudcode-pa\\.googleapis\\.com/i,\n /streamGenerateContent/i,\n /generateContent/i,\n ],\n parser: GeminiParser,\n },\n // Future parsers can be added here:\n // {\n // name: 'OpenAI GPT',\n // patterns: [/openai\\.com/i, /api\\.openai\\.com/i, /\\/v1\\/chat\\/completions/i],\n // parser: OpenAIParser,\n // },\n ];\n\n /**\n * Parse streaming response by detecting the appropriate parser based on URL\n *\n * @param url - The API endpoint URL\n * @param rawText - Raw streaming response text\n * @returns Parsed message object or null if no parser matches\n */\n static parseStreamingResponse(url: string, rawText: string): any {\n const parser = LlmParser.getParserForUrl(url);\n\n if (!parser) {\n return {\n error: `No parser found for URL: ${url}`,\n rawText,\n supportedProviders: LlmParser.parsers.map((p) => p.name),\n };\n }\n\n try {\n const result = parser.parseStreamingResponse(rawText);\n\n // Add parser metadata to the result\n if (result && typeof result === 'object' && !result.error) {\n result.metadata = {\n ...result.metadata,\n parser: LlmParser.getParserNameForUrl(url),\n };\n }\n\n return result;\n } catch (error) {\n return {\n error: `Parser failed: ${error}`,\n rawText,\n parser: LlmParser.getParserNameForUrl(url),\n };\n }\n }\n\n /**\n * Get the appropriate parser for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Parser instance or null if no match\n */\n static getParserForUrl(url: string): StreamingParser | null {\n if (!url) return null;\n\n for (const mapping of LlmParser.parsers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.parser;\n }\n }\n\n return null;\n }\n\n /**\n * Get the parser name for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Parser name or 'unknown'\n */\n static getParserNameForUrl(url: string): string {\n if (!url) return 'unknown';\n\n for (const mapping of LlmParser.parsers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.name;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Check if streaming response parsing is supported for a URL\n *\n * @param url - The API endpoint URL\n * @returns True if a parser is available\n */\n static isSupported(url: string): boolean {\n return LlmParser.getParserForUrl(url) !== null;\n }\n\n /**\n * Get list of all supported LLM providers\n *\n * @returns Array of supported provider names\n */\n static getSupportedProviders(): string[] {\n return LlmParser.parsers.map((p) => p.name);\n }\n\n /**\n * Register a new parser for additional LLM providers\n *\n * @param name - Human-readable parser name\n * @param patterns - Array of regex patterns to match URLs\n * @param parser - Parser class implementing StreamingParser interface\n */\n static registerParser(name: string, patterns: RegExp[], parser: StreamingParser): void {\n LlmParser.parsers.push({\n name,\n patterns,\n parser,\n });\n }\n}\n","/**\n * LLM Provider Detection and Utility Class\n *\n * Handles identification of different LLM providers based on URL patterns\n * and provides utilities for working with LLM API calls.\n */\n\nexport class LlmProvider {\n private readonly LLM_PATTERNS = [\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /openai\\.com/i,\n /api\\.openai\\.com/i,\n /gemini\\.google/i,\n /generativelanguage\\.googleapis/i,\n /cloudcode-pa\\.googleapis\\.com/i,\n /bedrock.*\\.amazonaws/i,\n /cognitive\\.microsoft/i,\n /azure\\.openai/i,\n /cohere\\.ai/i,\n /huggingface\\.co/i,\n /replicate\\.com/i,\n /together\\.xyz/i,\n /mistral\\.ai/i,\n /groq\\.com/i,\n /perplexity\\.ai/i,\n /ollama/i,\n /localhost:\\d+\\/v1/i,\n /127\\.0\\.0\\.1:\\d+\\/v1/i,\n /\\/v1\\/chat\\/completions/i,\n /\\/v1\\/completions/i,\n /\\/v1\\/embeddings/i,\n /\\/v1\\/messages/i,\n /\\/publishers\\/anthropic\\/models/i,\n ];\n\n private readonly PROVIDER_MAP = [\n { pattern: /\\/publishers\\/anthropic\\/models/i, provider: 'anthropic' },\n { pattern: /anthropic\\.com|claude\\.ai/i, provider: 'anthropic' },\n { pattern: /openai\\.com/i, provider: 'openai' },\n { pattern: /gemini\\.google|generativelanguage\\.googleapis|cloudcode-pa\\.googleapis/i, provider: 'google' },\n { pattern: /bedrock.*\\.amazonaws/i, provider: 'aws-bedrock' },\n { pattern: /cognitive\\.microsoft|azure\\.openai/i, provider: 'azure' },\n { pattern: /cohere\\.ai/i, provider: 'cohere' },\n { pattern: /huggingface\\.co/i, provider: 'huggingface' },\n { pattern: /replicate\\.com/i, provider: 'replicate' },\n { pattern: /together\\.xyz/i, provider: 'together' },\n { pattern: /mistral\\.ai/i, provider: 'mistral' },\n { pattern: /groq\\.com/i, provider: 'groq' },\n { pattern: /perplexity\\.ai/i, provider: 'perplexity' },\n { pattern: /ollama|localhost:\\d+\\/v1|127\\.0\\.0\\.1:\\d+\\/v1/i, provider: 'local' },\n ];\n\n /**\n * Check if a URL is related to an LLM service\n * @param url The URL to check\n * @returns true if the URL is LLM-related, false otherwise\n */\n isLLMCall(url: string): boolean {\n if (!url) return false;\n return this.LLM_PATTERNS.some((pattern) => pattern.test(url));\n }\n\n /**\n * Detect which LLM provider a URL belongs to\n * @param url The URL to analyze\n * @returns The provider name or 'unknown' if not recognized\n */\n detectProvider(url: string): string {\n if (!url) return 'unknown';\n\n for (const { pattern, provider } of this.PROVIDER_MAP) {\n if (pattern.test(url)) {\n return provider;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Get all supported provider names\n * @returns Array of supported provider names\n */\n getSupportedProviders(): string[] {\n return Array.from(new Set(this.PROVIDER_MAP.map(({ provider }) => provider)));\n }\n\n /**\n * Check if a specific provider is supported\n * @param providerName The provider name to check\n * @returns true if the provider is supported, false otherwise\n */\n isProviderSupported(providerName: string): boolean {\n return this.getSupportedProviders().includes(providerName.toLowerCase());\n }\n\n /**\n * Get the pattern used for detecting a specific provider\n * @param providerName The provider name\n * @returns The regex pattern or null if provider not found\n */\n getProviderPattern(providerName: string): RegExp | null {\n const mapping = this.PROVIDER_MAP.find(({ provider }) => provider === providerName.toLowerCase());\n return mapping ? mapping.pattern : null;\n }\n}\n\nexport default LlmProvider;\n","/**\n * Claude API Request Enhancer\n *\n * Enhances Claude API requests by injecting context data into the system prompt.\n * This allows passing task descriptions, comments, and other contextual information\n * to Claude without modifying the user's original prompt.\n */\n\nimport { SubEnvManager } from '../../config/subenv';\n\nexport class ClaudeEnhancer {\n /**\n * Enhance Claude API request by injecting context data into system prompt\n *\n * @param requestBody - The original request body as a string\n * @returns Enhanced request body with context data injected\n */\n static enhanceRequest(requestBody: string): string {\n const contextData = SubEnvManager.contextData;\n\n // If no context data, return original request\n if (!contextData) {\n return requestBody;\n }\n\n try {\n const request = JSON.parse(requestBody);\n\n // Ensure system is an array\n if (!request.system) {\n request.system = [];\n } else if (typeof request.system === 'string') {\n // Convert string system to array format\n request.system = [\n {\n type: 'text',\n text: request.system,\n },\n ];\n } else if (!Array.isArray(request.system)) {\n // If it's an object but not an array, wrap it\n request.system = [request.system];\n }\n\n // Insert context after the first system message (which may be auth-related)\n // This avoids adding extra cache control breakpoints\n const contextSystemMessage = {\n type: 'text',\n text: `# Task Context\\n\\n${contextData}`,\n };\n\n // Insert at position 1 (after first message) instead of prepending\n if (request.system.length > 0) {\n request.system.splice(1, 0, contextSystemMessage);\n } else {\n request.system.push(contextSystemMessage);\n }\n\n return JSON.stringify(request);\n } catch (error) {\n // If parsing fails, return original request\n console.error('Failed to enhance Claude request:', error);\n return requestBody;\n }\n }\n}\n","/**\n * Gemini API Request Enhancer\n *\n * Enhances Gemini API requests by injecting context data into the system instruction.\n */\n\nimport { SubEnvManager } from '../../config/subenv';\n\nexport class GeminiEnhancer {\n /**\n * Enhance Gemini API request by injecting context data into system instruction\n *\n * @param requestBody - The original request body as a string\n * @returns Enhanced request body with context data injected\n */\n static enhanceRequest(requestBody: string): string {\n const contextData = SubEnvManager.contextData;\n\n // If no context data, return original request\n if (!contextData) {\n return requestBody;\n }\n\n try {\n const request = JSON.parse(requestBody);\n\n // Inject context into systemInstruction\n const contextInstruction = `# Task Context\\n\\n${contextData}`;\n\n if (!request.systemInstruction) {\n request.systemInstruction = {\n parts: [\n {\n text: contextInstruction,\n },\n ],\n };\n } else if (typeof request.systemInstruction === 'string') {\n // If systemInstruction is a string, convert to parts format\n request.systemInstruction = {\n parts: [\n {\n text: contextInstruction,\n },\n {\n text: request.systemInstruction,\n },\n ],\n };\n } else if (request.systemInstruction.parts) {\n // Prepend context to existing parts\n request.systemInstruction.parts.unshift({\n text: contextInstruction,\n });\n }\n\n return JSON.stringify(request);\n } catch (error) {\n // If parsing fails, return original request\n console.error('Failed to enhance Gemini request:', error);\n return requestBody;\n }\n }\n}\n","/**\n * LLM Request Enhancer Factory\n *\n * Routes LLM requests to the appropriate enhancer based on URL endpoint patterns.\n * Enhancers inject context data (task descriptions, comments, etc.) into requests\n * without modifying the user's original prompt.\n */\n\nimport { ClaudeEnhancer } from './claude';\nimport { GeminiEnhancer } from './gemini';\n\n// Enhancer interface for consistency\ninterface RequestEnhancer {\n enhanceRequest(requestBody: string): string;\n}\n\n// Enhancer registry mapping URL patterns to enhancer classes\ninterface EnhancerMapping {\n patterns: RegExp[];\n enhancer: RequestEnhancer;\n name: string;\n}\n\nexport class LlmEnhancer {\n private static enhancers: EnhancerMapping[] = [\n {\n name: 'Claude (Anthropic)',\n patterns: [\n /\\/publishers\\/anthropic\\/models/i,\n /anthropic\\.com/i,\n /claude\\.ai/i,\n /\\/v1\\/messages/i, // Anthropic API endpoint\n ],\n enhancer: ClaudeEnhancer,\n },\n {\n name: 'Google Gemini',\n patterns: [\n /generativelanguage\\.googleapis\\.com/i,\n /cloudcode-pa\\.googleapis\\.com/i,\n /streamGenerateContent/i,\n /generateContent/i,\n ],\n enhancer: GeminiEnhancer,\n },\n // Future enhancers can be added here:\n // {\n // name: 'OpenAI GPT',\n // patterns: [/openai\\.com/i, /api\\.openai\\.com/i, /\\/v1\\/chat\\/completions/i],\n // enhancer: OpenAIEnhancer,\n // },\n ];\n\n /**\n * Enhance LLM request by detecting the appropriate enhancer based on URL\n *\n * @param url - The API endpoint URL\n * @param requestBody - Original request body as string\n * @returns Enhanced request body with context data injected\n */\n static enhanceRequest(url: string, requestBody: string): string {\n const enhancer = LlmEnhancer.getEnhancerForUrl(url);\n\n if (!enhancer) {\n // No enhancer found, return original request\n return requestBody;\n }\n\n try {\n return enhancer.enhanceRequest(requestBody);\n } catch (error) {\n console.error(`Enhancer failed for ${url}:`, error);\n return requestBody;\n }\n }\n\n /**\n * Get the appropriate enhancer for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Enhancer instance or null if no match\n */\n static getEnhancerForUrl(url: string): RequestEnhancer | null {\n if (!url) return null;\n\n for (const mapping of LlmEnhancer.enhancers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.enhancer;\n }\n }\n\n return null;\n }\n\n /**\n * Get the enhancer name for a given URL\n *\n * @param url - The API endpoint URL\n * @returns Enhancer name or 'unknown'\n */\n static getEnhancerNameForUrl(url: string): string {\n if (!url) return 'unknown';\n\n for (const mapping of LlmEnhancer.enhancers) {\n if (mapping.patterns.some((pattern) => pattern.test(url))) {\n return mapping.name;\n }\n }\n\n return 'unknown';\n }\n\n /**\n * Check if request enhancement is supported for a URL\n *\n * @param url - The API endpoint URL\n * @returns True if an enhancer is available\n */\n static isSupported(url: string): boolean {\n return LlmEnhancer.getEnhancerForUrl(url) !== null;\n }\n\n /**\n * Get list of all supported LLM providers\n *\n * @returns Array of supported provider names\n */\n static getSupportedProviders(): string[] {\n return LlmEnhancer.enhancers.map((e) => e.name);\n }\n\n /**\n * Register a new enhancer for additional LLM providers\n *\n * @param name - Human-readable enhancer name\n * @param patterns - Array of regex patterns to match URLs\n * @param enhancer - Enhancer class implementing RequestEnhancer interface\n */\n static registerEnhancer(name: string, patterns: RegExp[], enhancer: RequestEnhancer): void {\n LlmEnhancer.enhancers.push({\n name,\n patterns,\n enhancer,\n });\n }\n}\n","import { performance } from 'node:perf_hooks';\n// Removed zlib usage to avoid synchronous decompression & event loop blocking\n// import * as zlib from 'node:zlib';\nimport { LlmEnhancer } from '../../enhancers';\nimport type { CommonContext, NetworkLogEntry } from './types';\n\nexport interface FetchInstrumentConfig {\n agentHttpService?: any;\n sessionId?: string;\n isValidMessageRequest: (url: string, method: string, body: string) => boolean;\n sendHttpStart: (entry: NetworkLogEntry) => Promise<void>;\n sendHttpUpdate: (entry: NetworkLogEntry) => Promise<void>;\n}\n\nexport class FetchInstrument {\n private originalFetch: typeof fetch | null = null;\n // Maximum bytes to fully log / parse (helps protect memory usage)\n // Note: SSE streams are captured in full regardless of this limit\n private static readonly MAX_LOG_BYTES = 256 * 1024; // 256 KB\n private ctx: CommonContext;\n private extra: FetchInstrumentConfig;\n\n constructor(ctx: CommonContext, extra: FetchInstrumentConfig) {\n this.ctx = ctx;\n this.extra = extra;\n }\n\n patch(): void {\n if (typeof globalThis.fetch !== 'function' || this.originalFetch) return;\n\n this.originalFetch = globalThis.fetch.bind(globalThis);\n const ctx = this.ctx;\n const extra = this.extra;\n\n globalThis.fetch = (async (resource: any, init?: any) => {\n const start = performance.now();\n let url = '';\n try {\n url = typeof resource === 'string' ? resource : resource?.url || '';\n } catch {}\n const method = (init?.method || resource?.method || 'GET').toUpperCase();\n\n // --- Extract request body safely without consuming streams ---\n let requestBody = '';\n try {\n const body: any = init?.body ?? resource?.body;\n if (body) {\n if (typeof body === 'string') requestBody = body;\n else if (body instanceof URLSearchParams) requestBody = body.toString();\n else if (typeof FormData !== 'undefined' && body instanceof FormData) {\n const obj: Record<string, any> = {};\n for (const [k, v] of body.entries()) obj[k] = typeof v === 'string' ? v : '[File|Blob]';\n requestBody = JSON.stringify(obj);\n } else if (body instanceof Uint8Array || Buffer.isBuffer(body))\n requestBody = Buffer.from(body).toString('utf8');\n else if (typeof body === 'object') requestBody = JSON.stringify(body);\n }\n } catch {\n requestBody = '<request body error>';\n }\n\n // --- Enhance LLM requests with context data ---\n let enhancedBody = requestBody;\n if (ctx.llmProvider.isLLMCall(url) && requestBody && requestBody !== '<request body error>') {\n try {\n enhancedBody = LlmEnhancer.enhanceRequest(url, requestBody);\n\n // Update init.body with enhanced version if we modified it\n if (enhancedBody !== requestBody && init) {\n init.body = enhancedBody;\n } else if (enhancedBody !== requestBody && typeof resource === 'object' && resource.body) {\n resource.body = enhancedBody;\n }\n } catch (err) {\n console.error('[FetchInstrument] Failed to enhance LLM request:', err);\n // Continue with original body if enhancement fails\n }\n }\n\n const entryId = ctx.idGenerator();\n const entryTime = new Date().toISOString();\n\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n const startEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n durationMs: 0,\n };\n extra.sendHttpStart(startEntry).catch((err) => {\n console.error('[FetchInstrument] Failed to send HTTP start:', err);\n });\n }\n\n // --- Perform network request with outer error logging ---\n let res: Response;\n try {\n res = await this.originalFetch!(resource, init);\n } catch (networkError) {\n const duration = performance.now() - start;\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n durationMs: Math.round(duration),\n error: (networkError as Error).message,\n };\n ctx.handleLogEntry(logEntry);\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n throw networkError; // preserve original rejection semantics\n }\n\n try {\n const duration = performance.now() - start;\n let responseBody = '';\n let bodyBytes = 0;\n let processedBody: any = null;\n let isEventStream = false;\n\n // --- Capture response body with safeguards ---\n try {\n const contentType = res.headers.get('content-type') || '';\n isEventStream = /text\\/event-stream/i.test(contentType);\n const isTextLike =\n /^(text\\/|application\\/(json|xml|javascript))/i.test(contentType) || /(json|xml|html)/i.test(contentType);\n\n if (!isEventStream) {\n // Fully buffer only non-streaming types (or when small enough)\n const clone = res.clone();\n const arrayBuffer = await clone.arrayBuffer();\n bodyBytes = arrayBuffer.byteLength;\n const max = FetchInstrument.MAX_LOG_BYTES;\n if (isTextLike) {\n if (arrayBuffer.byteLength <= max) {\n responseBody = Buffer.from(arrayBuffer).toString('utf8');\n } else {\n const buf = Buffer.from(arrayBuffer.slice(0, max));\n responseBody = `${buf.toString('utf8')}\\n...[truncated ${(arrayBuffer.byteLength - max).toLocaleString()} bytes]`;\n }\n } else {\n responseBody = `<${bodyBytes} bytes binary>`;\n }\n } else {\n // For event-stream, we need to tee the stream to capture data without consuming it\n const originalBody = res.body;\n if (originalBody) {\n const [stream1, stream2] = originalBody.tee();\n\n // Capture stream1 data for logging/processing\n const capturePromise = (async () => {\n try {\n const reader = stream1.getReader();\n const chunks: Uint8Array[] = [];\n let totalBytes = 0;\n\n // For SSE streams, capture everything without limit\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n\n chunks.push(value);\n totalBytes += value.byteLength;\n }\n\n // Convert captured chunks to string for processing\n const capturedBuffer = Buffer.concat(chunks.map((c) => Buffer.from(c)));\n const capturedText = capturedBuffer.toString('utf8');\n responseBody = capturedText; // No truncation for SSE\n bodyBytes = totalBytes;\n\n // Process the captured streaming response for LLM calls\n if (ctx.llmProvider.isLLMCall(url) && responseBody) {\n try {\n processedBody = ctx.responseProcessor(url, responseBody);\n } catch (err) {\n processedBody = { error: `parser_failed: ${err instanceof Error ? err.message : String(err)}` };\n }\n }\n\n // Update the log entry through context handler (which will trigger sendHttpUpdate)\n const finalLogEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n status: res.status,\n ok: res.ok,\n durationMs: Math.round(performance.now() - start),\n responseBody,\n processedBody,\n bodyBytes,\n };\n ctx.handleLogEntry(finalLogEntry);\n\n // Send the update if this is a valid message request\n // Only send if: (1) has valid content OR (2) completing a pending message\n if (\n extra.agentHttpService &&\n extra.sessionId &&\n extra.isValidMessageRequest(url, method, requestBody)\n ) {\n extra.sendHttpUpdate(finalLogEntry).catch(() => {});\n }\n\n return { responseBody, processedBody, bodyBytes };\n } catch (err) {\n console.error('Error capturing stream:', err);\n return { responseBody: '<stream capture error>', processedBody: null, bodyBytes: 0 };\n }\n })();\n\n // Store promise for later use if needed\n (res as any).__capturePromise = capturePromise;\n\n // Return response with stream2 (unconsumed)\n res = new Response(stream2, {\n status: res.status,\n statusText: res.statusText,\n headers: res.headers,\n });\n\n // Set initial values for streaming\n responseBody = '<streaming in progress>';\n processedBody = null;\n } else {\n responseBody = '<streaming response not buffered>';\n }\n }\n } catch {\n responseBody = '<response body error>';\n }\n\n // Process non-streaming responses\n if (\n !isEventStream &&\n ctx.llmProvider.isLLMCall(url) &&\n responseBody &&\n responseBody !== '<response body error>'\n ) {\n try {\n processedBody = ctx.responseProcessor(url, responseBody);\n } catch (err) {\n processedBody = { error: `parser_failed: ${err instanceof Error ? err.message : String(err)}` };\n }\n }\n\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n status: res.status,\n ok: res.ok,\n durationMs: Math.round(duration),\n responseBody,\n processedBody,\n bodyBytes,\n };\n ctx.handleLogEntry(logEntry);\n\n // For non-streaming responses or when streaming capture fails, send update immediately\n // For streaming responses, the update will be sent after capture completes\n if (\n !isEventStream &&\n extra.agentHttpService &&\n extra.sessionId &&\n extra.isValidMessageRequest(url, method, requestBody)\n ) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n return res;\n } catch (error) {\n const duration = performance.now() - start;\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: 'fetch',\n method,\n url,\n requestBody,\n durationMs: Math.round(duration),\n error: (error as Error).message,\n };\n ctx.handleLogEntry(logEntry);\n if (extra.agentHttpService && extra.sessionId && extra.isValidMessageRequest(url, method, requestBody)) {\n extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n return res; // still return original response if available\n }\n }) as typeof fetch;\n }\n\n restore(): void {\n if (this.originalFetch) {\n globalThis.fetch = this.originalFetch;\n this.originalFetch = null;\n }\n }\n}\n","import { performance } from 'node:perf_hooks';\nimport type { CommonContext, NetworkLogEntry } from './types';\n\nexport interface HttpInstrumentConfig {\n agentHttpService?: any;\n sessionId?: string;\n isValidMessageRequest: (url: string, method: string, body: string) => boolean;\n sendHttpStart: (entry: NetworkLogEntry) => Promise<void>;\n sendHttpUpdate: (entry: NetworkLogEntry) => Promise<void>;\n}\n\nexport class HttpInstrument {\n private originals: any = null;\n constructor(\n private ctx: CommonContext,\n private enableHttps: boolean,\n private _extra: HttpInstrumentConfig = {\n isValidMessageRequest: () => false,\n sendHttpStart: async () => {},\n sendHttpUpdate: async () => {},\n },\n ) {}\n\n patch(): void {\n try {\n const http = require('http');\n const https = require('https');\n if (this.originals) return;\n\n this.originals = {\n httpRequest: http.request,\n httpsRequest: https.request,\n httpGet: http.get,\n httpsGet: https.get,\n };\n const self = this;\n const maxBody = this.ctx.maxBodyBytes;\n\n function buildUrl(options: any, protocolDefault: string): string {\n try {\n if (typeof options === 'string') return options;\n if (options instanceof URL) return options.toString();\n const protocol = options.protocol || protocolDefault;\n const host = options.hostname || options.host || 'localhost';\n const portPart = options.port ? `:${options.port}` : '';\n const path = options.path || options.pathname || '/';\n return `${protocol}//${host}${portPart}${path}`;\n } catch {\n return '';\n }\n }\n\n function wrapRequest(original: Function, protocol: 'http:' | 'https:') {\n return function wrappedRequest(options: any, callback?: any) {\n const url = buildUrl(options, protocol);\n const method = (typeof options === 'object' && options.method ? options.method : 'GET').toUpperCase();\n\n const startHr = performance.now();\n const entryId = self.ctx.idGenerator();\n const entryTime = new Date().toISOString();\n const requestBodyBuffers: Buffer[] = [];\n let requestLen = 0;\n let requestBodySent = false;\n\n const req = original(options, (res: any) => {\n const responseBuffers: Buffer[] = [];\n let responseBytes = 0;\n let capturedBytes = 0; // track captured (pushed) bytes to avoid O(n^2)\n const contentTypeHeader = () => String(res.headers['content-type'] || '');\n res.on('data', (chunk: Buffer) => {\n responseBytes += chunk.length;\n const ct = contentTypeHeader();\n const isEventStream = /text\\/event-stream/i.test(ct);\n\n // For SSE streams, capture everything without truncation\n if (isEventStream) {\n responseBuffers.push(chunk);\n capturedBytes += chunk.length;\n } else {\n // For non-SSE, apply the maxBody limit\n if (capturedBytes >= maxBody) return; // already at limit\n const remaining = maxBody - capturedBytes;\n if (chunk.length <= remaining) {\n responseBuffers.push(chunk);\n capturedBytes += chunk.length;\n } else {\n responseBuffers.push(chunk.subarray(0, remaining));\n capturedBytes += remaining;\n }\n }\n });\n res.on('end', () => {\n const duration = performance.now() - startHr;\n let responseBody = '';\n try {\n const ct = contentTypeHeader();\n const isEventStream = /text\\/event-stream/i.test(ct);\n const bufferRaw = Buffer.concat(responseBuffers);\n const wasTruncated = responseBytes > bufferRaw.length || bufferRaw.length === maxBody;\n let buffer = bufferRaw;\n\n // Handle decompression for non-SSE responses\n if (!isEventStream) {\n const contentEncoding = String(res.headers['content-encoding'] || '').toLowerCase();\n // Only attempt decompression if we captured the full body (not truncated) & encoding present\n const canDecompress = !wasTruncated && buffer.length && /(gzip|br|deflate)/.test(contentEncoding);\n if (canDecompress) {\n try {\n const zlib = require('node:zlib');\n if (contentEncoding.includes('gzip')) buffer = zlib.gunzipSync(bufferRaw);\n else if (contentEncoding.includes('br')) buffer = zlib.brotliDecompressSync(bufferRaw);\n else if (contentEncoding.includes('deflate')) buffer = zlib.inflateSync(bufferRaw);\n } catch {\n // swallow decompression errors; fall back to raw (likely already decoded or truncated)\n }\n }\n }\n\n // Convert buffer to string for text-based responses (including SSE)\n const isText = /text\\//i.test(ct) || /application\\/(json|xml|javascript)/i.test(ct);\n if (isText || isEventStream) {\n responseBody = buffer.toString('utf8');\n // Don't add truncation message for SSE streams (we capture full SSE)\n if (wasTruncated && !isEventStream) responseBody += '\\n...[truncated]';\n } else if (buffer.length) {\n responseBody = `<${buffer.length} bytes${wasTruncated ? ' (truncated)' : ''} binary>`;\n }\n } catch {\n responseBody = '<response body error>';\n }\n let requestBody = '';\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n } catch {\n requestBody = '<request body error>';\n }\n let processedBody: any = null;\n if (self.ctx.llmProvider.isLLMCall(url)) {\n try {\n // Always use the responseProcessor for LLM calls to get proper parsing\n processedBody = self.ctx.responseProcessor(url, responseBody);\n } catch (e) {\n processedBody = {\n parts: [{ type: 'text', text: responseBody || '<empty response>' }],\n metadata: { error: String(e) },\n };\n }\n }\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n status: res.statusCode,\n ok: res.statusCode >= 200 && res.statusCode < 300,\n durationMs: Math.round(duration),\n responseBody,\n processedBody,\n bodyBytes: responseBytes,\n };\n try {\n const u = new URL(url);\n logEntry.host = u.hostname;\n logEntry.path = u.pathname;\n } catch {}\n self.ctx.handleLogEntry(logEntry);\n\n // Send HTTP update when response is complete\n if (\n self._extra.agentHttpService &&\n self._extra.sessionId &&\n self._extra.isValidMessageRequest(url, method, requestBody)\n ) {\n self._extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n });\n res.on('error', (err: Error) => {\n const duration = performance.now() - startHr;\n let requestBody = '';\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n } catch {\n requestBody = '<request body error>';\n }\n const logEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n durationMs: Math.round(duration),\n error: err.message,\n };\n try {\n const u = new URL(url);\n logEntry.host = u.hostname;\n logEntry.path = u.pathname;\n } catch {}\n self.ctx.handleLogEntry(logEntry);\n\n // Send HTTP update for error case\n if (\n self._extra.agentHttpService &&\n self._extra.sessionId &&\n self._extra.isValidMessageRequest(url, method, requestBody)\n ) {\n self._extra.sendHttpUpdate(logEntry).catch(() => {});\n }\n });\n if (callback) callback(res);\n });\n const originalWrite = req.write;\n const originalEnd = req.end;\n req.write = function (chunk: any, encoding?: any, cb?: any) {\n try {\n if (chunk && requestLen < maxBody) {\n const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding || 'utf8');\n const remaining = maxBody - requestLen;\n if (buf.length <= remaining) {\n requestBodyBuffers.push(buf);\n requestLen += buf.length;\n } else {\n requestBodyBuffers.push(buf.subarray(0, remaining));\n requestLen += remaining;\n }\n }\n } catch {}\n return originalWrite.call(this, chunk, encoding, cb);\n };\n req.end = function (chunk?: any, encoding?: any, cb?: any) {\n try {\n if (chunk && requestLen < maxBody) {\n const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding || 'utf8');\n const remaining = maxBody - requestLen;\n if (buf.length <= remaining) {\n requestBodyBuffers.push(buf);\n requestLen += buf.length;\n } else {\n requestBodyBuffers.push(buf.subarray(0, remaining));\n requestLen += remaining;\n }\n }\n\n // Send HTTP start when request is ending (body is complete)\n if (!requestBodySent) {\n requestBodySent = true;\n try {\n const combined = Buffer.concat(requestBodyBuffers);\n const wasTruncated = requestLen > combined.length || combined.length === maxBody;\n let requestBody = combined.toString('utf8');\n if (wasTruncated) requestBody += '\\n...[truncated]';\n\n const isValid = self._extra.isValidMessageRequest(url, method, requestBody);\n\n if (self._extra.agentHttpService && self._extra.sessionId && isValid) {\n const startEntry: NetworkLogEntry = {\n id: entryId,\n time: entryTime,\n transport: protocol === 'https:' ? 'https' : 'http',\n method,\n url,\n requestBody,\n durationMs: 0,\n };\n self._extra.sendHttpStart(startEntry).catch(() => {});\n }\n } catch {}\n }\n } catch {}\n return originalEnd.call(this, chunk, encoding, cb);\n };\n return req;\n };\n }\n\n http.request = wrapRequest(this.originals.httpRequest, 'http:');\n http.get = function wrappedGet(o: any, cb?: any) {\n const r = (http.request as any)(o, cb);\n r.end();\n return r;\n };\n if (this.enableHttps !== false) {\n https.request = wrapRequest(this.originals.httpsRequest, 'https:');\n https.get = function wrappedGet(o: any, cb?: any) {\n const r = (https.request as any)(o, cb);\n r.end();\n return r;\n };\n }\n } catch (e) {\n // Silent instrumentation - no logging\n }\n }\n\n restore(): void {\n if (this.originals) {\n try {\n const http = require('http');\n const https = require('https');\n http.request = this.originals.httpRequest;\n http.get = this.originals.httpGet;\n https.request = this.originals.httpsRequest;\n https.get = this.originals.httpsGet;\n } catch {}\n this.originals = null;\n }\n }\n}\n","/**\n * LLM Network Interceptor Class\n *\n * Handles network interception and logging for LLM-related HTTP requests.\n * Patches global fetch and optionally http/https modules to capture request/response data.\n */\n\nimport * as os from 'node:os';\nimport * as path from 'node:path';\nimport { ulid } from 'ulidx';\nimport { SubEnvManager } from '../../config/subenv';\nimport type { AgentHttpService } from '../../services/AgentHttpService';\nimport { FileLogger } from '../loggers/file';\nimport type { MessagePart } from '../messages';\nimport { LlmParser } from '../parsers';\nimport { LlmProvider } from '../providers';\nimport { FetchInstrument } from './instruments/fetchInstrument';\nimport { HttpInstrument } from './instruments/httpInstrument';\nimport type { CommonContext, NetworkLogEntry } from './instruments/types';\n\nexport type { NetworkLogEntry } from './instruments/types';\n\nexport interface LlmInterceptorConfig {\n maxBodyBytes?: number;\n enableHttpsPatching?: boolean;\n enableDebug?: boolean;\n logFilter?: (entry: NetworkLogEntry) => boolean;\n responseProcessor?: (url: string, responseBody: string) => any;\n llmProvider?: LlmProvider;\n idGenerator?: () => string;\n agentHttpService?: AgentHttpService;\n sessionId?: string;\n logFilePath?: string; // optional path to write newline-delimited JSON log entries\n processedLogFilePath?: string; // optional path to write processed body (pretty JSON)\n}\n\nexport class LlmInterceptor {\n private static instance: LlmInterceptor | null = null;\n private isPatched = false;\n private config: Required<\n Omit<LlmInterceptorConfig, 'llmProvider' | 'idGenerator' | 'agentHttpService' | 'sessionId'>\n >;\n private llmProvider: LlmProvider;\n private idGenerator: () => string;\n private agentHttpService?: AgentHttpService;\n private sessionId?: string;\n private activeRequests = new Map<string, string>();\n private rawLogger?: FileLogger; // logger for original request/response\n private errorLogger?: FileLogger; // logger for original request/response\n private processedLogger?: FileLogger; // logger for url + processed response\n\n // Instruments\n private fetchInstrument?: FetchInstrument;\n private httpInstrument?: HttpInstrument;\n\n constructor(config: LlmInterceptorConfig = {}) {\n // Use provided LlmProvider or create a new instance\n this.llmProvider = config.llmProvider || new LlmProvider();\n\n // Use provided idGenerator or default to ulid\n this.idGenerator = config.idGenerator || ulid;\n\n // Store AgentHttpService and sessionId for promise lifecycle logging\n this.agentHttpService = config.agentHttpService;\n this.sessionId = config.sessionId;\n\n this.config = {\n maxBodyBytes: config.maxBodyBytes ?? 8192,\n enableHttpsPatching: config.enableHttpsPatching ?? true, // default to true so https requests are intercepted\n enableDebug: config.enableDebug ?? false,\n logFilter:\n config.logFilter ??\n ((entry) =>\n !!(\n entry.url &&\n !entry.url.includes('statsig') &&\n !entry.url.includes('registry.npmjs.org') &&\n !entry.url.startsWith('data:')\n )),\n // Default processor now parses streaming SSE via LlmParser\n responseProcessor:\n config.responseProcessor ??\n ((url: string, responseBody: string) => {\n try {\n return LlmParser.parseStreamingResponse(url, responseBody);\n } catch {\n return null;\n }\n }),\n } as typeof this.config;\n\n // Use system temp directory for automatic garbage collection\n const baseLogDir = path.join(os.tmpdir(), 'agiflow-agents', 'llm', `session-${process.pid}`);\n const logFilePath = config.logFilePath || path.join(baseLogDir, 'network.log');\n const errorLogFilePath = path.join(baseLogDir, 'error.log'); // Remove config.errorLogFilePath which doesn't exist\n const processedLogFilePath = config.processedLogFilePath || path.join(baseLogDir, 'processed.jsonl');\n\n // Initialize loggers\n try {\n this.rawLogger = new FileLogger(logFilePath);\n } catch {}\n try {\n this.errorLogger = new FileLogger(errorLogFilePath);\n } catch {}\n try {\n this.processedLogger = new FileLogger(processedLogFilePath);\n } catch {}\n }\n\n /**\n * Get singleton instance of LlmInterceptor\n */\n static getInstance(config?: LlmInterceptorConfig): LlmInterceptor {\n if (!LlmInterceptor.instance) {\n LlmInterceptor.instance = new LlmInterceptor(config);\n }\n return LlmInterceptor.instance;\n }\n\n /**\n * Start network interception\n */\n start(): void {\n if (this.isPatched) {\n return;\n }\n\n const common: CommonContext = {\n idGenerator: this.idGenerator,\n maxBodyBytes: this.config.maxBodyBytes,\n enableDebug: this.config.enableDebug,\n logFilter: this.config.logFilter,\n responseProcessor: this.config.responseProcessor,\n llmProvider: this.llmProvider,\n handleLogEntry: (e) => this.handleLogEntry(e),\n };\n\n this.fetchInstrument = new FetchInstrument(common, {\n agentHttpService: this.agentHttpService,\n sessionId: this.sessionId,\n isValidMessageRequest: (url, method, body) => this.isValidMessageRequest(url, method, body),\n sendHttpStart: (entry) => this.sendHttpStart(entry),\n sendHttpUpdate: (entry) => this.sendHttpUpdate(entry),\n });\n this.fetchInstrument.patch();\n\n this.httpInstrument = new HttpInstrument(common, this.config.enableHttpsPatching, {\n agentHttpService: this.agentHttpService,\n sessionId: this.sessionId,\n isValidMessageRequest: (url, method, body) => this.isValidMessageRequest(url, method, body),\n sendHttpStart: (entry) => this.sendHttpStart(entry),\n sendHttpUpdate: (entry) => this.sendHttpUpdate(entry),\n });\n this.httpInstrument.patch();\n\n this.isPatched = true;\n }\n\n /**\n * Stop network interception and restore original functions\n */\n stop(): void {\n if (!this.isPatched) {\n return;\n }\n this.fetchInstrument?.restore();\n this.httpInstrument?.restore();\n this.isPatched = false;\n }\n\n /**\n * Check if interceptor is currently active\n */\n isActive(): boolean {\n return this.isPatched;\n }\n\n /**\n * Update configuration\n */\n updateConfig(config: Partial<LlmInterceptorConfig>): void {\n this.config = { ...this.config, ...config };\n }\n\n /**\n * Handle a log entry - filter and pass to log handler if configured\n */\n private handleLogEntry(entry: NetworkLogEntry): void {\n // Raw logger writes the full network entry\n if (this.rawLogger) {\n this.rawLogger.info('network_entry', { networkLog: entry });\n }\n // Processed logger writes subset if processed body present\n if (this.processedLogger && this.config.logFilter(entry) && entry.processedBody) {\n this.processedLogger.info('processed_entry', {\n id: entry.id,\n url: entry.url,\n method: entry.method,\n status: entry.status,\n durationMs: entry.durationMs,\n timestamp: entry.time,\n processedBody: entry.processedBody,\n });\n }\n // Silent instrumentation - no debug logging\n }\n\n /**\n * Check if this is a valid message HTTP request that should be logged as an agent message\n */\n private isValidMessageRequest(url: string, method: string, requestBody: string): boolean {\n if (this.rawLogger) {\n this.rawLogger.info('isValidMessageRequest checking', {\n url,\n method,\n hasBody: !!requestBody,\n bodyLength: requestBody?.length,\n });\n }\n\n // Only log LLM calls\n if (!this.llmProvider.isLLMCall(url)) {\n if (this.rawLogger) {\n this.rawLogger.info('Not an LLM URL, skipping', { url });\n }\n if (this.rawLogger && this.config.enableDebug) {\n this.rawLogger.debug('Request validation failed: Not an LLM URL', {\n url,\n method,\n });\n }\n return false;\n }\n\n // For Google/Gemini APIs, only track SSE streaming requests\n if (url.includes('cloudcode-pa.googleapis.com') || url.includes('generativelanguage.googleapis.com')) {\n // Only track streamGenerateContent (SSE), skip countTokens, generateContent, loadCodeAssist, etc.\n if (url.includes('streamGenerateContent')) {\n return true;\n } else {\n return false;\n }\n }\n\n // Only log POST requests (actual message requests)\n if (method !== 'POST') {\n return false;\n }\n\n // Check if the request body contains actual message content\n if (!requestBody) {\n return false;\n }\n\n try {\n const body = JSON.parse(requestBody);\n\n // Look for common LLM message patterns\n const hasMessages = body.messages && Array.isArray(body.messages) && body.messages.length > 0;\n const hasPrompt = body.prompt && typeof body.prompt === 'string';\n const hasInput = body.input && typeof body.input === 'string';\n const hasContents = body.contents && Array.isArray(body.contents) && body.contents.length > 0; // Gemini format\n\n // Gemini specific: check for request.contents\n const hasRequestContents =\n body.request?.contents && Array.isArray(body.request.contents) && body.request.contents.length > 0;\n\n // Must have actual content to be considered a valid message request\n const isValid = hasMessages || hasPrompt || hasInput || hasContents || hasRequestContents;\n\n return isValid;\n } catch (err) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to parse request body in isValidMessageRequest', {\n url,\n method,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n // If we can't parse the JSON, it's not a valid message request\n return false;\n }\n }\n\n /**\n * Send HTTP start event using startMessage with request content\n */\n private async sendHttpStart(entry: NetworkLogEntry): Promise<void> {\n if (!this.agentHttpService || !this.sessionId) {\n return;\n }\n\n // Log start request\n if (this.rawLogger) {\n this.rawLogger.info('http_start', { entry });\n }\n\n // Extract the request content to include as parts\n let requestParts: Array<{ type: string; text?: string }> = [];\n if (entry.requestBody) {\n try {\n const requestData = JSON.parse(entry.requestBody);\n\n // Extract user input from common LLM API formats\n if (requestData.messages && Array.isArray(requestData.messages)) {\n // OpenAI/Anthropic format - find the last user message\n const userMessage = requestData.messages.filter((msg: any) => msg.role === 'user').pop();\n if (userMessage?.content) {\n if (typeof userMessage.content === 'string') {\n requestParts.push({ type: 'text', text: userMessage.content });\n } else if (Array.isArray(userMessage.content)) {\n // Handle content array format\n requestParts = userMessage.content.map((part: any) => ({\n type: part.type || 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n }\n } else if (requestData.prompt) {\n // Legacy prompt format\n requestParts.push({ type: 'text', text: requestData.prompt });\n } else if (requestData.input) {\n // Some APIs use 'input'\n requestParts.push({ type: 'text', text: requestData.input });\n } else if (requestData.contents && Array.isArray(requestData.contents)) {\n // Gemini format - extract the last user content\n const userContent = requestData.contents.filter((c: any) => c.role === 'user').pop();\n if (userContent?.parts && Array.isArray(userContent.parts)) {\n requestParts = userContent.parts.map((part: any) => ({\n type: 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n } else if (requestData.request?.contents && Array.isArray(requestData.request.contents)) {\n // Gemini format with nested request - extract the last user content\n const userContent = requestData.request.contents.filter((c: any) => c.role === 'user').pop();\n if (userContent?.parts && Array.isArray(userContent.parts)) {\n requestParts = userContent.parts.map((part: any) => ({\n type: 'text',\n text: part.text || JSON.stringify(part),\n }));\n }\n }\n } catch (err) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to parse request data in sendHttpStart', {\n url: entry.url,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n // If we can't parse the request, just include a placeholder\n requestParts.push({ type: 'text', text: 'LLM request initiated...' });\n }\n }\n\n // Extract only necessary metadata for frontend\n const startMetadata = {\n llm_provider: this.llmProvider.detectProvider(entry.url),\n request_started: true,\n };\n\n try {\n if (this.rawLogger) {\n this.rawLogger.info('Calling agentHttpService.startMessage', {\n sessionId: this.sessionId,\n entryId: entry.id,\n });\n }\n\n const response = await this.agentHttpService.startMessage(this.sessionId, {\n messageType: 'output',\n metadata: startMetadata,\n });\n\n // Store the message ID for later update\n this.activeRequests.set(entry.id, response.messageId);\n\n if (this.rawLogger) {\n this.rawLogger.info('startMessage successful', {\n entryId: entry.id,\n messageId: response.messageId,\n });\n }\n } catch (error) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to send HTTP start message', {\n url: entry.url,\n sessionId: this.sessionId,\n error: error instanceof Error ? error.message : String(error),\n });\n }\n }\n }\n\n /**\n * Send HTTP completion event using updateMessage\n */\n private async sendHttpUpdate(entry: NetworkLogEntry): Promise<void> {\n if (!this.agentHttpService || !this.sessionId) return;\n\n // Log update request\n if (this.rawLogger) {\n this.rawLogger.info('http_update', { entry });\n }\n\n const messageId = this.activeRequests.get(entry.id);\n if (!messageId) {\n // No pending message to update, skip\n return;\n }\n\n // Validate that we have actual content to send\n // Check if processedBody has valid parts\n const hasParts = entry.processedBody?.message?.parts?.length > 0 || entry.processedBody?.parts?.length > 0;\n\n if (!hasParts) {\n // Don't update if there's no content - keep the message in pending state\n if (this.rawLogger) {\n this.rawLogger.info('Skipping update - no valid parts yet', {\n entryId: entry.id,\n messageId,\n });\n }\n return;\n }\n\n // Extract only necessary metadata for frontend: model, llm_provider, token usage\n const completionMetadata: Record<string, any> = {\n llm_provider: this.llmProvider.detectProvider(entry.url),\n status: entry.status,\n duration: entry.durationMs,\n };\n\n // Add model and token usage if available from processed response\n if (entry.processedBody?.metadata) {\n const processedMeta = entry.processedBody.metadata;\n if (processedMeta.model) completionMetadata.model = processedMeta.model;\n if (processedMeta.usage) completionMetadata.usage = processedMeta.usage;\n if (processedMeta.total_tokens) completionMetadata.total_tokens = processedMeta.total_tokens;\n if (processedMeta.prompt_tokens) completionMetadata.prompt_tokens = processedMeta.prompt_tokens;\n if (processedMeta.completion_tokens) completionMetadata.completion_tokens = processedMeta.completion_tokens;\n if (processedMeta.cost) completionMetadata.cost = processedMeta.cost;\n }\n\n // Use already-parsed processedBody to extract response content as parts\n let responseParts: MessagePart[] = [];\n if (entry.processedBody) {\n const parsed = entry.processedBody;\n\n if (parsed?.message?.parts && Array.isArray(parsed.message.parts)) {\n // Accept parts array even if empty (valid for token counting, etc.)\n responseParts = parsed.message.parts;\n } else if (parsed?.parts && Array.isArray(parsed.parts)) {\n // httpInstrument.ts error handler creates parts directly without message wrapper\n responseParts = parsed.parts;\n } else if (parsed?.error) {\n responseParts.push({ type: 'text' as const, text: `Parser error: ${parsed.error}` });\n } else {\n // No parts is valid for some API calls (countTokens, loadCodeAssist, etc.)\n responseParts = [];\n }\n } else if (entry.responseBody) {\n // Fallback: create a simple text part for unsupported providers\n responseParts.push({ type: 'text' as const, text: 'LLM response completed' });\n }\n\n try {\n const updateData: any = {\n parts: responseParts,\n metadata: completionMetadata,\n mId: entry.processedBody?.message?.id,\n };\n\n // Only include raw data if explicitly enabled\n if (SubEnvManager.isSaveRawEnabled) {\n updateData.raw = JSON.stringify(entry);\n }\n\n await this.agentHttpService.updateMessage(this.sessionId, messageId, updateData);\n\n // Clean up the active request tracking\n this.activeRequests.delete(entry.id);\n } catch (error) {\n if (this.errorLogger) {\n this.errorLogger.error('Failed to send HTTP update message', {\n url: entry.url,\n messageId,\n sessionId: this.sessionId,\n error: error instanceof Error ? error.message : String(error),\n });\n }\n // Clean up even on error\n this.activeRequests.delete(entry.id);\n }\n }\n}\n\nexport default LlmInterceptor;\n","/**\n * Network logging hook - captures LLM HTTP responses\n *\n * STANDALONE HOOK - No external dependencies\n * This file must be self-contained as it's loaded via --import flag\n *\n * Simple approach:\n * - Patches fetch via LlmInterceptor\n * - Logs directly to file stream (avoids stdout pollution)\n * - Sends LLM calls to remote immediately (fire-and-forget)\n * - No batching, no cleanup, no complex error handling\n */\n\nimport * as os from 'node:os';\nimport * as path from 'node:path';\nimport { ulid } from 'ulidx';\nimport { SubEnvManager } from '../config/subenv';\nimport { LlmInterceptor, LlmProvider } from '../llms';\nimport { FileLogger } from '../llms/loggers/file';\nimport { AgentHttpService } from '../services/AgentHttpService';\n\nclass NetworkLog {\n private logger: FileLogger;\n private agentHttpService: AgentHttpService | null = null;\n private interceptor: LlmInterceptor;\n private llmProvider: LlmProvider;\n private maxBodyBytes: number;\n private sessionId: string;\n\n constructor() {\n const parsedMax = SubEnvManager.netlogMaxBody;\n this.maxBodyBytes = parsedMax || 8192;\n this.sessionId = SubEnvManager.sessionId || '';\n\n this.llmProvider = new LlmProvider();\n\n // Use system temp directory for automatic garbage collection\n const logPath =\n SubEnvManager.netlogFile || path.join(os.tmpdir(), 'agiflow-agents', 'network', `network-${process.pid}.log`);\n this.logger = new FileLogger(logPath);\n\n let apiUrl = SubEnvManager.serverUrl || '';\n if (apiUrl.startsWith('wss://')) apiUrl = 'https://' + apiUrl.slice(6);\n else if (apiUrl.startsWith('ws://')) apiUrl = 'http://' + apiUrl.slice(5);\n const apiKey = SubEnvManager.apiKey || '';\n const organizationId = SubEnvManager.organizationId || '';\n\n const enableRemote = SubEnvManager.isNetlogRemoteEnabled && !!(this.sessionId && apiUrl);\n\n if (enableRemote && apiUrl) {\n try {\n this.agentHttpService = new AgentHttpService({\n apiUrl,\n organizationId,\n apiKey,\n logger: this.logger as any,\n });\n } catch (error) {\n if (FileLogger.shouldDebug()) {\n this.logger.error('AgentHttpService initialization failed', error as Error);\n }\n }\n }\n\n this.interceptor = LlmInterceptor.getInstance({\n maxBodyBytes: this.maxBodyBytes,\n enableDebug: SubEnvManager.isDebugMode || SubEnvManager.isNetlogDebug,\n llmProvider: this.llmProvider,\n idGenerator: ulid,\n agentHttpService: this.agentHttpService || undefined,\n sessionId: this.sessionId,\n });\n }\n\n public start(): void {\n this.interceptor.start();\n }\n}\n\n// Initialize and start network logging (guard ensures idempotence)\nconst networkLog = new NetworkLog();\nnetworkLog.start();\n"],"names":["FileLogger","logPath","logDir","path","fs","error","message","data","level","logEntry","SubEnvManager","CostCalculator","modelName","usage","modelPricing","costData","inputTokens","outputTokens","cachedInputTokens","cacheCreationInputTokens","inputRate","outputRate","cacheReadRate","cacheCreationRate","inputCost","outputCost","cacheReadCost","cacheCreationCost","ClaudeParser","rawText","lines","contentBlocks","messageMetadata","blockMap","line","dataStr","block","parts","combinedUsage","tokenUsage","cost","len","part","_","key","GeminiParser","looksGzip","working","gun","json","response","fullText","finishReason","texts","nextSpeakers","reasoningSegments","t","modelVersion","responseId","usageMetadata","collectedTexts","jsonStr","extracted","l","candidates","cand","raw","trimmed","parsed","str","zlib.gunzipSync","text","control","sampleLen","i","code","input","output","total","LlmParser","url","parser","p","result","mapping","pattern","name","patterns","LlmProvider","provider","providerName","ClaudeEnhancer","requestBody","contextData","request","contextSystemMessage","GeminiEnhancer","contextInstruction","LlmEnhancer","enhancer","e","FetchInstrument","ctx","extra","resource","init","start","performance","method","body","obj","k","v","enhancedBody","err","entryId","entryTime","startEntry","res","networkError","duration","responseBody","bodyBytes","processedBody","isEventStream","contentType","isTextLike","originalBody","stream1","stream2","capturePromise","reader","chunks","totalBytes","done","value","c","finalLogEntry","arrayBuffer","max","HttpInstrument","enableHttps","_extra","buildUrl","options","protocolDefault","protocol","host","portPart","wrapRequest","original","callback","startHr","self","requestBodyBuffers","requestLen","requestBodySent","req","responseBuffers","responseBytes","capturedBytes","contentTypeHeader","chunk","ct","maxBody","remaining","bufferRaw","wasTruncated","buffer","contentEncoding","zlib","combined","u","originalWrite","originalEnd","encoding","cb","buf","isValid","http","https","o","r","LlmInterceptor","config","ulid","entry","baseLogDir","os","logFilePath","errorLogFilePath","processedLogFilePath","common","hasMessages","hasPrompt","hasInput","hasContents","hasRequestContents","requestParts","requestData","userMessage","msg","userContent","startMetadata","messageId","completionMetadata","processedMeta","responseParts","updateData","NetworkLog","parsedMax","apiUrl","apiKey","organizationId","AgentHttpService","networkLog"],"mappings":"8dAmBO,MAAMA,CAAW,CACd,UACA,QAER,YAAYC,EAAiB,CAC3B,KAAK,QAAUA,EAGf,MAAMC,EAASC,EAAK,QAAQ,KAAK,OAAO,EACnCC,EAAG,WAAWF,CAAM,GACvBE,EAAG,UAAUF,EAAQ,CAAE,UAAW,GAAM,EAI1C,KAAK,UAAYE,EAAG,kBAAkB,KAAK,QAAS,CAAE,MAAO,IAAK,EAGlE,KAAK,UAAU,GAAG,QAAUC,GAAU,CAEpC,QAAQ,MAAM,2CAA2CA,EAAM,OAAO,EAAE,CAC1E,CAAC,CACH,CAKA,KAAKC,EAAiBC,EAAkC,CACtD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,MAAMD,EAAiBC,EAAkC,CACvD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,KAAKD,EAAiBC,EAAkC,CACtD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,MAAMD,EAAiBC,EAAkC,CACvD,KAAK,IAAI,GAAID,EAASC,CAAI,CAC5B,CAKA,IAAIC,EAAeF,EAAiBC,EAAkC,CACpE,MAAME,EAAqB,CACzB,KAAM,IAAI,KAAA,EAAO,YAAA,EACjB,MAAAD,EACA,IAAKF,EACL,GAAGC,CAAA,EAGL,GAAI,CACF,KAAK,UAAU,MAAM,KAAK,UAAUE,CAAQ,EAAI;AAAA,CAAI,CACtD,OAASJ,EAAO,CAEd,QAAQ,MAAM,2CAA2CA,CAAK,EAAE,CAClE,CACF,CAKA,OAAc,CACR,KAAK,WAAa,CAAC,KAAK,UAAU,WACpC,KAAK,UAAU,IAAA,CAEnB,CAKA,YAAqB,CACnB,OAAO,KAAK,OACd,CAKA,OAAO,aAAuB,CAC5B,OAAOK,EAAAA,cAAc,aAAeA,EAAAA,cAAc,aACpD,CACF,4/8FCxFO,MAAMC,EAAe,CAI1B,OAAO,cAAcC,EAAmBC,EAAqC,CAC3E,MAAMC,EAAeC,GAASH,CAAkC,EAEhE,GAAI,CAACE,EACH,OAAO,KAGT,MAAME,EAAcH,EAAM,aAAe,EACnCI,EAAeJ,EAAM,cAAgB,EACrCK,EAAoBL,EAAM,mBAAqB,EAC/CM,EAA2BN,EAAM,0BAA4B,EAG7DO,EAAY,yBAA0BN,EAAeA,EAAa,qBAAuB,EACzFO,EAAa,0BAA2BP,EAAeA,EAAa,sBAAwB,EAC5FQ,EAAgB,gCAAiCR,EAAeA,EAAa,4BAA8B,EAC3GS,EACJ,oCAAqCT,EAAeA,EAAa,gCAAkC,EAG/FU,EAAYR,EAAcI,EAC1BK,EAAaR,EAAeI,EAC5BK,EAAgBR,EAAoBI,EACpCK,EAAoBR,EAA2BI,EAIrD,MAAO,CACL,UAHgBC,EAAYC,EAAaC,EAAgBC,EAIzD,UAAAH,EACA,WAAAC,EACA,cAAeP,EAAoB,EAAIQ,EAAgB,OACvD,kBAAmBP,EAA2B,EAAIQ,EAAoB,MAAA,CAE1E,CACF,CC7CO,MAAMC,CAAa,CAOxB,OAAO,uBACLC,EAGqC,CACrC,GAAI,CACF,MAAMC,EAAQD,EAAQ,MAAM;AAAA,CAAI,EAC1BE,EAAuB,CAAA,EAC7B,IAAIC,EAAuB,KACvBnB,EAAa,KACjB,MAAMoB,MAAe,IAErB,UAAWC,KAAQJ,EACjB,GAAII,EAAK,WAAW,QAAQ,EAAG,CAC7B,MAAMC,EAAUD,EAAK,UAAU,CAAC,EAAE,KAAA,EAClC,GAAIC,EACF,GAAI,CACF,MAAM5B,EAAO,KAAK,MAAM4B,CAAO,EAQ/B,GALI5B,EAAK,OAAS,iBAAmBA,EAAK,UACxCyB,EAAkBzB,EAAK,SAIrBA,EAAK,OAAS,sBAAuB,CACvC,MAAM6B,EAAQ,CACZ,MAAO7B,EAAK,MACZ,KAAMA,EAAK,cAAc,KACzB,GAAIA,EAAK,cAAc,GACvB,KAAMA,EAAK,cAAc,KACzB,MAAOA,EAAK,cAAc,OAAS,CAAA,EACnC,KAAM,GACN,YAAa,EAAA,EAEf0B,EAAS,IAAI1B,EAAK,MAAO6B,CAAK,CAChC,CAGA,GAAI7B,EAAK,OAAS,sBAAuB,CACvC,MAAM6B,EAAQH,EAAS,IAAI1B,EAAK,KAAK,EACjC6B,IACE7B,EAAK,MAAM,OAAS,aACtB6B,EAAM,MAAQ7B,EAAK,MAAM,KAChBA,EAAK,MAAM,OAAS,qBAC7B6B,EAAM,aAAe7B,EAAK,MAAM,cAGtC,CAGA,GAAIA,EAAK,OAAS,qBAAsB,CACtC,MAAM6B,EAAQH,EAAS,IAAI1B,EAAK,KAAK,EACrC,GAAI6B,EAAO,CAET,GAAIA,EAAM,YACR,GAAI,CACFA,EAAM,MAAQ,KAAK,MAAMA,EAAM,WAAW,CAC5C,MAAqB,CAGrB,CAEFL,EAAc,KAAKK,CAAK,CAC1B,CACF,CAGI7B,EAAK,OAAS,iBAAmBA,EAAK,QACxCM,EAAQN,EAAK,MAEjB,MAAQ,CAER,CAEJ,CAIF,MAAM8B,EAAuBN,EAAc,IAAKK,GAC1CA,EAAM,OAAS,OACV,CACL,KAAM,OACN,KAAMA,EAAM,IAAA,EAELA,EAAM,OAAS,WACjB,CACL,KAAM,kBACN,eAAgB,CACd,WAAYA,EAAM,GAClB,SAAUA,EAAM,KAChB,KAAMA,EAAM,MACZ,MAAO,MAAA,CACT,EAIG,CACL,KAAM,OACN,KAAM,+BAA+BA,EAAM,IAAI,EAAA,CAElD,EAGK9B,EAAU,CACd,GAAI0B,GAAiB,IAAMJ,EAAa,WAAA,EACxC,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAOS,EAAM,OAAS,EAAIA,EAAQ,CAAC,CAAE,KAAM,OAAiB,KAAM,EAAA,CAAI,CAAA,EAIlEC,EAAgB,CACpB,GAAIN,GAAiB,OAAS,CAAA,EAC9B,GAAInB,GAAS,CAAA,CAAC,EAIV0B,EAAa,CACjB,YAAaD,EAAc,cAAgBA,EAAc,YACzD,aAAcA,EAAc,eAAiBA,EAAc,aAC3D,kBAAmBA,EAAc,yBAA2BA,EAAc,kBAC1E,yBAA0BA,EAAc,6BAA+BA,EAAc,yBACrF,aAAcA,EAAc,cAAgB,IAAMA,EAAc,eAAiB,GACjF,cAAeA,EAAc,eAC7B,YAAaA,EAAc,YAAA,EAIvB1B,EAAYoB,GAAiB,MAC7BQ,EAAO5B,EAAYD,GAAe,cAAcC,EAAW2B,CAAU,EAAI,KAuB/E,MApBoB,CAClB,QAAAjC,EACA,SAAU,CACR,MAAOM,EACP,MAAO2B,EACP,KAAAC,EACA,kBAAmBR,GAAiB,GACpC,WAAYA,GAAiB,YAC7B,cAAeK,EAAM,OAAO,CAACI,EAAKC,IAC5BA,EAAK,OAAS,OACTD,EAAMC,EAAK,KAAK,OACdA,EAAK,OAAS,kBAChBD,EAAM,KAAK,UAAUC,EAAK,eAAe,MAAQ,EAAE,EAAE,OAEvDD,EAAM,KAAK,UAAUC,CAAI,EAAE,OACjC,CAAC,EACJ,gBAAiBZ,EAAM,OAAQI,GAASA,EAAK,WAAW,QAAQ,CAAC,EAAE,MAAA,CACrE,CAIJ,OAAS7B,EAAO,CACd,MAAO,CACL,MAAO,uCAAuCA,CAAK,GACnD,QAAAwB,CAAA,CAEJ,CACF,CAKA,OAAe,YAAqB,CAClC,OAAO,KAAK,IAAA,EAAM,SAAS,EAAE,EAAI,KAAK,OAAA,EAAS,SAAS,EAAE,EAAE,MAAM,EAAG,EAAE,CACzE,CACF,CAG+CD,EAAa,uBCnM3C,IAAI,MAAM,CAAA,EAAI,CAC3B,IAAIe,EAAGC,EAAK,CACV,MAAM,IAAI,MAAM,gGAAgGA,CAAG,oIAAoI,CACzP,CACJ,CAAG,ECEI,MAAMC,CAAa,CAExB,OAAO,uBAAuBhB,EAAsB,CAClD,GAAI,CAEF,OADcA,EAAQ,SAAS;AAAA,MAAS,GAAKA,EAAQ,WAAW,OAAO,EACrD,KAAK,SAASA,CAAO,EAChC,KAAK,aAAaA,CAAO,CAClC,OAASxB,EAAO,CACd,MAAO,CAAE,MAAO,oCAAoCA,CAAK,GAAI,QAAAwB,CAAA,CAC/D,CACF,CAGA,OAAe,aAAaA,EAAiB,CAC3C,MAAMiB,EAAY,KAAK,eAAejB,CAAO,GAAK,KAAK,wBAAwBA,CAAO,EACtF,IAAIkB,EAAUlB,EACd,GAAIiB,EAAW,CACb,MAAME,EAAM,KAAK,UAAUnB,CAAO,EAC9BmB,EAAI,KAAID,EAAUC,EAAI,KAC5B,CACA,GAAI,CACF,MAAMC,EAAO,KAAK,MAAMF,CAAO,EACzBG,EAAWD,EAAK,UAAYA,EAC5B,CAAE,SAAAE,EAAU,aAAAC,EAAc,MAAAC,EAAO,aAAAC,EAAc,kBAAAC,CAAA,EAAsB,KAAK,qBAC9EL,EAAS,UAAA,EAELrC,EAAQ,KAAK,eAAeqC,EAAS,eAAiBD,EAAK,eAAiBA,CAAI,EAChFZ,EACJgB,EAAM,OAAS,EACXA,EAAM,IAAKG,IAAO,CAAE,KAAM,OAAQ,KAAMA,GAAI,EAC5CL,EACE,CAAC,CAAE,KAAM,OAAQ,KAAMA,CAAA,CAAU,EACjC,CAAA,EACR,MAAO,CACL,QAAS,CACP,GAAID,EAAS,YAAc,KAAK,WAAA,EAChC,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAAb,CAAA,EAEF,SAAU,CACR,MAAOa,EAAS,cAAgBD,EAAK,aACrC,MAAApC,EACA,WAAYqC,EAAS,WACrB,aAAAE,EACA,cAAeD,EAAS,OACxB,gBAAiB,EACjB,WAAYL,GAAa,OACzB,aAAcQ,EAAa,OAASA,EAAe,OACnD,kBAAmBC,EAAkB,OAASA,EAAoB,MAAA,CACpE,CAEJ,MAAQ,CACN,MAAO,CACL,QAAS,CACP,GAAI,KAAK,WAAA,EACT,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAO,CAAC,CAAE,KAAM,OAAQ,KAAMR,EAAS,CAAA,EAEzC,SAAU,CAAE,IAAK,EAAA,CAAK,CAE1B,CACF,CAGA,OAAe,SAASlB,EAAiB,CACvC,MAAMC,EAAQD,EAAQ,MAAM,OAAO,EACnC,IAAI4B,EACAC,EACAN,EACAO,EACJ,MAAMC,EAA2B,CAAA,EAC3BN,EAAyB,CAAA,EACzBC,EAA8B,CAAA,EAEpC,UAAWrB,KAAQJ,EAAO,CACxB,GAAI,CAACI,EAAK,WAAW,OAAO,EAAG,SAC/B,MAAM2B,EAAU3B,EAAK,MAAM,CAAC,EAAE,KAAA,EAC9B,GAAK2B,EACL,GAAI,CAEF,MAAMX,EADU,KAAK,MAAMW,CAAO,EACT,UAAY,CAAA,EACjCX,EAAS,eAAcO,EAAeP,EAAS,cAC/CA,EAAS,aAAYQ,EAAaR,EAAS,YAC3CA,EAAS,gBAAeS,EAAgBT,EAAS,eACrD,MAAMY,EAAY,KAAK,qBAAqBZ,EAAS,UAAU,EAC3D,CAACE,GAAgBU,EAAU,iBAA6BA,EAAU,cAClEA,EAAU,MAAM,UAAuB,KAAK,GAAGA,EAAU,KAAK,EAC9DA,EAAU,aAAa,UAAqB,KAAK,GAAGA,EAAU,YAAY,EAC1EA,EAAU,kBAAkB,UAA0B,KAAK,GAAGA,EAAU,iBAAiB,CAC/F,MAAQ,CAER,CACF,CAEA,MAAMX,EAAWS,EAAe,KAAK,EAAE,EACjC/C,EAAQ,KAAK,eAAe8C,CAAa,EAC/C,MAAO,CACL,QAAS,CACP,GAAID,GAAc,KAAK,WAAA,EACvB,KAAM,YACN,UAAW,IAAI,KAAA,EAAO,YAAA,EACtB,MAAOE,EAAe,OAASA,EAAe,IAAKJ,IAAO,CAAE,KAAM,OAAQ,KAAMA,CAAA,EAAI,EAAI,CAAA,CAAC,EAE3F,SAAU,CACR,MAAOC,EACP,MAAA5C,EACA,WAAA6C,EACA,aAAAN,EACA,cAAeD,EAAS,OACxB,gBAAiBrB,EAAM,OAAQiC,GAAMA,EAAE,WAAW,OAAO,CAAC,EAAE,OAC5D,aAAcT,EAAa,OAASA,EAAe,OACnD,kBAAmBC,EAAkB,OAASA,EAAoB,MAAA,CACpE,CAEJ,CAGA,OAAe,qBAAqBS,EAOlC,CACA,MAAMX,EAAkB,CAAA,EAClBC,EAAyB,CAAA,EACzBC,EAA8B,CAAA,EACpC,IAAIH,EACJ,GAAI,MAAM,QAAQY,CAAU,EAC1B,UAAWC,KAAQD,EAAY,CACzB,CAACZ,GAAgBa,GAAM,iBAA6BA,EAAK,cAC7D,MAAM5B,EAAQ4B,GAAM,SAAS,OAAS,CAAA,EACtC,GAAI,MAAM,QAAQ5B,CAAK,GACrB,UAAWK,KAAQL,EACjB,GAAIK,GAAQ,OAAOA,GAAS,SAAU,CACpC,GAAIA,EAAK,UAAY,GAAM,SAC3B,MAAMwB,EAAMxB,EAAK,KACjB,GAAI,OAAOwB,GAAQ,UAAYA,EAAI,OAAQ,CAEzC,MAAMC,EAAUD,EAAI,KAAA,EACpB,GAAIC,EAAQ,WAAW,GAAG,GAAKA,EAAQ,SAAS,GAAG,EACjD,GAAI,CACF,MAAMC,EAAS,KAAK,MAAMD,CAAO,EACjC,GAAIC,GAAU,OAAOA,GAAW,SAAU,CACpC,OAAOA,EAAO,WAAc,WAC9Bb,EAAkB,KAAKa,EAAO,SAAS,EACvCf,EAAM,KAAKe,EAAO,SAAS,GAEzBA,EAAO,cAAgB,OAAOA,EAAO,cAAiB,UACxDd,EAAa,KAAKc,EAAO,YAAY,EACvC,QACF,CACF,MAAQ,CAER,CAEFf,EAAM,KAAKa,CAAG,CAChB,CACF,EAGN,CAEF,MAAO,CACL,SAAUb,EAAM,KAAK,EAAE,EACvB,aAAAD,EACA,MAAAC,EACA,kBAAmBD,EACnB,aAAAE,EACA,kBAAAC,CAAA,CAEJ,CAEA,OAAe,UAAUc,EAA4C,CACnE,GAAI,CAGF,MAAO,CAAE,GAAI,GAAM,KADPC,SADA,OAAO,KAAKD,EAAK,QAAQ,CACN,EACF,SAAS,MAAM,CAAA,CAC9C,MAAQ,CACN,MAAO,CAAE,GAAI,GAAO,KAAMA,CAAA,CAC5B,CACF,CAEA,OAAe,wBAAwBE,EAAuB,CAC5D,GAAI,CAACA,EAAM,MAAO,GAClB,IAAIC,EAAU,EACd,MAAMC,EAAY,KAAK,IAAIF,EAAK,OAAQ,GAAG,EAC3C,QAASG,EAAI,EAAGA,EAAID,EAAWC,IAAK,CAClC,MAAMC,EAAOJ,EAAK,WAAWG,CAAC,EAC1BC,EAAO,IAAMA,IAAS,GAAKA,IAAS,IAAMA,IAAS,IAAIH,GAC7D,CACA,OAAOA,EAAUC,EAAY,EAC/B,CAEA,OAAe,eAAeF,EAAuB,CACnD,OAAOA,EAAK,QAAU,GAAKA,EAAK,WAAW,CAAC,IAAM,IAAQA,EAAK,WAAW,CAAC,IAAM,GACnF,CAEA,OAAe,eAAe1D,EAAY,CACxC,GAAI,CAACA,GAAS,OAAOA,GAAU,SAC7B,MAAO,CAAE,YAAa,OAAW,aAAc,OAAW,YAAa,MAAA,EAEzE,MAAM+D,EAAQ/D,EAAM,kBAAoBA,EAAM,cACxCgE,EAAShE,EAAM,sBAAwBA,EAAM,kBAC7CiE,EACJjE,EAAM,kBAAoB,OAAO+D,GAAU,UAAY,OAAOC,GAAW,SAAWD,EAAQC,EAAS,QACvG,MAAO,CAAE,YAAaD,EAAO,aAAcC,EAAQ,YAAaC,CAAA,CAClE,CAEA,OAAe,YAAqB,CAClC,OAAO,KAAK,IAAA,EAAM,SAAS,EAAE,EAAI,KAAK,OAAA,EAAS,SAAS,EAAE,EAAE,MAAM,EAAG,EAAE,CACzE,CACF,CAE4CjC,EAAa,uBCzMlD,MAAMkC,CAAU,CACrB,OAAe,QAA2B,CACxC,CACE,KAAM,qBACN,SAAU,CACR,mCACA,kBACA,cACA,iBAAA,EAEF,OAAQnD,CAAA,EAEV,CACE,KAAM,gBACN,SAAU,CACR,uCACA,iCACA,yBACA,kBAAA,EAEF,OAAQiB,CAAA,CACV,EAgBF,OAAO,uBAAuBmC,EAAanD,EAAsB,CAC/D,MAAMoD,EAASF,EAAU,gBAAgBC,CAAG,EAE5C,GAAI,CAACC,EACH,MAAO,CACL,MAAO,4BAA4BD,CAAG,GACtC,QAAAnD,EACA,mBAAoBkD,EAAU,QAAQ,IAAKG,GAAMA,EAAE,IAAI,CAAA,EAI3D,GAAI,CACF,MAAMC,EAASF,EAAO,uBAAuBpD,CAAO,EAGpD,OAAIsD,GAAU,OAAOA,GAAW,UAAY,CAACA,EAAO,QAClDA,EAAO,SAAW,CAChB,GAAGA,EAAO,SACV,OAAQJ,EAAU,oBAAoBC,CAAG,CAAA,GAItCG,CACT,OAAS9E,EAAO,CACd,MAAO,CACL,MAAO,kBAAkBA,CAAK,GAC9B,QAAAwB,EACA,OAAQkD,EAAU,oBAAoBC,CAAG,CAAA,CAE7C,CACF,CAQA,OAAO,gBAAgBA,EAAqC,CAC1D,GAAI,CAACA,EAAK,OAAO,KAEjB,UAAWI,KAAWL,EAAU,QAC9B,GAAIK,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,OAInB,OAAO,IACT,CAQA,OAAO,oBAAoBJ,EAAqB,CAC9C,GAAI,CAACA,EAAK,MAAO,UAEjB,UAAWI,KAAWL,EAAU,QAC9B,GAAIK,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,KAInB,MAAO,SACT,CAQA,OAAO,YAAYJ,EAAsB,CACvC,OAAOD,EAAU,gBAAgBC,CAAG,IAAM,IAC5C,CAOA,OAAO,uBAAkC,CACvC,OAAOD,EAAU,QAAQ,IAAKG,GAAMA,EAAE,IAAI,CAC5C,CASA,OAAO,eAAeI,EAAcC,EAAoBN,EAA+B,CACrFF,EAAU,QAAQ,KAAK,CACrB,KAAAO,EACA,SAAAC,EACA,OAAAN,CAAA,CACD,CACH,CACF,CCzJO,MAAMO,EAAY,CACN,aAAe,CAC9B,kBACA,cACA,eACA,oBACA,kBACA,kCACA,iCACA,wBACA,wBACA,iBACA,cACA,mBACA,kBACA,iBACA,eACA,aACA,kBACA,UACA,qBACA,wBACA,2BACA,qBACA,oBACA,kBACA,kCAAA,EAGe,aAAe,CAC9B,CAAE,QAAS,mCAAoC,SAAU,WAAA,EACzD,CAAE,QAAS,6BAA8B,SAAU,WAAA,EACnD,CAAE,QAAS,eAAgB,SAAU,QAAA,EACrC,CAAE,QAAS,0EAA2E,SAAU,QAAA,EAChG,CAAE,QAAS,wBAAyB,SAAU,aAAA,EAC9C,CAAE,QAAS,sCAAuC,SAAU,OAAA,EAC5D,CAAE,QAAS,cAAe,SAAU,QAAA,EACpC,CAAE,QAAS,mBAAoB,SAAU,aAAA,EACzC,CAAE,QAAS,kBAAmB,SAAU,WAAA,EACxC,CAAE,QAAS,iBAAkB,SAAU,UAAA,EACvC,CAAE,QAAS,eAAgB,SAAU,SAAA,EACrC,CAAE,QAAS,aAAc,SAAU,MAAA,EACnC,CAAE,QAAS,kBAAmB,SAAU,YAAA,EACxC,CAAE,QAAS,iDAAkD,SAAU,OAAA,CAAQ,EAQjF,UAAUR,EAAsB,CAC9B,OAAKA,EACE,KAAK,aAAa,KAAMK,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EAD3C,EAEnB,CAOA,eAAeA,EAAqB,CAClC,GAAI,CAACA,EAAK,MAAO,UAEjB,SAAW,CAAE,QAAAK,EAAS,SAAAI,CAAA,IAAc,KAAK,aACvC,GAAIJ,EAAQ,KAAKL,CAAG,EAClB,OAAOS,EAIX,MAAO,SACT,CAMA,uBAAkC,CAChC,OAAO,MAAM,KAAK,IAAI,IAAI,KAAK,aAAa,IAAI,CAAC,CAAE,SAAAA,KAAeA,CAAQ,CAAC,CAAC,CAC9E,CAOA,oBAAoBC,EAA+B,CACjD,OAAO,KAAK,sBAAA,EAAwB,SAASA,EAAa,aAAa,CACzE,CAOA,mBAAmBA,EAAqC,CACtD,MAAMN,EAAU,KAAK,aAAa,KAAK,CAAC,CAAE,SAAAK,CAAA,IAAeA,IAAaC,EAAa,aAAa,EAChG,OAAON,EAAUA,EAAQ,QAAU,IACrC,CACF,CChGO,MAAMO,EAAe,CAO1B,OAAO,eAAeC,EAA6B,CACjD,MAAMC,EAAcnF,EAAAA,cAAc,YAGlC,GAAI,CAACmF,EACH,OAAOD,EAGT,GAAI,CACF,MAAME,EAAU,KAAK,MAAMF,CAAW,EAGjCE,EAAQ,OAEF,OAAOA,EAAQ,QAAW,SAEnCA,EAAQ,OAAS,CACf,CACE,KAAM,OACN,KAAMA,EAAQ,MAAA,CAChB,EAEQ,MAAM,QAAQA,EAAQ,MAAM,IAEtCA,EAAQ,OAAS,CAACA,EAAQ,MAAM,GAXhCA,EAAQ,OAAS,CAAA,EAgBnB,MAAMC,EAAuB,CAC3B,KAAM,OACN,KAAM;AAAA;AAAA,EAAqBF,CAAW,EAAA,EAIxC,OAAIC,EAAQ,OAAO,OAAS,EAC1BA,EAAQ,OAAO,OAAO,EAAG,EAAGC,CAAoB,EAEhDD,EAAQ,OAAO,KAAKC,CAAoB,EAGnC,KAAK,UAAUD,CAAO,CAC/B,OAASzF,EAAO,CAEd,eAAQ,MAAM,oCAAqCA,CAAK,EACjDuF,CACT,CACF,CACF,CCzDO,MAAMI,EAAe,CAO1B,OAAO,eAAeJ,EAA6B,CACjD,MAAMC,EAAcnF,EAAAA,cAAc,YAGlC,GAAI,CAACmF,EACH,OAAOD,EAGT,GAAI,CACF,MAAME,EAAU,KAAK,MAAMF,CAAW,EAGhCK,EAAqB;AAAA;AAAA,EAAqBJ,CAAW,GAE3D,OAAKC,EAAQ,kBAQF,OAAOA,EAAQ,mBAAsB,SAE9CA,EAAQ,kBAAoB,CAC1B,MAAO,CACL,CACE,KAAMG,CAAA,EAER,CACE,KAAMH,EAAQ,iBAAA,CAChB,CACF,EAEOA,EAAQ,kBAAkB,OAEnCA,EAAQ,kBAAkB,MAAM,QAAQ,CACtC,KAAMG,CAAA,CACP,EAvBDH,EAAQ,kBAAoB,CAC1B,MAAO,CACL,CACE,KAAMG,CAAA,CACR,CACF,EAqBG,KAAK,UAAUH,CAAO,CAC/B,OAASzF,EAAO,CAEd,eAAQ,MAAM,oCAAqCA,CAAK,EACjDuF,CACT,CACF,CACF,CCxCO,MAAMM,CAAY,CACvB,OAAe,UAA+B,CAC5C,CACE,KAAM,qBACN,SAAU,CACR,mCACA,kBACA,cACA,iBAAA,EAEF,SAAUP,EAAA,EAEZ,CACE,KAAM,gBACN,SAAU,CACR,uCACA,iCACA,yBACA,kBAAA,EAEF,SAAUK,EAAA,CACZ,EAgBF,OAAO,eAAehB,EAAaY,EAA6B,CAC9D,MAAMO,EAAWD,EAAY,kBAAkBlB,CAAG,EAElD,GAAI,CAACmB,EAEH,OAAOP,EAGT,GAAI,CACF,OAAOO,EAAS,eAAeP,CAAW,CAC5C,OAASvF,EAAO,CACd,eAAQ,MAAM,uBAAuB2E,CAAG,IAAK3E,CAAK,EAC3CuF,CACT,CACF,CAQA,OAAO,kBAAkBZ,EAAqC,CAC5D,GAAI,CAACA,EAAK,OAAO,KAEjB,UAAWI,KAAWc,EAAY,UAChC,GAAId,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,SAInB,OAAO,IACT,CAQA,OAAO,sBAAsBJ,EAAqB,CAChD,GAAI,CAACA,EAAK,MAAO,UAEjB,UAAWI,KAAWc,EAAY,UAChC,GAAId,EAAQ,SAAS,KAAMC,GAAYA,EAAQ,KAAKL,CAAG,CAAC,EACtD,OAAOI,EAAQ,KAInB,MAAO,SACT,CAQA,OAAO,YAAYJ,EAAsB,CACvC,OAAOkB,EAAY,kBAAkBlB,CAAG,IAAM,IAChD,CAOA,OAAO,uBAAkC,CACvC,OAAOkB,EAAY,UAAU,IAAKE,GAAMA,EAAE,IAAI,CAChD,CASA,OAAO,iBAAiBd,EAAcC,EAAoBY,EAAiC,CACzFD,EAAY,UAAU,KAAK,CACzB,KAAAZ,EACA,SAAAC,EACA,SAAAY,CAAA,CACD,CACH,CACF,CCnIO,MAAME,CAAgB,CACnB,cAAqC,KAG7C,OAAwB,cAAgB,IAAM,KACtC,IACA,MAER,YAAYC,EAAoBC,EAA8B,CAC5D,KAAK,IAAMD,EACX,KAAK,MAAQC,CACf,CAEA,OAAc,CACZ,GAAI,OAAO,WAAW,OAAU,YAAc,KAAK,cAAe,OAElE,KAAK,cAAgB,WAAW,MAAM,KAAK,UAAU,EACrD,MAAMD,EAAM,KAAK,IACXC,EAAQ,KAAK,MAEnB,WAAW,MAAS,MAAOC,EAAeC,IAAe,CACvD,MAAMC,EAAQC,EAAAA,YAAY,IAAA,EAC1B,IAAI3B,EAAM,GACV,GAAI,CACFA,EAAM,OAAOwB,GAAa,SAAWA,EAAWA,GAAU,KAAO,EACnE,MAAQ,CAAC,CACT,MAAMI,GAAUH,GAAM,QAAUD,GAAU,QAAU,OAAO,YAAA,EAG3D,IAAIZ,EAAc,GAClB,GAAI,CACF,MAAMiB,EAAYJ,GAAM,MAAQD,GAAU,KAC1C,GAAIK,EACF,GAAI,OAAOA,GAAS,SAAUjB,EAAciB,UACnCA,aAAgB,gBAAiBjB,EAAciB,EAAK,SAAA,UACpD,OAAO,SAAa,KAAeA,aAAgB,SAAU,CACpE,MAAMC,EAA2B,CAAA,EACjC,SAAW,CAACC,EAAGC,CAAC,IAAKH,EAAK,QAAA,EAAWC,EAAIC,CAAC,EAAI,OAAOC,GAAM,SAAWA,EAAI,cAC1EpB,EAAc,KAAK,UAAUkB,CAAG,CAClC,MAAWD,aAAgB,YAAc,OAAO,SAASA,CAAI,EAC3DjB,EAAc,OAAO,KAAKiB,CAAI,EAAE,SAAS,MAAM,EACxC,OAAOA,GAAS,WAAUjB,EAAc,KAAK,UAAUiB,CAAI,EAExE,MAAQ,CACNjB,EAAc,sBAChB,CAGA,IAAIqB,EAAerB,EACnB,GAAIU,EAAI,YAAY,UAAUtB,CAAG,GAAKY,GAAeA,IAAgB,uBACnE,GAAI,CACFqB,EAAef,EAAY,eAAelB,EAAKY,CAAW,EAGtDqB,IAAiBrB,GAAea,EAClCA,EAAK,KAAOQ,EACHA,IAAiBrB,GAAe,OAAOY,GAAa,UAAYA,EAAS,OAClFA,EAAS,KAAOS,EAEpB,OAASC,EAAK,CACZ,QAAQ,MAAM,mDAAoDA,CAAG,CAEvE,CAGF,MAAMC,EAAUb,EAAI,YAAA,EACdc,EAAY,IAAI,KAAA,EAAO,YAAA,EAE7B,GAAIb,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBvB,EAAK4B,EAAQhB,CAAW,EAAG,CACtG,MAAMyB,EAA8B,CAClC,GAAIF,EACJ,KAAMC,EACN,UAAW,QACX,OAAAR,EACA,IAAA5B,EACA,YAAAY,EACA,WAAY,CAAA,EAEdW,EAAM,cAAcc,CAAU,EAAE,MAAOH,GAAQ,CAC7C,QAAQ,MAAM,+CAAgDA,CAAG,CACnE,CAAC,CACH,CAGA,IAAII,EACJ,GAAI,CACFA,EAAM,MAAM,KAAK,cAAed,EAAUC,CAAI,CAChD,OAASc,EAAc,CACrB,MAAMC,EAAWb,EAAAA,YAAY,IAAA,EAAQD,EAC/BjG,EAA4B,CAChC,GAAI0G,EACJ,KAAMC,EACN,UAAW,QACX,OAAAR,EACA,IAAA5B,EACA,YAAAY,EACA,WAAY,KAAK,MAAM4B,CAAQ,EAC/B,MAAQD,EAAuB,OAAA,EAEjC,MAAAjB,EAAI,eAAe7F,CAAQ,EACvB8F,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBvB,EAAK4B,EAAQhB,CAAW,GACnGW,EAAM,eAAe9F,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAEzC8G,CACR,CAEA,GAAI,CACF,MAAMC,EAAWb,EAAAA,YAAY,IAAA,EAAQD,EACrC,IAAIe,EAAe,GACfC,EAAY,EACZC,EAAqB,KACrBC,EAAgB,GAGpB,GAAI,CACF,MAAMC,EAAcP,EAAI,QAAQ,IAAI,cAAc,GAAK,GACvDM,EAAgB,sBAAsB,KAAKC,CAAW,EACtD,MAAMC,EACJ,gDAAgD,KAAKD,CAAW,GAAK,mBAAmB,KAAKA,CAAW,EAE1G,GAAKD,EAgBE,CAEL,MAAMG,EAAeT,EAAI,KACzB,GAAIS,EAAc,CAChB,KAAM,CAACC,EAASC,CAAO,EAAIF,EAAa,IAAA,EAGlCG,GAAkB,SAAY,CAClC,GAAI,CACF,MAAMC,EAASH,EAAQ,UAAA,EACjBI,EAAuB,CAAA,EAC7B,IAAIC,EAAa,EAGjB,OAAa,CACX,KAAM,CAAE,KAAAC,EAAM,MAAAC,CAAA,EAAU,MAAMJ,EAAO,KAAA,EACrC,GAAIG,EAAM,MAEVF,EAAO,KAAKG,CAAK,EACjBF,GAAcE,EAAM,UACtB,CASA,GAJAd,EAFuB,OAAO,OAAOW,EAAO,IAAKI,GAAM,OAAO,KAAKA,CAAC,CAAC,CAAC,EAClC,SAAS,MAAM,EAEnDd,EAAYW,EAGR/B,EAAI,YAAY,UAAUtB,CAAG,GAAKyC,EACpC,GAAI,CACFE,EAAgBrB,EAAI,kBAAkBtB,EAAKyC,CAAY,CACzD,OAASP,EAAK,CACZS,EAAgB,CAAE,MAAO,kBAAkBT,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAC,EAAA,CAC7F,CAIF,MAAMuB,EAAiC,CACrC,GAAItB,EACJ,KAAMC,EACN,UAAW,QACX,OAAAR,EACA,IAAA5B,EACA,YAAAY,EACA,OAAQ0B,EAAI,OACZ,GAAIA,EAAI,GACR,WAAY,KAAK,MAAMX,EAAAA,YAAY,IAAA,EAAQD,CAAK,EAChD,aAAAe,EACA,cAAAE,EACA,UAAAD,CAAA,EAEF,OAAApB,EAAI,eAAemC,CAAa,EAK9BlC,EAAM,kBACNA,EAAM,WACNA,EAAM,sBAAsBvB,EAAK4B,EAAQhB,CAAW,GAEpDW,EAAM,eAAekC,CAAa,EAAE,MAAM,IAAM,CAAC,CAAC,EAG7C,CAAE,aAAAhB,EAAc,cAAAE,EAAe,UAAAD,CAAA,CACxC,OAASR,EAAK,CACZ,eAAQ,MAAM,0BAA2BA,CAAG,EACrC,CAAE,aAAc,yBAA0B,cAAe,KAAM,UAAW,CAAA,CACnF,CACF,GAAA,EAGCI,EAAY,iBAAmBY,EAGhCZ,EAAM,IAAI,SAASW,EAAS,CAC1B,OAAQX,EAAI,OACZ,WAAYA,EAAI,WAChB,QAASA,EAAI,OAAA,CACd,EAGDG,EAAe,0BACfE,EAAgB,IAClB,MACEF,EAAe,mCAEnB,KAvGoB,CAGlB,MAAMiB,EAAc,MADNpB,EAAI,MAAA,EACc,YAAA,EAChCI,EAAYgB,EAAY,WACxB,MAAMC,EAAMtC,EAAgB,cACxByB,EACEY,EAAY,YAAcC,EAC5BlB,EAAe,OAAO,KAAKiB,CAAW,EAAE,SAAS,MAAM,EAGvDjB,EAAe,GADH,OAAO,KAAKiB,EAAY,MAAM,EAAGC,CAAG,CAAC,EAC3B,SAAS,MAAM,CAAC;AAAA,iBAAoBD,EAAY,WAAaC,GAAK,eAAA,CAAgB,UAG1GlB,EAAe,IAAIC,CAAS,gBAEhC,CAwFF,MAAQ,CACND,EAAe,uBACjB,CAGA,GACE,CAACG,GACDtB,EAAI,YAAY,UAAUtB,CAAG,GAC7ByC,GACAA,IAAiB,wBAEjB,GAAI,CACFE,EAAgBrB,EAAI,kBAAkBtB,EAAKyC,CAAY,CACzD,OAASP,EAAK,CACZS,EAAgB,CAAE,MAAO,kBAAkBT,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAC,EAAA,CAC7F,CAGF,MAAMzG,EAA4B,CAChC,GAAI0G,EACJ,KAAMC,EACN,UAAW,QACX,OAAAR,EACA,IAAA5B,EACA,YAAAY,EACA,OAAQ0B,EAAI,OACZ,GAAIA,EAAI,GACR,WAAY,KAAK,MAAME,CAAQ,EAC/B,aAAAC,EACA,cAAAE,EACA,UAAAD,CAAA,EAEF,OAAApB,EAAI,eAAe7F,CAAQ,EAKzB,CAACmH,GACDrB,EAAM,kBACNA,EAAM,WACNA,EAAM,sBAAsBvB,EAAK4B,EAAQhB,CAAW,GAEpDW,EAAM,eAAe9F,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAExC6G,CACT,OAASjH,EAAO,CACd,MAAMmH,EAAWb,EAAAA,YAAY,IAAA,EAAQD,EAC/BjG,EAA4B,CAChC,GAAI0G,EACJ,KAAMC,EACN,UAAW,QACX,OAAAR,EACA,IAAA5B,EACA,YAAAY,EACA,WAAY,KAAK,MAAM4B,CAAQ,EAC/B,MAAQnH,EAAgB,OAAA,EAE1B,OAAAiG,EAAI,eAAe7F,CAAQ,EACvB8F,EAAM,kBAAoBA,EAAM,WAAaA,EAAM,sBAAsBvB,EAAK4B,EAAQhB,CAAW,GACnGW,EAAM,eAAe9F,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,EAExC6G,CACT,CACF,CACF,CAEA,SAAgB,CACV,KAAK,gBACP,WAAW,MAAQ,KAAK,cACxB,KAAK,cAAgB,KAEzB,CACF,CC3SO,MAAMsB,EAAe,CAE1B,YACUtC,EACAuC,EACAC,EAA+B,CACrC,sBAAuB,IAAM,GAC7B,cAAe,SAAY,CAAC,EAC5B,eAAgB,SAAY,CAAC,CAAA,EAE/B,CAPQ,KAAA,IAAAxC,EACA,KAAA,YAAAuC,EACA,KAAA,OAAAC,CAKP,CATK,UAAiB,KAWzB,OAAc,CACZ,GAAI,CAcF,IAASC,EAAT,SAAkBC,EAAcC,EAAiC,CAC/D,GAAI,CACF,GAAI,OAAOD,GAAY,SAAU,OAAOA,EACxC,GAAIA,aAAmB,IAAK,OAAOA,EAAQ,SAAA,EAC3C,MAAME,EAAWF,EAAQ,UAAYC,EAC/BE,EAAOH,EAAQ,UAAYA,EAAQ,MAAQ,YAC3CI,EAAWJ,EAAQ,KAAO,IAAIA,EAAQ,IAAI,GAAK,GAC/C7I,EAAO6I,EAAQ,MAAQA,EAAQ,UAAY,IACjD,MAAO,GAAGE,CAAQ,KAAKC,CAAI,GAAGC,CAAQ,GAAGjJ,CAAI,EAC/C,MAAQ,CACN,MAAO,EACT,CACF,EAESkJ,EAAT,SAAqBC,EAAoBJ,EAA8B,CACrE,OAAO,SAAwBF,EAAcO,EAAgB,CAC3D,MAAMvE,EAAM+D,EAASC,EAASE,CAAQ,EAChCtC,GAAU,OAAOoC,GAAY,UAAYA,EAAQ,OAASA,EAAQ,OAAS,OAAO,YAAA,EAElFQ,EAAU7C,EAAAA,YAAY,IAAA,EACtBQ,EAAUsC,EAAK,IAAI,YAAA,EACnBrC,EAAY,IAAI,KAAA,EAAO,YAAA,EACvBsC,EAA+B,CAAA,EACrC,IAAIC,EAAa,EACbC,EAAkB,GAEtB,MAAMC,EAAMP,EAASN,EAAU1B,GAAa,CAC1C,MAAMwC,EAA4B,CAAA,EAClC,IAAIC,EAAgB,EAChBC,EAAgB,EACpB,MAAMC,EAAoB,IAAM,OAAO3C,EAAI,QAAQ,cAAc,GAAK,EAAE,EACxEA,EAAI,GAAG,OAAS4C,GAAkB,CAChCH,GAAiBG,EAAM,OACvB,MAAMC,EAAKF,EAAA,EAIX,GAHsB,sBAAsB,KAAKE,CAAE,EAIjDL,EAAgB,KAAKI,CAAK,EAC1BF,GAAiBE,EAAM,WAClB,CAEL,GAAIF,GAAiBI,EAAS,OAC9B,MAAMC,EAAYD,EAAUJ,EACxBE,EAAM,QAAUG,GAClBP,EAAgB,KAAKI,CAAK,EAC1BF,GAAiBE,EAAM,SAEvBJ,EAAgB,KAAKI,EAAM,SAAS,EAAGG,CAAS,CAAC,EACjDL,GAAiBK,EAErB,CACF,CAAC,EACD/C,EAAI,GAAG,MAAO,IAAM,CAClB,MAAME,EAAWb,EAAAA,YAAY,IAAA,EAAQ6C,EACrC,IAAI/B,EAAe,GACnB,GAAI,CACF,MAAM0C,EAAKF,EAAA,EACLrC,EAAgB,sBAAsB,KAAKuC,CAAE,EAC7CG,EAAY,OAAO,OAAOR,CAAe,EACzCS,EAAeR,EAAgBO,EAAU,QAAUA,EAAU,SAAWF,EAC9E,IAAII,EAASF,EAGb,GAAI,CAAC1C,EAAe,CAClB,MAAM6C,EAAkB,OAAOnD,EAAI,QAAQ,kBAAkB,GAAK,EAAE,EAAE,YAAA,EAGtE,GADsB,CAACiD,GAAgBC,EAAO,QAAU,oBAAoB,KAAKC,CAAe,EAE9F,GAAI,CACF,MAAMC,EAAO,QAAQ,WAAW,EAC5BD,EAAgB,SAAS,MAAM,EAAGD,EAASE,EAAK,WAAWJ,CAAS,EAC/DG,EAAgB,SAAS,IAAI,EAAGD,EAASE,EAAK,qBAAqBJ,CAAS,EAC5EG,EAAgB,SAAS,SAAS,IAAGD,EAASE,EAAK,YAAYJ,CAAS,EACnF,MAAQ,CAER,CAEJ,CAGe,UAAU,KAAKH,CAAE,GAAK,sCAAsC,KAAKA,CAAE,GACpEvC,GACZH,EAAe+C,EAAO,SAAS,MAAM,EAEjCD,GAAgB,CAAC3C,IAAeH,GAAgB;AAAA,kBAC3C+C,EAAO,SAChB/C,EAAe,IAAI+C,EAAO,MAAM,SAASD,EAAe,eAAiB,EAAE,WAE/E,MAAQ,CACN9C,EAAe,uBACjB,CACA,IAAI7B,EAAc,GAClB,GAAI,CACF,MAAM+E,EAAW,OAAO,OAAOjB,CAAkB,EAC3Ca,EAAeZ,EAAagB,EAAS,QAAUA,EAAS,SAAWP,EACzExE,EAAc+E,EAAS,SAAS,MAAM,EAClCJ,IAAc3E,GAAe;AAAA,gBACnC,MAAQ,CACNA,EAAc,sBAChB,CACA,IAAI+B,EAAqB,KACzB,GAAI8B,EAAK,IAAI,YAAY,UAAUzE,CAAG,EACpC,GAAI,CAEF2C,EAAgB8B,EAAK,IAAI,kBAAkBzE,EAAKyC,CAAY,CAC9D,OAASrB,EAAG,CACVuB,EAAgB,CACd,MAAO,CAAC,CAAE,KAAM,OAAQ,KAAMF,GAAgB,mBAAoB,EAClE,SAAU,CAAE,MAAO,OAAOrB,CAAC,CAAA,CAAE,CAEjC,CAEF,MAAM3F,EAA4B,CAChC,GAAI0G,EACJ,KAAMC,EACN,UAAW8B,IAAa,SAAW,QAAU,OAC7C,OAAAtC,EACA,IAAA5B,EACA,YAAAY,EACA,OAAQ0B,EAAI,WACZ,GAAIA,EAAI,YAAc,KAAOA,EAAI,WAAa,IAC9C,WAAY,KAAK,MAAME,CAAQ,EAC/B,aAAAC,EACA,cAAAE,EACA,UAAWoC,CAAA,EAEb,GAAI,CACF,MAAMa,EAAI,IAAI,IAAI5F,CAAG,EACrBvE,EAAS,KAAOmK,EAAE,SAClBnK,EAAS,KAAOmK,EAAE,QACpB,MAAQ,CAAC,CACTnB,EAAK,IAAI,eAAehJ,CAAQ,EAI9BgJ,EAAK,OAAO,kBACZA,EAAK,OAAO,WACZA,EAAK,OAAO,sBAAsBzE,EAAK4B,EAAQhB,CAAW,GAE1D6D,EAAK,OAAO,eAAehJ,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,CAEvD,CAAC,EACD6G,EAAI,GAAG,QAAUJ,GAAe,CAC9B,MAAMM,EAAWb,EAAAA,YAAY,IAAA,EAAQ6C,EACrC,IAAI5D,EAAc,GAClB,GAAI,CACF,MAAM+E,EAAW,OAAO,OAAOjB,CAAkB,EAC3Ca,EAAeZ,EAAagB,EAAS,QAAUA,EAAS,SAAWP,EACzExE,EAAc+E,EAAS,SAAS,MAAM,EAClCJ,IAAc3E,GAAe;AAAA,gBACnC,MAAQ,CACNA,EAAc,sBAChB,CACA,MAAMnF,EAA4B,CAChC,GAAI0G,EACJ,KAAMC,EACN,UAAW8B,IAAa,SAAW,QAAU,OAC7C,OAAAtC,EACA,IAAA5B,EACA,YAAAY,EACA,WAAY,KAAK,MAAM4B,CAAQ,EAC/B,MAAON,EAAI,OAAA,EAEb,GAAI,CACF,MAAM0D,EAAI,IAAI,IAAI5F,CAAG,EACrBvE,EAAS,KAAOmK,EAAE,SAClBnK,EAAS,KAAOmK,EAAE,QACpB,MAAQ,CAAC,CACTnB,EAAK,IAAI,eAAehJ,CAAQ,EAI9BgJ,EAAK,OAAO,kBACZA,EAAK,OAAO,WACZA,EAAK,OAAO,sBAAsBzE,EAAK4B,EAAQhB,CAAW,GAE1D6D,EAAK,OAAO,eAAehJ,CAAQ,EAAE,MAAM,IAAM,CAAC,CAAC,CAEvD,CAAC,EACG8I,KAAmBjC,CAAG,CAC5B,CAAC,EACKuD,EAAgBhB,EAAI,MACpBiB,EAAcjB,EAAI,IACxB,OAAAA,EAAI,MAAQ,SAAUK,EAAYa,EAAgBC,EAAU,CAC1D,GAAI,CACF,GAAId,GAASP,EAAaS,EAAS,CACjC,MAAMa,EAAM,OAAO,SAASf,CAAK,EAAIA,EAAQ,OAAO,KAAKA,EAAOa,GAAY,MAAM,EAC5EV,EAAYD,EAAUT,EACxBsB,EAAI,QAAUZ,GAChBX,EAAmB,KAAKuB,CAAG,EAC3BtB,GAAcsB,EAAI,SAElBvB,EAAmB,KAAKuB,EAAI,SAAS,EAAGZ,CAAS,CAAC,EAClDV,GAAcU,EAElB,CACF,MAAQ,CAAC,CACT,OAAOQ,EAAc,KAAK,KAAMX,EAAOa,EAAUC,CAAE,CACrD,EACAnB,EAAI,IAAM,SAAUK,EAAaa,EAAgBC,EAAU,CACzD,GAAI,CACF,GAAId,GAASP,EAAaS,EAAS,CACjC,MAAMa,EAAM,OAAO,SAASf,CAAK,EAAIA,EAAQ,OAAO,KAAKA,EAAOa,GAAY,MAAM,EAC5EV,EAAYD,EAAUT,EACxBsB,EAAI,QAAUZ,GAChBX,EAAmB,KAAKuB,CAAG,EAC3BtB,GAAcsB,EAAI,SAElBvB,EAAmB,KAAKuB,EAAI,SAAS,EAAGZ,CAAS,CAAC,EAClDV,GAAcU,EAElB,CAGA,GAAI,CAACT,EAAiB,CACpBA,EAAkB,GAClB,GAAI,CACF,MAAMe,EAAW,OAAO,OAAOjB,CAAkB,EAC3Ca,EAAeZ,EAAagB,EAAS,QAAUA,EAAS,SAAWP,EACzE,IAAIxE,EAAc+E,EAAS,SAAS,MAAM,EACtCJ,IAAc3E,GAAe;AAAA,iBAEjC,MAAMsF,EAAUzB,EAAK,OAAO,sBAAsBzE,EAAK4B,EAAQhB,CAAW,EAE1E,GAAI6D,EAAK,OAAO,kBAAoBA,EAAK,OAAO,WAAayB,EAAS,CACpE,MAAM7D,EAA8B,CAClC,GAAIF,EACJ,KAAMC,EACN,UAAW8B,IAAa,SAAW,QAAU,OAC7C,OAAAtC,EACA,IAAA5B,EACA,YAAAY,EACA,WAAY,CAAA,EAEd6D,EAAK,OAAO,cAAcpC,CAAU,EAAE,MAAM,IAAM,CAAC,CAAC,CACtD,CACF,MAAQ,CAAC,CACX,CACF,MAAQ,CAAC,CACT,OAAOyD,EAAY,KAAK,KAAMZ,EAAOa,EAAUC,CAAE,CACnD,EACOnB,CACT,CACF,EAjQA,MAAMsB,EAAO,QAAQ,MAAM,EACrBC,EAAQ,QAAQ,OAAO,EAC7B,GAAI,KAAK,UAAW,OAEpB,KAAK,UAAY,CACf,YAAaD,EAAK,QAClB,aAAcC,EAAM,QACpB,QAASD,EAAK,IACd,SAAUC,EAAM,GAAA,EAElB,MAAM3B,EAAO,KACPW,EAAU,KAAK,IAAI,aAwPzBe,EAAK,QAAU9B,EAAY,KAAK,UAAU,YAAa,OAAO,EAC9D8B,EAAK,IAAM,SAAoBE,EAAQL,EAAU,CAC/C,MAAMM,EAAKH,EAAK,QAAgBE,EAAGL,CAAE,EACrC,OAAAM,EAAE,IAAA,EACKA,CACT,EACI,KAAK,cAAgB,KACvBF,EAAM,QAAU/B,EAAY,KAAK,UAAU,aAAc,QAAQ,EACjE+B,EAAM,IAAM,SAAoBC,EAAQL,EAAU,CAChD,MAAMM,EAAKF,EAAM,QAAgBC,EAAGL,CAAE,EACtC,OAAAM,EAAE,IAAA,EACKA,CACT,EAEJ,MAAY,CAEZ,CACF,CAEA,SAAgB,CACd,GAAI,KAAK,UAAW,CAClB,GAAI,CACF,MAAMH,EAAO,QAAQ,MAAM,EACrBC,EAAQ,QAAQ,OAAO,EAC7BD,EAAK,QAAU,KAAK,UAAU,YAC9BA,EAAK,IAAM,KAAK,UAAU,QAC1BC,EAAM,QAAU,KAAK,UAAU,aAC/BA,EAAM,IAAM,KAAK,UAAU,QAC7B,MAAQ,CAAC,CACT,KAAK,UAAY,IACnB,CACF,CACF,CCxRO,MAAMG,CAAe,CAC1B,OAAe,SAAkC,KACzC,UAAY,GACZ,OAGA,YACA,YACA,iBACA,UACA,mBAAqB,IACrB,UACA,YACA,gBAGA,gBACA,eAER,YAAYC,EAA+B,GAAI,CAE7C,KAAK,YAAcA,EAAO,aAAe,IAAIhG,GAG7C,KAAK,YAAcgG,EAAO,aAAeC,EAAAA,KAGzC,KAAK,iBAAmBD,EAAO,iBAC/B,KAAK,UAAYA,EAAO,UAExB,KAAK,OAAS,CACZ,aAAcA,EAAO,cAAgB,KACrC,oBAAqBA,EAAO,qBAAuB,GACnD,YAAaA,EAAO,aAAe,GACnC,UACEA,EAAO,YACLE,GACA,CAAC,EACCA,EAAM,KACN,CAACA,EAAM,IAAI,SAAS,SAAS,GAC7B,CAACA,EAAM,IAAI,SAAS,oBAAoB,GACxC,CAACA,EAAM,IAAI,WAAW,OAAO,IAGnC,kBACEF,EAAO,oBACN,CAACxG,EAAayC,IAAyB,CACtC,GAAI,CACF,OAAO1C,EAAU,uBAAuBC,EAAKyC,CAAY,CAC3D,MAAQ,CACN,OAAO,IACT,CACF,EAAA,EAIJ,MAAMkE,EAAaxL,EAAK,KAAKyL,EAAG,OAAA,EAAU,iBAAkB,MAAO,WAAW,QAAQ,GAAG,EAAE,EACrFC,EAAcL,EAAO,aAAerL,EAAK,KAAKwL,EAAY,aAAa,EACvEG,EAAmB3L,EAAK,KAAKwL,EAAY,WAAW,EACpDI,EAAuBP,EAAO,sBAAwBrL,EAAK,KAAKwL,EAAY,iBAAiB,EAGnG,GAAI,CACF,KAAK,UAAY,IAAI3L,EAAW6L,CAAW,CAC7C,MAAQ,CAAC,CACT,GAAI,CACF,KAAK,YAAc,IAAI7L,EAAW8L,CAAgB,CACpD,MAAQ,CAAC,CACT,GAAI,CACF,KAAK,gBAAkB,IAAI9L,EAAW+L,CAAoB,CAC5D,MAAQ,CAAC,CACX,CAKA,OAAO,YAAYP,EAA+C,CAChE,OAAKD,EAAe,WAClBA,EAAe,SAAW,IAAIA,EAAeC,CAAM,GAE9CD,EAAe,QACxB,CAKA,OAAc,CACZ,GAAI,KAAK,UACP,OAGF,MAAMS,EAAwB,CAC5B,YAAa,KAAK,YAClB,aAAc,KAAK,OAAO,aAC1B,YAAa,KAAK,OAAO,YACzB,UAAW,KAAK,OAAO,UACvB,kBAAmB,KAAK,OAAO,kBAC/B,YAAa,KAAK,YAClB,eAAiB,GAAM,KAAK,eAAe,CAAC,CAAA,EAG9C,KAAK,gBAAkB,IAAI3F,EAAgB2F,EAAQ,CACjD,iBAAkB,KAAK,iBACvB,UAAW,KAAK,UAChB,sBAAuB,CAAChH,EAAK4B,EAAQC,IAAS,KAAK,sBAAsB7B,EAAK4B,EAAQC,CAAI,EAC1F,cAAgB6E,GAAU,KAAK,cAAcA,CAAK,EAClD,eAAiBA,GAAU,KAAK,eAAeA,CAAK,CAAA,CACrD,EACD,KAAK,gBAAgB,MAAA,EAErB,KAAK,eAAiB,IAAI9C,GAAeoD,EAAQ,KAAK,OAAO,oBAAqB,CAChF,iBAAkB,KAAK,iBACvB,UAAW,KAAK,UAChB,sBAAuB,CAAChH,EAAK4B,EAAQC,IAAS,KAAK,sBAAsB7B,EAAK4B,EAAQC,CAAI,EAC1F,cAAgB6E,GAAU,KAAK,cAAcA,CAAK,EAClD,eAAiBA,GAAU,KAAK,eAAeA,CAAK,CAAA,CACrD,EACD,KAAK,eAAe,MAAA,EAEpB,KAAK,UAAY,EACnB,CAKA,MAAa,CACN,KAAK,YAGV,KAAK,iBAAiB,QAAA,EACtB,KAAK,gBAAgB,QAAA,EACrB,KAAK,UAAY,GACnB,CAKA,UAAoB,CAClB,OAAO,KAAK,SACd,CAKA,aAAaF,EAA6C,CACxD,KAAK,OAAS,CAAE,GAAG,KAAK,OAAQ,GAAGA,CAAA,CACrC,CAKQ,eAAeE,EAA8B,CAE/C,KAAK,WACP,KAAK,UAAU,KAAK,gBAAiB,CAAE,WAAYA,EAAO,EAGxD,KAAK,iBAAmB,KAAK,OAAO,UAAUA,CAAK,GAAKA,EAAM,eAChE,KAAK,gBAAgB,KAAK,kBAAmB,CAC3C,GAAIA,EAAM,GACV,IAAKA,EAAM,IACX,OAAQA,EAAM,OACd,OAAQA,EAAM,OACd,WAAYA,EAAM,WAClB,UAAWA,EAAM,KACjB,cAAeA,EAAM,aAAA,CACtB,CAGL,CAKQ,sBAAsB1G,EAAa4B,EAAgBhB,EAA8B,CAWvF,GAVI,KAAK,WACP,KAAK,UAAU,KAAK,iCAAkC,CACpD,IAAAZ,EACA,OAAA4B,EACA,QAAS,CAAC,CAAChB,EACX,WAAYA,GAAa,MAAA,CAC1B,EAIC,CAAC,KAAK,YAAY,UAAUZ,CAAG,EACjC,OAAI,KAAK,WACP,KAAK,UAAU,KAAK,2BAA4B,CAAE,IAAAA,EAAK,EAErD,KAAK,WAAa,KAAK,OAAO,aAChC,KAAK,UAAU,MAAM,4CAA6C,CAChE,IAAAA,EACA,OAAA4B,CAAA,CACD,EAEI,GAIT,GAAI5B,EAAI,SAAS,6BAA6B,GAAKA,EAAI,SAAS,mCAAmC,EAEjG,MAAI,EAAAA,EAAI,SAAS,uBAAuB,EAa1C,GALI4B,IAAW,QAKX,CAAChB,EACH,MAAO,GAGT,GAAI,CACF,MAAMiB,EAAO,KAAK,MAAMjB,CAAW,EAG7BqG,EAAcpF,EAAK,UAAY,MAAM,QAAQA,EAAK,QAAQ,GAAKA,EAAK,SAAS,OAAS,EACtFqF,EAAYrF,EAAK,QAAU,OAAOA,EAAK,QAAW,SAClDsF,EAAWtF,EAAK,OAAS,OAAOA,EAAK,OAAU,SAC/CuF,EAAcvF,EAAK,UAAY,MAAM,QAAQA,EAAK,QAAQ,GAAKA,EAAK,SAAS,OAAS,EAGtFwF,EACJxF,EAAK,SAAS,UAAY,MAAM,QAAQA,EAAK,QAAQ,QAAQ,GAAKA,EAAK,QAAQ,SAAS,OAAS,EAKnG,OAFgBoF,GAAeC,GAAaC,GAAYC,GAAeC,CAGzE,OAASnF,EAAK,CACZ,OAAI,KAAK,aACP,KAAK,YAAY,MAAM,wDAAyD,CAC9E,IAAAlC,EACA,OAAA4B,EACA,MAAOM,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAA,CACvD,EAGI,EACT,CACF,CAKA,MAAc,cAAcwE,EAAuC,CACjE,GAAI,CAAC,KAAK,kBAAoB,CAAC,KAAK,UAClC,OAIE,KAAK,WACP,KAAK,UAAU,KAAK,aAAc,CAAE,MAAAA,EAAO,EAI7C,IAAIY,EAAuD,CAAA,EAC3D,GAAIZ,EAAM,YACR,GAAI,CACF,MAAMa,EAAc,KAAK,MAAMb,EAAM,WAAW,EAGhD,GAAIa,EAAY,UAAY,MAAM,QAAQA,EAAY,QAAQ,EAAG,CAE/D,MAAMC,EAAcD,EAAY,SAAS,OAAQE,GAAaA,EAAI,OAAS,MAAM,EAAE,IAAA,EAC/ED,GAAa,UACX,OAAOA,EAAY,SAAY,SACjCF,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAME,EAAY,QAAS,EACpD,MAAM,QAAQA,EAAY,OAAO,IAE1CF,EAAeE,EAAY,QAAQ,IAAK9J,IAAe,CACrD,KAAMA,EAAK,MAAQ,OACnB,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,GAGR,SAAW6J,EAAY,OAErBD,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAMC,EAAY,OAAQ,UACnDA,EAAY,MAErBD,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAMC,EAAY,MAAO,UAClDA,EAAY,UAAY,MAAM,QAAQA,EAAY,QAAQ,EAAG,CAEtE,MAAMG,EAAcH,EAAY,SAAS,OAAQ/D,GAAWA,EAAE,OAAS,MAAM,EAAE,IAAA,EAC3EkE,GAAa,OAAS,MAAM,QAAQA,EAAY,KAAK,IACvDJ,EAAeI,EAAY,MAAM,IAAKhK,IAAe,CACnD,KAAM,OACN,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,EAEN,SAAW6J,EAAY,SAAS,UAAY,MAAM,QAAQA,EAAY,QAAQ,QAAQ,EAAG,CAEvF,MAAMG,EAAcH,EAAY,QAAQ,SAAS,OAAQ/D,GAAWA,EAAE,OAAS,MAAM,EAAE,IAAA,EACnFkE,GAAa,OAAS,MAAM,QAAQA,EAAY,KAAK,IACvDJ,EAAeI,EAAY,MAAM,IAAKhK,IAAe,CACnD,KAAM,OACN,KAAMA,EAAK,MAAQ,KAAK,UAAUA,CAAI,CAAA,EACtC,EAEN,CACF,OAASwE,EAAK,CACR,KAAK,aACP,KAAK,YAAY,MAAM,gDAAiD,CACtE,IAAKwE,EAAM,IACX,MAAOxE,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,CAAA,CACvD,EAGHoF,EAAa,KAAK,CAAE,KAAM,OAAQ,KAAM,2BAA4B,CACtE,CAIF,MAAMK,EAAgB,CACpB,aAAc,KAAK,YAAY,eAAejB,EAAM,GAAG,EACvD,gBAAiB,EAAA,EAGnB,GAAI,CACE,KAAK,WACP,KAAK,UAAU,KAAK,wCAAyC,CAC3D,UAAW,KAAK,UAChB,QAASA,EAAM,EAAA,CAChB,EAGH,MAAMxI,EAAW,MAAM,KAAK,iBAAiB,aAAa,KAAK,UAAW,CACxE,YAAa,SACb,SAAUyJ,CAAA,CACX,EAGD,KAAK,eAAe,IAAIjB,EAAM,GAAIxI,EAAS,SAAS,EAEhD,KAAK,WACP,KAAK,UAAU,KAAK,0BAA2B,CAC7C,QAASwI,EAAM,GACf,UAAWxI,EAAS,SAAA,CACrB,CAEL,OAAS7C,EAAO,CACV,KAAK,aACP,KAAK,YAAY,MAAM,oCAAqC,CAC1D,IAAKqL,EAAM,IACX,UAAW,KAAK,UAChB,MAAOrL,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,CAAA,CAC7D,CAEL,CACF,CAKA,MAAc,eAAeqL,EAAuC,CAClE,GAAI,CAAC,KAAK,kBAAoB,CAAC,KAAK,UAAW,OAG3C,KAAK,WACP,KAAK,UAAU,KAAK,cAAe,CAAE,MAAAA,EAAO,EAG9C,MAAMkB,EAAY,KAAK,eAAe,IAAIlB,EAAM,EAAE,EAClD,GAAI,CAACkB,EAEH,OAOF,GAAI,EAFalB,EAAM,eAAe,SAAS,OAAO,OAAS,GAAKA,EAAM,eAAe,OAAO,OAAS,GAE1F,CAET,KAAK,WACP,KAAK,UAAU,KAAK,uCAAwC,CAC1D,QAASA,EAAM,GACf,UAAAkB,CAAA,CACD,EAEH,MACF,CAGA,MAAMC,EAA0C,CAC9C,aAAc,KAAK,YAAY,eAAenB,EAAM,GAAG,EACvD,OAAQA,EAAM,OACd,SAAUA,EAAM,UAAA,EAIlB,GAAIA,EAAM,eAAe,SAAU,CACjC,MAAMoB,EAAgBpB,EAAM,cAAc,SACtCoB,EAAc,QAAOD,EAAmB,MAAQC,EAAc,OAC9DA,EAAc,QAAOD,EAAmB,MAAQC,EAAc,OAC9DA,EAAc,eAAcD,EAAmB,aAAeC,EAAc,cAC5EA,EAAc,gBAAeD,EAAmB,cAAgBC,EAAc,eAC9EA,EAAc,oBAAmBD,EAAmB,kBAAoBC,EAAc,mBACtFA,EAAc,OAAMD,EAAmB,KAAOC,EAAc,KAClE,CAGA,IAAIC,EAA+B,CAAA,EACnC,GAAIrB,EAAM,cAAe,CACvB,MAAMtH,EAASsH,EAAM,cAEjBtH,GAAQ,SAAS,OAAS,MAAM,QAAQA,EAAO,QAAQ,KAAK,EAE9D2I,EAAgB3I,EAAO,QAAQ,MACtBA,GAAQ,OAAS,MAAM,QAAQA,EAAO,KAAK,EAEpD2I,EAAgB3I,EAAO,MACdA,GAAQ,MACjB2I,EAAc,KAAK,CAAE,KAAM,OAAiB,KAAM,iBAAiB3I,EAAO,KAAK,GAAI,EAGnF2I,EAAgB,CAAA,CAEpB,MAAWrB,EAAM,cAEfqB,EAAc,KAAK,CAAE,KAAM,OAAiB,KAAM,yBAA0B,EAG9E,GAAI,CACF,MAAMC,EAAkB,CACtB,MAAOD,EACP,SAAUF,EACV,IAAKnB,EAAM,eAAe,SAAS,EAAA,EAIjChL,EAAAA,cAAc,mBAChBsM,EAAW,IAAM,KAAK,UAAUtB,CAAK,GAGvC,MAAM,KAAK,iBAAiB,cAAc,KAAK,UAAWkB,EAAWI,CAAU,EAG/E,KAAK,eAAe,OAAOtB,EAAM,EAAE,CACrC,OAASrL,EAAO,CACV,KAAK,aACP,KAAK,YAAY,MAAM,qCAAsC,CAC3D,IAAKqL,EAAM,IACX,UAAAkB,EACA,UAAW,KAAK,UAChB,MAAOvM,aAAiB,MAAQA,EAAM,QAAU,OAAOA,CAAK,CAAA,CAC7D,EAGH,KAAK,eAAe,OAAOqL,EAAM,EAAE,CACrC,CACF,CACF,CC1dA,MAAMuB,EAAW,CACP,OACA,iBAA4C,KAC5C,YACA,YACA,aACA,UAER,aAAc,CACZ,MAAMC,EAAYxM,EAAAA,cAAc,cAChC,KAAK,aAAewM,GAAa,KACjC,KAAK,UAAYxM,gBAAc,WAAa,GAE5C,KAAK,YAAc,IAAI8E,GAGvB,MAAMvF,EACJS,EAAAA,cAAc,YAAcP,EAAK,KAAKyL,EAAG,OAAA,EAAU,iBAAkB,UAAW,WAAW,QAAQ,GAAG,MAAM,EAC9G,KAAK,OAAS,IAAI5L,EAAWC,CAAO,EAEpC,IAAIkN,EAASzM,EAAAA,cAAc,UACvByM,EAAO,WAAW,QAAQ,IAAY,WAAaA,EAAO,MAAM,CAAC,EAC5DA,EAAO,WAAW,OAAO,MAAY,UAAYA,EAAO,MAAM,CAAC,GACxE,MAAMC,EAAS1M,gBAAc,QAAU,GACjC2M,EAAiB3M,gBAAc,gBAAkB,GAIvD,GAFqBA,EAAAA,cAAc,uBAAyB,CAAC,EAAE,KAAK,WAAayM,IAE7DA,EAClB,GAAI,CACF,KAAK,iBAAmB,IAAIG,mBAAiB,CAC3C,OAAAH,EACA,eAAAE,EACA,OAAAD,EACA,OAAQ,KAAK,MAAA,CACd,CACH,OAAS/M,EAAO,CACVL,EAAW,eACb,KAAK,OAAO,MAAM,yCAA0CK,CAAc,CAE9E,CAGF,KAAK,YAAckL,EAAe,YAAY,CAC5C,aAAc,KAAK,aACnB,YAAa7K,EAAAA,cAAc,aAAeA,EAAAA,cAAc,cACxD,YAAa,KAAK,YAClB,YAAa+K,EAAAA,KACb,iBAAkB,KAAK,kBAAoB,OAC3C,UAAW,KAAK,SAAA,CACjB,CACH,CAEO,OAAc,CACnB,KAAK,YAAY,MAAA,CACnB,CACF,CAGA,MAAM8B,GAAa,IAAIN,GACvBM,GAAW,MAAA"}
|