@omote/core 0.2.1 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +30 -2
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +30 -2
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/audio/MicrophoneCapture.ts","../src/audio/RingBuffer.ts","../src/audio/AudioScheduler.ts","../src/audio/AudioChunkCoalescer.ts","../src/audio/LAMPipeline.ts","../src/audio/SyncedAudioPipeline.ts","../src/animation/EmotionToBlendshapeMapper.ts","../src/telemetry/exporters/console.ts","../src/telemetry/exporters/otlp.ts","../src/telemetry/OmoteTelemetry.ts","../src/telemetry/types.ts","../src/cache/ModelCache.ts","../src/utils/runtime.ts","../src/inference/onnxLoader.ts","../src/inference/blendshapeUtils.ts","../src/inference/Wav2Vec2Inference.ts","../src/audio/FullFacePipeline.ts","../src/inference/WhisperInference.ts","../src/inference/Wav2ArkitCpuInference.ts","../src/inference/createLipSync.ts","../src/inference/SileroVADInference.ts","../src/inference/SileroVADWorker.ts","../src/inference/createSileroVAD.ts","../src/inference/Emotion2VecInference.ts","../src/inference/SafariSpeechRecognition.ts","../src/emotion/Emotion.ts","../src/ai/adapters/AgentCoreAdapter.ts","../src/ai/orchestration/ConversationOrchestrator.ts","../src/ai/tenancy/TenantManager.ts","../src/ai/utils/AudioSyncManager.ts","../src/ai/utils/InterruptionHandler.ts","../src/cache/huggingFaceCDN.ts","../src/utils/transformersCacheClear.ts","../src/animation/types.ts","../src/animation/AnimationGraph.ts","../src/animation/audioEnergy.ts"],"sourcesContent":["/**\n * Microphone capture - renderer-agnostic audio input\n *\n * Captures audio from the microphone and emits PCM chunks.\n * Works in any JavaScript environment with Web Audio API.\n *\n * @category Audio\n */\n\nimport { EventEmitter, type OmoteEvents } from '../events';\n\nexport interface MicrophoneCaptureConfig {\n /** Target sample rate (default: 16000 for speech processing) */\n sampleRate?: number;\n /** Chunk size in samples (default: 1600 = 100ms at 16kHz) */\n chunkSize?: number;\n}\n\nexport class MicrophoneCapture {\n private config: Required<MicrophoneCaptureConfig>;\n private stream: MediaStream | null = null;\n private context: AudioContext | null = null;\n private processor: ScriptProcessorNode | null = null;\n private buffer: Float32Array = new Float32Array(0);\n private _isRecording = false;\n private _loggedFirstChunk = false;\n\n constructor(\n private events: EventEmitter<OmoteEvents>,\n config: MicrophoneCaptureConfig = {}\n ) {\n this.config = {\n sampleRate: config.sampleRate ?? 16000,\n chunkSize: config.chunkSize ?? 1600,\n };\n }\n\n get isRecording(): boolean {\n return this._isRecording;\n }\n\n get isSupported(): boolean {\n return typeof navigator !== 'undefined' && !!navigator.mediaDevices?.getUserMedia;\n }\n\n async start(): Promise<void> {\n if (!this.isSupported) {\n this.events.emit('error', {\n code: 'MICROPHONE_NOT_SUPPORTED',\n message: 'Microphone not supported in this browser',\n });\n return;\n }\n\n if (this._isRecording) return;\n\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: this.config.sampleRate },\n channelCount: 1,\n echoCancellation: true,\n noiseSuppression: true,\n autoGainControl: true,\n },\n });\n\n this.context = new AudioContext({ sampleRate: this.config.sampleRate });\n\n // Resume AudioContext if suspended (browser autoplay policy)\n if (this.context.state === 'suspended') {\n await this.context.resume();\n }\n\n const source = this.context.createMediaStreamSource(this.stream);\n\n // Use ScriptProcessor for broad compatibility\n this.processor = this.context.createScriptProcessor(4096, 1, 1);\n\n this.processor.onaudioprocess = (e) => {\n const input = e.inputBuffer.getChannelData(0);\n\n // Calculate audio level\n let rms = 0;\n let peak = 0;\n for (let i = 0; i < input.length; i++) {\n const abs = Math.abs(input[i]);\n rms += input[i] * input[i];\n if (abs > peak) peak = abs;\n }\n rms = Math.sqrt(rms / input.length);\n\n this.events.emit('audio.level', { rms, peak });\n\n // Accumulate samples\n const newBuffer = new Float32Array(this.buffer.length + input.length);\n newBuffer.set(this.buffer);\n newBuffer.set(input, this.buffer.length);\n this.buffer = newBuffer;\n\n // Emit chunks\n let chunkCount = 0;\n while (this.buffer.length >= this.config.chunkSize) {\n const chunk = this.buffer.slice(0, this.config.chunkSize);\n this.buffer = this.buffer.slice(this.config.chunkSize);\n\n const pcm = this.floatToPCM16(chunk);\n this.events.emit('audio.chunk', {\n pcm,\n timestamp: performance.now(),\n });\n chunkCount++;\n }\n // Log first emission for debugging\n if (chunkCount > 0 && !this._loggedFirstChunk) {\n console.log('[MicrophoneCapture] Emitting audio chunks:', chunkCount);\n this._loggedFirstChunk = true;\n }\n };\n\n source.connect(this.processor);\n this.processor.connect(this.context.destination);\n\n this._isRecording = true;\n console.log('[MicrophoneCapture] Started recording, context state:', this.context.state);\n } catch (err) {\n this.events.emit('error', {\n code: 'MICROPHONE_ERROR',\n message: (err as Error).message,\n details: err,\n });\n }\n }\n\n stop(): void {\n if (this.processor) {\n this.processor.disconnect();\n this.processor = null;\n }\n\n if (this.context) {\n this.context.close();\n this.context = null;\n }\n\n if (this.stream) {\n this.stream.getTracks().forEach((t) => t.stop());\n this.stream = null;\n }\n\n this.buffer = new Float32Array(0);\n this._isRecording = false;\n }\n\n private floatToPCM16(float32: Float32Array): Int16Array {\n const pcm = new Int16Array(float32.length);\n for (let i = 0; i < float32.length; i++) {\n const s = Math.max(-1, Math.min(1, float32[i]));\n pcm[i] = s < 0 ? s * 0x8000 : s * 0x7fff;\n }\n return pcm;\n }\n}\n","/**\n * Ring buffer for audio sample accumulation\n *\n * Efficiently accumulates audio samples and provides\n * contiguous buffers for inference without memory allocation churn.\n *\n * @category Audio\n */\n\nexport class RingBuffer {\n private buffer: Float32Array;\n private writeIndex = 0;\n private isFull = false;\n\n constructor(private readonly size: number) {\n this.buffer = new Float32Array(size);\n }\n\n /**\n * Write samples to the ring buffer\n * Converts Int16Array PCM to Float32\n */\n write(pcm: Int16Array): void {\n for (let i = 0; i < pcm.length; i++) {\n this.buffer[this.writeIndex] = pcm[i] / 32768.0;\n this.writeIndex = (this.writeIndex + 1) % this.size;\n\n if (this.writeIndex === 0) {\n this.isFull = true;\n }\n }\n }\n\n /**\n * Write float samples directly\n */\n writeFloat(samples: Float32Array): void {\n for (let i = 0; i < samples.length; i++) {\n this.buffer[this.writeIndex] = samples[i];\n this.writeIndex = (this.writeIndex + 1) % this.size;\n\n if (this.writeIndex === 0) {\n this.isFull = true;\n }\n }\n }\n\n /**\n * Get a contiguous copy of the buffer contents in chronological order\n * Returns null if buffer isn't full yet\n */\n read(): Float32Array | null {\n if (!this.isFull) return null;\n\n const output = new Float32Array(this.size);\n\n // Copy from writeIndex to end (oldest samples)\n const firstPart = this.buffer.subarray(this.writeIndex);\n output.set(firstPart, 0);\n\n // Copy from 0 to writeIndex (newest samples)\n const secondPart = this.buffer.subarray(0, this.writeIndex);\n output.set(secondPart, firstPart.length);\n\n return output;\n }\n\n /**\n * Check if buffer has enough samples\n */\n get hasData(): boolean {\n return this.isFull;\n }\n\n /**\n * Get current fill level (0-1)\n */\n get fillLevel(): number {\n if (this.isFull) return 1;\n return this.writeIndex / this.size;\n }\n\n /**\n * Reset the buffer\n */\n reset(): void {\n this.buffer.fill(0);\n this.writeIndex = 0;\n this.isFull = false;\n }\n}\n","/**\n * AudioScheduler - Enterprise-grade Web Audio API scheduling\n *\n * Implements the lookahead scheduling pattern from Chris Wilson's\n * \"A Tale of Two Clocks\" - the authoritative guide on Web Audio timing.\n *\n * Key Features:\n * - Uses AudioContext.currentTime (hardware clock) for sample-accurate timing\n * - Pre-schedules audio chunks for gapless playback\n * - Tracks scheduled sources for cleanup\n * - Provides playback state monitoring\n *\n * @see https://web.dev/articles/audio-scheduling\n * @category Audio\n */\n\nexport interface AudioSchedulerOptions {\n /** Sample rate in Hz (default: 16000 for speech) */\n sampleRate?: number\n /** Number of audio channels (default: 1 for mono) */\n channels?: number\n}\n\nexport class AudioScheduler {\n private context: AudioContext | null = null\n private nextPlayTime = 0\n private scheduledSources: Array<{ source: AudioBufferSourceNode; gainNode: GainNode }> = []\n private isPlaying = false\n\n constructor(private readonly options: AudioSchedulerOptions = {}) {}\n\n /**\n * Initialize AudioContext with specified sample rate\n *\n * Note: This is now a no-op. AudioContext is created lazily on first schedule()\n * to avoid browser autoplay policy issues (requires user gesture).\n */\n async initialize(): Promise<void> {\n // No-op - context will be created lazily in ensureContext()\n console.log('[AudioScheduler] Ready for lazy initialization')\n }\n\n /**\n * Eagerly create and warm up the AudioContext\n *\n * Call this when a playback session starts (e.g., when AI response begins).\n * The AudioContext needs time to initialize the audio hardware — on Windows\n * this can take 50-100ms. By warming up early (before audio data arrives),\n * the context is fully ready when schedule() is first called.\n *\n * Must be called after a user gesture (click/tap) for autoplay policy.\n */\n async warmup(): Promise<void> {\n await this.ensureContext()\n }\n\n /**\n * Ensure AudioContext is created and ready\n * Called lazily on first schedule() - requires user gesture\n */\n private async ensureContext(): Promise<AudioContext> {\n if (this.context && this.context.state !== 'closed') {\n return this.context\n }\n\n const sampleRate = this.options.sampleRate ?? 16000\n this.context = new AudioContext({ sampleRate })\n\n // Resume if suspended (browser autoplay policy)\n if (this.context.state === 'suspended') {\n await this.context.resume()\n }\n\n console.log(`[AudioScheduler] AudioContext initialized at ${sampleRate}Hz`)\n return this.context\n }\n\n /**\n * Schedule an audio chunk for playback\n *\n * Uses Web Audio's hardware-accurate clock for sample-perfect timing.\n * Chunks are scheduled immediately, not when they should play - this\n * ensures gapless playback even if main thread stalls.\n *\n * @param audioData - Float32Array of audio samples\n * @returns Scheduled playback time in AudioContext seconds\n */\n async schedule(audioData: Float32Array): Promise<number> {\n // Lazy initialization (requires user gesture)\n const ctx = await this.ensureContext()\n const channels = this.options.channels ?? 1\n\n // Initialize playback timing on first chunk\n // Add 50ms lookahead so the node is fully enqueued before playback begins\n // (prevents skipping the first few ms when AudioContext was just created)\n if (!this.isPlaying) {\n this.nextPlayTime = ctx.currentTime + 0.05\n this.isPlaying = true\n }\n\n // Create audio buffer\n const audioBuffer = ctx.createBuffer(channels, audioData.length, ctx.sampleRate)\n audioBuffer.getChannelData(0).set(audioData)\n\n // Create gain node for fade control\n const gainNode = ctx.createGain()\n gainNode.gain.value = 1.0\n gainNode.connect(ctx.destination)\n\n // Create and schedule source\n const source = ctx.createBufferSource()\n source.buffer = audioBuffer\n source.connect(gainNode) // Route through gain node for fade control\n\n // Schedule at precise time for gapless playback\n const scheduleTime = this.nextPlayTime\n source.start(scheduleTime)\n\n // Track scheduled source with its gain node\n this.scheduledSources.push({ source, gainNode })\n\n // Update next play time\n const duration = audioData.length / ctx.sampleRate\n this.nextPlayTime = scheduleTime + duration\n\n return scheduleTime\n }\n\n /**\n * Get current audio clock time\n *\n * This is the hardware-accurate time, NOT JavaScript time.\n * Use this for synchronizing visual animations to audio.\n *\n * @returns Current time in AudioContext seconds\n */\n getCurrentTime(): number {\n if (!this.context) return 0\n return this.context.currentTime\n }\n\n /**\n * Get scheduled playback end time\n */\n getPlaybackEndTime(): number {\n return this.nextPlayTime\n }\n\n /**\n * Check if all scheduled audio has finished playing\n */\n isComplete(): boolean {\n if (!this.context || !this.isPlaying) return false\n return this.context.currentTime >= this.nextPlayTime\n }\n\n /**\n * Cancel all scheduled audio with smooth fade-out\n *\n * Applies a linear fade-out to all playing sources and stops them gracefully.\n * Prevents audio clicks/pops by ramping gain to zero before stopping.\n *\n * @param fadeOutMs - Fade-out duration in milliseconds (default: 50ms)\n * @returns Promise that resolves when fade-out completes\n */\n async cancelAll(fadeOutMs: number = 50): Promise<void> {\n if (!this.context || this.scheduledSources.length === 0) {\n return\n }\n\n const ctx = this.context\n const currentTime = ctx.currentTime\n const fadeOutSec = fadeOutMs / 1000\n\n // Apply fade-out to all scheduled sources\n for (const { source, gainNode } of this.scheduledSources) {\n try {\n // Ramp gain from current value to zero\n gainNode.gain.setValueAtTime(gainNode.gain.value, currentTime)\n gainNode.gain.linearRampToValueAtTime(0.0, currentTime + fadeOutSec)\n\n // Stop source after fade completes\n source.stop(currentTime + fadeOutSec)\n } catch (err) {\n // Source may have already stopped naturally - ignore error\n }\n }\n\n // Clear tracking arrays\n this.scheduledSources = []\n this.isPlaying = false\n this.nextPlayTime = 0\n\n // Wait for fade-out to complete\n await new Promise(resolve => setTimeout(resolve, fadeOutMs))\n }\n\n /**\n * Reset scheduler state for new playback session\n * Stops any orphaned sources that weren't cleaned up by cancelAll()\n */\n reset(): void {\n // Stop any still-playing sources before clearing\n if (this.context) {\n const now = this.context.currentTime\n for (const { source, gainNode } of this.scheduledSources) {\n try {\n gainNode.gain.setValueAtTime(0, now)\n source.stop(now)\n } catch {\n // Already stopped\n }\n }\n }\n this.nextPlayTime = 0\n this.isPlaying = false\n this.scheduledSources = []\n }\n\n /**\n * Cleanup resources\n */\n dispose(): void {\n if (this.context) {\n this.context.close()\n this.context = null\n }\n this.scheduledSources = []\n this.isPlaying = false\n }\n}\n","/**\n * AudioChunkCoalescer - Combine small network chunks into optimal buffers\n *\n * Network streaming often delivers audio in small chunks (e.g., 32ms from TTS APIs).\n * Creating an AudioBufferSourceNode for each tiny chunk is inefficient and can cause\n * overhead from object creation/GC.\n *\n * This class implements a double-buffering pattern: accumulate small chunks in a\n * temporary buffer, then flush to playback queue when threshold is reached.\n *\n * Benefits:\n * - Reduces AudioBufferSourceNode overhead (fewer nodes = less GC pressure)\n * - Configurable buffer size for optimal playback chunk duration\n * - Maintains sample-accurate timing despite buffering\n *\n * Based on patterns from HLS.js and production streaming implementations.\n *\n * @category Audio\n */\n\nexport interface AudioChunkCoalescerOptions {\n /**\n * Target duration in milliseconds for combined chunks\n * Default: 200ms (balances latency vs overhead)\n *\n * Smaller values = lower latency, more overhead\n * Larger values = higher latency, less overhead\n */\n targetDurationMs?: number\n\n /**\n * Sample rate in Hz\n * Default: 16000 (speech quality)\n */\n sampleRate?: number\n}\n\nexport class AudioChunkCoalescer {\n private tempBuffer: Uint8Array[] = []\n private readonly targetBytes: number\n\n constructor(private readonly options: AudioChunkCoalescerOptions = {}) {\n const targetMs = options.targetDurationMs ?? 200\n const sampleRate = options.sampleRate ?? 16000\n\n // Calculate target bytes: (duration_s) * (samples/s) * (2 bytes per Int16 sample)\n this.targetBytes = (targetMs / 1000) * sampleRate * 2\n }\n\n /**\n * Add a chunk to the temporary buffer\n *\n * @param chunk - Uint8Array containing Int16 PCM audio\n * @returns Combined buffer if threshold reached, null otherwise\n */\n add(chunk: Uint8Array): ArrayBuffer | null {\n // Add to temporary buffer\n this.tempBuffer.push(chunk)\n\n // Calculate total bytes buffered\n const totalBytes = this.tempBuffer.reduce((sum, c) => sum + c.length, 0)\n\n // If we've reached the threshold, combine and return\n if (totalBytes >= this.targetBytes) {\n return this.flush()\n }\n\n return null\n }\n\n /**\n * Flush remaining buffered data\n *\n * Call this when the stream ends to ensure all audio is processed,\n * even if it doesn't reach the target threshold.\n *\n * @returns Combined buffer, or null if buffer is empty\n */\n flush(): ArrayBuffer | null {\n if (this.tempBuffer.length === 0) {\n return null\n }\n\n // Calculate total size\n const totalBytes = this.tempBuffer.reduce((sum, c) => sum + c.length, 0)\n\n // Combine all chunks into single buffer\n const combined = new Uint8Array(totalBytes)\n let offset = 0\n for (const chunk of this.tempBuffer) {\n combined.set(chunk, offset)\n offset += chunk.length\n }\n\n // Clear temp buffer\n this.tempBuffer = []\n\n return combined.buffer\n }\n\n /**\n * Get current buffer fill level (0-1)\n */\n get fillLevel(): number {\n const totalBytes = this.tempBuffer.reduce((sum, c) => sum + c.length, 0)\n return Math.min(1, totalBytes / this.targetBytes)\n }\n\n /**\n * Get current buffered duration in milliseconds\n */\n getBufferedDurationMs(): number {\n const sampleRate = this.options.sampleRate ?? 16000\n const totalBytes = this.tempBuffer.reduce((sum, c) => sum + c.length, 0)\n const samples = totalBytes / 2 // Int16 = 2 bytes per sample\n return (samples / sampleRate) * 1000\n }\n\n /**\n * Get number of chunks currently buffered\n */\n get chunkCount(): number {\n return this.tempBuffer.length\n }\n\n /**\n * Reset the coalescer\n */\n reset(): void {\n this.tempBuffer = []\n }\n}\n","/**\n * LAMPipeline - Coordinate LAM (Wav2Vec2) inference with frame synchronization\n *\n * Manages the buffering and processing pipeline for LAM lip sync:\n * 1. Accumulates audio samples in a ring buffer\n * 2. Triggers LAM inference when buffer reaches required size (16000 samples @ 16kHz = 1.0s)\n * 3. Queues resulting blendshape frames with precise timestamps\n * 4. Provides frames synchronized to AudioContext clock\n *\n * Key Design Decisions:\n * - Ring buffer pattern for efficient sample accumulation (no allocation churn)\n * - Frame queue with timestamps for deterministic playback\n * - Timestamp-based frame retrieval (not callback) for renderer flexibility\n *\n * Based on patterns from Chrome Audio Worklet design and Web Audio clock management.\n *\n * @see https://developer.chrome.com/blog/audio-worklet-design-pattern\n * @category Audio\n */\n\nimport { RingBuffer } from './RingBuffer'\nimport type { LipSyncBackend } from '../inference/LipSyncBackend'\n\nexport interface LAMFrame {\n /** 52 ARKit blendshape weights */\n frame: Float32Array\n /** AudioContext time when this frame should be displayed */\n timestamp: number\n}\n\nexport interface LAMPipelineOptions {\n /**\n * Sample rate in Hz (must match audio playback)\n * Default: 16000\n */\n sampleRate?: number\n\n /**\n * LAM inference callback\n * Called each time LAM processes a buffer\n */\n onInference?: (frameCount: number) => void\n\n /**\n * Error callback for inference failures\n */\n onError?: (error: Error) => void\n}\n\nexport class LAMPipeline {\n private readonly REQUIRED_SAMPLES = 16000 // 1.0s at 16kHz (LAM requirement)\n private readonly FRAME_RATE = 30 // LAM outputs 30fps\n\n private buffer: Float32Array = new Float32Array(0)\n private bufferStartTime = 0\n private frameQueue: LAMFrame[] = []\n\n /**\n * Last successfully retrieved frame\n * Used as fallback when no new frame is available to prevent avatar freezing\n */\n private lastFrame: Float32Array | null = null\n\n constructor(private readonly options: LAMPipelineOptions = {}) {}\n\n /**\n * Push audio samples into the pipeline\n *\n * Accumulates samples and triggers LAM inference when buffer is full.\n * Multiple calls may be needed to accumulate enough samples.\n *\n * @param samples - Float32Array of audio samples\n * @param timestamp - AudioContext time when these samples start playing\n * @param lam - LAM inference engine\n */\n async push(samples: Float32Array, timestamp: number, lam: LipSyncBackend): Promise<void> {\n // Track buffer start time when empty\n if (this.buffer.length === 0) {\n this.bufferStartTime = timestamp\n }\n\n // Accumulate samples\n const newBuffer = new Float32Array(this.buffer.length + samples.length)\n newBuffer.set(this.buffer, 0)\n newBuffer.set(samples, this.buffer.length)\n this.buffer = newBuffer\n\n // Process ALL complete chunks (not just one)\n // Critical for AgentCore which delivers entire sentences at once (30-50K+ samples)\n // Without the while loop, samples pile up and LAM falls behind audio playback\n while (this.buffer.length >= this.REQUIRED_SAMPLES) {\n await this.processBuffer(lam)\n }\n }\n\n /**\n * Process accumulated buffer through LAM inference\n */\n private async processBuffer(lam: LipSyncBackend): Promise<void> {\n try {\n // Extract exactly REQUIRED_SAMPLES for inference\n const toProcess = this.buffer.slice(0, this.REQUIRED_SAMPLES)\n const processedStartTime = this.bufferStartTime\n\n // Keep remaining samples for next inference\n this.buffer = this.buffer.slice(this.REQUIRED_SAMPLES)\n\n // Update start time for remaining buffer\n const processedDuration = this.REQUIRED_SAMPLES / (this.options.sampleRate ?? 16000)\n this.bufferStartTime = processedStartTime + processedDuration\n\n // Run LAM inference\n const result = await lam.infer(toProcess)\n\n // Queue frames with timestamps\n const frameDuration = 1 / this.FRAME_RATE\n for (let i = 0; i < result.blendshapes.length; i++) {\n const frame = result.blendshapes[i]\n const timestamp = processedStartTime + (i * frameDuration)\n this.frameQueue.push({ frame, timestamp })\n }\n\n // Notify callback\n this.options.onInference?.(result.blendshapes.length)\n } catch (error) {\n this.options.onError?.(error as Error)\n\n // Clear buffer on error to prevent repeated failures\n this.buffer = new Float32Array(0)\n this.bufferStartTime = 0\n }\n }\n\n /**\n * Get the frame that should be displayed at the current time\n *\n * Automatically removes frames that have already been displayed.\n * This prevents memory leaks from accumulating old frames.\n *\n * Discard Window (prevents premature frame discarding):\n * - WebGPU: 0.5s (LAM inference 20-100ms + RAF jitter + React stalls)\n * - WASM: 1.0s (LAM inference 50-500ms + higher variability)\n *\n * Last-Frame-Hold: Returns last valid frame instead of null to prevent\n * avatar freezing when between frames (RAF at 60fps vs LAM at 30fps).\n *\n * @param currentTime - Current AudioContext time\n * @param lam - LAM inference engine (optional, for backend detection)\n * @returns Current frame, or last frame as fallback, or null if no frames yet\n */\n getFrameForTime(currentTime: number, lam?: { backend: 'webgpu' | 'wasm' | null }): Float32Array | null {\n // Dynamic discard window based on backend performance characteristics\n const discardWindow = lam?.backend === 'wasm' ? 1.0 : 0.5\n\n // Remove frames that are too old (already displayed)\n let discardedCount = 0\n while (this.frameQueue.length > 0 && this.frameQueue[0].timestamp < currentTime - discardWindow) {\n const discarded = this.frameQueue.shift()!\n discardedCount++\n\n // Log frame discards for debugging sync issues\n if (discardedCount === 1) {\n const ageMs = ((currentTime - discarded.timestamp) * 1000).toFixed(0)\n console.warn('[LAM] Frame(s) discarded as too old', {\n ageMs,\n discardWindowMs: discardWindow * 1000,\n queueLength: this.frameQueue.length,\n backend: lam?.backend ?? 'unknown'\n })\n }\n }\n\n // Return the frame that should be playing now\n if (this.frameQueue.length > 0 && this.frameQueue[0].timestamp <= currentTime) {\n const { frame } = this.frameQueue.shift()!\n this.lastFrame = frame // Cache for fallback\n return frame\n }\n\n // Last-frame-hold: Return cached frame instead of null to prevent freezing\n // This handles RAF running at 60fps while LAM produces 30fps\n return this.lastFrame\n }\n\n /**\n * Get all frames in the queue (for debugging/monitoring)\n */\n getQueuedFrames(): LAMFrame[] {\n return [...this.frameQueue]\n }\n\n /**\n * Get current buffer fill level (0-1)\n */\n get fillLevel(): number {\n return Math.min(1, this.buffer.length / this.REQUIRED_SAMPLES)\n }\n\n /**\n * Get number of frames queued\n */\n get queuedFrameCount(): number {\n return this.frameQueue.length\n }\n\n /**\n * Get buffered audio duration in seconds\n */\n get bufferedDuration(): number {\n return this.buffer.length / (this.options.sampleRate ?? 16000)\n }\n\n /**\n * Flush remaining buffered audio\n *\n * Processes any remaining audio in the buffer, even if less than REQUIRED_SAMPLES.\n * This ensures the final audio chunk generates blendshape frames.\n *\n * Should be called when audio stream ends to prevent losing the last 0-1 seconds.\n *\n * @param lam - LAM inference engine\n */\n async flush(lam: LipSyncBackend): Promise<void> {\n if (this.buffer.length === 0) {\n return // Nothing to flush\n }\n\n // Pad buffer to REQUIRED_SAMPLES (LAM expects exactly 16000 samples)\n const padded = new Float32Array(this.REQUIRED_SAMPLES)\n padded.set(this.buffer, 0)\n // Remaining samples are already zero (Float32Array default)\n\n // Process the padded buffer\n const processedStartTime = this.bufferStartTime\n\n try {\n // Run LAM inference\n const result = await lam.infer(padded)\n\n // Queue frames with timestamps\n // Only queue frames that correspond to actual audio (not padding)\n const actualDuration = this.buffer.length / (this.options.sampleRate ?? 16000)\n const frameDuration = 1 / this.FRAME_RATE\n const actualFrameCount = Math.ceil(actualDuration * this.FRAME_RATE)\n\n for (let i = 0; i < Math.min(actualFrameCount, result.blendshapes.length); i++) {\n const frame = result.blendshapes[i]\n const timestamp = processedStartTime + (i * frameDuration)\n this.frameQueue.push({ frame, timestamp })\n }\n\n // Clear buffer after flushing\n this.buffer = new Float32Array(0)\n this.bufferStartTime = 0\n\n // Notify callback\n this.options.onInference?.(Math.min(actualFrameCount, result.blendshapes.length))\n } catch (error) {\n this.options.onError?.(error as Error)\n\n // Clear buffer on error\n this.buffer = new Float32Array(0)\n this.bufferStartTime = 0\n }\n }\n\n /**\n * Adjust all queued frame timestamps by an offset\n *\n * Used for synchronization when audio scheduling time differs from\n * the estimated time used during LAM processing.\n *\n * @param offset - Time offset in seconds to add to all timestamps\n */\n adjustTimestamps(offset: number): void {\n for (const frame of this.frameQueue) {\n frame.timestamp += offset\n }\n }\n\n /**\n * Reset the pipeline\n */\n reset(): void {\n this.buffer = new Float32Array(0)\n this.bufferStartTime = 0\n this.frameQueue = []\n this.lastFrame = null // Clear last-frame-hold cache\n }\n}\n","/**\n * SyncedAudioPipeline - Audio playback + LAM lip sync coordinator\n *\n * Orchestrates the complete pipeline for synchronized audio playback and lip sync:\n * 1. Network chunks → Coalescer → Optimized buffers\n * 2. Audio buffers → Scheduler → Gapless playback (immediate, never blocks)\n * 3. Audio buffers → LAM Pipeline → Blendshape frames (background, fire-and-forget)\n * 4. Frames synchronized to AudioContext clock → Renderer\n *\n * Key Architecture Pattern: Audio-First, LAM-Background\n * - Audio chunks are scheduled for playback immediately (never waits for LAM)\n * - LAM inference runs in background without blocking the audio path\n * - Lip sync starts ~1 second after audio (LAM needs 16000 samples to infer)\n * - Once LAM catches up, frames stay synchronized to AudioContext clock\n *\n * This decoupled design prevents LAM inference (50-300ms) from blocking audio\n * scheduling, which caused audible stuttering when audio arrived as a continuous\n * stream (e.g., single-call TTS from ElevenLabs via AgentCore).\n *\n * @see https://web.dev/articles/audio-scheduling (Web Audio clock patterns)\n * @category Audio\n */\n\nimport { AudioScheduler } from './AudioScheduler'\nimport { AudioChunkCoalescer } from './AudioChunkCoalescer'\nimport { LAMPipeline } from './LAMPipeline'\nimport { EventEmitter } from '../events/EventEmitter'\nimport type { LipSyncBackend } from '../inference/LipSyncBackend'\n\n/**\n * Safely convert an ArrayBuffer of PCM16 bytes to Float32 samples.\n * Handles odd-length buffers by truncating to the nearest even byte boundary.\n */\nfunction pcm16ToFloat32(buffer: ArrayBuffer): Float32Array {\n // Int16Array requires even byte length — truncate if odd\n const byteLen = buffer.byteLength & ~1\n const int16 = byteLen === buffer.byteLength\n ? new Int16Array(buffer)\n : new Int16Array(buffer, 0, byteLen / 2)\n const float32 = new Float32Array(int16.length)\n for (let i = 0; i < int16.length; i++) {\n float32[i] = int16[i] / 32768\n }\n return float32\n}\n\nexport interface SyncedAudioPipelineOptions {\n /** Sample rate in Hz (default: 16000) */\n sampleRate?: number\n /** Target chunk duration in ms for coalescing (default: 200) */\n chunkTargetMs?: number\n /** LAM inference engine */\n lam: LipSyncBackend\n}\n\nexport interface SyncedAudioPipelineEvents {\n /** New frame ready for display */\n frame_ready: Float32Array\n /** Playback has completed */\n playback_complete: void\n /** First audio chunk scheduled, playback starting */\n playback_start: number\n /** Error occurred */\n error: Error\n /** Index signature for EventEmitter compatibility */\n [key: string]: unknown\n}\n\nexport class SyncedAudioPipeline extends EventEmitter<SyncedAudioPipelineEvents> {\n private scheduler: AudioScheduler\n private coalescer: AudioChunkCoalescer\n private lamPipeline: LAMPipeline\n\n private playbackStarted = false\n private monitorInterval: number | null = null\n private frameAnimationId: number | null = null\n\n constructor(private readonly options: SyncedAudioPipelineOptions) {\n super()\n\n const sampleRate = options.sampleRate ?? 16000\n\n this.scheduler = new AudioScheduler({ sampleRate })\n this.coalescer = new AudioChunkCoalescer({\n sampleRate,\n targetDurationMs: options.chunkTargetMs ?? 200,\n })\n this.lamPipeline = new LAMPipeline({\n sampleRate,\n onError: (error) => {\n this.emit('error', error)\n },\n })\n }\n\n /**\n * Initialize the pipeline\n */\n async initialize(): Promise<void> {\n await this.scheduler.initialize()\n }\n\n /**\n * Start a new playback session\n *\n * Resets all state and prepares for incoming audio chunks.\n * Audio will be scheduled immediately as chunks arrive (no buffering).\n */\n start(): void {\n // Stop any active session first (prevents duplicate frame loops/monitors)\n this.stopMonitoring()\n\n this.scheduler.reset()\n this.coalescer.reset()\n this.lamPipeline.reset()\n this.playbackStarted = false\n\n // Eagerly warm up AudioContext so audio hardware is ready when\n // first audio chunk arrives. Without this, AudioContext creation\n // happens at schedule time and the first 50-100ms of audio stutters\n // while Windows WASAPI initializes.\n this.scheduler.warmup()\n\n // Start frame animation loop\n this.startFrameLoop()\n\n // Start playback monitoring\n this.startMonitoring()\n }\n\n /**\n * Receive audio chunk from network\n *\n * Audio-first design: schedules audio immediately, LAM runs in background.\n * This prevents LAM inference (50-300ms) from blocking audio scheduling,\n * which caused audible stuttering with continuous audio streams.\n *\n * @param chunk - Uint8Array containing Int16 PCM audio\n */\n async onAudioChunk(chunk: Uint8Array): Promise<void> {\n // Coalesce small chunks into optimal buffers\n const combined = this.coalescer.add(chunk)\n if (!combined) {\n return // Not enough data yet\n }\n\n // Convert PCM16 bytes to Float32 samples (handles odd-length buffers safely)\n const float32 = pcm16ToFloat32(combined)\n\n // Schedule audio immediately — never wait for LAM\n const scheduleTime = await this.scheduler.schedule(float32)\n\n // Emit playback_start on first scheduled chunk\n if (!this.playbackStarted) {\n this.playbackStarted = true\n this.emit('playback_start', scheduleTime)\n }\n\n // LAM runs in background — never blocks audio scheduling.\n // lam.infer() takes 50-300ms when it triggers (every 16000 samples).\n // If we awaited here, the NDJSON processing loop in useVoice.tsx would\n // stall, preventing new audio chunks from being scheduled. The already-\n // scheduled audio plays out and runs dry → gap → audible stutter.\n this.lamPipeline.push(float32, scheduleTime, this.options.lam).catch(err => {\n this.emit('error', err)\n })\n }\n\n /**\n * End of audio stream\n *\n * Flushes any remaining buffered data.\n */\n async end(): Promise<void> {\n // Flush remaining coalesced data\n const remaining = this.coalescer.flush()\n if (remaining) {\n const chunk = new Uint8Array(remaining)\n await this.onAudioChunk(chunk)\n }\n\n // Flush remaining LAM buffer to process final audio chunk\n // This ensures blendshapes are generated for the last 0-1 seconds of audio\n await this.lamPipeline.flush(this.options.lam)\n }\n\n /**\n * Stop playback immediately with smooth fade-out\n *\n * Gracefully cancels all audio playback and LAM processing:\n * - Fades out audio over specified duration (default: 50ms)\n * - Cancels pending LAM inferences\n * - Clears all buffers and queues\n * - Emits 'playback_complete' event\n *\n * Use this for interruptions (e.g., user barge-in during AI speech).\n *\n * @param fadeOutMs - Fade-out duration in milliseconds (default: 50ms)\n * @returns Promise that resolves when fade-out completes\n */\n async stop(fadeOutMs: number = 50): Promise<void> {\n // Stop monitoring and frame loop\n this.stopMonitoring()\n\n // Cancel audio playback with fade-out\n await this.scheduler.cancelAll(fadeOutMs)\n\n // Clear all buffers\n this.coalescer.reset()\n this.lamPipeline.reset()\n this.playbackStarted = false\n\n // Emit completion event\n this.emit('playback_complete', undefined as any)\n }\n\n /**\n * Start frame animation loop\n *\n * Uses requestAnimationFrame to check for new LAM frames.\n * Synchronized to AudioContext clock (not visual refresh rate).\n *\n * Frame Emission Strategy:\n * - LAMPipeline uses last-frame-hold to prevent null returns\n * - Always emit frames (even repeated frames) to maintain smooth animation\n * - Renderer is responsible for detecting duplicate frames if needed\n */\n private startFrameLoop(): void {\n const updateFrame = () => {\n const currentTime = this.scheduler.getCurrentTime()\n const frame = this.lamPipeline.getFrameForTime(currentTime, this.options.lam)\n\n if (frame) {\n this.emit('frame_ready', frame)\n }\n\n this.frameAnimationId = requestAnimationFrame(updateFrame)\n }\n\n this.frameAnimationId = requestAnimationFrame(updateFrame)\n }\n\n /**\n * Start monitoring for playback completion\n */\n private startMonitoring(): void {\n if (this.monitorInterval) {\n clearInterval(this.monitorInterval)\n }\n\n this.monitorInterval = window.setInterval(() => {\n if (this.scheduler.isComplete() && this.lamPipeline.queuedFrameCount === 0) {\n this.emit('playback_complete', undefined as any)\n this.stopMonitoring()\n }\n }, 100)\n }\n\n /**\n * Stop monitoring\n */\n private stopMonitoring(): void {\n if (this.monitorInterval) {\n clearInterval(this.monitorInterval)\n this.monitorInterval = null\n }\n\n if (this.frameAnimationId) {\n cancelAnimationFrame(this.frameAnimationId)\n this.frameAnimationId = null\n }\n }\n\n /**\n * Get current pipeline state (for debugging/monitoring)\n */\n getState() {\n return {\n playbackStarted: this.playbackStarted,\n coalescerFill: this.coalescer.fillLevel,\n lamFill: this.lamPipeline.fillLevel,\n queuedFrames: this.lamPipeline.queuedFrameCount,\n currentTime: this.scheduler.getCurrentTime(),\n playbackEndTime: this.scheduler.getPlaybackEndTime(),\n }\n }\n\n /**\n * Cleanup resources\n */\n dispose(): void {\n this.stopMonitoring()\n this.scheduler.dispose()\n this.coalescer.reset()\n this.lamPipeline.reset()\n }\n}\n","/**\n * Emotion to ARKit Blendshape Mapper\n *\n * Converts Emotion2VecInference output to upper face ARKit blendshapes for\n * expressive avatar animation. Maps 4 emotion categories (neutral, happy, angry, sad)\n * to 11 upper face blendshapes (brows, eyes, cheeks).\n *\n * Supports two blend modes:\n * - 'dominant': Uses only the strongest emotion (simpler, more stable)\n * - 'weighted': Blends all emotions by probability (more nuanced, e.g., bittersweet)\n *\n * Also supports energy modulation to scale emotion intensity by audio energy,\n * making expressions stronger during emphasized speech.\n *\n * @example Basic usage\n * ```typescript\n * import { EmotionToBlendshapeMapper } from '@omote/core';\n * import { Emotion2VecInference } from '@omote/core';\n *\n * const emotion = new Emotion2VecInference({ modelUrl: '/models/emotion.onnx' });\n * const mapper = new EmotionToBlendshapeMapper();\n *\n * // Process emotion frame\n * const result = await emotion.infer(audioSamples);\n * const blendshapes = mapper.mapFrame(result.dominant);\n *\n * // Apply to avatar\n * for (const [name, value] of Object.entries(blendshapes)) {\n * avatar.setBlendshape(name, value);\n * }\n * ```\n *\n * @example Weighted blending for nuanced expressions\n * ```typescript\n * const mapper = new EmotionToBlendshapeMapper({\n * blendMode: 'weighted',\n * minBlendProbability: 0.1,\n * });\n *\n * // Frame with mixed emotions: { happy: 0.6, sad: 0.3, neutral: 0.1 }\n * // Result: bittersweet expression (smiling but worried brow)\n * const blendshapes = mapper.mapFrame(emotionFrame);\n * ```\n *\n * @example Energy-modulated emotion\n * ```typescript\n * import { AudioEnergyAnalyzer } from '@omote/core';\n *\n * const energyAnalyzer = new AudioEnergyAnalyzer();\n * const mapper = new EmotionToBlendshapeMapper({ energyModulation: true });\n *\n * // In animation loop\n * function animate(audioChunk: Float32Array, emotionFrame: EmotionFrame) {\n * const { energy } = energyAnalyzer.analyze(audioChunk);\n * mapper.mapFrame(emotionFrame, energy); // Louder = stronger emotion\n * mapper.update(16);\n * applyToAvatar(mapper.getCurrentBlendshapes());\n * }\n * ```\n *\n * @module animation\n */\n\nimport type { EmotionFrame, Emotion2VecLabel } from '../inference/Emotion2VecInference';\n\n/**\n * Upper face ARKit blendshape names (11 total)\n *\n * These blendshapes control the upper face (brows, eyes, cheeks) and are\n * driven by emotion detection, complementing the mouth blendshapes from\n * LAM lip sync.\n */\nexport const UPPER_FACE_BLENDSHAPES = [\n // Brows (5)\n 'browDownLeft',\n 'browDownRight',\n 'browInnerUp',\n 'browOuterUpLeft',\n 'browOuterUpRight',\n // Eyes (4)\n 'eyeSquintLeft',\n 'eyeSquintRight',\n 'eyeWideLeft',\n 'eyeWideRight',\n // Cheeks (2)\n 'cheekSquintLeft',\n 'cheekSquintRight',\n] as const;\n\nexport type UpperFaceBlendshapeName = (typeof UPPER_FACE_BLENDSHAPES)[number];\n\n/**\n * Upper face blendshape values (0-1 for each)\n */\nexport type UpperFaceBlendshapes = Record<UpperFaceBlendshapeName, number>;\n\n/**\n * Blend mode for combining emotions\n * - 'dominant': Use only the strongest emotion (default, more stable)\n * - 'weighted': Blend all emotions by probability (more nuanced)\n */\nexport type EmotionBlendMode = 'dominant' | 'weighted';\n\n/**\n * Emotion to ARKit blendshape mapping\n *\n * Based on Paul Ekman's FACS (Facial Action Coding System) research:\n *\n * - Happy (AU6+AU12): Cheek raise + lip corner pull (Duchenne smile)\n * Upper face: cheekSquint (AU6) + slight eyeSquint from genuine smile\n *\n * - Angry (AU4+AU5+AU7+AU23): Brow lower + eye wide + lid tighten + lip press\n * Upper face: browDown (AU4) + eyeWide (AU5) + eyeSquint (AU7) creates the \"glare\"\n *\n * - Sad (AU1+AU4+AU15): Inner brow raise + brow furrow + lip corner depress\n * Upper face: browInnerUp (AU1) + browDown (AU4) creates the worried/sad brow\n *\n * - Neutral: All zeros (no expression overlay)\n *\n * @see https://imotions.com/blog/learning/research-fundamentals/facial-action-coding-system/\n * @see https://melindaozel.com/arkit-to-facs-cheat-sheet/\n */\nexport const EMOTION_ARKIT_MAP: Record<Emotion2VecLabel, Partial<UpperFaceBlendshapes>> = {\n happy: {\n // AU6 - Cheek raiser (primary Duchenne smile marker)\n cheekSquintLeft: 0.5,\n cheekSquintRight: 0.5,\n // Slight eye squint from genuine smile (orbicularis oculi activation)\n eyeSquintLeft: 0.2,\n eyeSquintRight: 0.2,\n },\n angry: {\n // AU4 - Brow lowerer (intense, primary anger marker)\n browDownLeft: 0.7,\n browDownRight: 0.7,\n // AU5 - Upper lid raiser (wide eyes, part of the \"glare\")\n eyeWideLeft: 0.4,\n eyeWideRight: 0.4,\n // AU7 - Lid tightener (tense stare, combines with AU5 for angry glare)\n eyeSquintLeft: 0.3,\n eyeSquintRight: 0.3,\n },\n sad: {\n // AU1 - Inner brow raiser (primary sadness marker)\n browInnerUp: 0.6,\n // AU4 - Brow lowerer (brows drawn together)\n browDownLeft: 0.3,\n browDownRight: 0.3,\n },\n neutral: {}, // All zeros - no expression overlay\n};\n\n/**\n * Configuration for EmotionToBlendshapeMapper\n */\nexport interface EmotionBlendshapeConfig {\n /**\n * Smoothing factor for exponential moving average (0-1)\n * Lower = slower, smoother transitions\n * Higher = faster, more responsive\n * @default 0.15\n */\n smoothingFactor?: number;\n\n /**\n * Minimum confidence threshold for emotion to take effect\n * Emotions below this confidence are treated as neutral\n * @default 0.3\n */\n confidenceThreshold?: number;\n\n /**\n * Global intensity multiplier for all blendshapes (0-2)\n * @default 1.0\n */\n intensity?: number;\n\n /**\n * Blend mode for combining emotions\n * - 'dominant': Use only the strongest emotion (default)\n * - 'weighted': Blend all emotions by probability\n * @default 'dominant'\n */\n blendMode?: EmotionBlendMode;\n\n /**\n * Minimum probability for an emotion to contribute in weighted blend mode\n * Emotions with probability below this are ignored\n * @default 0.1\n */\n minBlendProbability?: number;\n\n /**\n * Enable energy modulation - scale emotion intensity by audio energy\n * When enabled, louder speech produces stronger expressions\n * @default false\n */\n energyModulation?: boolean;\n\n /**\n * Minimum energy scale when energy modulation is enabled (0-1)\n * At zero audio energy, emotion intensity is scaled by this factor\n * @default 0.3\n */\n minEnergyScale?: number;\n\n /**\n * Maximum energy scale when energy modulation is enabled (0-2)\n * At maximum audio energy, emotion intensity is scaled by this factor\n * @default 1.0\n */\n maxEnergyScale?: number;\n}\n\nconst DEFAULT_CONFIG: Required<EmotionBlendshapeConfig> = {\n smoothingFactor: 0.15,\n confidenceThreshold: 0.3,\n intensity: 1.0,\n blendMode: 'dominant',\n minBlendProbability: 0.1,\n energyModulation: false,\n minEnergyScale: 0.3,\n maxEnergyScale: 1.0,\n};\n\n/**\n * Creates a zeroed UpperFaceBlendshapes object\n */\nfunction createZeroBlendshapes(): UpperFaceBlendshapes {\n const result = {} as UpperFaceBlendshapes;\n for (const name of UPPER_FACE_BLENDSHAPES) {\n result[name] = 0;\n }\n return result;\n}\n\n/**\n * Clamp value between 0 and 1\n */\nfunction clamp01(value: number): number {\n return Math.max(0, Math.min(1, value));\n}\n\n/**\n * EmotionToBlendshapeMapper\n *\n * Converts emotion detection output to upper face ARKit blendshapes.\n * Provides smooth transitions between emotion states using exponential\n * moving average interpolation.\n *\n * Supports two blend modes:\n * - 'dominant': Uses only the strongest emotion\n * - 'weighted': Blends all emotions by probability for nuanced expressions\n *\n * Also supports energy modulation to scale emotion intensity by audio energy.\n */\nexport class EmotionToBlendshapeMapper {\n private config: Required<EmotionBlendshapeConfig>;\n private targetBlendshapes: UpperFaceBlendshapes;\n private currentBlendshapes: UpperFaceBlendshapes;\n private currentEnergy: number = 1.0;\n\n /**\n * Create a new EmotionToBlendshapeMapper\n *\n * @param config - Optional configuration\n */\n constructor(config?: EmotionBlendshapeConfig) {\n this.config = {\n ...DEFAULT_CONFIG,\n ...config,\n };\n this.targetBlendshapes = createZeroBlendshapes();\n this.currentBlendshapes = createZeroBlendshapes();\n }\n\n /**\n * Map an emotion frame to target blendshapes\n *\n * This sets the target values that the mapper will smoothly interpolate\n * towards. Call update() each frame to apply smoothing.\n *\n * @param frame - Emotion frame from Emotion2VecInference\n * @param audioEnergy - Optional audio energy (0-1) for energy modulation\n * @returns Target upper face blendshapes (before smoothing)\n */\n mapFrame(frame: EmotionFrame, audioEnergy?: number): UpperFaceBlendshapes {\n // Reset target to zeros\n this.targetBlendshapes = createZeroBlendshapes();\n\n // Store energy for modulation\n if (audioEnergy !== undefined) {\n this.currentEnergy = clamp01(audioEnergy);\n }\n\n // Check for valid frame\n if (!frame) {\n return { ...this.targetBlendshapes };\n }\n\n // Route to appropriate blend method\n if (this.config.blendMode === 'weighted') {\n this.mapFrameWeighted(frame);\n } else {\n this.mapFrameDominant(frame);\n }\n\n // Apply energy modulation if enabled\n if (this.config.energyModulation) {\n this.applyEnergyModulation();\n }\n\n return { ...this.targetBlendshapes };\n }\n\n /**\n * Map using dominant emotion only (original behavior)\n */\n private mapFrameDominant(frame: EmotionFrame): void {\n // Check confidence threshold\n if (frame.confidence < this.config.confidenceThreshold) {\n return;\n }\n\n // Get emotion mapping\n const emotion = frame.emotion as Emotion2VecLabel;\n const mapping = EMOTION_ARKIT_MAP[emotion];\n\n if (!mapping) {\n return;\n }\n\n // Apply mapping with intensity and confidence scaling\n const scale = this.config.intensity * frame.confidence;\n\n for (const [name, value] of Object.entries(mapping)) {\n const blendshapeName = name as UpperFaceBlendshapeName;\n if (value !== undefined) {\n this.targetBlendshapes[blendshapeName] = clamp01(value * scale);\n }\n }\n }\n\n /**\n * Map using weighted blend of all emotions by probability\n * Creates more nuanced expressions (e.g., bittersweet = happy + sad)\n */\n private mapFrameWeighted(frame: EmotionFrame): void {\n if (!frame.probabilities) {\n // Fall back to dominant if no probabilities\n this.mapFrameDominant(frame);\n return;\n }\n\n // Blend all emotions by their probability\n for (const [emotion, probability] of Object.entries(frame.probabilities)) {\n // Skip emotions below minimum probability\n if (probability < this.config.minBlendProbability) {\n continue;\n }\n\n const mapping = EMOTION_ARKIT_MAP[emotion as Emotion2VecLabel];\n if (!mapping) {\n continue;\n }\n\n // Add this emotion's contribution weighted by probability\n const scale = this.config.intensity * probability;\n\n for (const [name, value] of Object.entries(mapping)) {\n const blendshapeName = name as UpperFaceBlendshapeName;\n if (value !== undefined) {\n // Additive blending - sum contributions\n this.targetBlendshapes[blendshapeName] += value * scale;\n }\n }\n }\n\n // Clamp all values to 0-1 after blending\n for (const name of UPPER_FACE_BLENDSHAPES) {\n this.targetBlendshapes[name] = clamp01(this.targetBlendshapes[name]);\n }\n }\n\n /**\n * Apply energy modulation to scale emotion intensity by audio energy\n * Louder speech = stronger expressions\n */\n private applyEnergyModulation(): void {\n const { minEnergyScale, maxEnergyScale } = this.config;\n\n // Linear interpolation: energy 0 -> minScale, energy 1 -> maxScale\n const energyScale = minEnergyScale + this.currentEnergy * (maxEnergyScale - minEnergyScale);\n\n for (const name of UPPER_FACE_BLENDSHAPES) {\n this.targetBlendshapes[name] = clamp01(this.targetBlendshapes[name] * energyScale);\n }\n }\n\n /**\n * Apply smoothing to interpolate current values towards target\n *\n * Uses exponential moving average:\n * current = current + smoothingFactor * (target - current)\n *\n * @param _deltaMs - Delta time in milliseconds (reserved for future time-based smoothing)\n */\n update(_deltaMs: number): void {\n const factor = this.config.smoothingFactor;\n\n for (const name of UPPER_FACE_BLENDSHAPES) {\n const target = this.targetBlendshapes[name];\n const current = this.currentBlendshapes[name];\n this.currentBlendshapes[name] = clamp01(current + factor * (target - current));\n }\n }\n\n /**\n * Get current smoothed blendshape values\n *\n * @returns Current upper face blendshapes (after smoothing)\n */\n getCurrentBlendshapes(): UpperFaceBlendshapes {\n return { ...this.currentBlendshapes };\n }\n\n /**\n * Reset mapper to neutral state\n *\n * Sets both target and current blendshapes to zero.\n */\n reset(): void {\n this.targetBlendshapes = createZeroBlendshapes();\n this.currentBlendshapes = createZeroBlendshapes();\n this.currentEnergy = 1.0;\n }\n\n /**\n * Get current configuration\n */\n getConfig(): Required<EmotionBlendshapeConfig> {\n return { ...this.config };\n }\n\n /**\n * Update configuration\n *\n * @param config - Partial configuration to update\n */\n setConfig(config: Partial<EmotionBlendshapeConfig>): void {\n this.config = {\n ...this.config,\n ...config,\n };\n }\n}\n","/**\n * Console Exporter\n *\n * Exports telemetry data to the browser console for development/debugging.\n *\n * @category Telemetry\n */\n\nimport type { SpanAttributes } from '../types';\n\n/**\n * Span data structure for export\n */\nexport interface SpanData {\n name: string;\n traceId: string;\n spanId: string;\n parentSpanId?: string;\n startTime: number;\n endTime: number;\n durationMs: number;\n status: 'ok' | 'error';\n attributes: SpanAttributes;\n error?: Error;\n}\n\n/**\n * Metric data structure for export\n */\nexport interface MetricData {\n name: string;\n type: 'counter' | 'histogram';\n value: number;\n attributes: Record<string, string | number | boolean>;\n timestamp: number;\n}\n\n/**\n * Exporter interface that all exporters must implement\n */\nexport interface TelemetryExporterInterface {\n /** Export a completed span */\n exportSpan(span: SpanData): void;\n /** Export a metric */\n exportMetric(metric: MetricData): void;\n /** Flush any buffered data */\n flush(): Promise<void>;\n /** Shutdown the exporter */\n shutdown(): Promise<void>;\n}\n\n/**\n * Console exporter for development/debugging\n *\n * Outputs spans and metrics to the browser console with formatting.\n */\nexport class ConsoleExporter implements TelemetryExporterInterface {\n private enabled: boolean;\n private prefix: string;\n\n constructor(options: { enabled?: boolean; prefix?: string } = {}) {\n this.enabled = options.enabled ?? true;\n this.prefix = options.prefix ?? '[Omote Telemetry]';\n }\n\n exportSpan(span: SpanData): void {\n if (!this.enabled) return;\n\n const statusIcon = span.status === 'ok' ? '✓' : '✗';\n const statusColor = span.status === 'ok' ? 'color: green' : 'color: red';\n\n console.groupCollapsed(\n `%c${this.prefix} %c${statusIcon} ${span.name} %c(${span.durationMs.toFixed(2)}ms)`,\n 'color: gray',\n statusColor,\n 'color: gray'\n );\n\n console.log('Trace ID:', span.traceId);\n console.log('Span ID:', span.spanId);\n if (span.parentSpanId) {\n console.log('Parent Span ID:', span.parentSpanId);\n }\n console.log('Duration:', `${span.durationMs.toFixed(2)}ms`);\n console.log('Status:', span.status);\n\n if (Object.keys(span.attributes).length > 0) {\n console.log('Attributes:', span.attributes);\n }\n\n if (span.error) {\n console.error('Error:', span.error);\n }\n\n console.groupEnd();\n }\n\n exportMetric(metric: MetricData): void {\n if (!this.enabled) return;\n\n const typeIcon = metric.type === 'counter' ? '↑' : '📊';\n\n console.log(\n `%c${this.prefix} %c${typeIcon} ${metric.name}: %c${metric.value}`,\n 'color: gray',\n 'color: blue',\n 'color: black; font-weight: bold',\n metric.attributes\n );\n }\n\n async flush(): Promise<void> {\n // Console exporter doesn't buffer, nothing to flush\n }\n\n async shutdown(): Promise<void> {\n this.enabled = false;\n }\n}\n","/**\n * OTLP Exporter\n *\n * Exports telemetry data to OTLP-compatible backends (Jaeger, Tempo, etc.)\n * using the OTLP/HTTP JSON protocol.\n *\n * @category Telemetry\n */\n\nimport type { OTLPExporterConfig } from '../types';\nimport type { SpanData, MetricData, TelemetryExporterInterface } from './console';\n\n/**\n * OTLP span status codes\n */\nconst StatusCode = {\n UNSET: 0,\n OK: 1,\n ERROR: 2,\n} as const;\n\n/**\n * Convert internal span to OTLP format\n */\nfunction spanToOTLP(span: SpanData, serviceName: string, serviceVersion: string) {\n const attributes = Object.entries(span.attributes)\n .filter(([, v]) => v !== undefined)\n .map(([key, value]) => ({\n key,\n value: typeof value === 'string'\n ? { stringValue: value }\n : typeof value === 'number'\n ? Number.isInteger(value)\n ? { intValue: value }\n : { doubleValue: value }\n : { boolValue: value },\n }));\n\n return {\n resourceSpans: [{\n resource: {\n attributes: [\n { key: 'service.name', value: { stringValue: serviceName } },\n { key: 'service.version', value: { stringValue: serviceVersion } },\n { key: 'telemetry.sdk.name', value: { stringValue: 'omote-sdk' } },\n { key: 'telemetry.sdk.language', value: { stringValue: 'javascript' } },\n ],\n },\n scopeSpans: [{\n scope: {\n name: 'omote-sdk',\n version: serviceVersion,\n },\n spans: [{\n traceId: span.traceId,\n spanId: span.spanId,\n parentSpanId: span.parentSpanId || '',\n name: span.name,\n kind: 1, // INTERNAL\n startTimeUnixNano: String(span.startTime * 1_000_000),\n endTimeUnixNano: String(span.endTime * 1_000_000),\n attributes,\n status: {\n code: span.status === 'ok' ? StatusCode.OK : StatusCode.ERROR,\n message: span.error?.message || '',\n },\n }],\n }],\n }],\n };\n}\n\n/**\n * Convert internal metric to OTLP format\n */\nfunction metricToOTLP(metric: MetricData, serviceName: string, serviceVersion: string) {\n const attributes = Object.entries(metric.attributes)\n .filter(([, v]) => v !== undefined)\n .map(([key, value]) => ({\n key,\n value: typeof value === 'string'\n ? { stringValue: value }\n : typeof value === 'number'\n ? Number.isInteger(value)\n ? { intValue: value }\n : { doubleValue: value }\n : { boolValue: value },\n }));\n\n const dataPoint = {\n attributes,\n timeUnixNano: String(metric.timestamp * 1_000_000),\n ...(metric.type === 'counter'\n ? { asInt: metric.value }\n : { asDouble: metric.value }),\n };\n\n return {\n resourceMetrics: [{\n resource: {\n attributes: [\n { key: 'service.name', value: { stringValue: serviceName } },\n { key: 'service.version', value: { stringValue: serviceVersion } },\n ],\n },\n scopeMetrics: [{\n scope: {\n name: 'omote-sdk',\n version: serviceVersion,\n },\n metrics: [{\n name: metric.name,\n ...(metric.type === 'counter'\n ? {\n sum: {\n dataPoints: [dataPoint],\n aggregationTemporality: 2, // CUMULATIVE\n isMonotonic: true,\n },\n }\n : {\n gauge: {\n dataPoints: [dataPoint],\n },\n }),\n }],\n }],\n }],\n };\n}\n\n/**\n * OTLP exporter for production telemetry\n *\n * Sends spans and metrics to OTLP-compatible backends like:\n * - Jaeger\n * - Grafana Tempo\n * - Honeycomb\n * - Datadog\n * - AWS X-Ray (with collector)\n */\nexport class OTLPExporter implements TelemetryExporterInterface {\n private config: Required<OTLPExporterConfig>;\n private serviceName: string;\n private serviceVersion: string;\n private spanBuffer: SpanData[] = [];\n private metricBuffer: MetricData[] = [];\n private flushIntervalId: ReturnType<typeof setInterval> | null = null;\n private readonly BUFFER_SIZE = 100;\n private readonly FLUSH_INTERVAL_MS = 5000;\n private isShutdown = false;\n\n constructor(\n config: OTLPExporterConfig,\n serviceName: string = 'omote-sdk',\n serviceVersion: string = '0.1.0'\n ) {\n this.config = {\n timeoutMs: 10000,\n headers: {},\n ...config,\n };\n this.serviceName = serviceName;\n this.serviceVersion = serviceVersion;\n\n // Start periodic flush\n this.flushIntervalId = setInterval(() => {\n this.flush().catch(console.error);\n }, this.FLUSH_INTERVAL_MS);\n }\n\n exportSpan(span: SpanData): void {\n if (this.isShutdown) return;\n\n this.spanBuffer.push(span);\n\n if (this.spanBuffer.length >= this.BUFFER_SIZE) {\n this.flush().catch(console.error);\n }\n }\n\n exportMetric(metric: MetricData): void {\n if (this.isShutdown) return;\n\n this.metricBuffer.push(metric);\n\n if (this.metricBuffer.length >= this.BUFFER_SIZE) {\n this.flush().catch(console.error);\n }\n }\n\n async flush(): Promise<void> {\n if (this.isShutdown) return;\n\n const spans = this.spanBuffer.splice(0);\n const metrics = this.metricBuffer.splice(0);\n\n const promises: Promise<void>[] = [];\n\n // Export spans\n if (spans.length > 0) {\n promises.push(this.exportSpans(spans));\n }\n\n // Export metrics\n if (metrics.length > 0) {\n promises.push(this.exportMetrics(metrics));\n }\n\n await Promise.all(promises);\n }\n\n async shutdown(): Promise<void> {\n if (this.flushIntervalId) {\n clearInterval(this.flushIntervalId);\n this.flushIntervalId = null;\n }\n\n // Final flush before marking shutdown\n await this.flush();\n\n this.isShutdown = true;\n }\n\n private async exportSpans(spans: SpanData[]): Promise<void> {\n // Combine all spans into a single request\n const resourceSpans = spans.map(span =>\n spanToOTLP(span, this.serviceName, this.serviceVersion).resourceSpans[0]\n );\n\n const body = { resourceSpans };\n const endpoint = this.config.endpoint.replace(/\\/$/, '') + '/v1/traces';\n\n await this.sendRequest(endpoint, body);\n }\n\n private async exportMetrics(metrics: MetricData[]): Promise<void> {\n // Combine all metrics into a single request\n const resourceMetrics = metrics.map(metric =>\n metricToOTLP(metric, this.serviceName, this.serviceVersion).resourceMetrics[0]\n );\n\n const body = { resourceMetrics };\n const endpoint = this.config.endpoint.replace(/\\/$/, '') + '/v1/metrics';\n\n await this.sendRequest(endpoint, body);\n }\n\n private async sendRequest(endpoint: string, body: unknown): Promise<void> {\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.config.timeoutMs);\n\n try {\n const response = await fetch(endpoint, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...this.config.headers,\n },\n body: JSON.stringify(body),\n signal: controller.signal,\n });\n\n if (!response.ok) {\n console.warn(`[OTLP] Export failed: ${response.status} ${response.statusText}`);\n }\n } catch (error) {\n if ((error as Error).name === 'AbortError') {\n console.warn('[OTLP] Export timed out');\n } else {\n console.warn('[OTLP] Export error:', error);\n }\n } finally {\n clearTimeout(timeoutId);\n }\n }\n}\n","/**\n * Muse Telemetry\n *\n * Main orchestrator for SDK telemetry. Manages spans, metrics, and exporters.\n *\n * @category Telemetry\n */\n\nimport type { TelemetryConfig, SpanAttributes, SamplingConfig } from './types';\nimport type { SpanData, MetricData, TelemetryExporterInterface } from './exporters/console';\nimport { ConsoleExporter } from './exporters/console';\nimport { OTLPExporter } from './exporters/otlp';\n\n/**\n * Generate a random hex ID\n */\nfunction generateId(length: number = 16): string {\n const bytes = new Uint8Array(length);\n crypto.getRandomValues(bytes);\n return Array.from(bytes)\n .map(b => b.toString(16).padStart(2, '0'))\n .join('');\n}\n\n/**\n * Span context for tracing\n */\ninterface SpanContext {\n traceId: string;\n spanId: string;\n parentSpanId?: string;\n}\n\n/**\n * Active span handle returned by startSpan\n */\nexport interface ActiveSpan {\n /** End the span with success status */\n end(): void;\n /** End the span with error status */\n endWithError(error: Error): void;\n /** Add attributes to the span */\n setAttributes(attrs: Partial<SpanAttributes>): void;\n /** Get the span context */\n getContext(): SpanContext;\n}\n\n/**\n * Global telemetry instance\n */\nlet globalTelemetry: OmoteTelemetry | null = null;\n\n/**\n * Configure global telemetry\n *\n * @example\n * ```typescript\n * // Development\n * configureTelemetry({\n * enabled: true,\n * serviceName: 'omote-dev',\n * exporter: 'console',\n * });\n *\n * // Production\n * configureTelemetry({\n * enabled: true,\n * serviceName: 'omote-prod',\n * exporter: 'otlp',\n * exporterConfig: {\n * endpoint: 'https://tempo.example.com',\n * },\n * sampling: { ratio: 0.1 },\n * });\n * ```\n */\nexport function configureTelemetry(config: TelemetryConfig): OmoteTelemetry {\n if (globalTelemetry) {\n globalTelemetry.shutdown();\n }\n globalTelemetry = new OmoteTelemetry(config);\n return globalTelemetry;\n}\n\n/**\n * Get the global telemetry instance\n */\nexport function getTelemetry(): OmoteTelemetry | null {\n return globalTelemetry;\n}\n\n/**\n * Main telemetry class\n *\n * Manages spans, metrics, and exports to configured backends.\n */\nexport class OmoteTelemetry {\n private config: Required<Omit<TelemetryConfig, 'exporterConfig'>> & { exporterConfig?: TelemetryConfig['exporterConfig'] };\n private exporter: TelemetryExporterInterface | null = null;\n private activeTraceId: string | null = null;\n private metricsIntervalId: ReturnType<typeof setInterval> | null = null;\n\n // Metric accumulators\n private counters: Map<string, { value: number; attributes: Record<string, string | number | boolean> }> = new Map();\n private histograms: Map<string, { values: number[]; attributes: Record<string, string | number | boolean> }> = new Map();\n\n constructor(config: TelemetryConfig) {\n this.config = {\n enabled: config.enabled ?? false,\n serviceName: config.serviceName ?? 'omote-sdk',\n serviceVersion: config.serviceVersion ?? '0.1.0',\n exporter: config.exporter ?? 'none',\n exporterConfig: config.exporterConfig,\n sampling: config.sampling ?? { ratio: 1.0, alwaysSampleErrors: true },\n metricsEnabled: config.metricsEnabled ?? true,\n metricsIntervalMs: config.metricsIntervalMs ?? 60000,\n };\n\n if (this.config.enabled) {\n this.initExporter();\n this.startMetricsCollection();\n }\n }\n\n /**\n * Initialize the configured exporter\n */\n private initExporter(): void {\n switch (this.config.exporter) {\n case 'console':\n this.exporter = new ConsoleExporter({ enabled: true });\n break;\n case 'otlp':\n if (!this.config.exporterConfig) {\n console.warn('[Telemetry] OTLP exporter requires exporterConfig with endpoint');\n return;\n }\n this.exporter = new OTLPExporter(\n this.config.exporterConfig,\n this.config.serviceName,\n this.config.serviceVersion\n );\n break;\n case 'none':\n default:\n this.exporter = null;\n }\n }\n\n /**\n * Start periodic metrics collection\n */\n private startMetricsCollection(): void {\n if (!this.config.metricsEnabled || !this.exporter) return;\n\n this.metricsIntervalId = setInterval(() => {\n this.flushMetrics();\n }, this.config.metricsIntervalMs);\n }\n\n /**\n * Check if this operation should be sampled\n */\n private shouldSample(isError: boolean = false): boolean {\n if (!this.config.enabled) return false;\n\n const sampling = this.config.sampling as SamplingConfig;\n if (isError && sampling.alwaysSampleErrors) return true;\n\n const ratio = sampling.ratio ?? 1.0;\n return Math.random() < ratio;\n }\n\n /**\n * Start a new span\n *\n * @example\n * ```typescript\n * const span = telemetry.startSpan('Wav2Vec2.infer', {\n * 'inference.input_samples': samples.length,\n * 'model.backend': 'webgpu',\n * });\n *\n * try {\n * const result = await doInference();\n * span.setAttributes({ 'inference.output_frames': result.frames });\n * span.end();\n * } catch (error) {\n * span.endWithError(error);\n * }\n * ```\n */\n startSpan(name: string, attributes: Partial<SpanAttributes> = {}, parentContext?: SpanContext): ActiveSpan {\n const traceId = parentContext?.traceId ?? this.activeTraceId ?? generateId(16);\n const spanId = generateId(8);\n const parentSpanId = parentContext?.spanId;\n const startTime = performance.now();\n\n // Set active trace if this is a root span\n if (!parentContext && !this.activeTraceId) {\n this.activeTraceId = traceId;\n }\n\n let spanAttributes = { ...attributes };\n let ended = false;\n let sampled = this.shouldSample();\n\n const context: SpanContext = { traceId, spanId, parentSpanId };\n\n const endSpan = (status: 'ok' | 'error', error?: Error): void => {\n if (ended) return;\n ended = true;\n\n const endTime = performance.now();\n const durationMs = endTime - startTime;\n\n // Re-check sampling for errors\n if (status === 'error' && !sampled) {\n sampled = this.shouldSample(true);\n }\n\n if (!sampled || !this.exporter) return;\n\n const spanData: SpanData = {\n name,\n traceId,\n spanId,\n parentSpanId,\n startTime,\n endTime,\n durationMs,\n status,\n attributes: spanAttributes as SpanAttributes,\n error,\n };\n\n this.exporter.exportSpan(spanData);\n\n // Clear active trace if this was the root span\n if (!parentSpanId && this.activeTraceId === traceId) {\n this.activeTraceId = null;\n }\n };\n\n return {\n end: () => endSpan('ok'),\n endWithError: (error: Error) => endSpan('error', error),\n setAttributes: (attrs: Partial<SpanAttributes>) => {\n spanAttributes = { ...spanAttributes, ...attrs };\n },\n getContext: () => context,\n };\n }\n\n /**\n * Wrap an async function with a span\n *\n * @example\n * ```typescript\n * const result = await telemetry.withSpan('Model.load', async (span) => {\n * const model = await loadModel();\n * span.setAttributes({ 'model.size_bytes': model.size });\n * return model;\n * });\n * ```\n */\n async withSpan<T>(\n name: string,\n fn: (span: ActiveSpan) => Promise<T>,\n attributes: Partial<SpanAttributes> = {},\n parentContext?: SpanContext\n ): Promise<T> {\n const span = this.startSpan(name, attributes, parentContext);\n\n try {\n const result = await fn(span);\n span.end();\n return result;\n } catch (error) {\n span.endWithError(error as Error);\n throw error;\n }\n }\n\n /**\n * Increment a counter metric\n *\n * @example\n * ```typescript\n * telemetry.incrementCounter('omote.inference.total', 1, {\n * model: 'wav2vec2',\n * backend: 'webgpu',\n * status: 'success',\n * });\n * ```\n */\n incrementCounter(\n name: string,\n value: number = 1,\n attributes: Record<string, string | number | boolean> = {}\n ): void {\n if (!this.config.enabled || !this.config.metricsEnabled) return;\n\n const key = this.getMetricKey(name, attributes);\n const existing = this.counters.get(key);\n\n if (existing) {\n existing.value += value;\n } else {\n this.counters.set(key, { value, attributes });\n }\n }\n\n /**\n * Record a histogram value\n *\n * @example\n * ```typescript\n * telemetry.recordHistogram('omote.inference.latency', durationMs, {\n * model: 'wav2vec2',\n * backend: 'webgpu',\n * });\n * ```\n */\n recordHistogram(\n name: string,\n value: number,\n attributes: Record<string, string | number | boolean> = {}\n ): void {\n if (!this.config.enabled || !this.config.metricsEnabled) return;\n\n const key = this.getMetricKey(name, attributes);\n const existing = this.histograms.get(key);\n\n if (existing) {\n existing.values.push(value);\n } else {\n this.histograms.set(key, { values: [value], attributes });\n }\n }\n\n /**\n * Generate unique key for metric with attributes\n */\n private getMetricKey(name: string, attributes: Record<string, string | number | boolean>): string {\n const sortedAttrs = Object.entries(attributes)\n .sort(([a], [b]) => a.localeCompare(b))\n .map(([k, v]) => `${k}=${v}`)\n .join(',');\n return `${name}|${sortedAttrs}`;\n }\n\n /**\n * Flush accumulated metrics to exporter\n */\n private flushMetrics(): void {\n if (!this.exporter) return;\n\n const timestamp = performance.now();\n\n // Export counters\n for (const [key, data] of this.counters) {\n const name = key.split('|')[0];\n const metric: MetricData = {\n name,\n type: 'counter',\n value: data.value,\n attributes: data.attributes,\n timestamp,\n };\n this.exporter.exportMetric(metric);\n }\n\n // Export histogram aggregates\n for (const [key, data] of this.histograms) {\n const name = key.split('|')[0];\n if (data.values.length === 0) continue;\n\n // Calculate average for histogram\n const sum = data.values.reduce((a, b) => a + b, 0);\n const avg = sum / data.values.length;\n\n const metric: MetricData = {\n name,\n type: 'histogram',\n value: avg,\n attributes: {\n ...data.attributes,\n count: data.values.length,\n sum,\n min: Math.min(...data.values),\n max: Math.max(...data.values),\n },\n timestamp,\n };\n this.exporter.exportMetric(metric);\n\n // Clear values after export\n data.values = [];\n }\n }\n\n /**\n * Force flush all pending data\n */\n async flush(): Promise<void> {\n this.flushMetrics();\n await this.exporter?.flush();\n }\n\n /**\n * Shutdown telemetry\n */\n async shutdown(): Promise<void> {\n if (this.metricsIntervalId) {\n clearInterval(this.metricsIntervalId);\n this.metricsIntervalId = null;\n }\n\n await this.flush();\n await this.exporter?.shutdown();\n this.exporter = null;\n }\n\n /**\n * Check if telemetry is enabled\n */\n isEnabled(): boolean {\n return this.config.enabled;\n }\n\n /**\n * Get current configuration\n */\n getConfig(): TelemetryConfig {\n return { ...this.config };\n }\n}\n","/**\n * Telemetry Types\n *\n * Configuration and type definitions for OpenTelemetry instrumentation.\n *\n * @category Telemetry\n */\n\n/**\n * Supported telemetry exporters\n */\nexport type TelemetryExporter = 'console' | 'otlp' | 'none';\n\n/**\n * Sampling configuration\n */\nexport interface SamplingConfig {\n /** Sampling ratio (0.0 - 1.0). Default: 1.0 (sample everything) */\n ratio?: number;\n /** Always sample errors regardless of ratio */\n alwaysSampleErrors?: boolean;\n}\n\n/**\n * OTLP exporter configuration\n */\nexport interface OTLPExporterConfig {\n /** OTLP endpoint URL (e.g., 'https://tempo.example.com/v1/traces') */\n endpoint: string;\n /** Optional headers for authentication */\n headers?: Record<string, string>;\n /** Request timeout in ms. Default: 10000 */\n timeoutMs?: number;\n}\n\n/**\n * Main telemetry configuration\n */\nexport interface TelemetryConfig {\n /** Enable/disable telemetry. Default: false */\n enabled?: boolean;\n /** Service name for spans. Default: 'omote-sdk' */\n serviceName?: string;\n /** Service version. Default: SDK version */\n serviceVersion?: string;\n /** Exporter type. Default: 'none' */\n exporter?: TelemetryExporter;\n /** OTLP exporter config (required if exporter is 'otlp') */\n exporterConfig?: OTLPExporterConfig;\n /** Sampling configuration */\n sampling?: SamplingConfig;\n /** Enable metrics collection. Default: true when telemetry enabled */\n metricsEnabled?: boolean;\n /** Metrics export interval in ms. Default: 60000 */\n metricsIntervalMs?: number;\n}\n\n/**\n * Span attributes for model operations\n */\nexport interface ModelSpanAttributes {\n /** Model URL or identifier */\n 'model.url'?: string;\n /** Model name (e.g., 'whisper', 'lam', 'silero-vad') */\n 'model.name'?: string;\n /** Inference backend used */\n 'model.backend'?: 'webgpu' | 'wasm';\n /** Whether model was loaded from cache */\n 'model.cached'?: boolean;\n /** Model size in bytes */\n 'model.size_bytes'?: number;\n}\n\n/**\n * Span attributes for inference operations\n */\nexport interface InferenceSpanAttributes extends ModelSpanAttributes {\n /** Number of input audio samples */\n 'inference.input_samples'?: number;\n /** Input duration in ms */\n 'inference.input_duration_ms'?: number;\n /** Number of output frames (for LAM) */\n 'inference.output_frames'?: number;\n /** Inference duration in ms */\n 'inference.duration_ms'?: number;\n /** Whether inference succeeded */\n 'inference.success'?: boolean;\n /** Error type if failed */\n 'inference.error_type'?: string;\n}\n\n/**\n * Span attributes for cache operations\n */\nexport interface CacheSpanAttributes {\n /** Cache key (URL) */\n 'cache.key'?: string;\n /** Whether it was a cache hit */\n 'cache.hit'?: boolean;\n /** Size of cached item in bytes */\n 'cache.size_bytes'?: number;\n /** Cache operation type */\n 'cache.operation'?: 'get' | 'set' | 'delete';\n}\n\n/**\n * Combined span attributes type\n */\nexport type SpanAttributes =\n | ModelSpanAttributes\n | InferenceSpanAttributes\n | CacheSpanAttributes\n | Record<string, string | number | boolean | undefined>;\n\n/**\n * Metric names used by the SDK\n */\nexport const MetricNames = {\n /** Histogram: Inference latency in ms */\n INFERENCE_LATENCY: 'omote.inference.latency',\n /** Histogram: Model load time in ms */\n MODEL_LOAD_TIME: 'omote.model.load_time',\n /** Counter: Total inference operations */\n INFERENCE_TOTAL: 'omote.inference.total',\n /** Counter: Total errors */\n ERRORS_TOTAL: 'omote.errors.total',\n /** Counter: Cache hits */\n CACHE_HITS: 'omote.cache.hits',\n /** Counter: Cache misses */\n CACHE_MISSES: 'omote.cache.misses',\n} as const;\n\n/**\n * Histogram buckets for inference latency (ms)\n */\nexport const INFERENCE_LATENCY_BUCKETS = [1, 5, 10, 25, 50, 100, 250, 500, 1000, 2500, 5000];\n\n/**\n * Histogram buckets for model load time (ms)\n */\nexport const MODEL_LOAD_TIME_BUCKETS = [100, 500, 1000, 2500, 5000, 10000, 30000, 60000];\n","/**\n * Model Cache\n *\n * Caches ONNX models in IndexedDB for faster subsequent loads.\n * IndexedDB can handle large files (100s of MBs) unlike localStorage.\n *\n * @category Cache\n */\n\nimport { getTelemetry } from '../telemetry';\n\nconst DB_NAME = 'omote-model-cache';\nconst DB_VERSION = 2;\nconst STORE_NAME = 'models';\n\n/** Default cache size limit: 1GB */\nconst DEFAULT_MAX_SIZE_BYTES = 1024 * 1024 * 1024;\n\n/**\n * Configuration for cache size limits and eviction behavior\n */\nexport interface CacheConfig {\n /** Maximum total cache size in bytes (default: 1GB) */\n maxSizeBytes?: number;\n /** Maximum age in milliseconds before eviction (default: none) */\n maxAgeMs?: number;\n /** Callback when storage quota exceeds warning threshold */\n onQuotaWarning?: (info: QuotaInfo) => void;\n}\n\n/**\n * Storage quota information\n */\nexport interface QuotaInfo {\n /** Total bytes used across all origins */\n usedBytes: number;\n /** Total available quota in bytes */\n quotaBytes: number;\n /** Percentage of quota used (0-100) */\n percentUsed: number;\n /** Bytes used by omote cache specifically */\n cacheBytes: number;\n}\n\n/** Global cache configuration */\nlet globalCacheConfig: CacheConfig = {\n maxSizeBytes: DEFAULT_MAX_SIZE_BYTES,\n};\n\n/**\n * Configure cache size limits and eviction behavior\n *\n * @param config - Cache configuration options\n *\n * @example\n * ```typescript\n * import { configureCacheLimit } from '@omote/core';\n *\n * // Set 500MB limit with 24-hour max age\n * configureCacheLimit({\n * maxSizeBytes: 500 * 1024 * 1024,\n * maxAgeMs: 24 * 60 * 60 * 1000,\n * onQuotaWarning: (info) => {\n * console.warn(`Storage ${info.percentUsed.toFixed(1)}% used`);\n * }\n * });\n * ```\n */\nexport function configureCacheLimit(config: CacheConfig): void {\n globalCacheConfig = {\n ...globalCacheConfig,\n ...config,\n };\n\n // Trigger immediate cleanup if over limit\n const cache = getModelCache();\n cache.enforceLimit().catch((err) => {\n console.warn('[ModelCache] Failed to enforce limit after config change:', err);\n });\n}\n\n/**\n * Get current cache configuration\n */\nexport function getCacheConfig(): CacheConfig {\n return { ...globalCacheConfig };\n}\n\ninterface CachedModel {\n url: string;\n data: ArrayBuffer;\n size: number;\n cachedAt: number;\n /** Last time this model was accessed (for LRU eviction) */\n lastAccessedAt: number;\n etag?: string;\n version?: string;\n}\n\n/**\n * Result from getWithValidation() method\n */\nexport interface ValidationResult {\n /** The cached data, or null if not found */\n data: ArrayBuffer | null;\n /** True if the cached data is stale (etag mismatch) */\n stale: boolean;\n}\n\n/**\n * Generate a version-aware cache key\n *\n * @param url - The model URL\n * @param version - Optional version string\n * @returns The cache key (url#vX.X.X if version provided, url otherwise)\n *\n * @example\n * ```typescript\n * getCacheKey('http://example.com/model.onnx', '1.0.0')\n * // Returns: 'http://example.com/model.onnx#v1.0.0'\n *\n * getCacheKey('http://example.com/model.onnx')\n * // Returns: 'http://example.com/model.onnx'\n * ```\n */\nexport function getCacheKey(url: string, version?: string): string {\n if (version) {\n return `${url}#v${version}`;\n }\n return url;\n}\n\ninterface CacheStats {\n totalSize: number;\n modelCount: number;\n models: { url: string; size: number; cachedAt: Date }[];\n}\n\n/**\n * ModelCache - IndexedDB-based cache for ONNX models\n */\nexport class ModelCache {\n private db: IDBDatabase | null = null;\n private dbPromise: Promise<IDBDatabase> | null = null;\n\n /**\n * Initialize the cache database\n */\n private async getDB(): Promise<IDBDatabase> {\n if (this.db) return this.db;\n if (this.dbPromise) return this.dbPromise;\n\n // Request persistent storage for more generous quota on iOS/mobile browsers\n // This increases available storage from ~50MB to potentially GBs\n if (navigator.storage && navigator.storage.persist) {\n try {\n const isPersisted = await navigator.storage.persist();\n if (isPersisted) {\n console.log('[ModelCache] Persistent storage granted - increased quota available');\n } else {\n console.log('[ModelCache] Persistent storage denied - using default quota');\n }\n\n // Log current quota usage (helpful for debugging iOS limits)\n if (navigator.storage.estimate) {\n const estimate = await navigator.storage.estimate();\n const usedMB = ((estimate.usage || 0) / 1024 / 1024).toFixed(2);\n const quotaMB = ((estimate.quota || 0) / 1024 / 1024).toFixed(2);\n console.log(`[ModelCache] Storage: ${usedMB}MB / ${quotaMB}MB quota`);\n }\n } catch (err) {\n console.warn('[ModelCache] Failed to request persistent storage:', err);\n }\n }\n\n this.dbPromise = new Promise((resolve, reject) => {\n const request = indexedDB.open(DB_NAME, DB_VERSION);\n\n request.onerror = () => {\n console.error('[ModelCache] Failed to open IndexedDB:', request.error);\n reject(request.error);\n };\n\n request.onsuccess = () => {\n this.db = request.result;\n resolve(this.db);\n };\n\n request.onupgradeneeded = (event) => {\n const db = (event.target as IDBOpenDBRequest).result;\n const oldVersion = (event as IDBVersionChangeEvent).oldVersion;\n const tx = (event.target as IDBOpenDBRequest).transaction;\n\n if (oldVersion < 1) {\n // Initial schema: create store with url as key\n const store = db.createObjectStore(STORE_NAME, { keyPath: 'url' });\n store.createIndex('lastAccessedAt', 'lastAccessedAt', { unique: false });\n } else if (oldVersion < 2 && tx) {\n // Migrate from v1 to v2: add lastAccessedAt index and backfill existing entries\n const store = tx.objectStore(STORE_NAME);\n\n // Create index if it doesn't exist\n if (!store.indexNames.contains('lastAccessedAt')) {\n store.createIndex('lastAccessedAt', 'lastAccessedAt', { unique: false });\n }\n\n // Migrate existing entries: set lastAccessedAt = cachedAt\n const cursorRequest = store.openCursor();\n cursorRequest.onsuccess = (cursorEvent) => {\n const cursor = (cursorEvent.target as IDBRequest<IDBCursorWithValue>).result;\n if (cursor) {\n const value = cursor.value;\n if (value.lastAccessedAt === undefined) {\n value.lastAccessedAt = value.cachedAt || Date.now();\n cursor.update(value);\n }\n cursor.continue();\n }\n };\n }\n };\n });\n\n return this.dbPromise;\n }\n\n /**\n * Check if a model is cached\n */\n async has(url: string): Promise<boolean> {\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readonly');\n const store = tx.objectStore(STORE_NAME);\n const request = store.count(url);\n request.onsuccess = () => resolve(request.result > 0);\n request.onerror = () => resolve(false);\n });\n } catch {\n return false;\n }\n }\n\n /**\n * Get a cached model\n *\n * Updates lastAccessedAt timestamp for LRU tracking on cache hit.\n */\n async get(url: string): Promise<ArrayBuffer | null> {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('ModelCache.get', { 'cache.url': url });\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n // Use readwrite to update lastAccessedAt on hit\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const store = tx.objectStore(STORE_NAME);\n const request = store.get(url);\n request.onsuccess = () => {\n const cached = request.result as CachedModel | undefined;\n const hit = cached?.data != null;\n span?.setAttributes({ 'cache.hit': hit });\n if (cached) {\n span?.setAttributes({ 'cache.size_bytes': cached.size });\n // Update lastAccessedAt for LRU tracking\n cached.lastAccessedAt = Date.now();\n store.put(cached);\n }\n span?.end();\n if (hit) {\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n } else {\n telemetry?.incrementCounter('omote.cache.misses', 1, {});\n }\n resolve(cached?.data ?? null);\n };\n request.onerror = () => {\n span?.setAttributes({ 'cache.hit': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.misses', 1, {});\n resolve(null);\n };\n });\n } catch {\n span?.endWithError(new Error('Cache get failed'));\n return null;\n }\n }\n\n /**\n * Get a cached model with ETag validation\n *\n * Validates the cached data against the server's current ETag.\n * If the cached ETag differs from the server's, the data is marked as stale.\n *\n * @param url - The cache key\n * @param originalUrl - The original URL for HEAD request (if different from cache key)\n * @returns ValidationResult with data and stale flag\n *\n * @example\n * ```typescript\n * const result = await cache.getWithValidation('http://example.com/model.onnx');\n * if (result.data && !result.stale) {\n * // Use cached data\n * } else if (result.stale) {\n * // Refetch and update cache\n * }\n * ```\n */\n async getWithValidation(url: string, originalUrl?: string): Promise<ValidationResult> {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('ModelCache.getWithValidation', { 'cache.url': url });\n\n try {\n const db = await this.getDB();\n const cached = await new Promise<CachedModel | undefined>((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readonly');\n const store = tx.objectStore(STORE_NAME);\n const request = store.get(url);\n request.onsuccess = () => resolve(request.result as CachedModel | undefined);\n request.onerror = () => resolve(undefined);\n });\n\n // Cache miss\n if (!cached?.data) {\n span?.setAttributes({ 'cache.hit': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.misses', 1, {});\n return { data: null, stale: false };\n }\n\n span?.setAttributes({ 'cache.hit': true, 'cache.size_bytes': cached.size });\n\n // No etag stored - can't validate, return as fresh\n if (!cached.etag) {\n span?.setAttributes({ 'cache.validated': false, 'cache.stale': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n return { data: cached.data, stale: false };\n }\n\n // Validate via HEAD request\n const fetchUrl = originalUrl || url;\n try {\n const response = await fetch(fetchUrl, { method: 'HEAD' });\n if (!response.ok) {\n // Server error - assume cache is still valid\n span?.setAttributes({ 'cache.validated': false, 'cache.stale': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n return { data: cached.data, stale: false };\n }\n\n const serverEtag = response.headers.get('etag');\n const isStale = serverEtag !== null && serverEtag !== cached.etag;\n\n span?.setAttributes({\n 'cache.validated': true,\n 'cache.stale': isStale,\n 'cache.server_etag': serverEtag || 'none',\n 'cache.cached_etag': cached.etag,\n });\n span?.end();\n\n if (isStale) {\n telemetry?.incrementCounter('omote.cache.stale', 1, {});\n console.log(`[ModelCache] Stale cache detected for ${url}`);\n } else {\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n }\n\n return { data: cached.data, stale: isStale };\n } catch (fetchError) {\n // HEAD request failed (network error, CORS, etc.)\n // Return cached data as non-stale - better than failing completely\n console.warn('[ModelCache] HEAD validation failed, using cached data:', fetchError);\n span?.setAttributes({ 'cache.validated': false, 'cache.stale': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n return { data: cached.data, stale: false };\n }\n } catch {\n span?.endWithError(new Error('Cache getWithValidation failed'));\n return { data: null, stale: false };\n }\n }\n\n /**\n * Store a model in cache\n *\n * After storing, triggers LRU eviction if cache exceeds size limit.\n *\n * @param url - The cache key (use getCacheKey() for versioned keys)\n * @param data - The model data\n * @param etag - Optional ETag for staleness validation\n * @param version - Optional version string for metadata\n */\n async set(url: string, data: ArrayBuffer, etag?: string, version?: string): Promise<void> {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('ModelCache.set', {\n 'cache.url': url,\n 'cache.size_bytes': data.byteLength,\n ...(version && { 'cache.version': version }),\n });\n try {\n // Check quota before caching (best effort, don't block write)\n this.checkQuota().catch((err) => {\n console.warn('[ModelCache] Quota check failed:', err);\n });\n\n const db = await this.getDB();\n await new Promise<void>((resolve, reject) => {\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const store = tx.objectStore(STORE_NAME);\n const now = Date.now();\n const cached: CachedModel = {\n url,\n data,\n size: data.byteLength,\n cachedAt: now,\n lastAccessedAt: now,\n etag,\n version,\n };\n const request = store.put(cached);\n request.onsuccess = () => {\n span?.end();\n resolve();\n };\n request.onerror = () => {\n span?.endWithError(request.error || new Error('Cache set failed'));\n reject(request.error);\n };\n });\n\n // Trigger LRU cleanup after write (don't block)\n this.enforceLimit().catch((err) => {\n console.warn('[ModelCache] Failed to enforce limit after set:', err);\n });\n } catch (err) {\n console.warn('[ModelCache] Failed to cache model:', err);\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n }\n }\n\n /**\n * Check storage quota and trigger warnings/cleanup as needed\n *\n * - Logs warning if quota > 90% used\n * - Triggers LRU cleanup if quota > 95% used\n * - Calls onQuotaWarning callback if configured\n */\n private async checkQuota(): Promise<void> {\n const quota = await this.getQuotaInfo();\n if (!quota) {\n return; // API unavailable\n }\n\n const config = globalCacheConfig;\n const telemetry = getTelemetry();\n\n if (quota.percentUsed > 90) {\n console.warn(`[ModelCache] Storage quota ${quota.percentUsed.toFixed(1)}% used (${formatBytes(quota.usedBytes)} / ${formatBytes(quota.quotaBytes)})`);\n\n // Emit telemetry counter\n telemetry?.incrementCounter('omote.cache.quota_warning', 1, {\n percent_used: String(Math.round(quota.percentUsed)),\n });\n\n // Call user callback if configured\n if (config.onQuotaWarning) {\n try {\n config.onQuotaWarning(quota);\n } catch (err) {\n console.warn('[ModelCache] onQuotaWarning callback error:', err);\n }\n }\n }\n\n if (quota.percentUsed > 95) {\n console.warn('[ModelCache] Storage quota critical (>95%), triggering LRU cleanup');\n // Free at least 10% of cache to make room\n const bytesToFree = Math.max(quota.cacheBytes * 0.1, 10 * 1024 * 1024);\n await this.evictOldest(bytesToFree);\n }\n }\n\n /**\n * Delete a cached model\n */\n async delete(url: string): Promise<void> {\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const store = tx.objectStore(STORE_NAME);\n store.delete(url);\n tx.oncomplete = () => resolve();\n });\n } catch {\n // Ignore errors\n }\n }\n\n /**\n * Clear all cached models\n */\n async clear(): Promise<void> {\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const store = tx.objectStore(STORE_NAME);\n store.clear();\n tx.oncomplete = () => resolve();\n });\n } catch {\n // Ignore errors\n }\n }\n\n /**\n * Get cache statistics\n */\n async getStats(): Promise<CacheStats> {\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readonly');\n const store = tx.objectStore(STORE_NAME);\n const request = store.getAll();\n request.onsuccess = () => {\n const models = (request.result as CachedModel[]) || [];\n resolve({\n totalSize: models.reduce((sum, m) => sum + m.size, 0),\n modelCount: models.length,\n models: models.map((m) => ({\n url: m.url,\n size: m.size,\n cachedAt: new Date(m.cachedAt),\n })),\n });\n };\n request.onerror = () => resolve({ totalSize: 0, modelCount: 0, models: [] });\n });\n } catch {\n return { totalSize: 0, modelCount: 0, models: [] };\n }\n }\n\n /**\n * Enforce cache size limit by evicting oldest entries (LRU)\n *\n * Called automatically after each set() operation.\n * Can also be called manually to trigger cleanup.\n */\n async enforceLimit(): Promise<void> {\n const config = globalCacheConfig;\n const maxSize = config.maxSizeBytes ?? DEFAULT_MAX_SIZE_BYTES;\n\n const stats = await this.getStats();\n if (stats.totalSize <= maxSize) {\n return; // Under limit, nothing to do\n }\n\n const bytesToFree = stats.totalSize - maxSize;\n const evictedUrls = await this.evictOldest(bytesToFree);\n\n if (evictedUrls.length > 0) {\n console.log(`[ModelCache] LRU eviction: removed ${evictedUrls.length} models to free ${formatBytes(bytesToFree)}`);\n }\n }\n\n /**\n * Evict oldest entries (by lastAccessedAt) to free space\n *\n * @param bytesToFree - Minimum bytes to free\n * @returns List of evicted URLs\n *\n * @example\n * ```typescript\n * const cache = getModelCache();\n * const evicted = await cache.evictOldest(100 * 1024 * 1024); // Free 100MB\n * console.log('Evicted:', evicted);\n * ```\n */\n async evictOldest(bytesToFree: number): Promise<string[]> {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('ModelCache.evictOldest', {\n 'eviction.bytes_requested': bytesToFree,\n });\n\n try {\n const db = await this.getDB();\n\n // Get all models sorted by lastAccessedAt (oldest first)\n const models = await new Promise<CachedModel[]>((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readonly');\n const store = tx.objectStore(STORE_NAME);\n const request = store.getAll();\n request.onsuccess = () => {\n const all = (request.result as CachedModel[]) || [];\n // Sort by lastAccessedAt ascending (oldest first)\n all.sort((a, b) => (a.lastAccessedAt || a.cachedAt || 0) - (b.lastAccessedAt || b.cachedAt || 0));\n resolve(all);\n };\n request.onerror = () => resolve([]);\n });\n\n const evictedUrls: string[] = [];\n let freedBytes = 0;\n\n // Evict models until we've freed enough space\n for (const model of models) {\n if (freedBytes >= bytesToFree) {\n break;\n }\n\n await this.delete(model.url);\n evictedUrls.push(model.url);\n freedBytes += model.size;\n\n console.log(`[ModelCache] Evicted: ${model.url} (${formatBytes(model.size)})`);\n }\n\n span?.setAttributes({\n 'eviction.bytes_freed': freedBytes,\n 'eviction.models_evicted': evictedUrls.length,\n });\n span?.end();\n\n // Emit telemetry counter\n if (freedBytes > 0) {\n telemetry?.incrementCounter('omote.cache.eviction', evictedUrls.length, {\n bytes_freed: String(freedBytes),\n });\n }\n\n return evictedUrls;\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n console.warn('[ModelCache] Eviction failed:', err);\n return [];\n }\n }\n\n /**\n * Get storage quota information\n *\n * Uses navigator.storage.estimate() to get quota details.\n * Returns null if the API is unavailable.\n *\n * @returns Quota info or null if unavailable\n *\n * @example\n * ```typescript\n * const cache = getModelCache();\n * const quota = await cache.getQuotaInfo();\n * if (quota) {\n * console.log(`Using ${quota.percentUsed.toFixed(1)}% of quota`);\n * }\n * ```\n */\n async getQuotaInfo(): Promise<QuotaInfo | null> {\n if (!navigator?.storage?.estimate) {\n return null;\n }\n\n try {\n const estimate = await navigator.storage.estimate();\n const usedBytes = estimate.usage || 0;\n const quotaBytes = estimate.quota || 0;\n const percentUsed = quotaBytes > 0 ? (usedBytes / quotaBytes) * 100 : 0;\n\n const stats = await this.getStats();\n\n return {\n usedBytes,\n quotaBytes,\n percentUsed,\n cacheBytes: stats.totalSize,\n };\n } catch {\n return null;\n }\n }\n}\n\n// Singleton instance\nlet cacheInstance: ModelCache | null = null;\n\n/**\n * Get the global ModelCache instance\n */\nexport function getModelCache(): ModelCache {\n if (!cacheInstance) {\n cacheInstance = new ModelCache();\n }\n return cacheInstance;\n}\n\n// Max size for IndexedDB caching\n// When storing ArrayBuffer in IndexedDB, browser does structured clone which\n// temporarily doubles memory usage. To avoid STATUS_BREAKPOINT crashes:\n// - Files < 500MB: Cache as ArrayBuffer (safe, fast retrieval)\n// - Files >= 500MB: Skip IndexedDB, rely on HTTP cache\n// See: https://bugs.chromium.org/p/chromium/issues/detail?id=170845\nconst MAX_CACHE_SIZE_BYTES = 500 * 1024 * 1024;\n\n/**\n * Options for fetchWithCache\n */\nexport interface FetchWithCacheOptions {\n /** Optional version string for versioned caching */\n version?: string;\n /** If true, validates cached data against server ETag and refetches if stale */\n validateStale?: boolean;\n /** Progress callback during download */\n onProgress?: (loaded: number, total: number) => void;\n}\n\n/**\n * Fetch a model with caching\n * Uses IndexedDB cache with network fallback\n * Files larger than 500MB are not cached to IndexedDB to avoid memory pressure\n * (structured clone during IndexedDB write temporarily doubles memory usage)\n *\n * @param url - The URL to fetch\n * @param onProgress - Optional progress callback (legacy signature)\n * @returns The fetched ArrayBuffer\n *\n * @example\n * ```typescript\n * // Simple usage (backwards compatible)\n * const data = await fetchWithCache('http://example.com/model.onnx');\n *\n * // With progress callback (backwards compatible)\n * const data = await fetchWithCache('http://example.com/model.onnx', (loaded, total) => {\n * console.log(`${loaded}/${total} bytes`);\n * });\n *\n * // With options (new API)\n * const data = await fetchWithCache('http://example.com/model.onnx', {\n * version: '1.0.0',\n * validateStale: true,\n * onProgress: (loaded, total) => console.log(`${loaded}/${total}`)\n * });\n * ```\n */\nexport async function fetchWithCache(\n url: string,\n optionsOrProgress?: FetchWithCacheOptions | ((loaded: number, total: number) => void)\n): Promise<ArrayBuffer> {\n // Normalize arguments - support both old and new signatures\n let options: FetchWithCacheOptions = {};\n if (typeof optionsOrProgress === 'function') {\n // Legacy signature: fetchWithCache(url, onProgress)\n options = { onProgress: optionsOrProgress };\n } else if (optionsOrProgress) {\n // New signature: fetchWithCache(url, options)\n options = optionsOrProgress;\n }\n\n const { version, validateStale = false, onProgress } = options;\n\n const cache = getModelCache();\n const cacheKey = version ? getCacheKey(url, version) : url;\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('fetchWithCache', {\n 'fetch.url': url,\n ...(version && { 'fetch.version': version }),\n 'fetch.validate_stale': validateStale,\n });\n\n // Check cache with optional staleness validation\n if (validateStale) {\n const validation = await cache.getWithValidation(cacheKey, url);\n\n if (validation.data && !validation.stale) {\n console.log(`[ModelCache] Cache hit (validated): ${url} (${(validation.data.byteLength / 1024 / 1024).toFixed(1)}MB)`);\n onProgress?.(validation.data.byteLength, validation.data.byteLength);\n span?.setAttributes({\n 'fetch.cache_hit': true,\n 'fetch.cache_validated': true,\n 'fetch.cache_stale': false,\n 'fetch.size_bytes': validation.data.byteLength,\n });\n span?.end();\n return validation.data;\n }\n\n if (validation.stale) {\n console.log(`[ModelCache] Cache stale, refetching: ${url}`);\n span?.setAttributes({\n 'fetch.cache_hit': true,\n 'fetch.cache_validated': true,\n 'fetch.cache_stale': true,\n });\n // Continue to fetch fresh data\n }\n // If data is null, continue to fetch\n } else {\n // Simple cache check without validation (backwards compatible behavior)\n const cached = await cache.get(cacheKey);\n if (cached) {\n console.log(`[ModelCache] Cache hit: ${url} (${(cached.byteLength / 1024 / 1024).toFixed(1)}MB)`);\n onProgress?.(cached.byteLength, cached.byteLength);\n span?.setAttributes({\n 'fetch.cache_hit': true,\n 'fetch.size_bytes': cached.byteLength,\n });\n span?.end();\n return cached;\n }\n }\n\n span?.setAttributes({ 'fetch.cache_hit': false });\n console.log(`[ModelCache] Cache miss, fetching: ${url}`);\n\n try {\n // Fetch with progress\n const response = await fetch(url);\n if (!response.ok) {\n throw new Error(`Failed to fetch ${url}: ${response.status}`);\n }\n\n const contentLength = response.headers.get('content-length');\n const total = contentLength ? parseInt(contentLength, 10) : 0;\n const etag = response.headers.get('etag') ?? undefined;\n\n // Check if file is too large for IndexedDB (avoid memory pressure during structured clone)\n const tooLargeForCache = total > MAX_CACHE_SIZE_BYTES;\n if (tooLargeForCache) {\n console.log(`[ModelCache] File too large for IndexedDB (${(total / 1024 / 1024).toFixed(0)}MB > 500MB), using HTTP cache only`);\n }\n\n if (!response.body) {\n const data = await response.arrayBuffer();\n if (!tooLargeForCache) {\n await cache.set(cacheKey, data, etag, version);\n }\n span?.setAttributes({\n 'fetch.size_bytes': data.byteLength,\n 'fetch.cached_to_indexeddb': !tooLargeForCache,\n });\n span?.end();\n return data;\n }\n\n // Stream with progress\n const reader = response.body.getReader();\n const chunks: Uint8Array[] = [];\n let loaded = 0;\n\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n loaded += value.length;\n onProgress?.(loaded, total || loaded);\n }\n\n // Combine chunks\n const data = new Uint8Array(loaded);\n let offset = 0;\n for (const chunk of chunks) {\n data.set(chunk, offset);\n offset += chunk.length;\n }\n\n const buffer = data.buffer;\n\n // Cache for next time (if not too large)\n if (!tooLargeForCache) {\n await cache.set(cacheKey, buffer, etag, version);\n console.log(`[ModelCache] Cached: ${url} (${(buffer.byteLength / 1024 / 1024).toFixed(1)}MB)`);\n }\n\n span?.setAttributes({\n 'fetch.size_bytes': buffer.byteLength,\n 'fetch.cached_to_indexeddb': !tooLargeForCache,\n });\n span?.end();\n\n return buffer;\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n throw error;\n }\n}\n\n/**\n * Preload models into cache without creating sessions\n */\nexport async function preloadModels(\n urls: string[],\n onProgress?: (current: number, total: number, url: string) => void\n): Promise<void> {\n const cache = getModelCache();\n\n for (let i = 0; i < urls.length; i++) {\n const url = urls[i];\n onProgress?.(i, urls.length, url);\n\n if (await cache.has(url)) {\n console.log(`[ModelCache] Already cached: ${url}`);\n continue;\n }\n\n await fetchWithCache(url);\n }\n\n onProgress?.(urls.length, urls.length, 'done');\n}\n\n/**\n * Format bytes as human readable string\n */\nexport function formatBytes(bytes: number): string {\n if (bytes < 1024) return `${bytes} B`;\n if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;\n if (bytes < 1024 * 1024 * 1024) return `${(bytes / 1024 / 1024).toFixed(1)} MB`;\n return `${(bytes / 1024 / 1024 / 1024).toFixed(1)} GB`;\n}\n","/**\r\n * Runtime detection utilities for platform-specific inference configuration\r\n *\r\n * These utilities help determine the optimal backend (WebGPU vs WASM) based on\r\n * the current platform's capabilities and known limitations.\r\n *\r\n * Key considerations:\r\n * - iOS Safari: WebGPU crashes due to JSEP bugs (GitHub #22776, #26827)\r\n * - Android Chrome: WebGPU works well (Chrome 121+)\r\n * - Desktop: WebGPU preferred for performance\r\n *\r\n * @module utils/runtime\r\n */\r\n\r\n/**\r\n * Supported inference backends\r\n */\r\nexport type RuntimeBackend = 'webgpu' | 'wasm';\r\n\r\n/**\r\n * User-configurable backend preference\r\n */\r\nexport type BackendPreference =\r\n | 'auto' // iOS→WASM, else→WebGPU with fallback\r\n | 'webgpu' // Prefer WebGPU, fallback to WASM on error\r\n | 'wasm' // Prefer WASM, no WebGPU attempt\r\n | 'webgpu-only' // Force WebGPU, throw on failure (for debugging)\r\n | 'wasm-only'; // Force WASM, never load WebGPU bundle (smallest bundle)\r\n\r\n/**\r\n * Detect iOS Safari browser\r\n *\r\n * iOS Safari has severe WebGPU issues:\r\n * - JSEP compilation bugs cause OOM during session creation\r\n * - Threading bugs require numThreads=1\r\n * - Proxy mode triggers memory leaks\r\n *\r\n * @returns true if running in iOS Safari\r\n */\r\nexport function isIOSSafari(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n const ua = navigator.userAgent.toLowerCase();\r\n return (\r\n /iphone|ipad|ipod/.test(ua) ||\r\n // Safari on macOS could also have issues, but less severe\r\n // Only force WASM on actual iOS devices\r\n (/safari/.test(ua) && /mobile/.test(ua) && !/chrome|crios|fxios/.test(ua))\r\n );\r\n}\r\n\r\n/**\r\n * Detect any iOS device (regardless of browser)\r\n *\r\n * On iOS, all browsers use WebKit, so Chrome/Firefox on iOS\r\n * have the same limitations as Safari.\r\n *\r\n * @returns true if running on any iOS device\r\n */\r\nexport function isIOS(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n const ua = navigator.userAgent.toLowerCase();\r\n return /iphone|ipad|ipod/.test(ua);\r\n}\r\n\r\n/**\r\n * Detect Android device\r\n *\r\n * Android Chrome 121+ has good WebGPU support with Qualcomm/ARM GPUs.\r\n *\r\n * @returns true if running on Android\r\n */\r\nexport function isAndroid(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n return /android/i.test(navigator.userAgent);\r\n}\r\n\r\n/**\r\n * Detect any mobile device (iOS or Android)\r\n *\r\n * Mobile devices have different performance characteristics:\r\n * - Lower memory limits\r\n * - Thermal throttling\r\n * - Different GPU architectures\r\n *\r\n * @returns true if running on mobile\r\n */\r\nexport function isMobile(): boolean {\r\n return isIOS() || isAndroid();\r\n}\r\n\r\n/**\r\n * Check if WebGPU API is available in the browser\r\n *\r\n * Note: This only checks if the API exists, not if it works reliably.\r\n * iOS has navigator.gpu but ONNX Runtime's WebGPU backend crashes.\r\n *\r\n * @returns true if navigator.gpu exists\r\n */\r\nexport function hasWebGPUApi(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n return 'gpu' in navigator && navigator.gpu !== undefined;\r\n}\r\n\r\n/**\r\n * Get the recommended backend for the current platform\r\n *\r\n * Decision tree:\r\n * 1. iOS (any browser): Force WASM (WebGPU crashes)\r\n * 2. Android: WebGPU preferred (works in Chrome 121+)\r\n * 3. Desktop: WebGPU preferred (best performance)\r\n *\r\n * @returns 'wasm' for iOS, 'webgpu' for everything else\r\n */\r\nexport function getRecommendedBackend(): RuntimeBackend {\r\n // Safari (all platforms): Always WASM - WebGPU crashes due to JSEP bugs\r\n // iOS: All browsers use WebKit, so all have the same issue\r\n // macOS Safari: Same multithreaded JSEP build bug\r\n if (isSafari() || isIOS()) {\r\n return 'wasm';\r\n }\r\n\r\n // Android/Desktop (non-Safari): WebGPU preferred\r\n return 'webgpu';\r\n}\r\n\r\n/**\r\n * Resolve user preference to actual backend\r\n *\r\n * @param preference User's backend preference\r\n * @param webgpuAvailable Whether WebGPU is available and working\r\n * @returns The backend to use\r\n */\r\nexport function resolveBackend(\r\n preference: BackendPreference,\r\n webgpuAvailable: boolean\r\n): RuntimeBackend {\r\n switch (preference) {\r\n case 'wasm-only':\r\n return 'wasm';\r\n\r\n case 'webgpu-only':\r\n if (!webgpuAvailable) {\r\n throw new Error(\r\n 'WebGPU requested but not available. Use \"webgpu\" or \"auto\" for fallback.'\r\n );\r\n }\r\n return 'webgpu';\r\n\r\n case 'wasm':\r\n return 'wasm';\r\n\r\n case 'webgpu':\r\n return webgpuAvailable ? 'webgpu' : 'wasm';\r\n\r\n case 'auto':\r\n default:\r\n // Auto: Use platform recommendation, with WebGPU availability check\r\n const recommended = getRecommendedBackend();\r\n if (recommended === 'webgpu' && !webgpuAvailable) {\r\n return 'wasm';\r\n }\r\n return recommended;\r\n }\r\n}\r\n\r\n/**\r\n * Get optimal WASM thread count for current platform\r\n *\r\n * @returns Recommended number of WASM threads\r\n */\r\nexport function getOptimalWasmThreads(): number {\r\n if (isIOS()) {\r\n // iOS: Must be 1 to avoid shared memory bugs (GitHub #22086)\r\n return 1;\r\n }\r\n\r\n if (isAndroid()) {\r\n // Android: Conservative threading (2 threads)\r\n return 2;\r\n }\r\n\r\n // Desktop: Full threading (4 threads)\r\n return 4;\r\n}\r\n\r\n/**\r\n * Check if WASM proxy mode should be enabled\r\n *\r\n * Proxy mode offloads inference to a Web Worker, but has issues:\r\n * - iOS: Triggers Safari 26 JSEP memory leak\r\n * - Mobile: Generally unstable\r\n *\r\n * @returns true if proxy mode is safe to enable\r\n */\r\nexport function shouldEnableWasmProxy(): boolean {\r\n // Mobile: Disable proxy (triggers memory issues)\r\n if (isMobile()) {\r\n return false;\r\n }\r\n\r\n // Desktop: Enable proxy for better threading\r\n return true;\r\n}\r\n\r\n/**\r\n * Detect Safari browser on any platform (macOS + iOS)\r\n *\r\n * Safari WebKit has bugs with ONNX Runtime's WebGPU multithreaded JSEP build\r\n * that crash session creation. Both iOS and macOS Safari are affected.\r\n *\r\n * @returns true if running in Safari on any platform\r\n */\r\nexport function isSafari(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n const ua = navigator.userAgent.toLowerCase();\r\n // Safari: has \"safari\" but not Chrome, Chromium, CriOS, FxiOS, or Edge\r\n return /safari/.test(ua) && !/chrome|crios|fxios|chromium|edg/.test(ua);\r\n}\r\n\r\n/**\r\n * Recommend using CPU-optimized lip sync model (wav2arkit_cpu)\r\n *\r\n * All WebKit browsers (Safari macOS, Safari iOS, Chrome iOS, Firefox iOS)\r\n * have ONNX Runtime WebGPU JSEP bugs that crash session creation, and the\r\n * 384MB LAM model stack-overflows in WASM mode.\r\n * The wav2arkit_cpu model (1.8MB) provides identical 52 ARKit blendshape\r\n * output at 22x real-time on CPU/WASM.\r\n *\r\n * @returns true if on Safari or any iOS browser (should use CPU lip sync model)\r\n */\r\nexport function shouldUseCpuLipSync(): boolean {\r\n return isSafari() || isIOS();\r\n}\r\n\r\n/**\r\n * Check if Web Speech API is available in the browser\r\n *\r\n * The Web Speech API provides native speech recognition in Safari and Chrome.\r\n * On iOS Safari, this is significantly faster than Whisper WASM.\r\n *\r\n * @returns true if SpeechRecognition API is available\r\n */\r\nexport function isSpeechRecognitionAvailable(): boolean {\r\n if (typeof window === 'undefined') return false;\r\n return 'SpeechRecognition' in window || 'webkitSpeechRecognition' in window;\r\n}\r\n\r\n/**\r\n * Recommend using native Safari Speech API over Whisper on iOS\r\n *\r\n * On iOS, Whisper ASR via WASM takes ~1.3s per inference (30% over target).\r\n * Safari's native Web Speech API is:\r\n * - Much faster (native implementation)\r\n * - Battery-efficient (no WASM overhead)\r\n * - No model download needed (saves 30-150MB)\r\n *\r\n * @returns true if on iOS with Speech API available\r\n */\r\nexport function shouldUseNativeASR(): boolean {\r\n return isIOS() && isSpeechRecognitionAvailable();\r\n}\r\n\r\n/**\r\n * Recommend using server-side LAM over client-side on iOS\r\n *\r\n * On iOS, LAM lip sync via WASM takes ~332ms per second of audio (3.3x over target).\r\n * Server-side inference with GPU can achieve ~50ms, providing:\r\n * - Real-time lip sync (under 100ms target)\r\n * - Reduced iOS device thermal/battery impact\r\n * - Better user experience\r\n *\r\n * @returns true if on iOS (should use server-side lip sync)\r\n */\r\nexport function shouldUseServerLipSync(): boolean {\r\n return isIOS();\r\n}\r\n","/**\n * Lazy ONNX Runtime loader with conditional WebGPU/WASM bundle loading\n *\n * This module provides a way to dynamically load the appropriate ONNX Runtime bundle\n * based on the platform's capabilities. This is critical for iOS support because:\n *\n * 1. iOS Safari has WebGPU API but ONNX Runtime's WebGPU backend crashes\n * 2. Loading the WebGPU bundle wastes bandwidth and can cause issues\n * 3. WASM-only bundle is smaller and more reliable on iOS\n *\n * Usage:\n * ```typescript\n * const ort = await getOnnxRuntime('wasm'); // Load WASM-only bundle\n * const ort = await getOnnxRuntime('webgpu'); // Load WebGPU bundle (includes WASM)\n * ```\n *\n * @module inference/onnxLoader\n */\n\n// Type-only import for TypeScript (no runtime code loaded at import time)\n// At runtime, we dynamically import either 'onnxruntime-web' or 'onnxruntime-web/webgpu'\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\n\n// Type alias for the ORT module (loaded dynamically)\ntype OrtModule = {\n InferenceSession: typeof InferenceSession;\n Tensor: typeof Tensor;\n env: Env;\n};\n\n// Re-export session options type\nexport type SessionOptions = InferenceSession.SessionOptions;\nimport {\n RuntimeBackend,\n BackendPreference,\n isIOS,\n isMobile,\n getOptimalWasmThreads,\n shouldEnableWasmProxy,\n resolveBackend,\n hasWebGPUApi,\n} from '../utils/runtime';\n\n// Re-export RuntimeBackend for consumers\nexport type { RuntimeBackend } from '../utils/runtime';\nimport { createLogger } from '../logging';\n\nconst logger = createLogger('OnnxLoader');\n\n// Cached ONNX Runtime instance\nlet ortInstance: OrtModule | null = null;\nlet loadedBackend: RuntimeBackend | null = null;\n\n// CDN path for WASM files\nconst WASM_CDN_PATH = 'https://cdn.jsdelivr.net/npm/onnxruntime-web@1.23.2/dist/';\n\n/**\n * Check if WebGPU is available and likely to work\n *\n * This is more thorough than just checking navigator.gpu exists.\n * It actually requests an adapter to verify the GPU is accessible.\n *\n * @returns true if WebGPU is available and working\n */\nexport async function isWebGPUAvailable(): Promise<boolean> {\n // iOS: WebGPU is broken regardless of API presence\n if (isIOS()) {\n logger.debug('WebGPU check: iOS detected, returning false');\n return false;\n }\n\n if (!hasWebGPUApi()) {\n logger.debug('WebGPU check: navigator.gpu not available');\n return false;\n }\n\n try {\n const adapter = await navigator.gpu.requestAdapter();\n if (!adapter) {\n logger.debug('WebGPU check: No adapter available');\n return false;\n }\n\n // Check for minimum required features\n const device = await adapter.requestDevice();\n if (!device) {\n logger.debug('WebGPU check: Could not create device');\n return false;\n }\n\n // Clean up\n device.destroy();\n\n logger.debug('WebGPU check: Available and working');\n return true;\n } catch (err) {\n logger.debug('WebGPU check: Error during availability check', { error: err });\n return false;\n }\n}\n\n/**\n * Configure WASM environment settings based on platform\n *\n * This must be called before creating any inference sessions.\n */\nfunction configureWasm(ort: OrtModule): void {\n // Set CDN path for WASM files\n ort.env.wasm.wasmPaths = WASM_CDN_PATH;\n\n // Platform-specific threading configuration\n const numThreads = getOptimalWasmThreads();\n const enableProxy = shouldEnableWasmProxy();\n\n ort.env.wasm.numThreads = numThreads;\n ort.env.wasm.simd = true; // SIMD always helps\n ort.env.wasm.proxy = enableProxy;\n\n logger.info('WASM configured', {\n numThreads,\n simd: true,\n proxy: enableProxy,\n platform: isIOS() ? 'iOS' : isMobile() ? 'Android' : 'Desktop',\n });\n}\n\n/**\n * Load ONNX Runtime with the specified backend\n *\n * This lazily loads the appropriate bundle:\n * - 'wasm': Loads onnxruntime-web (WASM-only, smaller)\n * - 'webgpu': Loads onnxruntime-web/webgpu (includes WebGPU + WASM fallback)\n *\n * Once loaded, the same instance is reused for all subsequent calls.\n * If you need to switch backends, you must reload the page.\n *\n * @param backend The backend to load ('webgpu' or 'wasm')\n * @returns The ONNX Runtime module\n */\nexport async function getOnnxRuntime(\n backend: RuntimeBackend\n): Promise<OrtModule> {\n // Return cached instance if same backend\n if (ortInstance && loadedBackend === backend) {\n return ortInstance;\n }\n\n // Warn if trying to switch backends (not supported without page reload)\n if (ortInstance && loadedBackend !== backend) {\n logger.warn(\n `ONNX Runtime already loaded with ${loadedBackend} backend. ` +\n `Cannot switch to ${backend}. Returning existing instance.`\n );\n return ortInstance;\n }\n\n logger.info(`Loading ONNX Runtime with ${backend} backend...`);\n\n try {\n if (backend === 'wasm') {\n // Load WASM-only bundle (smaller, no WebGPU code)\n const module = await import('onnxruntime-web');\n ortInstance = module.default || module;\n } else {\n // Load WebGPU bundle (includes WASM fallback)\n const module = await import('onnxruntime-web/webgpu');\n ortInstance = module.default || module;\n }\n\n loadedBackend = backend;\n\n // Configure WASM settings (applies to both bundles)\n configureWasm(ortInstance);\n\n logger.info(`ONNX Runtime loaded successfully`, { backend });\n\n return ortInstance;\n } catch (err) {\n logger.error(`Failed to load ONNX Runtime with ${backend} backend`, {\n error: err,\n });\n throw new Error(\n `Failed to load ONNX Runtime: ${err instanceof Error ? err.message : String(err)}`\n );\n }\n}\n\n/**\n * Get the appropriate ONNX Runtime based on user preference\n *\n * This resolves the user's preference against platform capabilities\n * and loads the appropriate bundle.\n *\n * @param preference User's backend preference\n * @returns The ONNX Runtime module and the resolved backend\n */\nexport async function getOnnxRuntimeForPreference(\n preference: BackendPreference = 'auto'\n): Promise<{ ort: OrtModule; backend: RuntimeBackend }> {\n // Check WebGPU availability (skip for iOS)\n const webgpuAvailable = await isWebGPUAvailable();\n\n // Resolve preference to actual backend\n const backend = resolveBackend(preference, webgpuAvailable);\n\n logger.info('Resolved backend preference', {\n preference,\n webgpuAvailable,\n resolvedBackend: backend,\n });\n\n // Load the appropriate bundle\n const ort = await getOnnxRuntime(backend);\n\n return { ort, backend };\n}\n\n/**\n * Get session options for creating an inference session\n *\n * This returns optimized session options based on the backend and platform.\n *\n * @param backend The backend being used\n * @returns Session options for InferenceSession.create()\n */\nexport function getSessionOptions(\n backend: RuntimeBackend\n): SessionOptions {\n if (backend === 'webgpu') {\n return {\n executionProviders: [\n {\n name: 'webgpu',\n preferredLayout: 'NHWC', // Reduces memory overhead for layout conversions\n } as const,\n ],\n graphOptimizationLevel: 'all',\n };\n }\n\n // WASM backend\n return {\n executionProviders: ['wasm'],\n graphOptimizationLevel: 'all',\n };\n}\n\n/**\n * Create an inference session with automatic fallback\n *\n * If WebGPU session creation fails, automatically falls back to WASM.\n *\n * @param modelBuffer The model data as ArrayBuffer\n * @param preferredBackend The preferred backend\n * @returns The created session and the backend used\n */\nexport async function createSessionWithFallback(\n modelBuffer: ArrayBuffer,\n preferredBackend: RuntimeBackend\n): Promise<{\n session: InferenceSession;\n backend: RuntimeBackend;\n}> {\n const ort = await getOnnxRuntime(preferredBackend);\n\n // Convert ArrayBuffer to Uint8Array for onnxruntime-common types\n const modelData = new Uint8Array(modelBuffer);\n\n if (preferredBackend === 'webgpu') {\n try {\n const options = getSessionOptions('webgpu');\n const session = await ort.InferenceSession.create(modelData, options);\n\n logger.info('Session created with WebGPU backend');\n return { session, backend: 'webgpu' };\n } catch (err) {\n logger.warn('WebGPU session creation failed, falling back to WASM', {\n error: err instanceof Error ? err.message : String(err),\n });\n // Fall through to WASM\n }\n }\n\n // WASM (primary or fallback)\n const options = getSessionOptions('wasm');\n const session = await ort.InferenceSession.create(modelData, options);\n\n logger.info('Session created with WASM backend');\n return { session, backend: 'wasm' };\n}\n\n/**\n * Get the currently loaded backend (if any)\n */\nexport function getLoadedBackend(): RuntimeBackend | null {\n return loadedBackend;\n}\n\n/**\n * Check if ONNX Runtime has been loaded\n */\nexport function isOnnxRuntimeLoaded(): boolean {\n return ortInstance !== null;\n}\n","/**\n * Shared blendshape constants and utilities for lip sync inference\n *\n * Contains LAM_BLENDSHAPES (canonical ordering), symmetrization, and\n * index remapping used by both Wav2Vec2Inference and Wav2ArkitCpuInference.\n *\n * This module is the single source of truth for blendshape ordering to\n * avoid circular dependencies between inference classes.\n *\n * @category Inference\n */\n\n/**\n * LAM model blendshape names in order (52 total)\n * NOTE: This is alphabetical ordering used by LAM, different from standard ARKit order\n */\nexport const LAM_BLENDSHAPES = [\n 'browDownLeft', 'browDownRight', 'browInnerUp', 'browOuterUpLeft', 'browOuterUpRight',\n 'cheekPuff', 'cheekSquintLeft', 'cheekSquintRight',\n 'eyeBlinkLeft', 'eyeBlinkRight', 'eyeLookDownLeft', 'eyeLookDownRight',\n 'eyeLookInLeft', 'eyeLookInRight', 'eyeLookOutLeft', 'eyeLookOutRight',\n 'eyeLookUpLeft', 'eyeLookUpRight', 'eyeSquintLeft', 'eyeSquintRight',\n 'eyeWideLeft', 'eyeWideRight',\n 'jawForward', 'jawLeft', 'jawOpen', 'jawRight',\n 'mouthClose', 'mouthDimpleLeft', 'mouthDimpleRight', 'mouthFrownLeft', 'mouthFrownRight',\n 'mouthFunnel', 'mouthLeft', 'mouthLowerDownLeft', 'mouthLowerDownRight',\n 'mouthPressLeft', 'mouthPressRight', 'mouthPucker', 'mouthRight',\n 'mouthRollLower', 'mouthRollUpper', 'mouthShrugLower', 'mouthShrugUpper',\n 'mouthSmileLeft', 'mouthSmileRight', 'mouthStretchLeft', 'mouthStretchRight',\n 'mouthUpperUpLeft', 'mouthUpperUpRight',\n 'noseSneerLeft', 'noseSneerRight', 'tongueOut'\n] as const;\n\n/** Alias for backwards compatibility */\nexport const ARKIT_BLENDSHAPES = LAM_BLENDSHAPES;\n\n/**\n * ARKit Left/Right symmetric pairs for blendshape symmetrization\n * From LAM official postprocessing (models/utils.py)\n */\nconst ARKIT_SYMMETRIC_PAIRS: [string, string][] = [\n ['jawLeft', 'jawRight'],\n ['mouthLeft', 'mouthRight'],\n ['mouthSmileLeft', 'mouthSmileRight'],\n ['mouthFrownLeft', 'mouthFrownRight'],\n ['mouthDimpleLeft', 'mouthDimpleRight'],\n ['mouthStretchLeft', 'mouthStretchRight'],\n ['mouthPressLeft', 'mouthPressRight'],\n ['mouthUpperUpLeft', 'mouthUpperUpRight'],\n ['mouthLowerDownLeft', 'mouthLowerDownRight'],\n ['noseSneerLeft', 'noseSneerRight'],\n ['cheekSquintLeft', 'cheekSquintRight'],\n ['browDownLeft', 'browDownRight'],\n ['browOuterUpLeft', 'browOuterUpRight'],\n ['eyeBlinkLeft', 'eyeBlinkRight'],\n ['eyeLookUpLeft', 'eyeLookUpRight'],\n ['eyeLookDownLeft', 'eyeLookDownRight'],\n ['eyeLookInLeft', 'eyeLookInRight'],\n ['eyeLookOutLeft', 'eyeLookOutRight'],\n ['eyeSquintLeft', 'eyeSquintRight'],\n ['eyeWideLeft', 'eyeWideRight'],\n];\n\n// Precompute index pairs for fast symmetrization\nconst SYMMETRIC_INDEX_PAIRS: [number, number][] = ARKIT_SYMMETRIC_PAIRS.map(([l, r]) => [\n LAM_BLENDSHAPES.indexOf(l as typeof LAM_BLENDSHAPES[number]),\n LAM_BLENDSHAPES.indexOf(r as typeof LAM_BLENDSHAPES[number]),\n]).filter(([l, r]) => l !== -1 && r !== -1) as [number, number][];\n\n/**\n * Symmetrize blendshapes by averaging left/right pairs\n * From LAM official postprocessing (models/utils.py)\n * This fixes asymmetric output from the raw model\n */\nexport function symmetrizeBlendshapes(frame: Float32Array): Float32Array {\n const result = new Float32Array(frame);\n for (const [lIdx, rIdx] of SYMMETRIC_INDEX_PAIRS) {\n const avg = (frame[lIdx] + frame[rIdx]) / 2;\n result[lIdx] = avg;\n result[rIdx] = avg;\n }\n return result;\n}\n\n/**\n * wav2arkit_cpu model blendshape ordering\n *\n * Indices 0-24 match LAM_BLENDSHAPES, but 25+ diverge:\n * - LAM puts jawRight, mouthClose, mouthDimpleLeft, mouthDimpleRight at 25-28\n * - wav2arkit_cpu puts mouthFrownLeft at 25 and moves those four to 48-51\n */\nexport const WAV2ARKIT_BLENDSHAPES = [\n 'browDownLeft', 'browDownRight', 'browInnerUp', 'browOuterUpLeft', 'browOuterUpRight',\n 'cheekPuff', 'cheekSquintLeft', 'cheekSquintRight',\n 'eyeBlinkLeft', 'eyeBlinkRight', 'eyeLookDownLeft', 'eyeLookDownRight',\n 'eyeLookInLeft', 'eyeLookInRight', 'eyeLookOutLeft', 'eyeLookOutRight',\n 'eyeLookUpLeft', 'eyeLookUpRight', 'eyeSquintLeft', 'eyeSquintRight',\n 'eyeWideLeft', 'eyeWideRight',\n 'jawForward', 'jawLeft', 'jawOpen',\n 'mouthFrownLeft', 'mouthFrownRight', 'mouthFunnel', 'mouthLeft',\n 'mouthLowerDownLeft', 'mouthLowerDownRight',\n 'mouthPressLeft', 'mouthPressRight', 'mouthPucker', 'mouthRight',\n 'mouthRollLower', 'mouthRollUpper', 'mouthShrugLower', 'mouthShrugUpper',\n 'mouthSmileLeft', 'mouthSmileRight', 'mouthStretchLeft', 'mouthStretchRight',\n 'mouthUpperUpLeft', 'mouthUpperUpRight',\n 'noseSneerLeft', 'noseSneerRight', 'tongueOut',\n 'mouthClose', 'mouthDimpleLeft', 'mouthDimpleRight', 'jawRight',\n] as const;\n\n/**\n * Precomputed remap table: wav2arkit_cpu output index → LAM_BLENDSHAPES index\n *\n * For each wav2arkit output index i, REMAP_TO_LAM[i] gives the LAM_BLENDSHAPES\n * index where that value should be placed.\n */\nexport const REMAP_WAV2ARKIT_TO_LAM: number[] = WAV2ARKIT_BLENDSHAPES.map(\n (name) => LAM_BLENDSHAPES.indexOf(name as typeof LAM_BLENDSHAPES[number])\n);\n\n/**\n * Remap a blendshape frame from wav2arkit_cpu ordering to LAM_BLENDSHAPES ordering\n *\n * @param frame - Float32Array of 52 blendshape values in wav2arkit_cpu order\n * @returns Float32Array of 52 blendshape values in LAM_BLENDSHAPES order\n */\nexport function remapWav2ArkitToLam(frame: Float32Array): Float32Array {\n const result = new Float32Array(52);\n for (let i = 0; i < 52; i++) {\n result[REMAP_WAV2ARKIT_TO_LAM[i]] = frame[i];\n }\n return result;\n}\n","/**\r\n * Unified Wav2Vec2 inference engine for Audio-to-Expression + ASR\r\n *\r\n * Runs entirely in the browser using WebGPU or WASM.\r\n * Takes raw 16kHz audio and outputs:\r\n * - 52 ARKit blendshapes (lip sync)\r\n * - 32-token CTC logits (speech recognition)\r\n *\r\n * @category Inference\r\n *\r\n * @example Basic usage\r\n * ```typescript\r\n * import { Wav2Vec2Inference } from '@omote/core';\r\n *\r\n * const wav2vec = new Wav2Vec2Inference({ modelUrl: '/models/unified_wav2vec2_asr_a2e.onnx' });\r\n * await wav2vec.load();\r\n *\r\n * // Process 1 second of audio (16kHz = 16000 samples)\r\n * const result = await wav2vec.infer(audioSamples);\r\n *\r\n * console.log('Blendshapes:', result.blendshapes); // [30, 52] for 30fps\r\n * console.log('ASR text:', result.text); // Decoded transcription\r\n * ```\r\n */\r\n\r\n// Type-only import for TypeScript (no runtime code loaded at import time)\r\n// At runtime, we dynamically import either 'onnxruntime-web' or 'onnxruntime-web/webgpu'\r\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\r\n\r\nimport { fetchWithCache, getModelCache, formatBytes } from '../cache/ModelCache';\r\nimport { createLogger } from '../logging';\r\nimport { getTelemetry } from '../telemetry';\r\nimport {\r\n getOnnxRuntimeForPreference,\r\n getSessionOptions,\r\n isWebGPUAvailable,\r\n type RuntimeBackend,\r\n} from './onnxLoader';\r\nimport { BackendPreference } from '../utils/runtime';\r\nimport { symmetrizeBlendshapes, LAM_BLENDSHAPES, ARKIT_BLENDSHAPES } from './blendshapeUtils';\r\n\r\n// Type alias for the ORT module (loaded dynamically)\r\ntype OrtModule = {\r\n InferenceSession: typeof InferenceSession;\r\n Tensor: typeof Tensor;\r\n env: Env;\r\n};\r\n\r\nconst logger = createLogger('Wav2Vec2');\r\n\r\n// Re-export for backward compatibility\r\nexport type InferenceBackend = BackendPreference;\r\n\r\nexport interface Wav2Vec2InferenceConfig {\r\n /** Path or URL to the ONNX model */\r\n modelUrl: string;\r\n /** Preferred backend (auto will try WebGPU first, fallback to WASM) */\r\n backend?: InferenceBackend;\r\n /** Number of identity classes (default: 12 for streaming model) */\r\n numIdentityClasses?: number;\r\n}\r\n\r\nexport interface ModelInfo {\r\n backend: 'webgpu' | 'wasm';\r\n loadTimeMs: number;\r\n inputNames: string[];\r\n outputNames: string[];\r\n}\r\n\r\n// Re-export blendshape constants from shared utils (canonical source)\r\nexport { LAM_BLENDSHAPES, ARKIT_BLENDSHAPES } from './blendshapeUtils';\r\n\r\n/** CTC vocabulary (32 tokens from wav2vec2-base-960h) */\r\nexport const CTC_VOCAB = [\r\n '<pad>', '<s>', '</s>', '<unk>', '|', 'E', 'T', 'A', 'O', 'N',\r\n 'I', 'H', 'S', 'R', 'D', 'L', 'U', 'M', 'W', 'C',\r\n 'F', 'G', 'Y', 'P', 'B', 'V', 'K', \"'\", 'X', 'J', 'Q', 'Z'\r\n];\r\n\r\nexport interface Wav2Vec2Result {\r\n /** Blendshape weights [frames, 52] - 30fps */\r\n blendshapes: Float32Array[];\r\n /** Raw CTC logits [frames, 32] - 50fps */\r\n asrLogits: Float32Array[];\r\n /** Decoded text from CTC */\r\n text: string;\r\n /** Number of blendshape frames (30fps) — alias for numA2EFrames */\r\n numFrames: number;\r\n /** Number of A2E frames (30fps) */\r\n numA2EFrames: number;\r\n /** Number of ASR frames (50fps) */\r\n numASRFrames: number;\r\n /** Inference time in ms */\r\n inferenceTimeMs: number;\r\n}\r\n\r\nexport class Wav2Vec2Inference {\r\n private session: InferenceSession | null = null;\r\n private ort: OrtModule | null = null; // Lazy-loaded ONNX Runtime module\r\n private config: Wav2Vec2InferenceConfig;\r\n private _backend: RuntimeBackend = 'wasm';\r\n private isLoading = false;\r\n private numIdentityClasses: number;\r\n\r\n // Inference queue for handling concurrent calls\r\n private inferenceQueue: Promise<void> = Promise.resolve();\r\n\r\n constructor(config: Wav2Vec2InferenceConfig) {\r\n this.config = config;\r\n this.numIdentityClasses = config.numIdentityClasses ?? 12;\r\n }\r\n\r\n /**\r\n * Check if WebGPU is available and working\r\n * (iOS returns false even if navigator.gpu exists due to ONNX Runtime bugs)\r\n */\r\n static isWebGPUAvailable = isWebGPUAvailable;\r\n\r\n get backend(): 'webgpu' | 'wasm' | null {\r\n return this.session ? this._backend : null;\r\n }\r\n\r\n get isLoaded(): boolean {\r\n return this.session !== null;\r\n }\r\n\r\n /**\r\n * Load the ONNX model\r\n */\r\n async load(): Promise<ModelInfo> {\r\n if (this.isLoading) {\r\n throw new Error('Model is already loading');\r\n }\r\n\r\n if (this.session) {\r\n throw new Error('Model already loaded. Call dispose() first.');\r\n }\r\n\r\n this.isLoading = true;\r\n const startTime = performance.now();\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('Wav2Vec2.load', {\r\n 'model.url': this.config.modelUrl,\r\n 'model.backend_requested': this.config.backend || 'auto',\r\n });\r\n\r\n try {\r\n // Lazy load ONNX Runtime with appropriate backend\r\n // iOS: Loads WASM-only bundle (smaller, no WebGPU code)\r\n // Android/Desktop: Loads WebGPU bundle (with WASM fallback)\r\n logger.info('Loading ONNX Runtime...', { preference: this.config.backend || 'auto' });\r\n\r\n const { ort, backend } = await getOnnxRuntimeForPreference(this.config.backend || 'auto');\r\n this.ort = ort;\r\n this._backend = backend;\r\n\r\n logger.info('ONNX Runtime loaded', { backend: this._backend });\r\n\r\n // Load model with caching\r\n const cache = getModelCache();\r\n const modelUrl = this.config.modelUrl;\r\n const isCached = await cache.has(modelUrl);\r\n\r\n let modelBuffer: ArrayBuffer;\r\n if (isCached) {\r\n logger.debug('Loading model from cache', { modelUrl });\r\n modelBuffer = (await cache.get(modelUrl))!;\r\n\r\n // Check for cache corruption (entry exists but data is null)\r\n if (!modelBuffer) {\r\n logger.warn('Cache corruption detected, clearing and retrying', { modelUrl });\r\n await cache.delete(modelUrl);\r\n logger.info('Corrupted cache entry deleted, fetching fresh model', { modelUrl });\r\n modelBuffer = await fetchWithCache(modelUrl);\r\n }\r\n } else {\r\n logger.debug('Fetching and caching model', { modelUrl });\r\n modelBuffer = await fetchWithCache(modelUrl);\r\n }\r\n\r\n // Final validation\r\n if (!modelBuffer) {\r\n const errorMsg = `Failed to load model: ${modelUrl}. Model buffer is null or undefined even after retry.`;\r\n logger.error(errorMsg, { modelUrl, isCached });\r\n throw new Error(errorMsg);\r\n }\r\n\r\n logger.debug('Creating ONNX session', {\r\n size: formatBytes(modelBuffer.byteLength),\r\n backend: this._backend,\r\n });\r\n\r\n // Create session with optimized settings for the backend\r\n const sessionOptions = getSessionOptions(this._backend);\r\n logger.info('Creating session with execution provider', {\r\n executionProvider: this._backend,\r\n });\r\n\r\n // Create session from buffer (convert ArrayBuffer to Uint8Array for onnxruntime-common types)\r\n const modelData = new Uint8Array(modelBuffer);\r\n this.session = await this.ort!.InferenceSession.create(modelData, sessionOptions);\r\n\r\n logger.info('ONNX session created successfully', {\r\n executionProvider: this._backend,\r\n backend: this._backend,\r\n });\r\n\r\n const loadTimeMs = performance.now() - startTime;\r\n\r\n logger.info('Model loaded successfully', {\r\n backend: this._backend,\r\n loadTimeMs: Math.round(loadTimeMs),\r\n inputs: this.session.inputNames,\r\n outputs: this.session.outputNames,\r\n });\r\n\r\n span?.setAttributes({\r\n 'model.backend': this._backend,\r\n 'model.load_time_ms': loadTimeMs,\r\n 'model.cached': isCached,\r\n });\r\n span?.end();\r\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n });\r\n\r\n // Warmup inference to initialize GPU kernels and contexts\r\n // This prevents hitching on the first real inference during playback\r\n logger.debug('Running warmup inference to initialize GPU context');\r\n const warmupStart = performance.now();\r\n const silentAudio = new Float32Array(16000); // 1 second of silence\r\n await this.infer(silentAudio, 0);\r\n const warmupTimeMs = performance.now() - warmupStart;\r\n logger.info('Warmup inference complete', {\r\n warmupTimeMs: Math.round(warmupTimeMs),\r\n backend: this._backend,\r\n });\r\n telemetry?.recordHistogram('omote.model.warmup_time', warmupTimeMs, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n });\r\n\r\n return {\r\n backend: this._backend,\r\n loadTimeMs,\r\n inputNames: [...this.session.inputNames],\r\n outputNames: [...this.session.outputNames],\r\n };\r\n } catch (error) {\r\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\r\n telemetry?.incrementCounter('omote.errors.total', 1, {\r\n model: 'wav2vec2',\r\n error_type: 'load_failed',\r\n });\r\n throw error;\r\n } finally {\r\n this.isLoading = false;\r\n }\r\n }\r\n\r\n /**\r\n * Run inference on raw audio\r\n * @param audioSamples - Float32Array of raw audio at 16kHz (16000 samples = 1 second)\r\n * @param identityIndex - Optional identity index (0-11, default 0 = neutral)\r\n *\r\n * Note: Model expects 1-second chunks (16000 samples) for optimal performance.\r\n * Audio will be zero-padded or truncated to 16000 samples.\r\n */\r\n async infer(\r\n audioSamples: Float32Array,\r\n identityIndex: number = 0\r\n ): Promise<Wav2Vec2Result> {\r\n if (!this.session) {\r\n throw new Error('Model not loaded. Call load() first.');\r\n }\r\n\r\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\r\n // During interruptions, audioSamples buffer may get detached by ONNX Runtime\r\n // before we process it. Copy synchronously to preserve data.\r\n const audioSamplesCopy = new Float32Array(audioSamples);\r\n\r\n // Ensure audio is exactly 16000 samples (1 second)\r\n let audio: Float32Array;\r\n if (audioSamplesCopy.length === 16000) {\r\n audio = audioSamplesCopy;\r\n } else if (audioSamplesCopy.length < 16000) {\r\n // Zero-pad\r\n audio = new Float32Array(16000);\r\n audio.set(audioSamplesCopy, 0);\r\n } else {\r\n // Truncate\r\n audio = audioSamplesCopy.slice(0, 16000);\r\n }\r\n\r\n // Create identity one-hot vector\r\n const identity = new Float32Array(this.numIdentityClasses);\r\n identity[Math.min(identityIndex, this.numIdentityClasses - 1)] = 1.0;\r\n\r\n // CRITICAL: Force copy to prevent ArrayBuffer detachment by ONNX Runtime Web workers\r\n // Without copy, WASM backend transfers buffers to workers, causing \"memory access out of bounds\" errors\r\n const audioCopy = new Float32Array(audio);\r\n const identityCopy = new Float32Array(identity);\r\n\r\n const feeds = {\r\n 'audio': new this.ort!.Tensor('float32', audioCopy, [1, 16000]),\r\n 'identity': new this.ort!.Tensor('float32', identityCopy, [1, this.numIdentityClasses]),\r\n };\r\n\r\n // Queue the inference\r\n return this.queueInference(feeds);\r\n }\r\n\r\n /**\r\n * Decode CTC logits to text using greedy decoding\r\n */\r\n private decodeCTC(logits: Float32Array[]): string {\r\n const tokens: number[] = [];\r\n let prevToken = -1;\r\n\r\n for (const frame of logits) {\r\n // Find argmax\r\n let maxIdx = 0;\r\n let maxVal = frame[0];\r\n for (let i = 1; i < frame.length; i++) {\r\n if (frame[i] > maxVal) {\r\n maxVal = frame[i];\r\n maxIdx = i;\r\n }\r\n }\r\n\r\n // CTC collapse: skip duplicates and blanks (token 0)\r\n if (maxIdx !== prevToken && maxIdx !== 0) {\r\n tokens.push(maxIdx);\r\n }\r\n prevToken = maxIdx;\r\n }\r\n\r\n // Convert to text (token 4 = '|' = word separator = space)\r\n return tokens.map(t => CTC_VOCAB[t] === '|' ? ' ' : CTC_VOCAB[t]).join('');\r\n }\r\n\r\n /**\r\n * Queue inference to serialize ONNX session calls\r\n */\r\n private queueInference(\r\n feeds: Record<string, Tensor>\r\n ): Promise<Wav2Vec2Result> {\r\n return new Promise((resolve, reject) => {\r\n this.inferenceQueue = this.inferenceQueue.then(async () => {\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('Wav2Vec2.infer', {\r\n 'inference.backend': this._backend,\r\n 'inference.input_samples': 16000,\r\n });\r\n try {\r\n const startTime = performance.now();\r\n const results = await this.session!.run(feeds);\r\n const inferenceTimeMs = performance.now() - startTime;\r\n\r\n const asrOutput = results['asr_logits'];\r\n const blendshapeOutput = results['blendshapes'];\r\n\r\n if (!asrOutput || !blendshapeOutput) {\r\n throw new Error('Missing outputs from model');\r\n }\r\n\r\n const asrData = asrOutput.data as Float32Array;\r\n const blendshapeData = blendshapeOutput.data as Float32Array;\r\n\r\n // Parse shapes: ASR is [1, time_50fps, 32], A2E is [1, time_30fps, 52]\r\n const numASRFrames = asrOutput.dims[1] as number;\r\n const numA2EFrames = blendshapeOutput.dims[1] as number;\r\n const asrVocabSize = asrOutput.dims[2] as number;\r\n const numBlendshapes = blendshapeOutput.dims[2] as number;\r\n\r\n // Split into per-frame arrays\r\n const asrLogits: Float32Array[] = [];\r\n const blendshapes: Float32Array[] = [];\r\n\r\n for (let f = 0; f < numASRFrames; f++) {\r\n asrLogits.push(asrData.slice(f * asrVocabSize, (f + 1) * asrVocabSize));\r\n }\r\n\r\n for (let f = 0; f < numA2EFrames; f++) {\r\n const rawFrame = blendshapeData.slice(f * numBlendshapes, (f + 1) * numBlendshapes);\r\n // Apply symmetrization postprocessing (from LAM official pipeline)\r\n blendshapes.push(symmetrizeBlendshapes(rawFrame));\r\n }\r\n\r\n // Decode CTC\r\n const text = this.decodeCTC(asrLogits);\r\n\r\n logger.trace('Inference completed', {\r\n inferenceTimeMs: Math.round(inferenceTimeMs * 100) / 100,\r\n numA2EFrames,\r\n numASRFrames,\r\n textLength: text.length,\r\n });\r\n\r\n span?.setAttributes({\r\n 'inference.duration_ms': inferenceTimeMs,\r\n 'inference.a2e_frames': numA2EFrames,\r\n 'inference.asr_frames': numASRFrames,\r\n });\r\n span?.end();\r\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n });\r\n telemetry?.incrementCounter('omote.inference.total', 1, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n status: 'success',\r\n });\r\n\r\n resolve({\r\n blendshapes,\r\n asrLogits,\r\n text,\r\n numFrames: numA2EFrames,\r\n numA2EFrames,\r\n numASRFrames,\r\n inferenceTimeMs,\r\n });\r\n } catch (err) {\r\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\r\n telemetry?.incrementCounter('omote.inference.total', 1, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n status: 'error',\r\n });\r\n reject(err);\r\n }\r\n });\r\n });\r\n }\r\n\r\n /**\r\n * Get blendshape value by name for a specific frame\r\n */\r\n getBlendshape(blendshapes: Float32Array, name: typeof LAM_BLENDSHAPES[number]): number {\r\n const index = LAM_BLENDSHAPES.indexOf(name);\r\n if (index === -1) {\r\n throw new Error(`Unknown blendshape: ${name}`);\r\n }\r\n return blendshapes[index];\r\n }\r\n\r\n /**\r\n * Dispose of the model and free resources\r\n */\r\n async dispose(): Promise<void> {\r\n if (this.session) {\r\n await this.session.release();\r\n this.session = null;\r\n }\r\n }\r\n}\r\n","/**\n * FullFacePipeline - Combined LAM lip sync + Emotion upper face pipeline\n *\n * Orchestrates full-face animation by combining:\n * 1. LAM lip sync (52 ARKit blendshapes) via SyncedAudioPipeline\n * 2. Emotion2VecInference for emotion detection\n * 3. EmotionToBlendshapeMapper for upper face expressions\n *\n * Merge Strategy:\n * - Lower face (41 blendshapes): 100% from LAM (mouth, jaw, tongue, etc.)\n * - Upper face (11 blendshapes): Emotion overlay with LAM as subtle fallback\n * Formula: emotion * 0.8 + lam * 0.2\n *\n * This ensures accurate lip sync from LAM while adding expressive upper face\n * animations driven by speech emotion detection.\n *\n * @category Audio\n *\n * @example Basic usage\n * ```typescript\n * import { FullFacePipeline, Wav2Vec2Inference, Emotion2VecInference } from '@omote/core';\n *\n * const lam = new Wav2Vec2Inference({ modelUrl: '/models/lam.onnx' });\n * const emotion = new Emotion2VecInference({ modelUrl: '/models/emotion.onnx' });\n *\n * await lam.load();\n * await emotion.load();\n *\n * const pipeline = new FullFacePipeline({ lam, emotion });\n * await pipeline.initialize();\n *\n * pipeline.on('full_frame_ready', (frame) => {\n * // frame.blendshapes: merged 52 ARKit blendshapes\n * // frame.emotion: raw emotion data\n * applyToAvatar(frame.blendshapes);\n * });\n *\n * pipeline.start();\n * // Feed audio chunks...\n * await pipeline.onAudioChunk(audioData);\n * ```\n */\n\nimport { AudioScheduler } from './AudioScheduler';\nimport { AudioChunkCoalescer } from './AudioChunkCoalescer';\nimport { LAMPipeline } from './LAMPipeline';\nimport { EventEmitter } from '../events/EventEmitter';\nimport { EmotionToBlendshapeMapper, UPPER_FACE_BLENDSHAPES } from '../animation/EmotionToBlendshapeMapper';\nimport type { UpperFaceBlendshapes } from '../animation/EmotionToBlendshapeMapper';\nimport type { LipSyncBackend } from '../inference/LipSyncBackend';\nimport { LAM_BLENDSHAPES } from '../inference/Wav2Vec2Inference';\nimport type { Emotion2VecInference, EmotionFrame } from '../inference/Emotion2VecInference';\nimport { createLogger } from '../logging';\n\nconst logger = createLogger('FullFacePipeline');\n\n/**\n * Index map for O(1) blendshape name lookup\n */\nconst BLENDSHAPE_INDEX_MAP = new Map<string, number>();\nLAM_BLENDSHAPES.forEach((name, index) => {\n BLENDSHAPE_INDEX_MAP.set(name, index);\n});\n\n/**\n * Set of upper face blendshape names for fast lookup\n */\nconst UPPER_FACE_SET = new Set<string>(UPPER_FACE_BLENDSHAPES);\n\n/**\n * Configuration for FullFacePipeline\n */\nexport interface FullFacePipelineOptions {\n /** Sample rate in Hz (default: 16000) */\n sampleRate?: number;\n /** Target chunk duration in ms for coalescing (default: 200) */\n chunkTargetMs?: number;\n /** LAM inference engine */\n lam: LipSyncBackend;\n /** Emotion inference engine (optional - pipeline works without emotion) */\n emotion?: Emotion2VecInference;\n /**\n * Emotion blend factor for upper face blendshapes (0-1)\n * Higher values give more weight to emotion detection\n * @default 0.8\n */\n emotionBlendFactor?: number;\n /**\n * LAM blend factor for upper face blendshapes (0-1)\n * Provides subtle fallback from LAM when emotion is weak\n * @default 0.2\n */\n lamBlendFactor?: number;\n /**\n * Enable lazy loading of emotion model\n * If true, pipeline starts with LAM-only and adds emotion when ready\n * @default true\n */\n lazyLoadEmotion?: boolean;\n}\n\n/**\n * Full face frame with merged blendshapes and emotion data\n */\nexport interface FullFaceFrame {\n /** Merged 52 ARKit blendshapes (lower face from LAM + upper face from emotion) */\n blendshapes: Float32Array;\n /** Original LAM blendshapes (52) */\n lamBlendshapes: Float32Array;\n /** Emotion-driven upper face blendshapes (11) */\n emotionBlendshapes: UpperFaceBlendshapes;\n /** Raw emotion frame data */\n emotion: EmotionFrame | null;\n /** AudioContext timestamp for this frame */\n timestamp: number;\n}\n\n/**\n * Events emitted by FullFacePipeline\n */\nexport interface FullFacePipelineEvents {\n /** New merged frame ready for display */\n full_frame_ready: FullFaceFrame;\n /** Raw LAM frame ready (for debugging/monitoring) */\n lam_frame_ready: Float32Array;\n /** Emotion frame ready (for debugging/monitoring) */\n emotion_frame_ready: EmotionFrame;\n /** Playback has completed */\n playback_complete: void;\n /** First frame ready, playback starting */\n playback_start: number;\n /** Error occurred */\n error: Error;\n /** Emotion model loaded (for lazy loading) */\n emotion_ready: void;\n /** Index signature for EventEmitter compatibility */\n [key: string]: unknown;\n}\n\n/**\n * Internal emotion frame with timestamp for synchronization\n */\ninterface TimestampedEmotionFrame {\n frame: EmotionFrame;\n timestamp: number;\n}\n\n/**\n * FullFacePipeline - Unified LAM + Emotion animation pipeline\n */\nexport class FullFacePipeline extends EventEmitter<FullFacePipelineEvents> {\n private scheduler: AudioScheduler;\n private coalescer: AudioChunkCoalescer;\n private lamPipeline: LAMPipeline;\n private emotionMapper: EmotionToBlendshapeMapper;\n\n private waitingForFirstLAM = false;\n private bufferedChunks: ArrayBuffer[] = [];\n private monitorInterval: ReturnType<typeof setInterval> | null = null;\n private frameAnimationId: number | null = null;\n\n // Emotion processing\n private emotion: Emotion2VecInference | null = null;\n private emotionReady = false;\n private emotionDisabledDueToError = false; // Graceful degradation after memory error\n private emotionBuffer: Float32Array = new Float32Array(0);\n private emotionBufferStartTime = 0;\n private emotionFrameQueue: TimestampedEmotionFrame[] = [];\n private lastEmotionFrame: EmotionFrame | null = null;\n\n // Blend factors\n private emotionBlendFactor: number;\n private lamBlendFactor: number;\n\n constructor(private readonly options: FullFacePipelineOptions) {\n super();\n\n const sampleRate = options.sampleRate ?? 16000;\n this.emotionBlendFactor = options.emotionBlendFactor ?? 0.8;\n this.lamBlendFactor = options.lamBlendFactor ?? 0.2;\n\n this.scheduler = new AudioScheduler({ sampleRate });\n this.coalescer = new AudioChunkCoalescer({\n sampleRate,\n targetDurationMs: options.chunkTargetMs ?? 200,\n });\n this.lamPipeline = new LAMPipeline({\n sampleRate,\n onInference: (_frameCount) => {\n if (this.waitingForFirstLAM) {\n this.onFirstLAMComplete();\n }\n },\n onError: (error) => {\n this.emit('error', error);\n },\n });\n this.emotionMapper = new EmotionToBlendshapeMapper({\n smoothingFactor: 0.15,\n confidenceThreshold: 0.3,\n intensity: 1.0,\n });\n\n // Set emotion engine if provided\n if (options.emotion) {\n this.emotion = options.emotion;\n this.emotionReady = options.emotion.isLoaded;\n }\n }\n\n /**\n * Initialize the pipeline\n */\n async initialize(): Promise<void> {\n await this.scheduler.initialize();\n\n // Check if emotion is already loaded\n if (this.emotion?.isLoaded) {\n this.emotionReady = true;\n logger.info('Emotion engine ready');\n }\n }\n\n /**\n * Set or update the emotion inference engine\n * Call this for lazy loading after pipeline is already running\n */\n setEmotionEngine(emotion: Emotion2VecInference): void {\n this.emotion = emotion;\n this.emotionReady = emotion.isLoaded;\n\n if (this.emotionReady) {\n logger.info('Emotion engine set and ready');\n this.emit('emotion_ready', undefined as any);\n }\n }\n\n /**\n * Mark emotion engine as ready (for lazy loading)\n */\n markEmotionReady(): void {\n if (this.emotion) {\n this.emotionReady = true;\n logger.info('Emotion engine marked ready');\n this.emit('emotion_ready', undefined as any);\n }\n }\n\n /**\n * Start a new playback session\n */\n start(): void {\n this.scheduler.reset();\n this.coalescer.reset();\n this.lamPipeline.reset();\n this.bufferedChunks = [];\n this.waitingForFirstLAM = true;\n\n // Reset emotion state (but keep disabled flag - memory issue persists across sessions)\n this.emotionBuffer = new Float32Array(0);\n this.emotionBufferStartTime = 0;\n this.emotionFrameQueue = [];\n this.lastEmotionFrame = null;\n this.emotionMapper.reset();\n // Note: emotionDisabledDueToError is NOT reset - once memory fails, it won't recover\n\n this.startFrameLoop();\n this.startMonitoring();\n }\n\n /**\n * Receive audio chunk from network\n */\n async onAudioChunk(chunk: Uint8Array): Promise<void> {\n const combined = this.coalescer.add(chunk);\n if (!combined) {\n return;\n }\n\n // Convert to Float32\n const int16 = new Int16Array(combined);\n const float32 = new Float32Array(int16.length);\n for (let i = 0; i < int16.length; i++) {\n float32[i] = int16[i] / 32768;\n }\n\n if (this.waitingForFirstLAM) {\n this.bufferedChunks.push(combined);\n const estimatedTime = this.scheduler.getCurrentTime();\n\n // Process LAM\n await this.lamPipeline.push(float32, estimatedTime, this.options.lam);\n\n // Process emotion in parallel (non-blocking)\n if (this.emotionReady && this.emotion) {\n this.queueEmotionProcessing(float32, estimatedTime);\n }\n } else {\n const scheduleTime = await this.scheduler.schedule(float32);\n\n // Process LAM\n await this.lamPipeline.push(float32, scheduleTime, this.options.lam);\n\n // Process emotion in parallel\n if (this.emotionReady && this.emotion) {\n this.queueEmotionProcessing(float32, scheduleTime);\n }\n }\n }\n\n /**\n * Queue emotion processing (accumulates to 1 second chunks)\n */\n private queueEmotionProcessing(samples: Float32Array, timestamp: number): void {\n if (!this.emotion || this.emotionDisabledDueToError) return;\n\n // Track buffer start time\n if (this.emotionBuffer.length === 0) {\n this.emotionBufferStartTime = timestamp;\n }\n\n // Accumulate samples\n const newBuffer = new Float32Array(this.emotionBuffer.length + samples.length);\n newBuffer.set(this.emotionBuffer, 0);\n newBuffer.set(samples, this.emotionBuffer.length);\n this.emotionBuffer = newBuffer;\n\n // Process when we have 1 second (16000 samples)\n if (this.emotionBuffer.length >= 16000) {\n this.processEmotionBuffer();\n }\n }\n\n /**\n * Process accumulated emotion buffer\n */\n private async processEmotionBuffer(): Promise<void> {\n if (!this.emotion || this.emotionBuffer.length < 16000) return;\n\n try {\n const toProcess = this.emotionBuffer.slice(0, 16000);\n const processedStartTime = this.emotionBufferStartTime;\n\n // Keep remaining samples\n this.emotionBuffer = this.emotionBuffer.slice(16000);\n this.emotionBufferStartTime = processedStartTime + 1.0; // 1 second processed\n\n // Run emotion inference\n const result = await this.emotion.infer(toProcess);\n\n // Queue emotion frames with timestamps (50Hz = 20ms per frame)\n const frameDuration = 1 / 50; // 20ms\n for (let i = 0; i < result.frames.length; i++) {\n const frame = result.frames[i];\n const frameTimestamp = processedStartTime + i * frameDuration;\n this.emotionFrameQueue.push({ frame, timestamp: frameTimestamp });\n }\n\n // Emit event for debugging\n this.emit('emotion_frame_ready', result.dominant);\n\n logger.debug('Emotion processed', {\n frames: result.frames.length,\n dominant: result.dominant.emotion,\n confidence: Math.round(result.dominant.confidence * 100),\n });\n } catch (error) {\n // Memory errors crash the WASM runtime - disable emotion for graceful degradation\n logger.warn('Emotion inference failed - disabling emotion for this session', {\n error: error instanceof Error ? error.message : String(error)\n });\n this.emotionDisabledDueToError = true;\n this.emotionBuffer = new Float32Array(0);\n this.emotionFrameQueue = [];\n // Don't emit error - emotion is supplementary, LAM continues\n }\n }\n\n /**\n * Get emotion frame for a given timestamp\n * Interpolates between 50Hz emotion and 30fps animation\n */\n private getEmotionFrameForTime(currentTime: number): EmotionFrame | null {\n // Remove old frames (keep 1 second buffer for smoothing)\n while (\n this.emotionFrameQueue.length > 0 &&\n this.emotionFrameQueue[0].timestamp < currentTime - 1.0\n ) {\n const removed = this.emotionFrameQueue.shift()!;\n this.lastEmotionFrame = removed.frame; // Cache for fallback\n }\n\n // Find the frame closest to current time\n for (let i = 0; i < this.emotionFrameQueue.length; i++) {\n if (this.emotionFrameQueue[i].timestamp >= currentTime) {\n // Return previous frame if available (we want frame at or before current time)\n if (i > 0) {\n return this.emotionFrameQueue[i - 1].frame;\n }\n return this.emotionFrameQueue[0].frame;\n }\n }\n\n // Return last frame in queue or cached frame as fallback\n if (this.emotionFrameQueue.length > 0) {\n return this.emotionFrameQueue[this.emotionFrameQueue.length - 1].frame;\n }\n\n return this.lastEmotionFrame;\n }\n\n /**\n * Merge LAM blendshapes with emotion upper face blendshapes\n */\n private mergeBlendshapes(\n lamFrame: Float32Array,\n emotionFrame: EmotionFrame | null\n ): { merged: Float32Array; emotionBlendshapes: UpperFaceBlendshapes } {\n const merged = new Float32Array(52);\n let emotionBlendshapes: UpperFaceBlendshapes;\n\n if (emotionFrame) {\n // Get emotion-driven blendshapes\n this.emotionMapper.mapFrame(emotionFrame);\n this.emotionMapper.update(33); // ~30fps\n emotionBlendshapes = this.emotionMapper.getCurrentBlendshapes();\n } else {\n // No emotion - use zeros\n emotionBlendshapes = {} as UpperFaceBlendshapes;\n for (const name of UPPER_FACE_BLENDSHAPES) {\n emotionBlendshapes[name] = 0;\n }\n }\n\n // Merge: lower face 100% LAM, upper face emotion + LAM fallback\n for (let i = 0; i < 52; i++) {\n const name = LAM_BLENDSHAPES[i];\n\n if (UPPER_FACE_SET.has(name)) {\n // Upper face: emotion * 0.8 + LAM * 0.2\n const emotionValue = emotionBlendshapes[name as keyof UpperFaceBlendshapes] ?? 0;\n const lamValue = lamFrame[i];\n merged[i] = emotionValue * this.emotionBlendFactor + lamValue * this.lamBlendFactor;\n } else {\n // Lower face: 100% LAM\n merged[i] = lamFrame[i];\n }\n }\n\n return { merged, emotionBlendshapes };\n }\n\n /**\n * Handle first LAM inference completion\n */\n private async onFirstLAMComplete(): Promise<void> {\n this.waitingForFirstLAM = false;\n\n const beforeSchedule = this.scheduler.getCurrentTime();\n let actualStartTime = beforeSchedule;\n\n // Schedule all buffered audio chunks\n for (let i = 0; i < this.bufferedChunks.length; i++) {\n const buffer = this.bufferedChunks[i];\n const int16 = new Int16Array(buffer);\n const float32 = new Float32Array(int16.length);\n for (let j = 0; j < int16.length; j++) {\n float32[j] = int16[j] / 32768;\n }\n const scheduleTime = await this.scheduler.schedule(float32);\n\n if (i === 0) {\n actualStartTime = scheduleTime;\n }\n }\n\n // Adjust LAM frame timestamps\n const timeOffset = actualStartTime - beforeSchedule;\n if (timeOffset !== 0) {\n this.lamPipeline.adjustTimestamps(timeOffset);\n\n // Also adjust emotion frame timestamps\n for (const frame of this.emotionFrameQueue) {\n frame.timestamp += timeOffset;\n }\n }\n\n this.bufferedChunks = [];\n this.emit('playback_start', actualStartTime);\n }\n\n /**\n * Start frame animation loop\n */\n private startFrameLoop(): void {\n const updateFrame = () => {\n const currentTime = this.scheduler.getCurrentTime();\n const lamFrame = this.lamPipeline.getFrameForTime(currentTime, this.options.lam);\n\n if (lamFrame) {\n // Get corresponding emotion frame\n const emotionFrame = this.getEmotionFrameForTime(currentTime);\n\n // Merge LAM + emotion\n const { merged, emotionBlendshapes } = this.mergeBlendshapes(lamFrame, emotionFrame);\n\n // Emit merged frame\n const fullFrame: FullFaceFrame = {\n blendshapes: merged,\n lamBlendshapes: lamFrame,\n emotionBlendshapes,\n emotion: emotionFrame,\n timestamp: currentTime,\n };\n\n this.emit('full_frame_ready', fullFrame);\n this.emit('lam_frame_ready', lamFrame);\n }\n\n this.frameAnimationId = requestAnimationFrame(updateFrame);\n };\n\n this.frameAnimationId = requestAnimationFrame(updateFrame);\n }\n\n /**\n * End of audio stream\n */\n async end(): Promise<void> {\n // Flush remaining coalesced data\n const remaining = this.coalescer.flush();\n if (remaining) {\n const chunk = new Uint8Array(remaining);\n await this.onAudioChunk(chunk);\n }\n\n // Flush remaining LAM buffer\n await this.lamPipeline.flush(this.options.lam);\n\n // Process any remaining emotion buffer (even if less than 1 second)\n if (this.emotion && this.emotionBuffer.length > 0 && !this.emotionDisabledDueToError) {\n // Pad to 1 second for inference\n const padded = new Float32Array(16000);\n padded.set(this.emotionBuffer, 0);\n try {\n const result = await this.emotion.infer(padded);\n // Only queue frames for actual audio duration\n const actualFrames = Math.ceil(\n (this.emotionBuffer.length / 16000) * 50\n );\n for (let i = 0; i < Math.min(actualFrames, result.frames.length); i++) {\n const frameTimestamp =\n this.emotionBufferStartTime + i * (1 / 50);\n this.emotionFrameQueue.push({\n frame: result.frames[i],\n timestamp: frameTimestamp,\n });\n }\n } catch (error) {\n logger.warn('Final emotion flush failed', { error });\n }\n this.emotionBuffer = new Float32Array(0);\n }\n }\n\n /**\n * Stop playback immediately with smooth fade-out\n */\n async stop(fadeOutMs: number = 50): Promise<void> {\n this.stopMonitoring();\n await this.scheduler.cancelAll(fadeOutMs);\n\n this.bufferedChunks = [];\n this.coalescer.reset();\n this.lamPipeline.reset();\n this.waitingForFirstLAM = false;\n\n // Clear emotion state\n this.emotionBuffer = new Float32Array(0);\n this.emotionFrameQueue = [];\n this.emotionMapper.reset();\n\n this.emit('playback_complete', undefined as any);\n }\n\n /**\n * Start monitoring for playback completion\n */\n private startMonitoring(): void {\n if (this.monitorInterval) {\n clearInterval(this.monitorInterval);\n }\n\n this.monitorInterval = setInterval(() => {\n if (this.scheduler.isComplete() && this.lamPipeline.queuedFrameCount === 0) {\n this.emit('playback_complete', undefined as any);\n this.stopMonitoring();\n }\n }, 100);\n }\n\n /**\n * Stop monitoring\n */\n private stopMonitoring(): void {\n if (this.monitorInterval) {\n clearInterval(this.monitorInterval);\n this.monitorInterval = null;\n }\n\n if (this.frameAnimationId) {\n cancelAnimationFrame(this.frameAnimationId);\n this.frameAnimationId = null;\n }\n }\n\n /**\n * Get current pipeline state (for debugging/monitoring)\n */\n getState() {\n return {\n waitingForFirstLAM: this.waitingForFirstLAM,\n bufferedChunks: this.bufferedChunks.length,\n coalescerFill: this.coalescer.fillLevel,\n lamFill: this.lamPipeline.fillLevel,\n queuedLAMFrames: this.lamPipeline.queuedFrameCount,\n queuedEmotionFrames: this.emotionFrameQueue.length,\n emotionReady: this.emotionReady,\n currentTime: this.scheduler.getCurrentTime(),\n playbackEndTime: this.scheduler.getPlaybackEndTime(),\n };\n }\n\n /**\n * Check if emotion is enabled and ready\n */\n get isEmotionReady(): boolean {\n return this.emotionReady && !this.emotionDisabledDueToError;\n }\n\n /**\n * Check if emotion was disabled due to runtime error (memory exhaustion)\n */\n get isEmotionDisabled(): boolean {\n return this.emotionDisabledDueToError;\n }\n\n /**\n * Cleanup resources\n */\n dispose(): void {\n this.stopMonitoring();\n this.scheduler.dispose();\n this.coalescer.reset();\n this.lamPipeline.reset();\n this.bufferedChunks = [];\n this.emotionBuffer = new Float32Array(0);\n this.emotionFrameQueue = [];\n }\n}\n","/**\r\n * Whisper Automatic Speech Recognition using transformers.js\r\n * Uses Xenova's proven pipeline API for reliable transcription\r\n */\r\n\r\nimport { pipeline, env, type AutomaticSpeechRecognitionPipeline } from '@huggingface/transformers';\r\nimport { createLogger } from '../logging';\r\nimport { getTelemetry } from '../telemetry';\r\n\r\nconst logger = createLogger('Whisper');\r\n\r\nexport type WhisperModel = 'tiny' | 'base' | 'small' | 'medium';\r\nexport type WhisperDtype = 'fp32' | 'fp16' | 'q8' | 'int8' | 'uint8' | 'q4' | 'q4f16' | 'bnb4';\r\n\r\nexport interface WhisperConfig {\r\n /** Model size: tiny (~75MB), base (~150MB), small (~500MB), medium (~1.5GB) */\r\n model?: WhisperModel;\r\n /** Use multilingual model (default: false, uses .en models) */\r\n multilingual?: boolean;\r\n /** Language code (e.g., 'en', 'es', 'fr') - for multilingual models */\r\n language?: string;\r\n /** Task: transcribe or translate (default: transcribe) */\r\n task?: 'transcribe' | 'translate';\r\n /** Model quantization format (default: 'q8' for balance of speed/quality) */\r\n dtype?: WhisperDtype;\r\n /** Use WebGPU acceleration if available (default: auto-detect) */\r\n device?: 'auto' | 'webgpu' | 'wasm';\r\n /** Local model path (e.g., '/models/whisper-tiny.en') - overrides HuggingFace CDN */\r\n localModelPath?: string;\r\n /** HuggingFace API token to bypass rate limits (get from https://huggingface.co/settings/tokens) */\r\n token?: string;\r\n /** Suppress non-speech tokens like [LAUGHTER], [CLICKING], etc. (default: true) */\r\n suppressNonSpeech?: boolean;\r\n}\r\n\r\nexport interface TranscriptionResult {\r\n /** Transcribed text */\r\n text: string;\r\n /** Detected/used language */\r\n language: string;\r\n /** Inference time in ms */\r\n inferenceTimeMs: number;\r\n /** Full chunks with timestamps (if requested) */\r\n chunks?: Array<{ text: string; timestamp: [number, number | null] }>;\r\n}\r\n\r\n/**\r\n * Whisper ASR inference using transformers.js pipeline API\r\n *\r\n * Features:\r\n * - Automatic WebGPU/WASM backend selection\r\n * - Streaming support with chunk callbacks\r\n * - Proven implementation from Xenova's demo\r\n * - Handles all audio preprocessing automatically\r\n */\r\nexport class WhisperInference {\r\n private config: Required<Omit<WhisperConfig, 'localModelPath' | 'token'>> & Pick<WhisperConfig, 'localModelPath' | 'token'>;\r\n private pipeline: AutomaticSpeechRecognitionPipeline | null = null;\r\n private currentModel: string | null = null;\r\n private isLoading = false;\r\n private actualBackend: string = 'unknown';\r\n\r\n constructor(config: WhisperConfig = {}) {\r\n this.config = {\r\n model: config.model || 'tiny',\r\n multilingual: config.multilingual || false,\r\n language: config.language || 'en',\r\n task: config.task || 'transcribe',\r\n dtype: config.dtype || 'q8',\r\n device: config.device || 'auto',\r\n localModelPath: config.localModelPath,\r\n token: config.token,\r\n suppressNonSpeech: config.suppressNonSpeech !== false, // Default true\r\n };\r\n }\r\n\r\n /**\r\n * Check if WebGPU is available in this browser\r\n */\r\n static async isWebGPUAvailable(): Promise<boolean> {\r\n return 'gpu' in navigator;\r\n }\r\n\r\n /**\r\n * Load the Whisper model pipeline\r\n */\r\n async load(onProgress?: (progress: { status: string; progress?: number; file?: string }) => void): Promise<void> {\r\n if (this.isLoading) {\r\n logger.debug('Already loading model, waiting...');\r\n while (this.isLoading) {\r\n await new Promise(resolve => setTimeout(resolve, 100));\r\n }\r\n return;\r\n }\r\n\r\n const modelName = this.getModelName();\r\n\r\n // Check if we already have this model loaded\r\n if (this.pipeline !== null && this.currentModel === modelName) {\r\n logger.debug('Model already loaded', { model: modelName });\r\n return;\r\n }\r\n\r\n this.isLoading = true;\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('whisper.load', {\r\n 'whisper.model': modelName,\r\n 'whisper.dtype': this.config.dtype,\r\n 'whisper.device': this.config.device,\r\n });\r\n\r\n try {\r\n const loadStart = performance.now();\r\n\r\n logger.info('Loading model', {\r\n model: modelName,\r\n dtype: this.config.dtype,\r\n device: this.config.device,\r\n multilingual: this.config.multilingual,\r\n });\r\n\r\n // Dispose old pipeline if switching models\r\n if (this.pipeline !== null && this.currentModel !== modelName) {\r\n logger.debug('Disposing old model', { oldModel: this.currentModel });\r\n await this.pipeline.dispose();\r\n this.pipeline = null;\r\n }\r\n\r\n // Create pipeline with progress callback\r\n // Force WebGPU if available to avoid WASM CORS issues in v4\r\n const hasWebGPU = await WhisperInference.isWebGPUAvailable();\r\n const device = this.config.device === 'auto'\r\n ? (hasWebGPU ? 'webgpu' : 'wasm')\r\n : this.config.device;\r\n\r\n logger.info('Creating pipeline', { device, hasWebGPU });\r\n\r\n // CRITICAL: Configure transformers.js environment BEFORE calling pipeline()\r\n // This must happen here (not at module level) to avoid lazy-loading race conditions\r\n env.allowLocalModels = false; // Don't check local paths\r\n env.allowRemoteModels = true; // Fetch from HuggingFace\r\n env.useBrowserCache = false; // Disable browser cache\r\n env.useCustomCache = false; // No custom cache\r\n env.useWasmCache = false; // Disable WASM cache (prevents HTML error pages from being cached)\r\n if (env.backends.onnx.wasm) {\r\n env.backends.onnx.wasm.proxy = false; // Disable worker proxy for WASM\r\n env.backends.onnx.wasm.numThreads = 1; // Single thread to avoid CORS issues\r\n }\r\n\r\n logger.info('Configured transformers.js env', {\r\n allowLocalModels: env.allowLocalModels,\r\n useBrowserCache: env.useBrowserCache,\r\n useWasmCache: env.useWasmCache,\r\n });\r\n\r\n // Force WebGPU execution providers if available\r\n const pipelineOptions: any = {\r\n dtype: this.config.dtype,\r\n device,\r\n progress_callback: onProgress,\r\n // For medium models, use no_attentions revision to save memory\r\n revision: modelName.includes('whisper-medium') ? 'no_attentions' : 'main',\r\n // Pass HuggingFace token to bypass rate limits\r\n ...(this.config.token && { token: this.config.token }),\r\n };\r\n\r\n // CRITICAL: Force WebGPU execution providers to avoid WASM memory issues\r\n if (device === 'webgpu') {\r\n pipelineOptions.session_options = {\r\n executionProviders: ['webgpu'],\r\n };\r\n logger.info('Forcing WebGPU execution providers');\r\n }\r\n\r\n this.pipeline = await pipeline(\r\n 'automatic-speech-recognition',\r\n modelName,\r\n pipelineOptions\r\n );\r\n\r\n // Store the actual backend used\r\n this.actualBackend = device;\r\n\r\n this.currentModel = modelName;\r\n const loadTimeMs = performance.now() - loadStart;\r\n\r\n logger.info('Model loaded successfully', {\r\n model: modelName,\r\n loadTimeMs: Math.round(loadTimeMs),\r\n });\r\n\r\n span?.setAttributes({\r\n 'whisper.load_time_ms': loadTimeMs,\r\n });\r\n span?.end();\r\n } catch (error) {\r\n const errorDetails = {\r\n message: error instanceof Error ? error.message : String(error),\r\n stack: error instanceof Error ? error.stack : undefined,\r\n name: error instanceof Error ? error.name : undefined,\r\n error: error,\r\n };\r\n logger.error('Failed to load model', errorDetails);\r\n span?.endWithError(error as Error);\r\n throw error;\r\n } finally {\r\n this.isLoading = false;\r\n }\r\n }\r\n\r\n /**\r\n * Transcribe audio to text\r\n *\r\n * @param audio Audio samples (Float32Array, 16kHz mono)\r\n * @param options Transcription options\r\n */\r\n async transcribe(\r\n audio: Float32Array,\r\n options?: {\r\n /** Return timestamps for each chunk */\r\n returnTimestamps?: boolean;\r\n /** Chunk length in seconds (default: 30) */\r\n chunkLengthS?: number;\r\n /** Stride length in seconds for overlapping chunks (default: 5) */\r\n strideLengthS?: number;\r\n /** Language override */\r\n language?: string;\r\n /** Task override */\r\n task?: 'transcribe' | 'translate';\r\n }\r\n ): Promise<TranscriptionResult> {\r\n if (!this.pipeline) {\r\n throw new Error('Model not loaded. Call load() first.');\r\n }\r\n\r\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\r\n // During interruptions or concurrent inferences, audio buffer may get detached\r\n // by transformers.js ONNX Runtime before processing. Copy synchronously to preserve data.\r\n const audioCopy = new Float32Array(audio);\r\n\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('whisper.transcribe', {\r\n 'audio.samples': audioCopy.length,\r\n 'audio.duration_s': audioCopy.length / 16000,\r\n 'whisper.model': this.currentModel!,\r\n });\r\n\r\n try {\r\n const inferStart = performance.now();\r\n\r\n const audioDurationSec = audioCopy.length / 16000;\r\n const isShortAudio = audioDurationSec < 10;\r\n\r\n logger.debug('Starting transcription', {\r\n audioSamples: audioCopy.length,\r\n durationSeconds: audioDurationSec.toFixed(2),\r\n isShortAudio,\r\n });\r\n\r\n // Build transcription options\r\n // PERFORMANCE: For short audio (<10s), disable chunking to avoid overhead\r\n // Chunking is designed for long audio (podcasts, meetings) and adds 0.5-1s\r\n // latency for conversational clips. For 2-4s audio, process in one pass.\r\n const transcribeOptions: any = {\r\n // Decoding strategy\r\n top_k: 0,\r\n do_sample: false,\r\n\r\n // Adaptive chunking: Disable for short audio, enable for long audio\r\n chunk_length_s: options?.chunkLengthS || (isShortAudio ? audioDurationSec : 30),\r\n stride_length_s: options?.strideLengthS || (isShortAudio ? 0 : 5),\r\n\r\n // Timestamps\r\n return_timestamps: options?.returnTimestamps || false,\r\n force_full_sequences: false,\r\n };\r\n\r\n // Only pass language/task for multilingual models\r\n if (this.config.multilingual) {\r\n transcribeOptions.language = options?.language || this.config.language;\r\n transcribeOptions.task = options?.task || this.config.task;\r\n }\r\n\r\n // Run transcription\r\n const rawResult = await this.pipeline(audioCopy, transcribeOptions);\r\n\r\n // v3 can return array or single object, normalize to single\r\n const result = Array.isArray(rawResult) ? rawResult[0] : rawResult;\r\n\r\n const inferenceTimeMs = performance.now() - inferStart;\r\n\r\n // Clean non-speech tokens if enabled\r\n let cleanedText = result.text;\r\n if (this.config.suppressNonSpeech) {\r\n cleanedText = this.removeNonSpeechTokens(cleanedText);\r\n }\r\n\r\n const transcription: TranscriptionResult = {\r\n text: cleanedText,\r\n language: this.config.language,\r\n inferenceTimeMs,\r\n chunks: result.chunks,\r\n };\r\n\r\n logger.debug('Transcription complete', {\r\n text: transcription.text,\r\n inferenceTimeMs: Math.round(inferenceTimeMs),\r\n chunksCount: result.chunks?.length || 0,\r\n });\r\n\r\n span?.setAttributes({\r\n 'whisper.inference_time_ms': inferenceTimeMs,\r\n 'whisper.text_length': transcription.text.length,\r\n });\r\n span?.end();\r\n\r\n return transcription;\r\n } catch (error) {\r\n logger.error('Transcribe error', { error });\r\n span?.endWithError(error as Error);\r\n throw new Error(`Whisper transcription failed: ${error}`);\r\n }\r\n }\r\n\r\n /**\r\n * Transcribe with streaming chunks (progressive results)\r\n *\r\n * @param audio Audio samples\r\n * @param onChunk Called when each chunk is finalized\r\n * @param onUpdate Called after each generation step (optional)\r\n */\r\n async transcribeStreaming(\r\n audio: Float32Array,\r\n onChunk: (chunk: { text: string; timestamp: [number, number | null] }) => void,\r\n onUpdate?: (text: string) => void,\r\n options?: {\r\n chunkLengthS?: number;\r\n strideLengthS?: number;\r\n language?: string;\r\n task?: 'transcribe' | 'translate';\r\n }\r\n ): Promise<TranscriptionResult> {\r\n if (!this.pipeline) {\r\n throw new Error('Model not loaded. Call load() first.');\r\n }\r\n\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('whisper.transcribe_streaming', {\r\n 'audio.samples': audio.length,\r\n 'audio.duration_s': audio.length / 16000,\r\n });\r\n\r\n try {\r\n const inferStart = performance.now();\r\n\r\n logger.debug('Starting streaming transcription', {\r\n audioSamples: audio.length,\r\n durationSeconds: (audio.length / 16000).toFixed(2),\r\n });\r\n\r\n // NOTE: transformers.js v3 removed callback_function support for streaming\r\n // For now, we'll process chunks manually after transcription\r\n // TODO: Re-implement streaming when v3 supports it\r\n\r\n // Build transcription options\r\n const transcribeOptions: any = {\r\n top_k: 0,\r\n do_sample: false,\r\n chunk_length_s: options?.chunkLengthS || 30,\r\n stride_length_s: options?.strideLengthS || 5,\r\n return_timestamps: true,\r\n force_full_sequences: false,\r\n };\r\n\r\n // Only pass language/task for multilingual models\r\n if (this.config.multilingual) {\r\n transcribeOptions.language = options?.language || this.config.language;\r\n transcribeOptions.task = options?.task || this.config.task;\r\n }\r\n\r\n // Run transcription with timestamps to get chunks\r\n const rawResult = await this.pipeline(audio, transcribeOptions);\r\n\r\n // v3 can return array or single object, normalize to single\r\n const result = Array.isArray(rawResult) ? rawResult[0] : rawResult;\r\n\r\n const inferenceTimeMs = performance.now() - inferStart;\r\n\r\n // Process chunks after completion\r\n if (result.chunks && onChunk) {\r\n for (const chunk of result.chunks) {\r\n onChunk({\r\n text: chunk.text,\r\n timestamp: chunk.timestamp,\r\n });\r\n }\r\n }\r\n\r\n // Final update with complete text\r\n if (onUpdate) {\r\n onUpdate(result.text);\r\n }\r\n\r\n logger.debug('Streaming transcription complete', {\r\n text: result.text,\r\n inferenceTimeMs: Math.round(inferenceTimeMs),\r\n chunksCount: result.chunks?.length || 0,\r\n });\r\n\r\n span?.setAttributes({\r\n 'whisper.inference_time_ms': inferenceTimeMs,\r\n 'whisper.chunks_count': result.chunks?.length || 0,\r\n });\r\n span?.end();\r\n\r\n return {\r\n text: result.text,\r\n language: this.config.language,\r\n inferenceTimeMs,\r\n chunks: result.chunks,\r\n };\r\n } catch (error) {\r\n logger.error('Streaming transcribe error', { error });\r\n span?.endWithError(error as Error);\r\n throw new Error(`Whisper streaming transcription failed: ${error}`);\r\n }\r\n }\r\n\r\n /**\r\n * Dispose of the model and free resources\r\n */\r\n async dispose(): Promise<void> {\r\n if (this.pipeline) {\r\n logger.debug('Disposing model', { model: this.currentModel });\r\n await this.pipeline.dispose();\r\n this.pipeline = null;\r\n this.currentModel = null;\r\n }\r\n }\r\n\r\n /**\r\n * Check if model is loaded\r\n */\r\n get isLoaded(): boolean {\r\n return this.pipeline !== null;\r\n }\r\n\r\n /**\r\n * Get the backend being used (webgpu or wasm)\r\n */\r\n get backend(): string {\r\n return this.actualBackend;\r\n }\r\n\r\n /**\r\n * Get the full model name used by transformers.js\r\n */\r\n private getModelName(): string {\r\n // Use local model path if provided\r\n if (this.config.localModelPath) {\r\n return this.config.localModelPath;\r\n }\r\n\r\n // Otherwise construct HuggingFace model path\r\n let modelName = `onnx-community/whisper-${this.config.model}`;\r\n\r\n // Add .en suffix for English-only models (non-multilingual)\r\n if (!this.config.multilingual) {\r\n modelName += '.en';\r\n }\r\n\r\n return modelName;\r\n }\r\n\r\n /**\r\n * Remove non-speech event tokens from transcription\r\n *\r\n * Whisper outputs special tokens for non-speech events like:\r\n * [LAUGHTER], [APPLAUSE], [MUSIC], [BLANK_AUDIO], [CLICKING], etc.\r\n *\r\n * This method strips these tokens and cleans up extra whitespace.\r\n */\r\n private removeNonSpeechTokens(text: string): string {\r\n // Pattern matches anything in square brackets (non-speech tokens)\r\n // Examples: [LAUGHTER], [BLANK_AUDIO], [MUSIC], [APPLAUSE], [CLICKING]\r\n const cleaned = text.replace(/\\[[\\w\\s_]+\\]/g, '');\r\n\r\n // Clean up multiple spaces and trim\r\n return cleaned.replace(/\\s+/g, ' ').trim();\r\n }\r\n}\r\n","/**\n * CPU-optimized lip sync inference using wav2arkit_cpu model\n *\n * A lightweight (1.8MB) alternative to Wav2Vec2Inference (384MB) designed\n * for Safari/iOS where WebGPU crashes due to ONNX Runtime JSEP bugs.\n *\n * Key differences from Wav2Vec2Inference:\n * - WASM-only backend (CPU-optimized, no WebGPU)\n * - 1.8MB model vs 384MB\n * - No identity input (baked to identity 11)\n * - No ASR output (lip sync only)\n * - Dynamic input length (not fixed to 16000 samples)\n * - Different native blendshape ordering (remapped to LAM_BLENDSHAPES)\n *\n * @category Inference\n *\n * @example\n * ```typescript\n * import { Wav2ArkitCpuInference } from '@omote/core';\n *\n * const lam = new Wav2ArkitCpuInference({\n * modelUrl: '/models/wav2arkit_cpu.onnx',\n * });\n * await lam.load();\n *\n * const { blendshapes } = await lam.infer(audioSamples);\n * // blendshapes: Float32Array[] in LAM_BLENDSHAPES order, 30fps\n * ```\n */\n\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\n\nimport { fetchWithCache, getModelCache, formatBytes } from '../cache/ModelCache';\nimport { createLogger } from '../logging';\nimport { getTelemetry } from '../telemetry';\nimport {\n getOnnxRuntimeForPreference,\n getSessionOptions,\n type RuntimeBackend,\n} from './onnxLoader';\nimport { BackendPreference } from '../utils/runtime';\nimport { symmetrizeBlendshapes, remapWav2ArkitToLam } from './blendshapeUtils';\nimport type { LipSyncBackend, LipSyncModelInfo, LipSyncResult } from './LipSyncBackend';\n\ntype OrtModule = {\n InferenceSession: typeof InferenceSession;\n Tensor: typeof Tensor;\n env: Env;\n};\n\nconst logger = createLogger('Wav2ArkitCpu');\n\nexport interface Wav2ArkitCpuConfig {\n /** Path or URL to the wav2arkit_cpu ONNX model */\n modelUrl: string;\n /** Preferred backend (default: 'wasm' — this model is CPU-optimized) */\n backend?: BackendPreference;\n}\n\nexport class Wav2ArkitCpuInference implements LipSyncBackend {\n private session: InferenceSession | null = null;\n private ort: OrtModule | null = null;\n private config: Wav2ArkitCpuConfig;\n private _backend: RuntimeBackend = 'wasm';\n private isLoading = false;\n\n // Inference queue for handling concurrent calls\n private inferenceQueue: Promise<void> = Promise.resolve();\n\n constructor(config: Wav2ArkitCpuConfig) {\n this.config = config;\n }\n\n get backend(): RuntimeBackend | null {\n return this.session ? this._backend : null;\n }\n\n get isLoaded(): boolean {\n return this.session !== null;\n }\n\n /**\n * Load the ONNX model\n */\n async load(): Promise<LipSyncModelInfo> {\n if (this.isLoading) {\n throw new Error('Model is already loading');\n }\n\n if (this.session) {\n throw new Error('Model already loaded. Call dispose() first.');\n }\n\n this.isLoading = true;\n const startTime = performance.now();\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('Wav2ArkitCpu.load', {\n 'model.url': this.config.modelUrl,\n 'model.backend_requested': this.config.backend || 'wasm',\n });\n\n try {\n // Default to WASM — this model is CPU-optimized\n const preference = this.config.backend || 'wasm';\n logger.info('Loading ONNX Runtime...', { preference });\n\n const { ort, backend } = await getOnnxRuntimeForPreference(preference);\n this.ort = ort;\n this._backend = backend;\n\n logger.info('ONNX Runtime loaded', { backend: this._backend });\n\n // Load model with caching\n const cache = getModelCache();\n const modelUrl = this.config.modelUrl;\n const isCached = await cache.has(modelUrl);\n\n let modelBuffer: ArrayBuffer;\n if (isCached) {\n logger.debug('Loading model from cache', { modelUrl });\n modelBuffer = (await cache.get(modelUrl))!;\n\n if (!modelBuffer) {\n logger.warn('Cache corruption detected, clearing and retrying', { modelUrl });\n await cache.delete(modelUrl);\n modelBuffer = await fetchWithCache(modelUrl);\n }\n } else {\n logger.debug('Fetching and caching model', { modelUrl });\n modelBuffer = await fetchWithCache(modelUrl);\n }\n\n if (!modelBuffer) {\n throw new Error(`Failed to load model: ${modelUrl}`);\n }\n\n logger.debug('Creating ONNX session', {\n size: formatBytes(modelBuffer.byteLength),\n backend: this._backend,\n });\n\n const sessionOptions = getSessionOptions(this._backend);\n const modelData = new Uint8Array(modelBuffer);\n this.session = await this.ort!.InferenceSession.create(modelData, sessionOptions);\n\n const loadTimeMs = performance.now() - startTime;\n\n logger.info('Model loaded successfully', {\n backend: this._backend,\n loadTimeMs: Math.round(loadTimeMs),\n inputs: this.session.inputNames,\n outputs: this.session.outputNames,\n });\n\n span?.setAttributes({\n 'model.backend': this._backend,\n 'model.load_time_ms': loadTimeMs,\n 'model.cached': isCached,\n });\n span?.end();\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n });\n\n // Warmup inference\n logger.debug('Running warmup inference');\n const warmupStart = performance.now();\n const silentAudio = new Float32Array(16000);\n await this.infer(silentAudio);\n const warmupTimeMs = performance.now() - warmupStart;\n logger.info('Warmup inference complete', {\n warmupTimeMs: Math.round(warmupTimeMs),\n backend: this._backend,\n });\n telemetry?.recordHistogram('omote.model.warmup_time', warmupTimeMs, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n });\n\n return {\n backend: this._backend,\n loadTimeMs,\n inputNames: [...this.session.inputNames],\n outputNames: [...this.session.outputNames],\n };\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n telemetry?.incrementCounter('omote.errors.total', 1, {\n model: 'wav2arkit_cpu',\n error_type: 'load_failed',\n });\n throw error;\n } finally {\n this.isLoading = false;\n }\n }\n\n /**\n * Run inference on raw audio\n *\n * Accepts variable-length audio (not fixed to 16000 samples).\n * Output frames = ceil(30 * numSamples / 16000).\n *\n * @param audioSamples - Float32Array of raw audio at 16kHz\n * @param _identityIndex - Ignored (identity 11 is baked into the model)\n */\n async infer(\n audioSamples: Float32Array,\n _identityIndex?: number\n ): Promise<LipSyncResult> {\n if (!this.session) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n // Force copy to prevent ArrayBuffer detachment\n const audioCopy = new Float32Array(audioSamples);\n\n const feeds = {\n 'audio_waveform': new this.ort!.Tensor('float32', audioCopy, [1, audioCopy.length]),\n };\n\n return this.queueInference(feeds, audioCopy.length);\n }\n\n /**\n * Queue inference to serialize ONNX session calls\n */\n private queueInference(\n feeds: Record<string, Tensor>,\n inputSamples: number\n ): Promise<LipSyncResult> {\n return new Promise((resolve, reject) => {\n this.inferenceQueue = this.inferenceQueue.then(async () => {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('Wav2ArkitCpu.infer', {\n 'inference.backend': this._backend,\n 'inference.input_samples': inputSamples,\n });\n try {\n const startTime = performance.now();\n const results = await this.session!.run(feeds);\n const inferenceTimeMs = performance.now() - startTime;\n\n const blendshapeOutput = results['blendshapes'];\n\n if (!blendshapeOutput) {\n throw new Error('Missing blendshapes output from model');\n }\n\n const blendshapeData = blendshapeOutput.data as Float32Array;\n const numFrames = blendshapeOutput.dims[1] as number;\n const numBlendshapes = blendshapeOutput.dims[2] as number;\n\n // Split into per-frame arrays, remap to LAM order, symmetrize\n const blendshapes: Float32Array[] = [];\n for (let f = 0; f < numFrames; f++) {\n const rawFrame = blendshapeData.slice(f * numBlendshapes, (f + 1) * numBlendshapes);\n const remapped = remapWav2ArkitToLam(rawFrame);\n blendshapes.push(symmetrizeBlendshapes(remapped));\n }\n\n logger.trace('Inference completed', {\n inferenceTimeMs: Math.round(inferenceTimeMs * 100) / 100,\n numFrames,\n inputSamples,\n });\n\n span?.setAttributes({\n 'inference.duration_ms': inferenceTimeMs,\n 'inference.frames': numFrames,\n });\n span?.end();\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n });\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n status: 'success',\n });\n\n resolve({\n blendshapes,\n numFrames,\n inferenceTimeMs,\n });\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n status: 'error',\n });\n reject(err);\n }\n });\n });\n }\n\n /**\n * Dispose of the model and free resources\n */\n async dispose(): Promise<void> {\n if (this.session) {\n await this.session.release();\n this.session = null;\n }\n }\n}\n","/**\n * Factory function for lip sync with automatic GPU/CPU model selection\n *\n * Provides a unified API that automatically selects the optimal model:\n * - Safari (macOS + iOS): Uses Wav2ArkitCpuInference (1.8MB, WASM)\n * - Chrome/Firefox/Edge: Uses Wav2Vec2Inference (384MB, WebGPU)\n * - Fallback: Gracefully falls back to CPU model if GPU model fails to load\n *\n * @category Inference\n *\n * @example Auto-detect (recommended)\n * ```typescript\n * import { createLipSync } from '@omote/core';\n *\n * const lam = createLipSync({\n * gpuModelUrl: '/models/unified_wav2vec2_asr_a2e.onnx',\n * cpuModelUrl: '/models/wav2arkit_cpu.onnx',\n * });\n *\n * await lam.load();\n * const { blendshapes } = await lam.infer(audioSamples);\n * ```\n *\n * @example Force CPU model\n * ```typescript\n * const lam = createLipSync({\n * gpuModelUrl: '/models/unified_wav2vec2_asr_a2e.onnx',\n * cpuModelUrl: '/models/wav2arkit_cpu.onnx',\n * mode: 'cpu',\n * });\n * ```\n */\n\nimport { createLogger } from '../logging';\nimport { shouldUseCpuLipSync, isSafari } from '../utils/runtime';\nimport { Wav2Vec2Inference } from './Wav2Vec2Inference';\nimport { Wav2ArkitCpuInference } from './Wav2ArkitCpuInference';\nimport type { LipSyncBackend, LipSyncModelInfo, LipSyncResult } from './LipSyncBackend';\nimport type { RuntimeBackend, BackendPreference } from '../utils/runtime';\n\nconst logger = createLogger('createLipSync');\n\n/**\n * Configuration for the lip sync factory\n */\nexport interface CreateLipSyncConfig {\n /** URL for the GPU model (Wav2Vec2, used on Chrome/Firefox/Edge) */\n gpuModelUrl: string;\n /** URL for the CPU model (wav2arkit_cpu, used on Safari/iOS) */\n cpuModelUrl: string;\n /**\n * Model selection mode:\n * - 'auto': Safari/iOS → CPU, everything else → GPU (default)\n * - 'gpu': Force GPU model (Wav2Vec2Inference)\n * - 'cpu': Force CPU model (Wav2ArkitCpuInference)\n */\n mode?: 'auto' | 'gpu' | 'cpu';\n /** Backend preference for GPU model (default: 'auto') */\n gpuBackend?: BackendPreference;\n /** Number of identity classes for GPU model (default: 12) */\n numIdentityClasses?: number;\n /**\n * Fall back to CPU model if GPU model fails to load (default: true)\n * Only applies when mode is 'auto' or 'gpu'\n */\n fallbackOnError?: boolean;\n}\n\n/**\n * Create a lip sync instance with automatic GPU/CPU model selection\n *\n * @param config - Factory configuration\n * @returns A LipSyncBackend instance (either GPU or CPU model)\n */\nexport function createLipSync(config: CreateLipSyncConfig): LipSyncBackend {\n const mode = config.mode ?? 'auto';\n const fallbackOnError = config.fallbackOnError ?? true;\n\n // Determine which model to use\n let useCpu: boolean;\n\n if (mode === 'cpu') {\n useCpu = true;\n logger.info('Forcing CPU lip sync model (wav2arkit_cpu)');\n } else if (mode === 'gpu') {\n useCpu = false;\n logger.info('Forcing GPU lip sync model (Wav2Vec2)');\n } else {\n // Auto-detect: Safari/iOS → CPU, everything else → GPU\n useCpu = shouldUseCpuLipSync();\n logger.info('Auto-detected lip sync model', {\n useCpu,\n isSafari: isSafari(),\n });\n }\n\n if (useCpu) {\n logger.info('Creating Wav2ArkitCpuInference (1.8MB, WASM)');\n return new Wav2ArkitCpuInference({\n modelUrl: config.cpuModelUrl,\n });\n }\n\n // GPU model, optionally with fallback\n const gpuInstance = new Wav2Vec2Inference({\n modelUrl: config.gpuModelUrl,\n backend: config.gpuBackend ?? 'auto',\n numIdentityClasses: config.numIdentityClasses,\n });\n\n if (fallbackOnError) {\n logger.info('Creating Wav2Vec2Inference with CPU fallback');\n return new LipSyncWithFallback(gpuInstance, config);\n }\n\n logger.info('Creating Wav2Vec2Inference (no fallback)');\n return gpuInstance;\n}\n\n/**\n * Wrapper that provides automatic fallback from GPU to CPU model\n *\n * If the GPU model fails during load(), this wrapper automatically\n * creates a Wav2ArkitCpuInference instance instead.\n */\nclass LipSyncWithFallback implements LipSyncBackend {\n private implementation: LipSyncBackend;\n private readonly config: CreateLipSyncConfig;\n private hasFallenBack = false;\n\n constructor(gpuInstance: Wav2Vec2Inference, config: CreateLipSyncConfig) {\n this.implementation = gpuInstance;\n this.config = config;\n }\n\n get backend(): RuntimeBackend | null {\n return this.implementation.backend;\n }\n\n get isLoaded(): boolean {\n return this.implementation.isLoaded;\n }\n\n async load(): Promise<LipSyncModelInfo> {\n try {\n return await this.implementation.load();\n } catch (error) {\n logger.warn('GPU model load failed, falling back to CPU model', {\n error: error instanceof Error ? error.message : String(error),\n });\n\n // Clean up failed GPU instance\n try {\n await this.implementation.dispose();\n } catch {\n // Ignore dispose errors\n }\n\n // Create CPU fallback\n this.implementation = new Wav2ArkitCpuInference({\n modelUrl: this.config.cpuModelUrl,\n });\n this.hasFallenBack = true;\n\n logger.info('Fallback to Wav2ArkitCpuInference successful');\n return await this.implementation.load();\n }\n }\n\n async infer(audioSamples: Float32Array, identityIndex?: number): Promise<LipSyncResult> {\n return this.implementation.infer(audioSamples, identityIndex);\n }\n\n async dispose(): Promise<void> {\n return this.implementation.dispose();\n }\n}\n","/**\n * Silero VAD (Voice Activity Detection) inference\n *\n * Neural network-based VAD running in browser via ONNX Runtime Web.\n * Much more accurate than RMS-based energy detection.\n *\n * Uses lazy loading to conditionally load WebGPU or WASM-only bundle:\n * - iOS: Loads WASM-only bundle (WebGPU crashes due to Safari bugs)\n * - Android/Desktop: Loads WebGPU bundle (with WASM fallback)\n *\n * @category Inference\n *\n * @example Basic usage\n * ```typescript\n * import { SileroVADInference } from '@omote/core';\n *\n * const vad = new SileroVADInference({\n * modelUrl: '/models/silero-vad.onnx'\n * });\n * await vad.load();\n *\n * // Process 32ms chunks (512 samples at 16kHz)\n * const probability = await vad.process(audioChunk);\n * if (probability > 0.5) {\n * console.log('Speech detected!');\n * }\n * ```\n *\n * @example Streaming with state management\n * ```typescript\n * // State is automatically maintained between process() calls\n * // Call reset() when starting a new audio stream\n * vad.reset();\n *\n * for (const chunk of audioChunks) {\n * const prob = await vad.process(chunk);\n * // prob is speech probability [0, 1]\n * }\n * ```\n */\n\n// Type-only import for TypeScript (no runtime code loaded at import time)\n// At runtime, we dynamically import either 'onnxruntime-web' or 'onnxruntime-web/webgpu'\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\n\n// Type alias for the ORT module (loaded dynamically)\ntype OrtModule = {\n InferenceSession: typeof InferenceSession;\n Tensor: typeof Tensor;\n env: Env;\n};\nimport { fetchWithCache, getModelCache, formatBytes } from '../cache/ModelCache';\nimport { createLogger } from '../logging';\nimport { getTelemetry } from '../telemetry';\nimport {\n getOnnxRuntimeForPreference,\n getSessionOptions,\n isWebGPUAvailable,\n type RuntimeBackend,\n} from './onnxLoader';\nimport { BackendPreference } from '../utils/runtime';\n\nconst logger = createLogger('SileroVAD');\n\nexport type VADBackend = BackendPreference;\n\n/**\n * Configuration for Silero VAD\n */\nexport interface SileroVADConfig {\n /** Path or URL to the ONNX model */\n modelUrl: string;\n /** Preferred backend (auto will try WebGPU first, fallback to WASM) */\n backend?: VADBackend;\n /** Sample rate (8000 or 16000, default: 16000) */\n sampleRate?: 8000 | 16000;\n /** Speech probability threshold (default: 0.5) */\n threshold?: number;\n /**\n * Number of audio chunks to keep in pre-speech buffer.\n * When VAD triggers, these chunks are prepended to the speech buffer\n * to capture the beginning of speech that occurred before detection.\n *\n * At 512 samples/chunk and 16kHz:\n * - 10 chunks = 320ms of pre-speech audio\n * - 15 chunks = 480ms of pre-speech audio\n *\n * Default: 10 chunks (320ms)\n */\n preSpeechBufferChunks?: number;\n}\n\n/**\n * VAD model loading information\n */\nexport interface VADModelInfo {\n backend: 'webgpu' | 'wasm';\n loadTimeMs: number;\n inputNames: string[];\n outputNames: string[];\n sampleRate: number;\n chunkSize: number;\n}\n\n/**\n * Result from a single VAD inference\n */\nexport interface VADResult {\n /** Speech probability (0-1) */\n probability: number;\n /** Whether speech is detected (probability > threshold) */\n isSpeech: boolean;\n /** Inference time in milliseconds */\n inferenceTimeMs: number;\n /**\n * Pre-speech audio chunks (only present on first speech detection).\n * These are the N chunks immediately before VAD triggered, useful for\n * capturing the beginning of speech that occurred before detection.\n *\n * Only populated when transitioning from silence to speech.\n */\n preSpeechChunks?: Float32Array[];\n}\n\n/**\n * Speech segment detected by VAD\n */\nexport interface SpeechSegment {\n /** Start time in seconds */\n start: number;\n /** End time in seconds */\n end: number;\n /** Average probability during segment */\n avgProbability: number;\n}\n\n/**\n * Silero VAD - Neural network voice activity detection\n *\n * Based on snakers4/silero-vad ONNX model.\n * Processes 32ms chunks (512 samples at 16kHz) with LSTM state.\n *\n * @see https://github.com/snakers4/silero-vad\n */\nexport class SileroVADInference {\n private session: InferenceSession | null = null;\n private ort: OrtModule | null = null; // Lazy-loaded ONNX Runtime module\n private config: Required<SileroVADConfig>;\n private _backend: RuntimeBackend = 'wasm';\n private isLoading = false;\n\n // LSTM state tensors [2, batch_size, 128]\n private state: Tensor | null = null;\n\n // Context buffer (prepended to each chunk)\n private context: Float32Array;\n\n // Chunk sizes based on sample rate\n private readonly chunkSize: number;\n private readonly contextSize: number;\n\n // Inference queue for serialization\n private inferenceQueue: Promise<void> = Promise.resolve();\n\n // Pre-speech buffer for capturing beginning of speech\n private preSpeechBuffer: Float32Array[] = [];\n private wasSpeaking = false;\n\n constructor(config: SileroVADConfig) {\n const sampleRate = config.sampleRate ?? 16000;\n\n if (sampleRate !== 8000 && sampleRate !== 16000) {\n throw new Error('Silero VAD only supports 8000 or 16000 Hz sample rates');\n }\n\n this.config = {\n modelUrl: config.modelUrl,\n backend: config.backend ?? 'auto',\n sampleRate,\n threshold: config.threshold ?? 0.5,\n preSpeechBufferChunks: config.preSpeechBufferChunks ?? 10,\n };\n\n // Set chunk sizes based on sample rate\n this.chunkSize = sampleRate === 16000 ? 512 : 256;\n this.contextSize = sampleRate === 16000 ? 64 : 32;\n this.context = new Float32Array(this.contextSize);\n }\n\n get backend(): RuntimeBackend | null {\n return this.session ? this._backend : null;\n }\n\n get isLoaded(): boolean {\n return this.session !== null;\n }\n\n get sampleRate(): number {\n return this.config.sampleRate;\n }\n\n get threshold(): number {\n return this.config.threshold;\n }\n\n /**\n * Get required chunk size in samples\n */\n getChunkSize(): number {\n return this.chunkSize;\n }\n\n /**\n * Get chunk duration in milliseconds\n */\n getChunkDurationMs(): number {\n return (this.chunkSize / this.config.sampleRate) * 1000;\n }\n\n /**\n * Check if WebGPU is available and working\n * (iOS returns false even if navigator.gpu exists due to ONNX Runtime bugs)\n */\n static isWebGPUAvailable = isWebGPUAvailable;\n\n /**\n * Load the ONNX model\n */\n async load(): Promise<VADModelInfo> {\n if (this.isLoading) {\n throw new Error('Model is already loading');\n }\n\n if (this.session) {\n throw new Error('Model already loaded. Call dispose() first.');\n }\n\n this.isLoading = true;\n const startTime = performance.now();\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('SileroVAD.load', {\n 'model.url': this.config.modelUrl,\n 'model.backend_requested': this.config.backend,\n 'model.sample_rate': this.config.sampleRate,\n });\n\n try {\n // Lazy load ONNX Runtime with appropriate backend\n // iOS: Loads WASM-only bundle (smaller, no WebGPU code)\n // Android/Desktop: Loads WebGPU bundle (with WASM fallback)\n logger.info('Loading ONNX Runtime...', { preference: this.config.backend });\n\n const { ort, backend } = await getOnnxRuntimeForPreference(this.config.backend);\n this.ort = ort;\n this._backend = backend;\n\n logger.info('ONNX Runtime loaded', { backend: this._backend });\n\n // Load model with caching\n const cache = getModelCache();\n const modelUrl = this.config.modelUrl;\n const isCached = await cache.has(modelUrl);\n\n let modelBuffer: ArrayBuffer;\n if (isCached) {\n logger.debug('Loading model from cache', { modelUrl });\n modelBuffer = (await cache.get(modelUrl))!;\n } else {\n logger.debug('Fetching and caching model', { modelUrl });\n modelBuffer = await fetchWithCache(modelUrl);\n }\n\n logger.debug('Creating ONNX session', {\n size: formatBytes(modelBuffer.byteLength),\n backend: this._backend,\n });\n\n // Create session with optimized settings for the backend\n // Convert ArrayBuffer to Uint8Array for onnxruntime-common types\n const sessionOptions = getSessionOptions(this._backend);\n const modelData = new Uint8Array(modelBuffer);\n this.session = await ort.InferenceSession.create(modelData, sessionOptions);\n\n // Initialize state\n this.reset();\n\n const loadTimeMs = performance.now() - startTime;\n\n logger.info('Model loaded successfully', {\n backend: this._backend,\n loadTimeMs: Math.round(loadTimeMs),\n sampleRate: this.config.sampleRate,\n chunkSize: this.chunkSize,\n threshold: this.config.threshold,\n });\n\n span?.setAttributes({\n 'model.backend': this._backend,\n 'model.load_time_ms': loadTimeMs,\n 'model.cached': isCached,\n });\n span?.end();\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\n model: 'silero-vad',\n backend: this._backend,\n });\n\n return {\n backend: this._backend,\n loadTimeMs,\n inputNames: [...this.session.inputNames],\n outputNames: [...this.session.outputNames],\n sampleRate: this.config.sampleRate,\n chunkSize: this.chunkSize,\n };\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n telemetry?.incrementCounter('omote.errors.total', 1, {\n model: 'silero-vad',\n error_type: 'load_failed',\n });\n throw error;\n } finally {\n this.isLoading = false;\n }\n }\n\n /**\n * Reset state for new audio stream\n */\n reset(): void {\n if (!this.ort) {\n throw new Error('ONNX Runtime not loaded. Call load() first.');\n }\n // LSTM state: [2, batch_size=1, 128]\n this.state = new this.ort.Tensor('float32', new Float32Array(2 * 1 * 128), [2, 1, 128]);\n // Reset context buffer\n this.context = new Float32Array(this.contextSize);\n // Reset pre-speech buffer\n this.preSpeechBuffer = [];\n this.wasSpeaking = false;\n }\n\n /**\n * Process a single audio chunk\n *\n * @param audioChunk - Float32Array of exactly chunkSize samples (512 for 16kHz, 256 for 8kHz)\n * @returns VAD result with speech probability\n */\n async process(audioChunk: Float32Array): Promise<VADResult> {\n if (!this.session) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n if (audioChunk.length !== this.chunkSize) {\n throw new Error(\n `Audio chunk must be exactly ${this.chunkSize} samples (got ${audioChunk.length}). ` +\n `Use getChunkSize() to get required size.`\n );\n }\n\n return this.queueInference(audioChunk);\n }\n\n /**\n * Process audio and detect speech segments\n *\n * @param audio - Complete audio buffer\n * @param options - Detection options\n * @returns Array of speech segments\n */\n async detectSpeech(\n audio: Float32Array,\n options: {\n /** Minimum speech duration in ms (default: 250) */\n minSpeechDurationMs?: number;\n /** Minimum silence duration to end segment in ms (default: 300) */\n minSilenceDurationMs?: number;\n /** Padding to add before/after speech in ms (default: 30) */\n speechPadMs?: number;\n } = {}\n ): Promise<SpeechSegment[]> {\n const {\n minSpeechDurationMs = 250,\n minSilenceDurationMs = 300,\n speechPadMs = 30,\n } = options;\n\n this.reset();\n\n const segments: SpeechSegment[] = [];\n const chunkDurationMs = this.getChunkDurationMs();\n const minSpeechChunks = Math.ceil(minSpeechDurationMs / chunkDurationMs);\n const minSilenceChunks = Math.ceil(minSilenceDurationMs / chunkDurationMs);\n const padChunks = Math.ceil(speechPadMs / chunkDurationMs);\n\n let inSpeech = false;\n let speechStart = 0;\n let silenceCount = 0;\n let speechChunks = 0;\n let totalProb = 0;\n\n // Process in chunks\n for (let i = 0; i + this.chunkSize <= audio.length; i += this.chunkSize) {\n const chunk = audio.slice(i, i + this.chunkSize);\n const result = await this.process(chunk);\n const chunkIndex = i / this.chunkSize;\n const timeMs = chunkIndex * chunkDurationMs;\n\n if (result.isSpeech) {\n if (!inSpeech) {\n // Start of speech\n inSpeech = true;\n speechStart = Math.max(0, timeMs - speechPadMs);\n silenceCount = 0;\n speechChunks = 0;\n totalProb = 0;\n }\n silenceCount = 0;\n speechChunks++;\n totalProb += result.probability;\n } else if (inSpeech) {\n silenceCount++;\n if (silenceCount >= minSilenceChunks) {\n // End of speech\n if (speechChunks >= minSpeechChunks) {\n segments.push({\n start: speechStart / 1000,\n end: (timeMs + speechPadMs) / 1000,\n avgProbability: totalProb / speechChunks,\n });\n }\n inSpeech = false;\n }\n }\n }\n\n // Handle trailing speech\n if (inSpeech && speechChunks >= minSpeechChunks) {\n const endMs = (audio.length / this.config.sampleRate) * 1000;\n segments.push({\n start: speechStart / 1000,\n end: endMs / 1000,\n avgProbability: totalProb / speechChunks,\n });\n }\n\n return segments;\n }\n\n /**\n * Calculate RMS energy of audio chunk\n */\n private calculateRMS(samples: Float32Array): number {\n let sum = 0;\n for (let i = 0; i < samples.length; i++) {\n sum += samples[i] * samples[i];\n }\n return Math.sqrt(sum / samples.length);\n }\n\n /**\n * Queue inference to serialize ONNX session calls\n */\n private queueInference(audioChunk: Float32Array): Promise<VADResult> {\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\n // During interruptions, audioChunk's buffer may get detached by ONNX Runtime\n // before we access it in the async queue. Copy synchronously to preserve data.\n const audioChunkCopy = new Float32Array(audioChunk);\n\n // Energy pre-filter: skip inference on very quiet audio\n // This prevents false positives from blank/silent chunks and saves compute\n const MIN_ENERGY_THRESHOLD = 0.001; // Very low threshold - only filters near-silence\n const rms = this.calculateRMS(audioChunkCopy);\n if (rms < MIN_ENERGY_THRESHOLD) {\n // Update pre-speech buffer even for silent chunks (ring buffer)\n if (!this.wasSpeaking) {\n this.preSpeechBuffer.push(new Float32Array(audioChunkCopy));\n if (this.preSpeechBuffer.length > this.config.preSpeechBufferChunks) {\n this.preSpeechBuffer.shift();\n }\n }\n\n logger.trace('Skipping VAD inference - audio too quiet', {\n rms: Math.round(rms * 10000) / 10000,\n threshold: MIN_ENERGY_THRESHOLD,\n });\n\n return Promise.resolve({\n probability: 0,\n isSpeech: false,\n inferenceTimeMs: 0,\n });\n }\n\n return new Promise((resolve, reject) => {\n this.inferenceQueue = this.inferenceQueue.then(async () => {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('SileroVAD.process', {\n 'inference.backend': this._backend,\n 'inference.chunk_size': this.chunkSize,\n });\n try {\n const startTime = performance.now();\n\n // Prepend context to input\n const inputSize = this.contextSize + this.chunkSize;\n const inputBuffer = new Float32Array(inputSize);\n inputBuffer.set(this.context, 0);\n inputBuffer.set(audioChunkCopy, this.contextSize);\n\n // Create tensors\n // CRITICAL: Force copy to prevent ArrayBuffer detachment by ONNX Runtime Web workers\n // Without copy, WASM backend transfers buffers to workers, causing \"memory access out of bounds\" errors\n const inputBufferCopy = new Float32Array(inputBuffer);\n const inputTensor = new this.ort!.Tensor('float32', inputBufferCopy, [1, inputSize]);\n const srTensor = new this.ort!.Tensor('int64', BigInt64Array.from([BigInt(this.config.sampleRate)]), []);\n\n // CRITICAL: Also copy state tensor to prevent detachment\n // State tensor is reused across inferences and gets detached during interruptions\n const stateCopy = new Float32Array(this.state!.data as Float32Array);\n const stateTensor = new this.ort!.Tensor('float32', stateCopy, this.state!.dims as number[]);\n\n const feeds = {\n 'input': inputTensor,\n 'state': stateTensor,\n 'sr': srTensor,\n };\n\n // Run inference\n const results = await this.session!.run(feeds);\n\n // Extract outputs\n const outputTensor = results['output'];\n const newStateTensor = results['stateN'] || results['state'];\n\n if (!outputTensor) {\n throw new Error('Missing output tensor from VAD model');\n }\n\n const probability = (outputTensor.data as Float32Array)[0];\n\n // Update state for next call\n if (newStateTensor) {\n this.state = new this.ort!.Tensor(\n 'float32',\n new Float32Array(newStateTensor.data as Float32Array),\n [2, 1, 128]\n );\n }\n\n // Update context (last contextSize samples of input chunk)\n this.context = audioChunk.slice(-this.contextSize);\n\n const inferenceTimeMs = performance.now() - startTime;\n const isSpeech = probability > this.config.threshold;\n\n // Pre-speech buffer logic\n let preSpeechChunks: Float32Array[] | undefined;\n\n if (isSpeech && !this.wasSpeaking) {\n // Silence→Speech transition: populate preSpeechChunks\n preSpeechChunks = [...this.preSpeechBuffer];\n this.preSpeechBuffer = [];\n logger.debug('Speech started with pre-speech buffer', {\n preSpeechChunks: preSpeechChunks.length,\n durationMs: Math.round(preSpeechChunks.length * this.getChunkDurationMs()),\n });\n } else if (!isSpeech && !this.wasSpeaking) {\n // Still in silence: maintain ring buffer\n this.preSpeechBuffer.push(new Float32Array(audioChunk));\n if (this.preSpeechBuffer.length > this.config.preSpeechBufferChunks) {\n this.preSpeechBuffer.shift();\n }\n } else if (!isSpeech && this.wasSpeaking) {\n // Speech→Silence transition: clear buffer\n this.preSpeechBuffer = [];\n }\n\n this.wasSpeaking = isSpeech;\n\n logger.trace('VAD inference completed', {\n probability: Math.round(probability * 1000) / 1000,\n isSpeech,\n inferenceTimeMs: Math.round(inferenceTimeMs * 100) / 100,\n });\n\n span?.setAttributes({\n 'inference.duration_ms': inferenceTimeMs,\n 'inference.probability': probability,\n 'inference.is_speech': isSpeech,\n });\n span?.end();\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\n model: 'silero-vad',\n backend: this._backend,\n });\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'silero-vad',\n backend: this._backend,\n status: 'success',\n });\n\n resolve({\n probability,\n isSpeech,\n inferenceTimeMs,\n preSpeechChunks,\n });\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'silero-vad',\n backend: this._backend,\n status: 'error',\n });\n reject(err);\n }\n });\n });\n }\n\n /**\n * Dispose of the model and free resources\n */\n async dispose(): Promise<void> {\n if (this.session) {\n await this.session.release();\n this.session = null;\n }\n this.state = null;\n }\n}\n","/**\n * Silero VAD Web Worker implementation\n *\n * Runs Silero VAD inference in a dedicated Web Worker to prevent main thread blocking.\n * Uses inline worker script (Blob URL pattern) to avoid separate file deployment.\n *\n * Key design decisions:\n * - WASM backend only (WebGPU doesn't work in Workers)\n * - LSTM state serialized as Float32Array (Tensors can't cross worker boundary)\n * - Audio copied (not transferred) to retain main thread access for pre-speech buffer\n * - ONNX Runtime loaded from CDN in worker (no bundler complications)\n *\n * @category Inference\n *\n * @example Basic usage\n * ```typescript\n * import { SileroVADWorker } from '@omote/core';\n *\n * const vad = new SileroVADWorker({\n * modelUrl: '/models/silero-vad.onnx'\n * });\n * await vad.load();\n *\n * // Process 32ms chunks (512 samples at 16kHz)\n * const result = await vad.process(audioChunk);\n * if (result.isSpeech) {\n * console.log('Speech detected!', result.probability);\n * }\n * ```\n */\n\nimport { createLogger } from '../logging';\nimport { getTelemetry } from '../telemetry';\n\nconst logger = createLogger('SileroVADWorker');\n\n// ONNX Runtime CDN path (matches onnxLoader.ts)\nconst WASM_CDN_PATH = 'https://cdn.jsdelivr.net/npm/onnxruntime-web@1.23.2/dist/';\n\n// Worker script timeouts\nconst LOAD_TIMEOUT_MS = 10000; // 10 seconds for model load\nconst INFERENCE_TIMEOUT_MS = 1000; // 1 second for inference\n\n/**\n * Messages sent from main thread to worker\n */\nexport type VADWorkerMessage =\n | { type: 'load'; modelUrl: string; sampleRate: 8000 | 16000; wasmPaths: string }\n | { type: 'process'; audio: Float32Array; state: Float32Array; context: Float32Array }\n | { type: 'reset' }\n | { type: 'dispose' };\n\n/**\n * Messages sent from worker to main thread\n */\nexport type VADWorkerResult =\n | { type: 'loaded'; inputNames: string[]; outputNames: string[]; loadTimeMs: number }\n | { type: 'result'; probability: number; state: Float32Array; inferenceTimeMs: number }\n | { type: 'reset'; state: Float32Array }\n | { type: 'error'; error: string }\n | { type: 'disposed' };\n\n/**\n * Configuration for Silero VAD Worker\n */\nexport interface VADWorkerConfig {\n /** Path or URL to the ONNX model */\n modelUrl: string;\n /** Sample rate (8000 or 16000, default: 16000) */\n sampleRate?: 8000 | 16000;\n /** Speech probability threshold (default: 0.5) */\n threshold?: number;\n /**\n * Number of audio chunks to keep in pre-speech buffer.\n * When VAD triggers, these chunks are prepended to the speech buffer\n * to capture the beginning of speech that occurred before detection.\n *\n * At 512 samples/chunk and 16kHz:\n * - 10 chunks = 320ms of pre-speech audio\n * - 15 chunks = 480ms of pre-speech audio\n *\n * Default: 10 chunks (320ms)\n */\n preSpeechBufferChunks?: number;\n}\n\n/**\n * VAD model loading information from worker\n */\nexport interface VADWorkerModelInfo {\n backend: 'wasm'; // Worker always uses WASM (no WebGPU in workers)\n loadTimeMs: number;\n inputNames: string[];\n outputNames: string[];\n sampleRate: number;\n chunkSize: number;\n}\n\n/**\n * Result from a single VAD inference\n */\nexport interface VADResult {\n /** Speech probability (0-1) */\n probability: number;\n /** Whether speech is detected (probability > threshold) */\n isSpeech: boolean;\n /** Inference time in milliseconds */\n inferenceTimeMs: number;\n /**\n * Pre-speech audio chunks (only present on first speech detection).\n * These are the N chunks immediately before VAD triggered, useful for\n * capturing the beginning of speech that occurred before detection.\n *\n * Only populated when transitioning from silence to speech.\n */\n preSpeechChunks?: Float32Array[];\n}\n\n/**\n * Inline worker script for VAD inference\n *\n * This script is embedded as a string and loaded via Blob URL.\n * It loads ONNX Runtime from CDN and runs VAD inference.\n */\nconst WORKER_SCRIPT = `\n// Silero VAD Worker Script\n// Loaded via Blob URL - no separate file needed\n\nlet ort = null;\nlet session = null;\nlet sampleRate = 16000;\nlet chunkSize = 512;\nlet contextSize = 64;\n\n/**\n * Load ONNX Runtime from CDN\n */\nasync function loadOrt(wasmPaths) {\n if (ort) return;\n\n // Import ONNX Runtime from CDN\n // Using dynamic import with full CDN URL\n const ortUrl = wasmPaths + 'ort.wasm.min.js';\n\n // Load the script by fetching and executing it\n const response = await fetch(ortUrl);\n const scriptText = await response.text();\n\n // Create a blob URL for the script\n const blob = new Blob([scriptText], { type: 'application/javascript' });\n const blobUrl = URL.createObjectURL(blob);\n\n // Import the module\n importScripts(blobUrl);\n URL.revokeObjectURL(blobUrl);\n\n // ort is now available as global\n ort = self.ort;\n\n // Configure WASM settings\n ort.env.wasm.wasmPaths = wasmPaths;\n ort.env.wasm.numThreads = 1; // Single thread in worker\n ort.env.wasm.simd = true;\n ort.env.wasm.proxy = false; // No proxy in worker\n}\n\n/**\n * Load the VAD model\n */\nasync function loadModel(modelUrl, sr) {\n sampleRate = sr;\n chunkSize = sr === 16000 ? 512 : 256;\n contextSize = sr === 16000 ? 64 : 32;\n\n // Fetch model data\n const response = await fetch(modelUrl);\n if (!response.ok) {\n throw new Error('Failed to fetch model: ' + response.status + ' ' + response.statusText);\n }\n const modelBuffer = await response.arrayBuffer();\n const modelData = new Uint8Array(modelBuffer);\n\n // Create session with WASM backend\n session = await ort.InferenceSession.create(modelData, {\n executionProviders: ['wasm'],\n graphOptimizationLevel: 'all',\n });\n\n return {\n inputNames: session.inputNames,\n outputNames: session.outputNames,\n };\n}\n\n/**\n * Create initial LSTM state\n */\nfunction createInitialState() {\n return new Float32Array(2 * 1 * 128); // [2, 1, 128]\n}\n\n/**\n * Run VAD inference\n */\nasync function runInference(audio, state, context) {\n const inputSize = contextSize + chunkSize;\n\n // Prepend context to input\n const inputBuffer = new Float32Array(inputSize);\n inputBuffer.set(context, 0);\n inputBuffer.set(audio, contextSize);\n\n // Create tensors\n const inputTensor = new ort.Tensor('float32', new Float32Array(inputBuffer), [1, inputSize]);\n const stateTensor = new ort.Tensor('float32', new Float32Array(state), [2, 1, 128]);\n const srTensor = new ort.Tensor('int64', BigInt64Array.from([BigInt(sampleRate)]), []);\n\n const feeds = {\n 'input': inputTensor,\n 'state': stateTensor,\n 'sr': srTensor,\n };\n\n // Run inference\n const results = await session.run(feeds);\n\n // Extract outputs\n const outputTensor = results['output'];\n const newStateTensor = results['stateN'] || results['state'];\n\n if (!outputTensor) {\n throw new Error('Missing output tensor from VAD model');\n }\n\n const probability = outputTensor.data[0];\n const newState = new Float32Array(newStateTensor.data);\n\n return { probability, newState };\n}\n\n// Message handler\nself.onmessage = async function(e) {\n const msg = e.data;\n\n try {\n switch (msg.type) {\n case 'load': {\n const startTime = performance.now();\n await loadOrt(msg.wasmPaths);\n const { inputNames, outputNames } = await loadModel(msg.modelUrl, msg.sampleRate);\n const loadTimeMs = performance.now() - startTime;\n\n self.postMessage({\n type: 'loaded',\n inputNames,\n outputNames,\n loadTimeMs,\n });\n break;\n }\n\n case 'process': {\n const startTime = performance.now();\n const { probability, newState } = await runInference(msg.audio, msg.state, msg.context);\n const inferenceTimeMs = performance.now() - startTime;\n\n self.postMessage({\n type: 'result',\n probability,\n state: newState,\n inferenceTimeMs,\n });\n break;\n }\n\n case 'reset': {\n const state = createInitialState();\n self.postMessage({\n type: 'reset',\n state,\n });\n break;\n }\n\n case 'dispose': {\n if (session) {\n await session.release();\n session = null;\n }\n ort = null;\n self.postMessage({ type: 'disposed' });\n break;\n }\n\n default:\n self.postMessage({\n type: 'error',\n error: 'Unknown message type: ' + msg.type,\n });\n }\n } catch (err) {\n self.postMessage({\n type: 'error',\n error: err.message || String(err),\n });\n }\n};\n\n// Error handler\nself.onerror = function(err) {\n self.postMessage({\n type: 'error',\n error: 'Worker error: ' + (err.message || String(err)),\n });\n};\n`;\n\n/**\n * Silero VAD Worker - Voice Activity Detection in a Web Worker\n *\n * Runs Silero VAD inference off the main thread to prevent UI blocking.\n * Feature parity with SileroVADInference but runs in dedicated worker.\n *\n * @see SileroVADInference for main-thread version\n */\nexport class SileroVADWorker {\n private worker: Worker | null = null;\n private config: Required<VADWorkerConfig>;\n private isLoading = false;\n private _isLoaded = false;\n\n // LSTM state (kept in main thread, sent to worker for each inference)\n private state: Float32Array;\n\n // Context buffer (last 64 samples for 16kHz, 32 for 8kHz)\n private context: Float32Array;\n\n // Chunk sizes based on sample rate\n private readonly chunkSize: number;\n private readonly contextSize: number;\n\n // Inference queue for serialization\n private inferenceQueue: Promise<void> = Promise.resolve();\n\n // Pre-speech buffer for capturing beginning of speech\n private preSpeechBuffer: Float32Array[] = [];\n private wasSpeaking = false;\n\n // Pending message handlers\n private pendingResolvers: Map<string, { resolve: (value: unknown) => void; reject: (error: Error) => void }> = new Map();\n private messageId = 0;\n\n constructor(config: VADWorkerConfig) {\n const sampleRate = config.sampleRate ?? 16000;\n\n if (sampleRate !== 8000 && sampleRate !== 16000) {\n throw new Error('Silero VAD only supports 8000 or 16000 Hz sample rates');\n }\n\n this.config = {\n modelUrl: config.modelUrl,\n sampleRate,\n threshold: config.threshold ?? 0.5,\n preSpeechBufferChunks: config.preSpeechBufferChunks ?? 10,\n };\n\n // Set chunk sizes based on sample rate\n this.chunkSize = sampleRate === 16000 ? 512 : 256;\n this.contextSize = sampleRate === 16000 ? 64 : 32;\n\n // Initialize state and context\n this.state = new Float32Array(2 * 1 * 128); // [2, 1, 128]\n this.context = new Float32Array(this.contextSize);\n }\n\n get isLoaded(): boolean {\n return this._isLoaded;\n }\n\n /**\n * Backend type (always 'wasm' for Worker, WebGPU not supported in Workers)\n */\n get backend(): 'wasm' | null {\n return this._isLoaded ? 'wasm' : null;\n }\n\n get sampleRate(): number {\n return this.config.sampleRate;\n }\n\n get threshold(): number {\n return this.config.threshold;\n }\n\n /**\n * Get required chunk size in samples\n */\n getChunkSize(): number {\n return this.chunkSize;\n }\n\n /**\n * Get chunk duration in milliseconds\n */\n getChunkDurationMs(): number {\n return (this.chunkSize / this.config.sampleRate) * 1000;\n }\n\n /**\n * Create the worker from inline script\n */\n private createWorker(): Worker {\n const blob = new Blob([WORKER_SCRIPT], { type: 'application/javascript' });\n const blobUrl = URL.createObjectURL(blob);\n const worker = new Worker(blobUrl);\n\n // Revoke blob URL after worker is created (worker has its own copy)\n URL.revokeObjectURL(blobUrl);\n\n // Set up message handler\n worker.onmessage = (event: MessageEvent<VADWorkerResult>) => {\n this.handleWorkerMessage(event.data);\n };\n\n // Set up error handler\n worker.onerror = (error) => {\n logger.error('Worker error', { error: error.message });\n // Reject any pending operations\n for (const [, resolver] of this.pendingResolvers) {\n resolver.reject(new Error(`Worker error: ${error.message}`));\n }\n this.pendingResolvers.clear();\n };\n\n return worker;\n }\n\n /**\n * Handle messages from worker\n */\n private handleWorkerMessage(result: VADWorkerResult): void {\n // Route to pending resolver based on result type\n const resolver = this.pendingResolvers.get(result.type);\n if (resolver) {\n this.pendingResolvers.delete(result.type);\n if (result.type === 'error') {\n resolver.reject(new Error(result.error));\n } else {\n resolver.resolve(result);\n }\n }\n }\n\n /**\n * Send message to worker and wait for response\n */\n private sendMessage<T>(message: VADWorkerMessage, expectedType: string, timeoutMs: number): Promise<T> {\n return new Promise((resolve, reject) => {\n if (!this.worker) {\n reject(new Error('Worker not initialized'));\n return;\n }\n\n // Set up timeout\n const timeoutId = setTimeout(() => {\n this.pendingResolvers.delete(expectedType);\n reject(new Error(`Worker operation timed out after ${timeoutMs}ms`));\n }, timeoutMs);\n\n // Register resolver\n this.pendingResolvers.set(expectedType, {\n resolve: (value) => {\n clearTimeout(timeoutId);\n resolve(value as T);\n },\n reject: (error) => {\n clearTimeout(timeoutId);\n reject(error);\n },\n });\n\n // Also listen for errors\n this.pendingResolvers.set('error', {\n resolve: () => {}, // Never called for errors\n reject: (error) => {\n clearTimeout(timeoutId);\n this.pendingResolvers.delete(expectedType);\n reject(error);\n },\n });\n\n // Send message\n this.worker.postMessage(message);\n });\n }\n\n /**\n * Load the ONNX model in the worker\n */\n async load(): Promise<VADWorkerModelInfo> {\n if (this.isLoading) {\n throw new Error('Model is already loading');\n }\n\n if (this._isLoaded) {\n throw new Error('Model already loaded. Call dispose() first.');\n }\n\n this.isLoading = true;\n const startTime = performance.now();\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('SileroVADWorker.load', {\n 'model.url': this.config.modelUrl,\n 'model.sample_rate': this.config.sampleRate,\n });\n\n try {\n logger.info('Creating VAD worker...');\n\n // Create worker\n this.worker = this.createWorker();\n\n logger.info('Loading model in worker...', {\n modelUrl: this.config.modelUrl,\n sampleRate: this.config.sampleRate,\n });\n\n // Send load message to worker\n const result = await this.sendMessage<{\n type: 'loaded';\n inputNames: string[];\n outputNames: string[];\n loadTimeMs: number;\n }>(\n {\n type: 'load',\n modelUrl: this.config.modelUrl,\n sampleRate: this.config.sampleRate,\n wasmPaths: WASM_CDN_PATH,\n },\n 'loaded',\n LOAD_TIMEOUT_MS\n );\n\n this._isLoaded = true;\n\n const loadTimeMs = performance.now() - startTime;\n\n logger.info('VAD worker loaded successfully', {\n backend: 'wasm',\n loadTimeMs: Math.round(loadTimeMs),\n workerLoadTimeMs: Math.round(result.loadTimeMs),\n sampleRate: this.config.sampleRate,\n chunkSize: this.chunkSize,\n threshold: this.config.threshold,\n });\n\n span?.setAttributes({\n 'model.backend': 'wasm',\n 'model.load_time_ms': loadTimeMs,\n 'model.worker_load_time_ms': result.loadTimeMs,\n });\n span?.end();\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\n model: 'silero-vad-worker',\n backend: 'wasm',\n });\n\n return {\n backend: 'wasm',\n loadTimeMs,\n inputNames: result.inputNames,\n outputNames: result.outputNames,\n sampleRate: this.config.sampleRate,\n chunkSize: this.chunkSize,\n };\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n telemetry?.incrementCounter('omote.errors.total', 1, {\n model: 'silero-vad-worker',\n error_type: 'load_failed',\n });\n\n // Clean up on failure\n if (this.worker) {\n this.worker.terminate();\n this.worker = null;\n }\n\n throw error;\n } finally {\n this.isLoading = false;\n }\n }\n\n /**\n * Reset state for new audio stream\n */\n async reset(): Promise<void> {\n if (!this._isLoaded || !this.worker) {\n throw new Error('Worker not loaded. Call load() first.');\n }\n\n // Request reset from worker to get fresh state\n const result = await this.sendMessage<{ type: 'reset'; state: Float32Array }>(\n { type: 'reset' },\n 'reset',\n INFERENCE_TIMEOUT_MS\n );\n\n // Update local state\n this.state = result.state;\n this.context = new Float32Array(this.contextSize);\n this.preSpeechBuffer = [];\n this.wasSpeaking = false;\n }\n\n /**\n * Process a single audio chunk\n *\n * @param audioChunk - Float32Array of exactly chunkSize samples (512 for 16kHz, 256 for 8kHz)\n * @returns VAD result with speech probability\n */\n async process(audioChunk: Float32Array): Promise<VADResult> {\n if (!this._isLoaded || !this.worker) {\n throw new Error('Worker not loaded. Call load() first.');\n }\n\n if (audioChunk.length !== this.chunkSize) {\n throw new Error(\n `Audio chunk must be exactly ${this.chunkSize} samples (got ${audioChunk.length}). ` +\n `Use getChunkSize() to get required size.`\n );\n }\n\n return this.queueInference(audioChunk);\n }\n\n /**\n * Queue inference to serialize worker calls\n */\n private queueInference(audioChunk: Float32Array): Promise<VADResult> {\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\n const audioChunkCopy = new Float32Array(audioChunk);\n\n return new Promise((resolve, reject) => {\n this.inferenceQueue = this.inferenceQueue.then(async () => {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('SileroVADWorker.process', {\n 'inference.backend': 'wasm',\n 'inference.chunk_size': this.chunkSize,\n });\n\n try {\n const startTime = performance.now();\n\n // Send process message to worker\n const result = await this.sendMessage<{\n type: 'result';\n probability: number;\n state: Float32Array;\n inferenceTimeMs: number;\n }>(\n {\n type: 'process',\n audio: audioChunkCopy,\n state: this.state,\n context: this.context,\n },\n 'result',\n INFERENCE_TIMEOUT_MS\n );\n\n // Update local state from worker result\n this.state = result.state;\n\n // Update context (last contextSize samples of input chunk)\n this.context = audioChunkCopy.slice(-this.contextSize);\n\n const inferenceTimeMs = performance.now() - startTime;\n const isSpeech = result.probability > this.config.threshold;\n\n // Pre-speech buffer logic (same as SileroVADInference)\n let preSpeechChunks: Float32Array[] | undefined;\n\n if (isSpeech && !this.wasSpeaking) {\n // Silence→Speech transition: populate preSpeechChunks\n preSpeechChunks = [...this.preSpeechBuffer];\n this.preSpeechBuffer = [];\n logger.debug('Speech started with pre-speech buffer', {\n preSpeechChunks: preSpeechChunks.length,\n durationMs: Math.round(preSpeechChunks.length * this.getChunkDurationMs()),\n });\n } else if (!isSpeech && !this.wasSpeaking) {\n // Still in silence: maintain ring buffer\n this.preSpeechBuffer.push(new Float32Array(audioChunkCopy));\n if (this.preSpeechBuffer.length > this.config.preSpeechBufferChunks) {\n this.preSpeechBuffer.shift();\n }\n } else if (!isSpeech && this.wasSpeaking) {\n // Speech→Silence transition: clear buffer\n this.preSpeechBuffer = [];\n }\n\n this.wasSpeaking = isSpeech;\n\n logger.trace('VAD worker inference completed', {\n probability: Math.round(result.probability * 1000) / 1000,\n isSpeech,\n inferenceTimeMs: Math.round(inferenceTimeMs * 100) / 100,\n workerTimeMs: Math.round(result.inferenceTimeMs * 100) / 100,\n });\n\n span?.setAttributes({\n 'inference.duration_ms': inferenceTimeMs,\n 'inference.worker_duration_ms': result.inferenceTimeMs,\n 'inference.probability': result.probability,\n 'inference.is_speech': isSpeech,\n });\n span?.end();\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\n model: 'silero-vad-worker',\n backend: 'wasm',\n });\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'silero-vad-worker',\n backend: 'wasm',\n status: 'success',\n });\n\n resolve({\n probability: result.probability,\n isSpeech,\n inferenceTimeMs,\n preSpeechChunks,\n });\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'silero-vad-worker',\n backend: 'wasm',\n status: 'error',\n });\n reject(err);\n }\n });\n });\n }\n\n /**\n * Dispose of the worker and free resources\n */\n async dispose(): Promise<void> {\n if (this.worker) {\n try {\n // Ask worker to clean up\n await this.sendMessage({ type: 'dispose' }, 'disposed', INFERENCE_TIMEOUT_MS);\n } catch {\n // Ignore errors during dispose\n }\n\n // Terminate worker\n this.worker.terminate();\n this.worker = null;\n }\n\n this._isLoaded = false;\n this.state = new Float32Array(2 * 1 * 128);\n this.context = new Float32Array(this.contextSize);\n this.preSpeechBuffer = [];\n this.wasSpeaking = false;\n this.pendingResolvers.clear();\n }\n\n /**\n * Check if Web Workers are supported\n */\n static isSupported(): boolean {\n return typeof Worker !== 'undefined';\n }\n}\n","/**\n * Factory function for Silero VAD with automatic Worker vs main thread selection\n *\n * Provides a unified API that automatically selects the optimal implementation:\n * - Desktop browsers: Uses SileroVADWorker (off-main-thread inference)\n * - Mobile devices: Uses SileroVADInference (main thread, avoids memory overhead)\n * - Fallback: Gracefully falls back to main thread if Worker fails\n *\n * @category Inference\n *\n * @example Basic usage (auto-detect)\n * ```typescript\n * import { createSileroVAD } from '@omote/core';\n *\n * const vad = createSileroVAD({\n * modelUrl: '/models/silero-vad.onnx',\n * threshold: 0.5,\n * });\n *\n * await vad.load();\n * const result = await vad.process(audioChunk);\n * if (result.isSpeech) {\n * console.log('Speech detected!', result.probability);\n * }\n * ```\n *\n * @example Force worker usage\n * ```typescript\n * const vad = createSileroVAD({\n * modelUrl: '/models/silero-vad.onnx',\n * useWorker: true, // Force Worker even on mobile\n * });\n * ```\n *\n * @example Force main thread\n * ```typescript\n * const vad = createSileroVAD({\n * modelUrl: '/models/silero-vad.onnx',\n * useWorker: false, // Force main thread\n * });\n * ```\n */\n\nimport { createLogger } from '../logging';\nimport { isMobile } from '../utils/runtime';\nimport { SileroVADInference } from './SileroVADInference';\nimport type { SileroVADConfig, VADModelInfo, VADResult } from './SileroVADInference';\nimport { SileroVADWorker } from './SileroVADWorker';\nimport type { VADWorkerModelInfo } from './SileroVADWorker';\nimport type { RuntimeBackend } from '../utils/runtime';\n\nconst logger = createLogger('createSileroVAD');\n\n/**\n * Common interface for both SileroVADInference and SileroVADWorker\n *\n * This interface defines the shared API that both implementations provide,\n * allowing consumers to use either interchangeably.\n */\nexport interface SileroVADBackend {\n /** Current backend type (webgpu, wasm, or null if not loaded) */\n readonly backend: RuntimeBackend | null;\n\n /** Whether the model is loaded and ready for inference */\n readonly isLoaded: boolean;\n\n /** Audio sample rate (8000 or 16000 Hz) */\n readonly sampleRate: number;\n\n /** Speech detection threshold (0-1) */\n readonly threshold: number;\n\n /**\n * Load the ONNX model\n * @returns Model loading information\n */\n load(): Promise<VADModelInfo | VADWorkerModelInfo>;\n\n /**\n * Process a single audio chunk\n * @param audioChunk - Float32Array of exactly chunkSize samples\n * @returns VAD result with speech probability\n */\n process(audioChunk: Float32Array): Promise<VADResult>;\n\n /**\n * Reset state for new audio stream\n */\n reset(): void | Promise<void>;\n\n /**\n * Dispose of the model and free resources\n */\n dispose(): Promise<void>;\n\n /**\n * Get required chunk size in samples\n */\n getChunkSize(): number;\n\n /**\n * Get chunk duration in milliseconds\n */\n getChunkDurationMs(): number;\n}\n\n/**\n * Configuration for the Silero VAD factory\n *\n * Extends SileroVADConfig with worker-specific options.\n */\nexport interface SileroVADFactoryConfig extends SileroVADConfig {\n /**\n * Force worker usage (true), main thread (false), or auto-detect (undefined).\n *\n * Auto-detection behavior:\n * - Desktop: Uses Worker (better responsiveness, off-main-thread)\n * - Mobile: Uses main thread (avoids 5MB memory overhead)\n *\n * You can override this to:\n * - `true`: Force Worker even on mobile (if you have memory headroom)\n * - `false`: Force main thread even on desktop (for debugging)\n *\n * Default: undefined (auto-detect)\n */\n useWorker?: boolean;\n\n /**\n * Fallback to main thread on worker errors.\n *\n * When true (default), if the Worker fails to load or encounters an error,\n * the factory will automatically create a main thread instance instead.\n *\n * When false, worker errors will propagate as exceptions.\n *\n * Default: true\n */\n fallbackOnError?: boolean;\n}\n\n/**\n * Check if the current environment supports VAD Web Workers\n *\n * Requirements:\n * - Worker constructor must exist\n * - Blob URL support (for inline worker script)\n *\n * @returns true if VAD Worker is supported\n */\nexport function supportsVADWorker(): boolean {\n // Check Worker constructor exists\n if (typeof Worker === 'undefined') {\n logger.debug('Worker not supported: Worker constructor undefined');\n return false;\n }\n\n // Check Blob URL support (needed for inline worker script)\n if (typeof URL === 'undefined' || typeof URL.createObjectURL === 'undefined') {\n logger.debug('Worker not supported: URL.createObjectURL unavailable');\n return false;\n }\n\n // Check Blob support\n if (typeof Blob === 'undefined') {\n logger.debug('Worker not supported: Blob constructor unavailable');\n return false;\n }\n\n return true;\n}\n\n/**\n * Create a Silero VAD instance with automatic implementation selection\n *\n * This factory function automatically selects between:\n * - **SileroVADWorker**: Off-main-thread inference (better for desktop)\n * - **SileroVADInference**: Main thread inference (better for mobile)\n *\n * The selection is based on:\n * 1. Explicit `useWorker` config (if provided)\n * 2. Platform detection (mobile vs desktop)\n * 3. Worker API availability\n *\n * Both implementations share the same interface (SileroVADBackend),\n * so consumers can use either interchangeably.\n *\n * @param config - Factory configuration\n * @returns A SileroVAD instance (either Worker or main thread)\n *\n * @example\n * ```typescript\n * // Auto-detect (recommended)\n * const vad = createSileroVAD({ modelUrl: '/models/silero-vad.onnx' });\n *\n * // Force Worker\n * const vadWorker = createSileroVAD({ modelUrl: '/models/silero-vad.onnx', useWorker: true });\n *\n * // Force main thread\n * const vadMain = createSileroVAD({ modelUrl: '/models/silero-vad.onnx', useWorker: false });\n * ```\n */\nexport function createSileroVAD(config: SileroVADFactoryConfig): SileroVADBackend {\n const fallbackOnError = config.fallbackOnError ?? true;\n\n // Determine whether to use Worker\n let useWorker: boolean;\n\n if (config.useWorker !== undefined) {\n // Explicit preference\n useWorker = config.useWorker;\n logger.debug('Worker preference explicitly set', { useWorker });\n } else {\n // Auto-detect based on platform and support\n const workerSupported = supportsVADWorker();\n const onMobile = isMobile();\n\n // Desktop with Worker support: use Worker\n // Mobile: use main thread (memory overhead concern)\n useWorker = workerSupported && !onMobile;\n\n logger.debug('Auto-detected Worker preference', {\n useWorker,\n workerSupported,\n onMobile,\n });\n }\n\n // Create the appropriate implementation\n if (useWorker) {\n logger.info('Creating SileroVADWorker (off-main-thread)');\n const worker = new SileroVADWorker({\n modelUrl: config.modelUrl,\n sampleRate: config.sampleRate,\n threshold: config.threshold,\n preSpeechBufferChunks: config.preSpeechBufferChunks,\n });\n\n if (fallbackOnError) {\n // Wrap with fallback behavior\n return new VADWorkerWithFallback(worker, config);\n }\n\n return worker as SileroVADBackend;\n }\n\n logger.info('Creating SileroVADInference (main thread)');\n return new SileroVADInference(config) as SileroVADBackend;\n}\n\n/**\n * Wrapper that provides automatic fallback from Worker to main thread\n *\n * If the Worker fails during load(), this wrapper will automatically\n * create a main thread SileroVADInference instance instead.\n */\nclass VADWorkerWithFallback implements SileroVADBackend {\n private implementation: SileroVADBackend;\n private readonly config: SileroVADFactoryConfig;\n private hasFallenBack = false;\n\n constructor(worker: SileroVADWorker, config: SileroVADFactoryConfig) {\n this.implementation = worker as SileroVADBackend;\n this.config = config;\n }\n\n get backend(): RuntimeBackend | null {\n // Worker always uses WASM, but hasn't loaded yet\n if (!this.isLoaded) return null;\n return this.hasFallenBack ? (this.implementation as SileroVADInference).backend : 'wasm';\n }\n\n get isLoaded(): boolean {\n return this.implementation.isLoaded;\n }\n\n get sampleRate(): number {\n return this.implementation.sampleRate;\n }\n\n get threshold(): number {\n return this.implementation.threshold;\n }\n\n async load(): Promise<VADModelInfo | VADWorkerModelInfo> {\n try {\n return await this.implementation.load();\n } catch (error) {\n logger.warn('Worker load failed, falling back to main thread', {\n error: error instanceof Error ? error.message : String(error),\n });\n\n // Clean up failed worker\n try {\n await this.implementation.dispose();\n } catch {\n // Ignore dispose errors\n }\n\n // Create main thread fallback\n this.implementation = new SileroVADInference(this.config) as SileroVADBackend;\n this.hasFallenBack = true;\n\n logger.info('Fallback to SileroVADInference successful');\n return await this.implementation.load();\n }\n }\n\n async process(audioChunk: Float32Array): Promise<VADResult> {\n return this.implementation.process(audioChunk);\n }\n\n reset(): void | Promise<void> {\n return this.implementation.reset();\n }\n\n async dispose(): Promise<void> {\n return this.implementation.dispose();\n }\n\n getChunkSize(): number {\n return this.implementation.getChunkSize();\n }\n\n getChunkDurationMs(): number {\n return this.implementation.getChunkDurationMs();\n }\n}\n","/**\n * Speech emotion inference\n *\n * Frame-level speech emotion recognition running in browser via ONNX Runtime Web.\n * Classifies audio into 4 emotion categories: neutral, happy, angry, sad.\n *\n * Uses SUPERB wav2vec2-base-superb-er model (~360MB ONNX, Apache 2.0 license).\n * Outputs frame-level embeddings at 50Hz (matching LAM lip sync) plus utterance-level\n * emotion classification.\n *\n * Uses lazy loading to conditionally load WebGPU or WASM-only bundle:\n * - iOS: Loads WASM-only bundle (WebGPU crashes due to Safari bugs)\n * - Android/Desktop: Loads WebGPU bundle (with WASM fallback)\n *\n * @category Inference\n *\n * @example Basic usage\n * ```typescript\n * import { Emotion2VecInference } from '@omote/core';\n *\n * const emotion = new Emotion2VecInference({\n * modelUrl: '/models/emotion/emotion_superb.onnx'\n * });\n * await emotion.load();\n *\n * // Process audio chunk (1 second at 16kHz)\n * const result = await emotion.infer(audioSamples);\n * console.log('Emotion:', result.dominant.emotion, 'Confidence:', result.dominant.confidence);\n * console.log('Frames:', result.frames.length, 'at 50Hz');\n * ```\n *\n * @example Access per-frame emotions\n * ```typescript\n * const result = await emotion.infer(audioSamples);\n * // Each frame has the same emotion (utterance-level applied per-frame)\n * result.frames.forEach((frame, i) => {\n * const timeMs = i * 20; // 50Hz = 20ms per frame\n * console.log(`${timeMs}ms: ${frame.emotion} (${frame.confidence.toFixed(2)})`);\n * });\n * ```\n */\n\n// Type-only import for TypeScript (no runtime code loaded at import time)\n// At runtime, we dynamically import either 'onnxruntime-web' or 'onnxruntime-web/webgpu'\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\n\n// Type alias for the ORT module (loaded dynamically)\ntype OrtModule = {\n InferenceSession: typeof InferenceSession;\n Tensor: typeof Tensor;\n env: Env;\n};\n\nimport { fetchWithCache, getModelCache, formatBytes } from '../cache/ModelCache';\nimport { createLogger } from '../logging';\nimport { getTelemetry } from '../telemetry';\nimport {\n getOnnxRuntimeForPreference,\n getSessionOptions,\n isWebGPUAvailable,\n type RuntimeBackend,\n} from './onnxLoader';\nimport { BackendPreference } from '../utils/runtime';\n\nconst logger = createLogger('Emotion2Vec');\n\n/**\n * Emotion labels supported by the SUPERB emotion model\n *\n * These are the 4 emotion categories that the model can classify:\n * - neutral: Neutral, calm state\n * - happy: Joy, happiness, excitement\n * - angry: Anger, frustration\n * - sad: Sadness, grief\n *\n * Labels are in model output order (indices 0-3).\n */\nexport const EMOTION2VEC_LABELS = ['neutral', 'happy', 'angry', 'sad'] as const;\n\nexport type Emotion2VecLabel = (typeof EMOTION2VEC_LABELS)[number];\n\nexport type EmotionBackend = BackendPreference;\n\n/**\n * Configuration for Emotion2Vec inference\n */\nexport interface Emotion2VecConfig {\n /** Path or URL to the ONNX model */\n modelUrl: string;\n /** Preferred backend (auto will try WebGPU first, fallback to WASM) */\n backend?: EmotionBackend;\n /** Sample rate (default: 16000) */\n sampleRate?: number;\n}\n\n/**\n * Model loading information\n */\nexport interface Emotion2VecModelInfo {\n backend: 'webgpu' | 'wasm';\n loadTimeMs: number;\n inputNames: string[];\n outputNames: string[];\n sampleRate: number;\n}\n\n/**\n * Frame-level emotion result\n */\nexport interface EmotionFrame {\n /** Primary emotion label */\n emotion: Emotion2VecLabel;\n /** Confidence for primary emotion (0-1) */\n confidence: number;\n /** All emotion probabilities */\n probabilities: Record<Emotion2VecLabel, number>;\n}\n\n/**\n * Result from emotion inference\n */\nexport interface Emotion2VecResult {\n /** Frame-level emotion results at 50Hz (constant emotion per frame from utterance classification) */\n frames: EmotionFrame[];\n /** Overall dominant emotion for the audio (utterance-level classification) */\n dominant: EmotionFrame;\n /** Frame-level embeddings (768-dim per frame at 50Hz) for advanced use */\n embeddings: Float32Array[];\n /** Raw logits before softmax (4-dim for 4 emotions) */\n logits: Float32Array;\n /** Inference time in milliseconds */\n inferenceTimeMs: number;\n}\n\n/**\n * Emotion2VecInference - Frame-level speech emotion recognition\n *\n * Uses SUPERB wav2vec2-base-superb-er model.\n * Processes audio and outputs:\n * - Frame-level embeddings at 50Hz (matching LAM lip sync)\n * - Utterance-level emotion classification\n *\n * @see https://huggingface.co/superb/wav2vec2-base-superb-er\n */\nexport class Emotion2VecInference {\n private session: InferenceSession | null = null;\n private ort: OrtModule | null = null;\n private config: Required<Emotion2VecConfig>;\n private _backend: RuntimeBackend = 'wasm';\n private isLoading = false;\n private inferenceQueue: Promise<void> = Promise.resolve();\n\n constructor(config: Emotion2VecConfig) {\n this.config = {\n modelUrl: config.modelUrl,\n backend: config.backend ?? 'auto',\n sampleRate: config.sampleRate ?? 16000,\n };\n }\n\n get backend(): RuntimeBackend | null {\n return this.session ? this._backend : null;\n }\n\n get isLoaded(): boolean {\n return this.session !== null;\n }\n\n get sampleRate(): number {\n return this.config.sampleRate;\n }\n\n /**\n * Check if WebGPU is available and working\n * (iOS returns false even if navigator.gpu exists due to ONNX Runtime bugs)\n */\n static isWebGPUAvailable = isWebGPUAvailable;\n\n /**\n * Load the ONNX model\n */\n async load(): Promise<Emotion2VecModelInfo> {\n if (this.isLoading) {\n throw new Error('Model is already loading');\n }\n\n if (this.session) {\n throw new Error('Model already loaded. Call dispose() first.');\n }\n\n this.isLoading = true;\n const startTime = performance.now();\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('Emotion2Vec.load', {\n 'model.url': this.config.modelUrl,\n 'model.backend_requested': this.config.backend,\n });\n\n try {\n logger.info('Loading ONNX Runtime...', { preference: this.config.backend });\n\n const { ort, backend } = await getOnnxRuntimeForPreference(this.config.backend);\n this.ort = ort;\n this._backend = backend;\n\n logger.info('ONNX Runtime loaded', { backend: this._backend });\n\n // Load model with caching\n logger.info('Checking model cache...');\n const cache = getModelCache();\n const modelUrl = this.config.modelUrl;\n const isCached = await cache.has(modelUrl);\n logger.info('Cache check complete', { modelUrl, isCached });\n\n let modelBuffer: ArrayBuffer;\n if (isCached) {\n logger.info('Loading model from cache...', { modelUrl });\n modelBuffer = (await cache.get(modelUrl))!;\n logger.info('Model loaded from cache', { size: formatBytes(modelBuffer.byteLength) });\n } else {\n logger.info('Fetching model (not cached)...', { modelUrl });\n modelBuffer = await fetchWithCache(modelUrl);\n logger.info('Model fetched and cached', { size: formatBytes(modelBuffer.byteLength) });\n }\n\n logger.info('Creating ONNX session (this may take a while for large models)...');\n logger.debug('Creating ONNX session', {\n size: formatBytes(modelBuffer.byteLength),\n backend: this._backend,\n });\n\n const sessionOptions = getSessionOptions(this._backend);\n const modelData = new Uint8Array(modelBuffer);\n this.session = await ort.InferenceSession.create(modelData, sessionOptions);\n\n const loadTimeMs = performance.now() - startTime;\n\n logger.info('Model loaded successfully', {\n backend: this._backend,\n loadTimeMs: Math.round(loadTimeMs),\n sampleRate: this.config.sampleRate,\n inputNames: [...this.session.inputNames],\n outputNames: [...this.session.outputNames],\n });\n\n span?.setAttributes({\n 'model.backend': this._backend,\n 'model.load_time_ms': loadTimeMs,\n 'model.cached': isCached,\n });\n span?.end();\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\n model: 'emotion2vec',\n backend: this._backend,\n });\n\n return {\n backend: this._backend,\n loadTimeMs,\n inputNames: [...this.session.inputNames],\n outputNames: [...this.session.outputNames],\n sampleRate: this.config.sampleRate,\n };\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n telemetry?.incrementCounter('omote.errors.total', 1, {\n model: 'emotion2vec',\n error_type: 'load_failed',\n });\n throw error;\n } finally {\n this.isLoading = false;\n }\n }\n\n /**\n * Run emotion inference on audio samples\n *\n * @param audio - Float32Array of 16kHz audio samples\n * @returns Frame-level emotion results at 50Hz\n */\n async infer(audio: Float32Array): Promise<Emotion2VecResult> {\n if (!this.session) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n return this.queueInference(audio);\n }\n\n private queueInference(audio: Float32Array): Promise<Emotion2VecResult> {\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\n // During interruptions, audio's buffer may get detached by ONNX Runtime\n // before we access it in the async queue. Copy synchronously to preserve data.\n const audioCopy = new Float32Array(audio);\n\n return new Promise((resolve, reject) => {\n this.inferenceQueue = this.inferenceQueue.then(async () => {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('Emotion2Vec.infer', {\n 'inference.backend': this._backend,\n 'inference.audio_samples': audioCopy.length,\n });\n\n try {\n const startTime = performance.now();\n\n // Create input tensor\n // SUPERB model expects: [batch, audio_samples] with input name 'audio'\n const inputTensor = new this.ort!.Tensor('float32', audioCopy, [1, audioCopy.length]);\n\n // Run inference - SUPERB model input name is 'audio'\n const results = await this.session!.run({ audio: inputTensor });\n\n // Extract outputs:\n // - logits: [1, 4] utterance-level emotion classification\n // - layer_norm_25: [1, frames, 768] frame-level embeddings\n const logitsTensor = results['logits'];\n const embeddingsTensor = results['layer_norm_25'];\n\n if (!logitsTensor) {\n throw new Error(\n `Missing logits tensor from SUPERB model. Got outputs: ${Object.keys(results).join(', ')}`\n );\n }\n\n // Process utterance-level logits\n const logitsData = logitsTensor.data as Float32Array;\n const logits = new Float32Array(logitsData);\n\n // Apply softmax to get probabilities\n const probs = this.softmax(logits);\n\n // Create probabilities record\n const probabilities: Record<Emotion2VecLabel, number> = {\n neutral: probs[0],\n happy: probs[1],\n angry: probs[2],\n sad: probs[3],\n };\n\n // Find dominant emotion\n let maxIdx = 0;\n let maxProb = probs[0];\n for (let i = 1; i < probs.length; i++) {\n if (probs[i] > maxProb) {\n maxProb = probs[i];\n maxIdx = i;\n }\n }\n\n const dominant: EmotionFrame = {\n emotion: EMOTION2VEC_LABELS[maxIdx],\n confidence: maxProb,\n probabilities,\n };\n\n // Process frame-level embeddings\n let embeddings: Float32Array[] = [];\n let numFrames = 1;\n\n if (embeddingsTensor) {\n const embeddingData = embeddingsTensor.data as Float32Array;\n const dims = embeddingsTensor.dims as number[];\n\n if (dims.length === 3) {\n // [batch, frames, embedding_dim]\n numFrames = dims[1];\n const embeddingDim = dims[2];\n\n for (let i = 0; i < numFrames; i++) {\n const start = i * embeddingDim;\n embeddings.push(new Float32Array(embeddingData.slice(start, start + embeddingDim)));\n }\n }\n }\n\n // Create per-frame emotion results\n // Currently we apply the utterance-level emotion to all frames\n // (Future: could train a per-frame classifier on the embeddings)\n const frames: EmotionFrame[] = [];\n for (let i = 0; i < numFrames; i++) {\n frames.push({\n emotion: dominant.emotion,\n confidence: dominant.confidence,\n probabilities: { ...probabilities },\n });\n }\n\n const inferenceTimeMs = performance.now() - startTime;\n\n logger.debug('Emotion inference completed', {\n numFrames,\n dominant: dominant.emotion,\n confidence: Math.round(dominant.confidence * 100),\n inferenceTimeMs: Math.round(inferenceTimeMs),\n });\n\n span?.setAttributes({\n 'inference.duration_ms': inferenceTimeMs,\n 'inference.num_frames': numFrames,\n 'inference.dominant_emotion': dominant.emotion,\n });\n span?.end();\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\n model: 'emotion2vec',\n backend: this._backend,\n });\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'emotion2vec',\n backend: this._backend,\n status: 'success',\n });\n\n resolve({\n frames,\n dominant,\n embeddings,\n logits,\n inferenceTimeMs,\n });\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'emotion2vec',\n backend: this._backend,\n status: 'error',\n });\n reject(err);\n }\n });\n });\n }\n\n /**\n * Apply softmax to convert logits to probabilities\n */\n private softmax(logits: Float32Array): Float32Array {\n // Find max for numerical stability\n let max = logits[0];\n for (let i = 1; i < logits.length; i++) {\n if (logits[i] > max) max = logits[i];\n }\n\n // Compute exp and sum\n const exp = new Float32Array(logits.length);\n let sum = 0;\n for (let i = 0; i < logits.length; i++) {\n exp[i] = Math.exp(logits[i] - max);\n sum += exp[i];\n }\n\n // Normalize\n const probs = new Float32Array(logits.length);\n for (let i = 0; i < logits.length; i++) {\n probs[i] = exp[i] / sum;\n }\n\n return probs;\n }\n\n /**\n * Dispose of the model and free resources\n */\n async dispose(): Promise<void> {\n if (this.session) {\n await this.session.release();\n this.session = null;\n }\n }\n}\n","/**\r\n * Safari Web Speech API wrapper for iOS speech recognition\r\n *\r\n * Provides a similar interface to WhisperInference for easy substitution on iOS.\r\n * Uses the native Web Speech API which is significantly faster than Whisper WASM on iOS.\r\n *\r\n * Key differences from WhisperInference:\r\n * - Real-time streaming (not batch processing)\r\n * - No audio buffer input (microphone handled by browser)\r\n * - transcribe() throws error (use start/stop pattern instead)\r\n *\r\n * @category Inference\r\n *\r\n * @example Basic usage\r\n * ```typescript\r\n * import { SafariSpeechRecognition, shouldUseNativeASR } from '@omote/core';\r\n *\r\n * // Use native ASR on iOS, Whisper elsewhere\r\n * if (shouldUseNativeASR()) {\r\n * const speech = new SafariSpeechRecognition({ language: 'en-US' });\r\n *\r\n * speech.onResult((result) => {\r\n * console.log('Transcript:', result.text);\r\n * });\r\n *\r\n * await speech.start();\r\n * // ... user speaks ...\r\n * const finalResult = await speech.stop();\r\n * }\r\n * ```\r\n *\r\n * @example Platform-aware initialization\r\n * ```typescript\r\n * const asr = shouldUseNativeASR()\r\n * ? new SafariSpeechRecognition({ language: 'en-US' })\r\n * : new WhisperInference({ model: 'tiny' });\r\n * ```\r\n */\r\n\r\nimport { createLogger } from '../logging';\r\nimport { getTelemetry } from '../telemetry';\r\nimport { isSpeechRecognitionAvailable } from '../utils/runtime';\r\n\r\nconst logger = createLogger('SafariSpeech');\r\n\r\n/**\r\n * Configuration for Safari Speech Recognition\r\n */\r\nexport interface SafariSpeechConfig {\r\n /** Language code (default: 'en-US') */\r\n language?: string;\r\n /** Continuous mode for ongoing conversation (default: true) */\r\n continuous?: boolean;\r\n /** Interim results before speech ends (default: true) */\r\n interimResults?: boolean;\r\n /** Max alternatives (default: 1) */\r\n maxAlternatives?: number;\r\n}\r\n\r\n/**\r\n * Result from speech recognition (matches WhisperInference TranscriptionResult)\r\n */\r\nexport interface SpeechRecognitionResult {\r\n /** Transcribed text */\r\n text: string;\r\n /** Detected/used language */\r\n language: string;\r\n /** Time since start in ms (not inference time - native API) */\r\n inferenceTimeMs: number;\r\n /** Whether this is a final result or interim */\r\n isFinal: boolean;\r\n /** Confidence score (0-1) if available */\r\n confidence?: number;\r\n}\r\n\r\n/**\r\n * Callback for receiving recognition results\r\n */\r\nexport type SpeechResultCallback = (result: SpeechRecognitionResult) => void;\r\n\r\n/**\r\n * Callback for receiving recognition errors\r\n */\r\nexport type SpeechErrorCallback = (error: Error) => void;\r\n\r\n// Type declarations for Web Speech API (not in lib.dom.d.ts by default)\r\ninterface SpeechRecognitionEvent extends Event {\r\n resultIndex: number;\r\n results: SpeechRecognitionResultList;\r\n}\r\n\r\ninterface SpeechRecognitionResultList {\r\n length: number;\r\n item(index: number): SpeechRecognitionResult;\r\n [index: number]: SpeechRecognitionResultItem;\r\n}\r\n\r\ninterface SpeechRecognitionResultItem {\r\n isFinal: boolean;\r\n length: number;\r\n item(index: number): SpeechRecognitionAlternative;\r\n [index: number]: SpeechRecognitionAlternative;\r\n}\r\n\r\ninterface SpeechRecognitionAlternative {\r\n transcript: string;\r\n confidence: number;\r\n}\r\n\r\ninterface SpeechRecognitionErrorEvent extends Event {\r\n error: string;\r\n message: string;\r\n}\r\n\r\ninterface SpeechRecognitionInterface extends EventTarget {\r\n continuous: boolean;\r\n interimResults: boolean;\r\n lang: string;\r\n maxAlternatives: number;\r\n start(): void;\r\n stop(): void;\r\n abort(): void;\r\n onresult: ((event: SpeechRecognitionEvent) => void) | null;\r\n onerror: ((event: SpeechRecognitionErrorEvent) => void) | null;\r\n onend: (() => void) | null;\r\n onstart: (() => void) | null;\r\n onaudiostart: (() => void) | null;\r\n onaudioend: (() => void) | null;\r\n onspeechstart: (() => void) | null;\r\n onspeechend: (() => void) | null;\r\n}\r\n\r\ndeclare global {\r\n interface Window {\r\n SpeechRecognition?: new () => SpeechRecognitionInterface;\r\n webkitSpeechRecognition?: new () => SpeechRecognitionInterface;\r\n }\r\n}\r\n\r\n/**\r\n * Safari Web Speech API wrapper\r\n *\r\n * Provides native speech recognition on iOS Safari.\r\n * Much faster than Whisper WASM and more battery-efficient.\r\n */\r\nexport class SafariSpeechRecognition {\r\n private config: Required<SafariSpeechConfig>;\r\n private recognition: SpeechRecognitionInterface | null = null;\r\n private isListening = false;\r\n private startTime = 0;\r\n private accumulatedText = '';\r\n\r\n // Callbacks\r\n private resultCallbacks: SpeechResultCallback[] = [];\r\n private errorCallbacks: SpeechErrorCallback[] = [];\r\n\r\n // Promise resolvers for stop()\r\n private stopResolver: ((result: SpeechRecognitionResult) => void) | null = null;\r\n private stopRejecter: ((error: Error) => void) | null = null;\r\n\r\n constructor(config: SafariSpeechConfig = {}) {\r\n this.config = {\r\n language: config.language ?? 'en-US',\r\n continuous: config.continuous ?? true,\r\n interimResults: config.interimResults ?? true,\r\n maxAlternatives: config.maxAlternatives ?? 1,\r\n };\r\n\r\n logger.debug('SafariSpeechRecognition created', {\r\n language: this.config.language,\r\n continuous: this.config.continuous,\r\n });\r\n }\r\n\r\n /**\r\n * Check if Web Speech API is available\r\n */\r\n static isAvailable(): boolean {\r\n return isSpeechRecognitionAvailable();\r\n }\r\n\r\n /**\r\n * Check if currently listening\r\n */\r\n get listening(): boolean {\r\n return this.isListening;\r\n }\r\n\r\n /**\r\n * Get the language being used\r\n */\r\n get language(): string {\r\n return this.config.language;\r\n }\r\n\r\n /**\r\n * Register a callback for receiving results\r\n */\r\n onResult(callback: SpeechResultCallback): void {\r\n this.resultCallbacks.push(callback);\r\n }\r\n\r\n /**\r\n * Register a callback for receiving errors\r\n */\r\n onError(callback: SpeechErrorCallback): void {\r\n this.errorCallbacks.push(callback);\r\n }\r\n\r\n /**\r\n * Remove a result callback\r\n */\r\n offResult(callback: SpeechResultCallback): void {\r\n const index = this.resultCallbacks.indexOf(callback);\r\n if (index !== -1) {\r\n this.resultCallbacks.splice(index, 1);\r\n }\r\n }\r\n\r\n /**\r\n * Remove an error callback\r\n */\r\n offError(callback: SpeechErrorCallback): void {\r\n const index = this.errorCallbacks.indexOf(callback);\r\n if (index !== -1) {\r\n this.errorCallbacks.splice(index, 1);\r\n }\r\n }\r\n\r\n /**\r\n * Start listening for speech\r\n *\r\n * On iOS Safari, this will trigger the microphone permission prompt\r\n * if not already granted.\r\n */\r\n async start(): Promise<void> {\r\n if (this.isListening) {\r\n logger.warn('Already listening');\r\n return;\r\n }\r\n\r\n if (!SafariSpeechRecognition.isAvailable()) {\r\n const error = new Error(\r\n 'Web Speech API not available. ' +\r\n 'This API is supported in Safari (iOS/macOS) and Chrome. ' +\r\n 'On iOS, use Safari for native speech recognition.'\r\n );\r\n this.emitError(error);\r\n throw error;\r\n }\r\n\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('SafariSpeech.start', {\r\n 'speech.language': this.config.language,\r\n 'speech.continuous': this.config.continuous,\r\n });\r\n\r\n try {\r\n // Create recognition instance\r\n const SpeechRecognitionClass = window.SpeechRecognition || window.webkitSpeechRecognition;\r\n if (!SpeechRecognitionClass) {\r\n throw new Error('SpeechRecognition constructor not found');\r\n }\r\n\r\n this.recognition = new SpeechRecognitionClass();\r\n this.recognition.continuous = this.config.continuous;\r\n this.recognition.interimResults = this.config.interimResults;\r\n this.recognition.lang = this.config.language;\r\n this.recognition.maxAlternatives = this.config.maxAlternatives;\r\n\r\n // Set up event handlers\r\n this.setupEventHandlers();\r\n\r\n // Start recognition\r\n this.recognition.start();\r\n this.isListening = true;\r\n this.startTime = performance.now();\r\n this.accumulatedText = '';\r\n\r\n logger.info('Speech recognition started', {\r\n language: this.config.language,\r\n });\r\n\r\n span?.end();\r\n } catch (error) {\r\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\r\n this.emitError(error instanceof Error ? error : new Error(String(error)));\r\n throw error;\r\n }\r\n }\r\n\r\n /**\r\n * Stop listening and return the final transcript\r\n */\r\n async stop(): Promise<SpeechRecognitionResult> {\r\n if (!this.isListening || !this.recognition) {\r\n logger.warn('Not currently listening');\r\n return {\r\n text: this.accumulatedText,\r\n language: this.config.language,\r\n inferenceTimeMs: 0,\r\n isFinal: true,\r\n };\r\n }\r\n\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('SafariSpeech.stop');\r\n\r\n return new Promise((resolve, reject) => {\r\n this.stopResolver = resolve;\r\n this.stopRejecter = reject;\r\n\r\n try {\r\n this.recognition!.stop();\r\n // onend handler will resolve the promise\r\n } catch (error) {\r\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\r\n this.isListening = false;\r\n reject(error);\r\n }\r\n });\r\n }\r\n\r\n /**\r\n * Abort recognition without waiting for final result\r\n */\r\n abort(): void {\r\n if (this.recognition && this.isListening) {\r\n this.recognition.abort();\r\n this.isListening = false;\r\n logger.info('Speech recognition aborted');\r\n }\r\n }\r\n\r\n /**\r\n * NOT SUPPORTED: Transcribe audio buffer\r\n *\r\n * Safari Speech API does not support transcribing pre-recorded audio.\r\n * It only works with live microphone input.\r\n *\r\n * For batch transcription on iOS, use server-side Whisper or a cloud ASR service.\r\n *\r\n * @throws Error always - this method is not supported\r\n */\r\n async transcribe(_audio: Float32Array): Promise<SpeechRecognitionResult> {\r\n throw new Error(\r\n 'SafariSpeechRecognition does not support transcribe() with audio buffers. ' +\r\n 'The Web Speech API only works with live microphone input. ' +\r\n 'Use start() and stop() for real-time recognition, or use WhisperInference/cloud ASR for batch transcription.'\r\n );\r\n }\r\n\r\n /**\r\n * Dispose of recognition resources\r\n */\r\n dispose(): void {\r\n if (this.recognition) {\r\n if (this.isListening) {\r\n this.recognition.abort();\r\n }\r\n this.recognition = null;\r\n }\r\n this.isListening = false;\r\n this.resultCallbacks = [];\r\n this.errorCallbacks = [];\r\n logger.debug('SafariSpeechRecognition disposed');\r\n }\r\n\r\n /**\r\n * Set up event handlers for the recognition instance\r\n */\r\n private setupEventHandlers(): void {\r\n if (!this.recognition) return;\r\n\r\n this.recognition.onresult = (event: SpeechRecognitionEvent) => {\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('SafariSpeech.onresult');\r\n\r\n try {\r\n // Process all new results\r\n for (let i = event.resultIndex; i < event.results.length; i++) {\r\n const result = event.results[i];\r\n const alternative = result[0];\r\n\r\n if (alternative) {\r\n const text = alternative.transcript;\r\n const isFinal = result.isFinal;\r\n\r\n // Accumulate final text\r\n if (isFinal) {\r\n this.accumulatedText += text + ' ';\r\n }\r\n\r\n const speechResult: SpeechRecognitionResult = {\r\n text: isFinal ? this.accumulatedText.trim() : text,\r\n language: this.config.language,\r\n inferenceTimeMs: performance.now() - this.startTime,\r\n isFinal,\r\n confidence: alternative.confidence,\r\n };\r\n\r\n // Emit to callbacks\r\n this.emitResult(speechResult);\r\n\r\n logger.trace('Speech result', {\r\n text: text.substring(0, 50),\r\n isFinal,\r\n confidence: alternative.confidence,\r\n });\r\n }\r\n }\r\n\r\n span?.end();\r\n } catch (error) {\r\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\r\n logger.error('Error processing speech result', { error });\r\n }\r\n };\r\n\r\n this.recognition.onerror = (event: SpeechRecognitionErrorEvent) => {\r\n const error = new Error(`Speech recognition error: ${event.error} - ${event.message}`);\r\n logger.error('Speech recognition error', { error: event.error, message: event.message });\r\n this.emitError(error);\r\n\r\n if (this.stopRejecter) {\r\n this.stopRejecter(error);\r\n this.stopResolver = null;\r\n this.stopRejecter = null;\r\n }\r\n };\r\n\r\n this.recognition.onend = () => {\r\n this.isListening = false;\r\n logger.info('Speech recognition ended', {\r\n totalText: this.accumulatedText.length,\r\n durationMs: performance.now() - this.startTime,\r\n });\r\n\r\n // Resolve stop() promise if pending\r\n if (this.stopResolver) {\r\n const result: SpeechRecognitionResult = {\r\n text: this.accumulatedText.trim(),\r\n language: this.config.language,\r\n inferenceTimeMs: performance.now() - this.startTime,\r\n isFinal: true,\r\n };\r\n this.stopResolver(result);\r\n this.stopResolver = null;\r\n this.stopRejecter = null;\r\n }\r\n };\r\n\r\n this.recognition.onstart = () => {\r\n logger.debug('Speech recognition started by browser');\r\n };\r\n\r\n this.recognition.onspeechstart = () => {\r\n logger.debug('Speech detected');\r\n };\r\n\r\n this.recognition.onspeechend = () => {\r\n logger.debug('Speech ended');\r\n };\r\n }\r\n\r\n /**\r\n * Emit result to all registered callbacks\r\n */\r\n private emitResult(result: SpeechRecognitionResult): void {\r\n for (const callback of this.resultCallbacks) {\r\n try {\r\n callback(result);\r\n } catch (error) {\r\n logger.error('Error in result callback', { error });\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * Emit error to all registered callbacks\r\n */\r\n private emitError(error: Error): void {\r\n for (const callback of this.errorCallbacks) {\r\n try {\r\n callback(error);\r\n } catch (callbackError) {\r\n logger.error('Error in error callback', { error: callbackError });\r\n }\r\n }\r\n }\r\n}\r\n","/**\n * Emotion - Helper for creating emotion vectors for avatar animation\n *\n * Provides 10 explicit emotion channels that can be used to control\n * avatar expressions and emotional states.\n *\n * @category Emotion\n *\n * @example Creating emotion vectors\n * ```typescript\n * import { createEmotionVector, EmotionPresets } from '@omote/core';\n *\n * // Named weights\n * const happy = createEmotionVector({ joy: 0.8, amazement: 0.2 });\n *\n * // Use preset\n * const surprised = EmotionPresets.surprised;\n * ```\n *\n * @example Smooth transitions\n * ```typescript\n * import { EmotionController } from '@omote/core';\n *\n * const controller = new EmotionController();\n * controller.setPreset('happy');\n * controller.transitionTo({ sadness: 0.7 }, 500);\n *\n * // In animation loop\n * controller.update();\n * const emotion = controller.emotion;\n * ```\n */\n\n/** The 10 explicit emotion channels */\nexport const EMOTION_NAMES = [\n 'amazement',\n 'anger',\n 'cheekiness',\n 'disgust',\n 'fear',\n 'grief',\n 'joy',\n 'outofbreath',\n 'pain',\n 'sadness',\n] as const;\n\nexport type EmotionName = typeof EMOTION_NAMES[number];\n\n/** Emotion weights by name */\nexport type EmotionWeights = Partial<Record<EmotionName, number>>;\n\n/** Total emotion vector size */\nexport const EMOTION_VECTOR_SIZE = 26;\n\n/** Number of explicit emotion channels */\nexport const EXPLICIT_EMOTION_COUNT = 10;\n\n/**\n * Create an emotion vector from named weights\n *\n * @param weights - Named emotion weights (0-1)\n * @returns Float32Array of emotion values\n *\n * @example\n * ```ts\n * const emotion = createEmotionVector({ joy: 0.8, amazement: 0.3 });\n * ```\n */\nexport function createEmotionVector(weights: EmotionWeights = {}): Float32Array {\n const vector = new Float32Array(EMOTION_VECTOR_SIZE);\n\n for (const [name, value] of Object.entries(weights)) {\n const idx = EMOTION_NAMES.indexOf(name as EmotionName);\n if (idx >= 0) {\n vector[idx] = Math.max(0, Math.min(1, value));\n }\n }\n\n return vector;\n}\n\n/**\n * Pre-built emotion presets for common expressions\n */\nexport const EmotionPresets = {\n /** Neutral/default - no emotional expression */\n neutral: createEmotionVector({}),\n\n /** Happy - joy with slight amazement */\n happy: createEmotionVector({ joy: 0.7, amazement: 0.2 }),\n\n /** Sad - grief and sadness */\n sad: createEmotionVector({ sadness: 0.7, grief: 0.4 }),\n\n /** Angry - anger with disgust */\n angry: createEmotionVector({ anger: 0.8, disgust: 0.3 }),\n\n /** Surprised - high amazement */\n surprised: createEmotionVector({ amazement: 0.9, fear: 0.2 }),\n\n /** Scared - fear with pain */\n scared: createEmotionVector({ fear: 0.8, pain: 0.3 }),\n\n /** Disgusted - disgust with anger */\n disgusted: createEmotionVector({ disgust: 0.8, anger: 0.2 }),\n\n /** Excited - joy with amazement and cheekiness */\n excited: createEmotionVector({ joy: 0.6, amazement: 0.5, cheekiness: 0.4 }),\n\n /** Tired - out of breath with sadness */\n tired: createEmotionVector({ outofbreath: 0.6, sadness: 0.3 }),\n\n /** Playful - cheekiness with joy */\n playful: createEmotionVector({ cheekiness: 0.7, joy: 0.5 }),\n\n /** Pained - pain with grief */\n pained: createEmotionVector({ pain: 0.8, grief: 0.4 }),\n\n /** Contemplative - slight sadness, calm */\n contemplative: createEmotionVector({ sadness: 0.2, grief: 0.1 }),\n} as const;\n\nexport type EmotionPresetName = keyof typeof EmotionPresets;\n\n/**\n * Get an emotion preset by name\n */\nexport function getEmotionPreset(name: EmotionPresetName): Float32Array {\n return EmotionPresets[name].slice();\n}\n\n/**\n * Blend multiple emotion vectors together\n *\n * @param emotions - Array of { vector, weight } pairs\n * @returns Blended emotion vector\n *\n * @example\n * ```ts\n * const blended = blendEmotions([\n * { vector: EmotionPresets.happy, weight: 0.7 },\n * { vector: EmotionPresets.surprised, weight: 0.3 },\n * ]);\n * ```\n */\nexport function blendEmotions(\n emotions: Array<{ vector: Float32Array; weight: number }>\n): Float32Array {\n const result = new Float32Array(EMOTION_VECTOR_SIZE);\n let totalWeight = 0;\n\n for (const { vector, weight } of emotions) {\n totalWeight += weight;\n for (let i = 0; i < EMOTION_VECTOR_SIZE; i++) {\n result[i] += (vector[i] || 0) * weight;\n }\n }\n\n // Normalize if total weight > 0\n if (totalWeight > 0) {\n for (let i = 0; i < EMOTION_VECTOR_SIZE; i++) {\n result[i] /= totalWeight;\n }\n }\n\n return result;\n}\n\n/**\n * Interpolate between two emotion vectors\n *\n * @param from - Starting emotion\n * @param to - Target emotion\n * @param t - Interpolation factor (0-1)\n * @returns Interpolated emotion vector\n */\nexport function lerpEmotion(\n from: Float32Array,\n to: Float32Array,\n t: number\n): Float32Array {\n const result = new Float32Array(EMOTION_VECTOR_SIZE);\n const clampedT = Math.max(0, Math.min(1, t));\n\n for (let i = 0; i < EMOTION_VECTOR_SIZE; i++) {\n result[i] = (from[i] || 0) * (1 - clampedT) + (to[i] || 0) * clampedT;\n }\n\n return result;\n}\n\n/**\n * EmotionController - Manages emotion state with smooth transitions\n */\nexport class EmotionController {\n private currentEmotion = new Float32Array(EMOTION_VECTOR_SIZE);\n private targetEmotion = new Float32Array(EMOTION_VECTOR_SIZE);\n private transitionProgress = 1.0;\n private transitionDuration = 0;\n private transitionStartTime = 0;\n\n /**\n * Get the current emotion vector\n */\n get emotion(): Float32Array {\n if (this.transitionProgress >= 1.0) {\n return this.targetEmotion;\n }\n\n // Interpolate during transition\n return lerpEmotion(this.currentEmotion, this.targetEmotion, this.transitionProgress);\n }\n\n /**\n * Set emotion immediately (no transition)\n */\n set(weights: EmotionWeights): void {\n const newEmotion = createEmotionVector(weights);\n this.targetEmotion.set(newEmotion);\n this.currentEmotion.set(newEmotion);\n this.transitionProgress = 1.0;\n }\n\n /**\n * Set emotion from preset immediately\n */\n setPreset(preset: EmotionPresetName): void {\n const newEmotion = getEmotionPreset(preset);\n this.targetEmotion.set(newEmotion);\n this.currentEmotion.set(newEmotion);\n this.transitionProgress = 1.0;\n }\n\n /**\n * Transition to new emotion over time\n *\n * @param weights - Target emotion weights\n * @param durationMs - Transition duration in milliseconds\n */\n transitionTo(weights: EmotionWeights, durationMs: number): void {\n this.currentEmotion.set(this.emotion);\n this.targetEmotion.set(createEmotionVector(weights));\n this.transitionDuration = durationMs;\n this.transitionStartTime = performance.now();\n this.transitionProgress = 0;\n }\n\n /**\n * Transition to preset over time\n */\n transitionToPreset(preset: EmotionPresetName, durationMs: number): void {\n this.currentEmotion.set(this.emotion);\n this.targetEmotion.set(getEmotionPreset(preset));\n this.transitionDuration = durationMs;\n this.transitionStartTime = performance.now();\n this.transitionProgress = 0;\n }\n\n /**\n * Update transition progress (call each frame)\n */\n update(): void {\n if (this.transitionProgress >= 1.0) return;\n\n const elapsed = performance.now() - this.transitionStartTime;\n this.transitionProgress = Math.min(1.0, elapsed / this.transitionDuration);\n }\n\n /**\n * Check if currently transitioning\n */\n get isTransitioning(): boolean {\n return this.transitionProgress < 1.0;\n }\n\n /**\n * Reset to neutral\n */\n reset(): void {\n this.currentEmotion.fill(0);\n this.targetEmotion.fill(0);\n this.transitionProgress = 1.0;\n }\n}\n","/**\r\n * AWS AgentCore Adapter\r\n *\r\n * Primary AI adapter for the Omote Platform.\r\n *\r\n * Pipeline:\r\n * User Audio -> Whisper ASR (local) -> Text\r\n * Text -> AgentCore (WebSocket) -> Response Text + Audio chunks (TTS handled backend-side)\r\n * Audio chunks -> LAM (local) -> Blendshapes -> Render\r\n *\r\n * @category AI\r\n */\r\n\r\nimport { EventEmitter } from '../../events/EventEmitter';\r\nimport type {\r\n AIAdapter,\r\n AIAdapterEvents,\r\n SessionConfig,\r\n AISessionState,\r\n ConversationMessage,\r\n TenantConfig,\r\n} from '../interfaces/AIAdapter';\r\nimport { WhisperInference } from '../../inference/WhisperInference';\r\nimport { Wav2Vec2Inference, LAM_BLENDSHAPES } from '../../inference/Wav2Vec2Inference';\r\nimport { SileroVADInference } from '../../inference/SileroVADInference';\r\nimport { EmotionController } from '../../emotion/Emotion';\r\nimport { SyncedAudioPipeline } from '../../audio/SyncedAudioPipeline';\r\n\r\n/**\r\n * AgentCore-specific configuration\r\n */\r\nexport interface AgentCoreConfig {\r\n /** AgentCore WebSocket endpoint */\r\n endpoint: string;\r\n /** AWS region */\r\n region?: string;\r\n /** Model URLs */\r\n models?: {\r\n lamUrl?: string;\r\n };\r\n /** Enable observability */\r\n observability?: {\r\n tracing?: boolean;\r\n metrics?: boolean;\r\n };\r\n}\r\n\r\n/**\r\n * AWS AgentCore Adapter\r\n */\r\nexport class AgentCoreAdapter extends EventEmitter<AIAdapterEvents> implements AIAdapter {\r\n readonly name = 'AgentCore';\r\n\r\n private _state: AISessionState = 'disconnected';\r\n private _sessionId: string | null = null;\r\n private _isConnected = false;\r\n\r\n // Sub-components\r\n private whisper: WhisperInference | null = null;\r\n private vad: SileroVADInference | null = null;\r\n private lam: Wav2Vec2Inference | null = null;\r\n private emotionController: EmotionController;\r\n private pipeline: SyncedAudioPipeline | null = null;\r\n\r\n // WebSocket connection to AgentCore\r\n private ws: WebSocket | null = null;\r\n private wsReconnectAttempts = 0;\r\n private readonly maxReconnectAttempts = 5;\r\n\r\n // Audio buffers\r\n private audioBuffer: Float32Array[] = [];\r\n\r\n // Conversation state\r\n private history: ConversationMessage[] = [];\r\n private currentConfig: SessionConfig | null = null;\r\n private agentCoreConfig: AgentCoreConfig;\r\n\r\n // Interruption handling\r\n private isSpeaking = false;\r\n private currentTtsAbortController: AbortController | null = null;\r\n\r\n // Auth token cache per tenant\r\n private tokenCache = new Map<string, { token: string; expiresAt: number }>();\r\n\r\n constructor(config: AgentCoreConfig) {\r\n super();\r\n this.agentCoreConfig = config;\r\n this.emotionController = new EmotionController();\r\n }\r\n\r\n get state(): AISessionState {\r\n return this._state;\r\n }\r\n\r\n get sessionId(): string | null {\r\n return this._sessionId;\r\n }\r\n\r\n get isConnected(): boolean {\r\n return this._isConnected;\r\n }\r\n\r\n /**\r\n * Connect to AgentCore with session configuration\r\n */\r\n async connect(config: SessionConfig): Promise<void> {\r\n this.currentConfig = config;\r\n this._sessionId = config.sessionId;\r\n\r\n try {\r\n // 1. Get/refresh auth token for tenant\r\n const authToken = await this.getAuthToken(config.tenant);\r\n\r\n // 2. Initialize local inference components in parallel\r\n await Promise.all([\r\n this.initWhisper(),\r\n this.initLAM(),\r\n ]);\r\n\r\n // 3. Connect to AgentCore WebSocket\r\n await this.connectWebSocket(authToken, config);\r\n\r\n this._isConnected = true;\r\n this.setState('idle');\r\n\r\n this.emit('connection.opened', { sessionId: this._sessionId, adapter: this.name });\r\n } catch (error) {\r\n this.setState('error');\r\n this.emit('connection.error', {\r\n error: error as Error,\r\n recoverable: true,\r\n });\r\n throw error;\r\n }\r\n }\r\n\r\n /**\r\n * Disconnect and cleanup\r\n */\r\n async disconnect(): Promise<void> {\r\n // Cancel any ongoing TTS\r\n this.currentTtsAbortController?.abort();\r\n\r\n // Stop pipeline\r\n if (this.pipeline) {\r\n this.pipeline.dispose();\r\n this.pipeline = null;\r\n }\r\n\r\n // Close WebSocket\r\n if (this.ws) {\r\n this.ws.close(1000, 'Client disconnect');\r\n this.ws = null;\r\n }\r\n\r\n // Cleanup local components\r\n await Promise.all([\r\n this.whisper?.dispose(),\r\n this.vad?.dispose(),\r\n this.lam?.dispose(),\r\n ]);\r\n\r\n this._isConnected = false;\r\n this.setState('disconnected');\r\n\r\n this.emit('connection.closed', { reason: 'Client disconnect' });\r\n }\r\n\r\n /**\r\n * Push user audio for processing\r\n */\r\n pushAudio(audio: Int16Array | Float32Array): void {\r\n if (!this._isConnected) return;\r\n\r\n // Handle interruption detection (async but fire-and-forget)\r\n if (this.isSpeaking) {\r\n this.detectVoiceActivity(audio).then((hasVoiceActivity) => {\r\n if (hasVoiceActivity) {\r\n this.interrupt();\r\n }\r\n }).catch((error) => {\r\n console.error('[AgentCore] VAD error during interruption detection:', error);\r\n });\r\n // Don't return - still buffer the audio for transcription after interruption\r\n }\r\n\r\n // Convert to Float32 if needed\r\n const float32 = audio instanceof Float32Array\r\n ? audio\r\n : this.int16ToFloat32(audio);\r\n\r\n // Buffer audio chunks\r\n this.audioBuffer.push(float32);\r\n\r\n // Debounce and send to Whisper when we have enough\r\n this.scheduleTranscription();\r\n }\r\n\r\n /**\r\n * Send text directly to AgentCore\r\n */\r\n async sendText(text: string): Promise<void> {\r\n if (!this._isConnected || !this.ws) {\r\n throw new Error('Not connected to AgentCore');\r\n }\r\n\r\n // Add to history\r\n this.addToHistory({\r\n role: 'user',\r\n content: text,\r\n timestamp: Date.now(),\r\n });\r\n\r\n this.setState('thinking');\r\n this.emit('ai.thinking.start', { timestamp: Date.now() });\r\n\r\n // Send to AgentCore\r\n this.ws.send(JSON.stringify({\r\n type: 'user_message',\r\n sessionId: this._sessionId,\r\n content: text,\r\n context: {\r\n history: this.history.slice(-10), // Last 10 messages\r\n emotion: Array.from(this.emotionController.emotion),\r\n },\r\n }));\r\n }\r\n\r\n /**\r\n * Interrupt current AI response\r\n */\r\n interrupt(): void {\r\n if (!this.isSpeaking) return;\r\n\r\n this.emit('interruption.detected', { timestamp: Date.now() });\r\n\r\n // Cancel any pending operations\r\n this.currentTtsAbortController?.abort();\r\n this.currentTtsAbortController = null;\r\n\r\n // Notify AgentCore to stop TTS streaming\r\n if (this.ws?.readyState === WebSocket.OPEN) {\r\n this.ws.send(JSON.stringify({\r\n type: 'interrupt',\r\n sessionId: this._sessionId,\r\n timestamp: Date.now(),\r\n }));\r\n }\r\n\r\n this.isSpeaking = false;\r\n this.setState('listening');\r\n\r\n this.emit('interruption.handled', { timestamp: Date.now(), action: 'stop' });\r\n }\r\n\r\n getHistory(): ConversationMessage[] {\r\n return [...this.history];\r\n }\r\n\r\n clearHistory(): void {\r\n this.history = [];\r\n this.emit('memory.updated', { messageCount: 0 });\r\n }\r\n\r\n async healthCheck(): Promise<boolean> {\r\n if (!this.ws || this.ws.readyState !== WebSocket.OPEN) {\r\n return false;\r\n }\r\n\r\n return new Promise((resolve) => {\r\n const timeout = setTimeout(() => resolve(false), 5000);\r\n\r\n const handler = (event: MessageEvent) => {\r\n const data = JSON.parse(event.data);\r\n if (data.type === 'pong') {\r\n clearTimeout(timeout);\r\n this.ws?.removeEventListener('message', handler);\r\n resolve(true);\r\n }\r\n };\r\n\r\n this.ws?.addEventListener('message', handler);\r\n this.ws?.send(JSON.stringify({ type: 'ping' }));\r\n });\r\n }\r\n\r\n // ==================== Private Methods ====================\r\n\r\n private setState(state: AISessionState): void {\r\n const previousState = this._state;\r\n this._state = state;\r\n this.emit('state.change', { state, previousState });\r\n }\r\n\r\n private async getAuthToken(tenant: TenantConfig): Promise<string> {\r\n const cached = this.tokenCache.get(tenant.tenantId);\r\n if (cached && cached.expiresAt > Date.now() + 60000) {\r\n return cached.token;\r\n }\r\n\r\n // If we have an auth token already, use it\r\n if (tenant.credentials.authToken) {\r\n return tenant.credentials.authToken;\r\n }\r\n\r\n // Skip auth for local dev (ws:// endpoints or localhost)\r\n // The simple voice-agent doesn't have an auth endpoint\r\n const endpoint = this.agentCoreConfig.endpoint;\r\n if (endpoint.startsWith('ws://') || endpoint.includes('localhost')) {\r\n return 'local-dev-token';\r\n }\r\n\r\n // Exchange credentials for token (production)\r\n const httpEndpoint = endpoint.replace('wss://', 'https://').replace('ws://', 'http://');\r\n const response = await fetch(`${httpEndpoint}/auth/token`, {\r\n method: 'POST',\r\n headers: { 'Content-Type': 'application/json' },\r\n body: JSON.stringify({\r\n tenantId: tenant.tenantId,\r\n apiKey: tenant.credentials.apiKey,\r\n }),\r\n });\r\n\r\n if (!response.ok) {\r\n throw new Error(`Auth failed: ${response.statusText}`);\r\n }\r\n\r\n const { token, expiresIn } = await response.json();\r\n\r\n this.tokenCache.set(tenant.tenantId, {\r\n token,\r\n expiresAt: Date.now() + expiresIn * 1000,\r\n });\r\n\r\n return token;\r\n }\r\n\r\n private async initWhisper(): Promise<void> {\r\n // Initialize Whisper and Silero VAD in parallel\r\n await Promise.all([\r\n // Whisper ASR\r\n (async () => {\r\n this.whisper = new WhisperInference({\r\n model: 'tiny',\r\n device: 'auto',\r\n language: 'en',\r\n });\r\n await this.whisper.load();\r\n })(),\r\n // Silero VAD for accurate voice activity detection\r\n (async () => {\r\n this.vad = new SileroVADInference({\r\n modelUrl: '/models/silero-vad.onnx',\r\n backend: 'webgpu',\r\n sampleRate: 16000,\r\n threshold: 0.5,\r\n });\r\n await this.vad.load();\r\n })(),\r\n ]);\r\n }\r\n\r\n private async initLAM(): Promise<void> {\r\n // LAM (Lip Animation Model) based on wav2vec2\r\n // Outputs 52 ARKit blendshapes directly at 30fps - no PCA solver needed\r\n const lamUrl = this.agentCoreConfig.models?.lamUrl || '/models/unified_wav2vec2_asr_a2e.onnx';\r\n\r\n this.lam = new Wav2Vec2Inference({\r\n modelUrl: lamUrl,\r\n backend: 'auto',\r\n });\r\n\r\n await this.lam.load();\r\n\r\n // Initialize SyncedAudioPipeline for synchronized audio playback + LAM\r\n await this.initPipeline();\r\n }\r\n\r\n private async initPipeline(): Promise<void> {\r\n if (!this.lam) {\r\n throw new Error('LAM must be initialized before pipeline');\r\n }\r\n\r\n this.pipeline = new SyncedAudioPipeline({\r\n lam: this.lam,\r\n sampleRate: 16000,\r\n chunkTargetMs: 200,\r\n });\r\n\r\n await this.pipeline.initialize();\r\n\r\n // Subscribe to pipeline events\r\n this.pipeline.on('frame_ready', (frame: Float32Array) => {\r\n // Emit animation event with synchronized frame\r\n this.emit('animation', {\r\n blendshapes: frame,\r\n get: (name: string) => {\r\n const idx = (LAM_BLENDSHAPES as readonly string[]).indexOf(name);\r\n return idx >= 0 ? frame[idx] : 0;\r\n },\r\n timestamp: Date.now(), // Wall clock for client-side logging only\r\n inferenceMs: 0, // Pipeline handles LAM inference asynchronously\r\n });\r\n });\r\n\r\n this.pipeline.on('playback_complete', () => {\r\n this.isSpeaking = false;\r\n this.setState('idle');\r\n this.emit('audio.output.end', { durationMs: 0 });\r\n });\r\n\r\n this.pipeline.on('error', (error: Error) => {\r\n console.error('[AgentCore] Pipeline error:', error);\r\n this.emit('connection.error', {\r\n error,\r\n recoverable: true,\r\n });\r\n });\r\n }\r\n\r\n private async connectWebSocket(authToken: string, config: SessionConfig): Promise<void> {\r\n return new Promise((resolve, reject) => {\r\n const wsUrl = new URL(`${this.agentCoreConfig.endpoint.replace('http', 'ws')}/ws`);\r\n wsUrl.searchParams.set('sessionId', config.sessionId);\r\n wsUrl.searchParams.set('characterId', config.tenant.characterId);\r\n\r\n this.ws = new WebSocket(wsUrl.toString());\r\n\r\n this.ws.onopen = () => {\r\n // Send auth\r\n this.ws?.send(JSON.stringify({\r\n type: 'auth',\r\n token: authToken,\r\n tenantId: config.tenant.tenantId,\r\n systemPrompt: config.systemPrompt,\r\n }));\r\n };\r\n\r\n this.ws.onmessage = (event) => {\r\n this.handleAgentCoreMessage(JSON.parse(event.data));\r\n };\r\n\r\n this.ws.onerror = () => {\r\n reject(new Error('WebSocket connection failed'));\r\n };\r\n\r\n this.ws.onclose = (event) => {\r\n this.handleDisconnect(event);\r\n };\r\n\r\n // Wait for auth confirmation\r\n const authTimeout = setTimeout(() => {\r\n reject(new Error('Auth timeout'));\r\n }, 10000);\r\n\r\n const authHandler = (event: MessageEvent) => {\r\n const data = JSON.parse(event.data);\r\n if (data.type === 'auth_success') {\r\n clearTimeout(authTimeout);\r\n this.ws?.removeEventListener('message', authHandler);\r\n resolve();\r\n } else if (data.type === 'auth_failed') {\r\n clearTimeout(authTimeout);\r\n reject(new Error(data.message));\r\n }\r\n };\r\n\r\n this.ws.addEventListener('message', authHandler);\r\n });\r\n }\r\n\r\n private handleAgentCoreMessage(data: Record<string, unknown>): void {\r\n switch (data.type) {\r\n case 'response_start':\r\n this.setState('speaking');\r\n this.isSpeaking = true;\r\n this.emit('ai.response.start', {\r\n text: data.text as string | undefined,\r\n emotion: data.emotion as string | undefined,\r\n });\r\n // Update emotion state\r\n if (data.emotion) {\r\n this.emotionController.transitionTo(\r\n { [data.emotion as string]: 0.7 },\r\n 300\r\n );\r\n }\r\n // Start pipeline for synchronized playback\r\n if (this.pipeline) {\r\n this.pipeline.start();\r\n }\r\n break;\r\n\r\n case 'response_chunk':\r\n this.emit('ai.response.chunk', {\r\n text: data.text as string,\r\n isLast: data.isLast as boolean,\r\n });\r\n break;\r\n\r\n case 'audio_chunk':\r\n // TTS audio streamed from backend - feed to synchronized pipeline\r\n if (data.audio && this.pipeline) {\r\n const audioData = this.base64ToArrayBuffer(data.audio as string);\r\n const uint8 = new Uint8Array(audioData);\r\n this.pipeline.onAudioChunk(uint8).catch((error) => {\r\n console.error('[AgentCore] Pipeline chunk error:', error);\r\n });\r\n }\r\n break;\r\n\r\n case 'audio_end':\r\n // Signal end of audio stream to pipeline\r\n if (this.pipeline) {\r\n this.pipeline.end().catch((error) => {\r\n console.error('[AgentCore] Pipeline end error:', error);\r\n });\r\n }\r\n // Note: isSpeaking and state will be set to idle by pipeline.playback_complete event\r\n break;\r\n\r\n case 'response_end':\r\n this.addToHistory({\r\n role: 'assistant',\r\n content: data.fullText as string,\r\n timestamp: Date.now(),\r\n emotion: data.emotion as string | undefined,\r\n });\r\n this.emit('ai.response.end', {\r\n fullText: data.fullText as string,\r\n durationMs: data.durationMs as number || 0,\r\n });\r\n break;\r\n\r\n case 'memory_updated':\r\n this.emit('memory.updated', {\r\n messageCount: data.messageCount as number,\r\n tokenCount: data.tokenCount as number | undefined,\r\n });\r\n break;\r\n\r\n case 'error':\r\n this.emit('connection.error', {\r\n error: new Error(data.message as string),\r\n recoverable: (data.recoverable as boolean) ?? false,\r\n });\r\n break;\r\n }\r\n }\r\n\r\n private scheduleTranscription(): void {\r\n // No debounce - transcribe immediately when we have enough audio\r\n // This reduces latency significantly (was adding 100ms delay)\r\n\r\n if (this.audioBuffer.length === 0) return;\r\n\r\n // Concatenate buffered audio\r\n const totalLength = this.audioBuffer.reduce((sum, buf) => sum + buf.length, 0);\r\n\r\n // Need minimum samples for Whisper (250ms instead of 1 sec)\r\n // Shorter buffer = faster response time\r\n if (totalLength < 4000) return; // 250ms at 16kHz (was 16000 = 1sec)\r\n\r\n const audio = new Float32Array(totalLength);\r\n let offset = 0;\r\n for (const buf of this.audioBuffer) {\r\n audio.set(buf, offset);\r\n offset += buf.length;\r\n }\r\n this.audioBuffer = [];\r\n\r\n // Check for actual audio content (not silence/blank audio)\r\n // This prevents [BLANK_AUDIO] transcriptions\r\n let sum = 0;\r\n for (let i = 0; i < audio.length; i++) {\r\n sum += audio[i] * audio[i];\r\n }\r\n const rms = Math.sqrt(sum / audio.length);\r\n\r\n // Skip silent audio (too low energy)\r\n if (rms < 0.01) {\r\n console.debug('[AgentCore] Skipping silent audio', { rms, samples: audio.length });\r\n return;\r\n }\r\n\r\n // Transcribe with Whisper\r\n if (this.whisper) {\r\n this.setState('listening');\r\n this.emit('user.speech.start', { timestamp: Date.now() });\r\n\r\n this.whisper.transcribe(audio).then((result) => {\r\n this.emit('user.transcript.final', {\r\n text: result.text,\r\n confidence: 1.0,\r\n });\r\n this.emit('user.speech.end', { timestamp: Date.now(), durationMs: result.inferenceTimeMs });\r\n\r\n // Send to AgentCore (skip [BLANK_AUDIO] or empty transcriptions)\r\n const cleanText = result.text.trim();\r\n if (cleanText && !cleanText.includes('[BLANK_AUDIO]')) {\r\n this.sendText(cleanText).catch((error) => {\r\n console.error('[AgentCore] Send text error:', error);\r\n });\r\n }\r\n }).catch((error) => {\r\n console.error('[AgentCore] Transcription error:', error);\r\n });\r\n }\r\n }\r\n\r\n // REMOVED: processAudioForAnimation() - now handled by SyncedAudioPipeline\r\n // The pipeline manages audio scheduling, LAM inference, and frame synchronization\r\n // Frames are emitted via pipeline.on('frame_ready') event (see initPipeline())\r\n\r\n /**\r\n * Detect voice activity using Silero VAD\r\n * Falls back to simple RMS if VAD not available\r\n */\r\n private async detectVoiceActivity(audio: Int16Array | Float32Array): Promise<boolean> {\r\n // Convert to Float32 if needed\r\n const float32 = audio instanceof Float32Array\r\n ? audio\r\n : this.int16ToFloat32(audio);\r\n\r\n // Use Silero VAD if available (much more accurate)\r\n if (this.vad) {\r\n // Silero VAD requires 512-sample chunks (32ms at 16kHz)\r\n const chunkSize = this.vad.getChunkSize();\r\n\r\n // Process available chunks\r\n for (let i = 0; i + chunkSize <= float32.length; i += chunkSize) {\r\n const chunk = float32.slice(i, i + chunkSize);\r\n const result = await this.vad.process(chunk);\r\n\r\n // If any chunk has speech, return true\r\n if (result.isSpeech) {\r\n return true;\r\n }\r\n }\r\n\r\n return false;\r\n }\r\n\r\n // Fallback: Simple RMS-based detection (less accurate)\r\n let sum = 0;\r\n for (let i = 0; i < float32.length; i++) {\r\n sum += float32[i] * float32[i];\r\n }\r\n const rms = Math.sqrt(sum / float32.length);\r\n return rms > 0.02;\r\n }\r\n\r\n private int16ToFloat32(int16: Int16Array): Float32Array {\r\n const float32 = new Float32Array(int16.length);\r\n for (let i = 0; i < int16.length; i++) {\r\n float32[i] = int16[i] / 32768;\r\n }\r\n return float32;\r\n }\r\n\r\n private base64ToArrayBuffer(base64: string): ArrayBuffer {\r\n const binaryString = atob(base64);\r\n const bytes = new Uint8Array(binaryString.length);\r\n for (let i = 0; i < binaryString.length; i++) {\r\n bytes[i] = binaryString.charCodeAt(i);\r\n }\r\n return bytes.buffer;\r\n }\r\n\r\n private addToHistory(message: ConversationMessage): void {\r\n this.history.push(message);\r\n this.emit('memory.updated', { messageCount: this.history.length });\r\n }\r\n\r\n private handleDisconnect(event: CloseEvent): void {\r\n this._isConnected = false;\r\n\r\n if (event.code !== 1000) {\r\n // Abnormal close - attempt reconnect\r\n if (this.wsReconnectAttempts < this.maxReconnectAttempts) {\r\n this.wsReconnectAttempts++;\r\n setTimeout(() => {\r\n if (this.currentConfig) {\r\n this.connect(this.currentConfig).catch(() => {\r\n // Will retry if fails\r\n });\r\n }\r\n }, Math.pow(2, this.wsReconnectAttempts) * 1000);\r\n } else {\r\n this.setState('error');\r\n this.emit('connection.error', {\r\n error: new Error('Max reconnection attempts reached'),\r\n recoverable: false,\r\n });\r\n }\r\n }\r\n\r\n this.emit('connection.closed', { reason: event.reason || 'Connection closed' });\r\n }\r\n}\r\n","/**\n * Conversation Orchestrator\n *\n * Manages the conversation pipeline with AgentCore:\n * - Handles session lifecycle and tenant isolation\n * - Manages adapter events and state\n *\n * @category AI\n */\n\nimport { EventEmitter } from '../../events/EventEmitter';\nimport type {\n AIAdapter,\n AIAdapterEvents,\n SessionConfig,\n TenantConfig,\n ConversationMessage,\n AISessionState,\n} from '../interfaces/AIAdapter';\nimport type { ConversationSession, SessionSnapshot } from '../interfaces/ConversationSession';\nimport { AgentCoreAdapter, type AgentCoreConfig } from '../adapters/AgentCoreAdapter';\nimport { EmotionController, type EmotionWeights } from '../../emotion/Emotion';\n\n/**\n * Orchestrator configuration\n */\nexport interface OrchestratorConfig {\n /** AgentCore adapter config */\n adapter: AgentCoreConfig;\n /** Connection timeout in ms */\n connectionTimeoutMs?: number;\n /** Max retry attempts */\n maxRetries?: number;\n}\n\n/**\n * Orchestrator events (extends AI adapter events)\n */\nexport interface OrchestratorEvents extends AIAdapterEvents {\n 'session.created': { sessionId: string; tenantId: string };\n 'session.ended': { sessionId: string; reason: string };\n}\n\n/**\n * Internal session implementation\n */\nclass ConversationSessionImpl implements ConversationSession {\n readonly sessionId: string;\n readonly createdAt: number;\n\n private _adapter: AIAdapter;\n private _config: SessionConfig;\n private _history: ConversationMessage[] = [];\n private _context = new Map<string, string>();\n private _emotionController: EmotionController;\n private _lastActivityAt: number;\n\n constructor(\n config: SessionConfig,\n adapter: AIAdapter,\n ) {\n this.sessionId = config.sessionId;\n this._config = config;\n this._adapter = adapter;\n this.createdAt = Date.now();\n this._lastActivityAt = Date.now();\n this._emotionController = new EmotionController();\n\n if (config.emotion) {\n this._emotionController.setPreset(config.emotion as Parameters<typeof this._emotionController.setPreset>[0]);\n }\n }\n\n get adapter(): AIAdapter {\n return this._adapter;\n }\n\n get config(): SessionConfig {\n return this._config;\n }\n\n get state(): AISessionState {\n return this._adapter.state;\n }\n\n get history(): ConversationMessage[] {\n return [...this._history];\n }\n\n get emotion(): EmotionWeights {\n return {};\n }\n\n get lastActivityAt(): number {\n return this._lastActivityAt;\n }\n\n async start(): Promise<void> {\n await this._adapter.connect(this._config);\n this._lastActivityAt = Date.now();\n }\n\n async end(): Promise<void> {\n await this._adapter.disconnect();\n }\n\n pushAudio(audio: Int16Array | Float32Array): void {\n this._adapter.pushAudio(audio);\n this._lastActivityAt = Date.now();\n }\n\n async sendText(text: string): Promise<void> {\n await this._adapter.sendText(text);\n this._lastActivityAt = Date.now();\n }\n\n interrupt(): void {\n this._adapter.interrupt();\n this._lastActivityAt = Date.now();\n }\n\n setEmotion(emotion: EmotionWeights): void {\n this._emotionController.set(emotion);\n }\n\n addContext(key: string, value: string): void {\n this._context.set(key, value);\n }\n\n removeContext(key: string): void {\n this._context.delete(key);\n }\n\n getContext(): Record<string, string> {\n return Object.fromEntries(this._context);\n }\n\n export(): SessionSnapshot {\n return {\n sessionId: this.sessionId,\n tenantId: this._config.tenant.tenantId,\n characterId: this._config.tenant.characterId,\n history: this._history,\n context: Object.fromEntries(this._context),\n emotion: this.emotion,\n createdAt: this.createdAt,\n lastActivityAt: this._lastActivityAt,\n };\n }\n\n import(snapshot: SessionSnapshot): void {\n this._history = [...snapshot.history];\n this._context = new Map(Object.entries(snapshot.context));\n this._lastActivityAt = snapshot.lastActivityAt;\n }\n\n syncHistory(): void {\n this._history = this._adapter.getHistory();\n }\n}\n\n/**\n * Conversation Orchestrator\n */\nexport class ConversationOrchestrator extends EventEmitter<OrchestratorEvents> {\n private config: Required<OrchestratorConfig>;\n\n // Adapter\n private adapter: AgentCoreAdapter;\n\n // Sessions per tenant\n private sessions = new Map<string, ConversationSessionImpl>();\n\n // Tenant configurations\n private tenants = new Map<string, TenantConfig>();\n\n // Health monitoring\n private healthCheckInterval: ReturnType<typeof setInterval> | null = null;\n private readonly HEALTH_CHECK_INTERVAL_MS = 30000;\n\n constructor(config: OrchestratorConfig) {\n super();\n this.config = {\n connectionTimeoutMs: 5000,\n maxRetries: 3,\n ...config,\n };\n\n // Initialize adapter\n this.adapter = new AgentCoreAdapter(config.adapter);\n }\n\n /**\n * Register a tenant\n */\n registerTenant(tenant: TenantConfig): void {\n this.tenants.set(tenant.tenantId, tenant);\n }\n\n /**\n * Unregister a tenant\n */\n unregisterTenant(tenantId: string): void {\n this.tenants.delete(tenantId);\n }\n\n /**\n * Get tenant config\n */\n getTenant(tenantId: string): TenantConfig | undefined {\n return this.tenants.get(tenantId);\n }\n\n /**\n * Create a new conversation session for a tenant\n */\n async createSession(\n tenantId: string,\n options: Partial<SessionConfig> = {}\n ): Promise<ConversationSession> {\n const tenant = this.tenants.get(tenantId);\n if (!tenant) {\n throw new Error(`Tenant not found: ${tenantId}`);\n }\n\n const sessionId = options.sessionId || this.generateSessionId();\n\n const sessionConfig: SessionConfig = {\n sessionId,\n tenant,\n systemPrompt: options.systemPrompt,\n voice: options.voice,\n emotion: options.emotion,\n language: options.language,\n };\n\n const session = new ConversationSessionImpl(sessionConfig, this.adapter);\n\n this.sessions.set(sessionId, session);\n\n // Forward adapter events\n this.forwardAdapterEvents(this.adapter, sessionId);\n\n // Connect the session\n await session.start();\n\n this.emit('session.created', { sessionId, tenantId });\n\n return session;\n }\n\n /**\n * End a session\n */\n async endSession(sessionId: string): Promise<void> {\n const session = this.sessions.get(sessionId);\n if (session) {\n await session.end();\n this.sessions.delete(sessionId);\n this.emit('session.ended', { sessionId, reason: 'Client requested' });\n }\n }\n\n /**\n * Get session by ID\n */\n getSession(sessionId: string): ConversationSession | undefined {\n return this.sessions.get(sessionId);\n }\n\n /**\n * Get all sessions for a tenant\n */\n getTenantSessions(tenantId: string): ConversationSession[] {\n return Array.from(this.sessions.values())\n .filter(s => s.config.tenant.tenantId === tenantId);\n }\n\n /**\n * Start health monitoring\n */\n startHealthMonitoring(): void {\n if (this.healthCheckInterval) return;\n\n this.healthCheckInterval = setInterval(async () => {\n await this.performHealthCheck();\n }, this.HEALTH_CHECK_INTERVAL_MS);\n }\n\n /**\n * Stop health monitoring\n */\n stopHealthMonitoring(): void {\n if (this.healthCheckInterval) {\n clearInterval(this.healthCheckInterval);\n this.healthCheckInterval = null;\n }\n }\n\n /**\n * Dispose all resources\n */\n async dispose(): Promise<void> {\n this.stopHealthMonitoring();\n\n // End all sessions\n const endPromises = Array.from(this.sessions.values()).map(s => s.end());\n await Promise.all(endPromises);\n this.sessions.clear();\n\n // Disconnect adapter\n await this.adapter.disconnect();\n }\n\n // ==================== Private Methods ====================\n\n private generateSessionId(): string {\n return `sess_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;\n }\n\n private forwardAdapterEvents(adapter: AIAdapter, sessionId: string): void {\n // Forward key events with session context\n const events: (keyof AIAdapterEvents)[] = [\n 'state.change',\n 'user.speech.start',\n 'user.speech.end',\n 'user.transcript.partial',\n 'user.transcript.final',\n 'ai.thinking.start',\n 'ai.response.start',\n 'ai.response.chunk',\n 'ai.response.end',\n 'audio.output.chunk',\n 'audio.output.end',\n 'animation',\n 'memory.updated',\n 'connection.error',\n 'interruption.detected',\n 'interruption.handled',\n ];\n\n for (const event of events) {\n adapter.on(event, (data) => {\n const eventData = data as Record<string, unknown>;\n this.emit(event, { ...eventData, sessionId } as AIAdapterEvents[typeof event]);\n });\n }\n }\n\n private async performHealthCheck(): Promise<void> {\n try {\n await this.adapter.healthCheck();\n } catch {\n // Adapter health check failed\n }\n }\n}\n","/**\n * Tenant Manager\n *\n * Handles multi-tenant isolation for the Omote Platform:\n * - Credential isolation per tenant\n * - Session scoping per tenant\n * - Quota management\n * - Token refresh\n *\n * @category AI\n */\n\nimport type { TenantConfig } from '../interfaces/AIAdapter';\n\n/**\n * Tenant quota configuration\n */\nexport interface TenantQuota {\n /** Max concurrent sessions */\n maxSessions: number;\n /** Requests per minute */\n requestsPerMinute: number;\n /** Max tokens per conversation */\n maxTokensPerConversation: number;\n /** Max audio minutes per day */\n maxAudioMinutesPerDay: number;\n}\n\n/**\n * Tenant usage tracking\n */\nexport interface TenantUsage {\n /** Current active sessions */\n currentSessions: number;\n /** Requests in current minute */\n requestsThisMinute: number;\n /** Total tokens used */\n tokensUsed: number;\n /** Audio minutes used today */\n audioMinutesToday: number;\n /** Last reset timestamp */\n lastMinuteReset: number;\n /** Last daily reset timestamp */\n lastDailyReset: number;\n}\n\n/**\n * Token refresh callback\n */\nexport type TokenRefreshCallback = () => Promise<string>;\n\n/**\n * Tenant Manager\n */\nexport class TenantManager {\n private tenants = new Map<string, TenantConfig>();\n private quotas = new Map<string, TenantQuota>();\n private usage = new Map<string, TenantUsage>();\n private tokenRefreshCallbacks = new Map<string, TokenRefreshCallback>();\n\n /**\n * Default quota for new tenants\n */\n static readonly DEFAULT_QUOTA: TenantQuota = {\n maxSessions: 10,\n requestsPerMinute: 60,\n maxTokensPerConversation: 100000,\n maxAudioMinutesPerDay: 60,\n };\n\n /**\n * Register a tenant with quota\n */\n register(\n tenant: TenantConfig,\n quota: TenantQuota = TenantManager.DEFAULT_QUOTA,\n tokenRefreshCallback?: TokenRefreshCallback\n ): void {\n this.tenants.set(tenant.tenantId, tenant);\n this.quotas.set(tenant.tenantId, quota);\n this.usage.set(tenant.tenantId, {\n currentSessions: 0,\n requestsThisMinute: 0,\n tokensUsed: 0,\n audioMinutesToday: 0,\n lastMinuteReset: Date.now(),\n lastDailyReset: Date.now(),\n });\n\n if (tokenRefreshCallback) {\n this.tokenRefreshCallbacks.set(tenant.tenantId, tokenRefreshCallback);\n }\n }\n\n /**\n * Unregister a tenant\n */\n unregister(tenantId: string): void {\n this.tenants.delete(tenantId);\n this.quotas.delete(tenantId);\n this.usage.delete(tenantId);\n this.tokenRefreshCallbacks.delete(tenantId);\n }\n\n /**\n * Get tenant config\n */\n get(tenantId: string): TenantConfig | undefined {\n return this.tenants.get(tenantId);\n }\n\n /**\n * Check if tenant exists\n */\n has(tenantId: string): boolean {\n return this.tenants.has(tenantId);\n }\n\n /**\n * Get all tenant IDs\n */\n getTenantIds(): string[] {\n return Array.from(this.tenants.keys());\n }\n\n /**\n * Check if tenant can create new session\n */\n canCreateSession(tenantId: string): boolean {\n const quota = this.quotas.get(tenantId);\n const usage = this.usage.get(tenantId);\n\n if (!quota || !usage) return false;\n\n return usage.currentSessions < quota.maxSessions;\n }\n\n /**\n * Check if tenant can make request\n */\n canMakeRequest(tenantId: string): boolean {\n const quota = this.quotas.get(tenantId);\n const usage = this.usage.get(tenantId);\n\n if (!quota || !usage) return false;\n\n // Auto-reset minute counter if needed\n this.checkMinuteReset(tenantId);\n\n return usage.requestsThisMinute < quota.requestsPerMinute;\n }\n\n /**\n * Check if tenant can use audio\n */\n canUseAudio(tenantId: string, minutes: number): boolean {\n const quota = this.quotas.get(tenantId);\n const usage = this.usage.get(tenantId);\n\n if (!quota || !usage) return false;\n\n // Auto-reset daily counter if needed\n this.checkDailyReset(tenantId);\n\n return usage.audioMinutesToday + minutes <= quota.maxAudioMinutesPerDay;\n }\n\n /**\n * Increment session count\n */\n incrementSessions(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n usage.currentSessions++;\n }\n }\n\n /**\n * Decrement session count\n */\n decrementSessions(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (usage && usage.currentSessions > 0) {\n usage.currentSessions--;\n }\n }\n\n /**\n * Record a request\n */\n recordRequest(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n this.checkMinuteReset(tenantId);\n usage.requestsThisMinute++;\n }\n }\n\n /**\n * Record token usage\n */\n recordTokens(tenantId: string, tokens: number): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n usage.tokensUsed += tokens;\n }\n }\n\n /**\n * Record audio usage\n */\n recordAudioMinutes(tenantId: string, minutes: number): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n this.checkDailyReset(tenantId);\n usage.audioMinutesToday += minutes;\n }\n }\n\n /**\n * Get fresh auth token for tenant\n */\n async getAuthToken(tenantId: string): Promise<string> {\n const tenant = this.tenants.get(tenantId);\n if (!tenant) {\n throw new Error(`Tenant not found: ${tenantId}`);\n }\n\n // Check if we have a refresh callback\n const callback = this.tokenRefreshCallbacks.get(tenantId);\n if (callback) {\n const token = await callback();\n tenant.credentials.authToken = token;\n return token;\n }\n\n // Return existing token\n if (tenant.credentials.authToken) {\n return tenant.credentials.authToken;\n }\n\n throw new Error(`No auth token available for tenant: ${tenantId}`);\n }\n\n /**\n * Update tenant credentials\n */\n updateCredentials(tenantId: string, credentials: Partial<TenantConfig['credentials']>): void {\n const tenant = this.tenants.get(tenantId);\n if (tenant) {\n tenant.credentials = { ...tenant.credentials, ...credentials };\n }\n }\n\n /**\n * Get usage stats for tenant\n */\n getUsage(tenantId: string): TenantUsage | undefined {\n return this.usage.get(tenantId);\n }\n\n /**\n * Get quota for tenant\n */\n getQuota(tenantId: string): TenantQuota | undefined {\n return this.quotas.get(tenantId);\n }\n\n /**\n * Update quota for tenant\n */\n updateQuota(tenantId: string, quota: Partial<TenantQuota>): void {\n const existing = this.quotas.get(tenantId);\n if (existing) {\n this.quotas.set(tenantId, { ...existing, ...quota });\n }\n }\n\n /**\n * Reset all usage stats for a tenant\n */\n resetUsage(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n usage.requestsThisMinute = 0;\n usage.tokensUsed = 0;\n usage.audioMinutesToday = 0;\n usage.lastMinuteReset = Date.now();\n usage.lastDailyReset = Date.now();\n }\n }\n\n // ==================== Private Methods ====================\n\n private checkMinuteReset(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (!usage) return;\n\n const now = Date.now();\n if (now - usage.lastMinuteReset >= 60000) {\n usage.requestsThisMinute = 0;\n usage.lastMinuteReset = now;\n }\n }\n\n private checkDailyReset(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (!usage) return;\n\n const now = Date.now();\n const MS_PER_DAY = 24 * 60 * 60 * 1000;\n if (now - usage.lastDailyReset >= MS_PER_DAY) {\n usage.audioMinutesToday = 0;\n usage.lastDailyReset = now;\n }\n }\n}\n","/**\n * Audio Sync Manager\n *\n * Synchronizes TTS audio playback with lip sync animation:\n * - Buffers audio for inference\n * - Manages playback timing\n * - Handles audio queue for streaming\n *\n * @category AI\n */\n\nimport { EventEmitter } from '../../events/EventEmitter';\n\n/**\n * Audio sync events\n */\nexport interface AudioSyncEvents {\n [key: string]: unknown;\n 'buffer.ready': { audio: Float32Array };\n 'playback.start': Record<string, never>;\n 'playback.end': Record<string, never>;\n 'sync.drift': { driftMs: number };\n}\n\n/**\n * Audio sync configuration\n */\nexport interface AudioSyncConfig {\n /** Target sample rate (default: 16000) */\n sampleRate?: number;\n /** Buffer size for inference (default: 16640) */\n bufferSize?: number;\n /** Overlap between buffers (default: 4160) */\n overlapSize?: number;\n /** Max drift before correction (default: 100ms) */\n maxDriftMs?: number;\n}\n\n/**\n * Audio Sync Manager\n */\nexport class AudioSyncManager extends EventEmitter<AudioSyncEvents> {\n private config: Required<AudioSyncConfig>;\n private audioBuffer: Float32Array;\n private bufferPosition = 0;\n private playbackQueue: Float32Array[] = [];\n private isPlaying = false;\n private audioContext: AudioContext | null = null;\n private playbackStartTime = 0;\n private samplesPlayed = 0;\n\n constructor(config: AudioSyncConfig = {}) {\n super();\n this.config = {\n sampleRate: 16000,\n bufferSize: 16640,\n overlapSize: 4160,\n maxDriftMs: 100,\n ...config,\n };\n\n this.audioBuffer = new Float32Array(this.config.bufferSize);\n }\n\n /**\n * Initialize audio context\n */\n async initialize(): Promise<void> {\n if (!this.audioContext) {\n this.audioContext = new AudioContext({ sampleRate: this.config.sampleRate });\n }\n\n if (this.audioContext.state === 'suspended') {\n await this.audioContext.resume();\n }\n }\n\n /**\n * Push audio chunk for processing and playback\n */\n pushAudio(audio: Float32Array): void {\n // Add to playback queue\n this.playbackQueue.push(audio);\n\n // Buffer for inference\n this.bufferForInference(audio);\n\n // Start playback if not playing\n if (!this.isPlaying && this.playbackQueue.length > 0) {\n this.startPlayback();\n }\n }\n\n /**\n * Buffer audio for inference\n */\n private bufferForInference(audio: Float32Array): void {\n let offset = 0;\n\n while (offset < audio.length) {\n const remaining = this.config.bufferSize - this.bufferPosition;\n const toCopy = Math.min(remaining, audio.length - offset);\n\n this.audioBuffer.set(audio.subarray(offset, offset + toCopy), this.bufferPosition);\n this.bufferPosition += toCopy;\n offset += toCopy;\n\n // Buffer full - emit for processing\n if (this.bufferPosition >= this.config.bufferSize) {\n this.emit('buffer.ready', { audio: new Float32Array(this.audioBuffer) });\n\n // Shift buffer with overlap for continuity\n const overlapStart = this.config.bufferSize - this.config.overlapSize;\n this.audioBuffer.copyWithin(0, overlapStart);\n this.bufferPosition = this.config.overlapSize;\n }\n }\n }\n\n /**\n * Start audio playback\n */\n private async startPlayback(): Promise<void> {\n if (!this.audioContext || this.isPlaying) return;\n\n this.isPlaying = true;\n this.playbackStartTime = this.audioContext.currentTime;\n this.samplesPlayed = 0;\n\n this.emit('playback.start', {});\n\n await this.processPlaybackQueue();\n }\n\n /**\n * Process playback queue\n */\n private async processPlaybackQueue(): Promise<void> {\n if (!this.audioContext) return;\n\n while (this.playbackQueue.length > 0) {\n const audio = this.playbackQueue.shift()!;\n\n // Create buffer and source\n const buffer = this.audioContext.createBuffer(1, audio.length, this.config.sampleRate);\n buffer.copyToChannel(audio, 0);\n\n const source = this.audioContext.createBufferSource();\n source.buffer = buffer;\n source.connect(this.audioContext.destination);\n\n // Calculate when to play\n const playTime = this.playbackStartTime + this.samplesPlayed / this.config.sampleRate;\n source.start(playTime);\n\n this.samplesPlayed += audio.length;\n\n // Check for drift\n this.checkDrift();\n\n // Wait for chunk to finish before processing next\n await new Promise(resolve => {\n source.onended = resolve;\n });\n }\n\n this.isPlaying = false;\n this.emit('playback.end', {});\n }\n\n /**\n * Check for audio/animation drift\n */\n private checkDrift(): void {\n if (!this.audioContext) return;\n\n const expectedTime = this.playbackStartTime + this.samplesPlayed / this.config.sampleRate;\n const actualTime = this.audioContext.currentTime;\n const driftMs = (actualTime - expectedTime) * 1000;\n\n if (Math.abs(driftMs) > this.config.maxDriftMs) {\n this.emit('sync.drift', { driftMs });\n }\n }\n\n /**\n * Clear playback queue\n */\n clearQueue(): void {\n this.playbackQueue = [];\n this.bufferPosition = 0;\n this.audioBuffer.fill(0);\n }\n\n /**\n * Stop playback\n */\n stop(): void {\n this.clearQueue();\n this.isPlaying = false;\n }\n\n /**\n * Get current playback position in seconds\n */\n getPlaybackPosition(): number {\n if (!this.audioContext) return 0;\n return this.audioContext.currentTime - this.playbackStartTime;\n }\n\n /**\n * Check if currently playing\n */\n getIsPlaying(): boolean {\n return this.isPlaying;\n }\n\n /**\n * Dispose resources\n */\n dispose(): void {\n this.stop();\n this.audioContext?.close();\n this.audioContext = null;\n }\n}\n","/**\n * Interruption Handler\n *\n * VAD-based interruption detection for AI conversations:\n * - Monitors user audio for speech\n * - Detects when user interrupts AI response\n * - Triggers interruption callbacks\n *\n * @category AI\n */\n\nimport { EventEmitter } from '../../events/EventEmitter';\n\n/**\n * Interruption events\n */\nexport interface InterruptionEvents {\n [key: string]: unknown;\n 'speech.detected': { rms: number };\n 'speech.ended': { durationMs: number };\n 'interruption.triggered': { rms: number; durationMs: number };\n}\n\n/**\n * Interruption handler configuration\n *\n * Industry standards applied:\n * - vadThreshold: 0.5 (Silero VAD default)\n * - minSpeechDurationMs: 200ms (Google/Amazon barge-in standard)\n * - silenceTimeoutMs: 500ms (OpenAI Realtime API standard)\n */\nexport interface InterruptionConfig {\n /** VAD probability threshold for speech detection (default: 0.5, Silero standard) */\n vadThreshold?: number;\n /** Minimum speech duration to trigger interruption (default: 200ms, Google/Amazon standard) */\n minSpeechDurationMs?: number;\n /** Silence duration to end speech (default: 500ms, OpenAI standard) */\n silenceTimeoutMs?: number;\n /** Enable interruption detection (default: true) */\n enabled?: boolean;\n}\n\n/**\n * Interruption Handler\n */\nexport class InterruptionHandler extends EventEmitter<InterruptionEvents> {\n private config: Required<InterruptionConfig>;\n private isSpeaking = false;\n private speechStartTime = 0;\n private lastSpeechTime = 0;\n private silenceTimer: ReturnType<typeof setTimeout> | null = null;\n private aiIsSpeaking = false;\n\n // Debouncing: only emit one interruption per speech session\n private interruptionTriggeredThisSession = false;\n\n constructor(config: InterruptionConfig = {}) {\n super();\n this.config = {\n vadThreshold: 0.5, // Silero VAD default\n minSpeechDurationMs: 200, // Google/Amazon barge-in standard\n silenceTimeoutMs: 500, // OpenAI Realtime API standard\n enabled: true,\n ...config,\n };\n }\n\n /**\n * Process VAD result for interruption detection\n * @param vadProbability - Speech probability from VAD (0-1)\n * @param audioEnergy - Optional RMS energy for logging (default: 0)\n */\n processVADResult(vadProbability: number, audioEnergy: number = 0): void {\n if (!this.config.enabled) return;\n\n if (vadProbability > this.config.vadThreshold) {\n this.onSpeechDetected(audioEnergy || vadProbability);\n } else {\n this.onSilenceDetected();\n }\n }\n\n /**\n * @deprecated Use processVADResult() instead. This method uses naive RMS detection.\n * Process audio samples for VAD (legacy - uses simple RMS)\n */\n processAudio(samples: Float32Array | Int16Array): void {\n if (!this.config.enabled) return;\n\n const rms = this.calculateRMS(samples);\n\n // Use RMS as proxy for VAD probability (less accurate)\n // RMS > 0.02 roughly maps to speech probability > 0.5\n const vadProbability = Math.min(rms / 0.02, 1.0);\n\n if (vadProbability > this.config.vadThreshold) {\n this.onSpeechDetected(rms);\n } else {\n this.onSilenceDetected();\n }\n }\n\n /**\n * Notify that AI started speaking\n */\n setAISpeaking(speaking: boolean): void {\n this.aiIsSpeaking = speaking;\n }\n\n /**\n * Enable/disable interruption detection\n */\n setEnabled(enabled: boolean): void {\n this.config.enabled = enabled;\n if (!enabled) {\n this.reset();\n }\n }\n\n /**\n * Update configuration\n */\n updateConfig(config: Partial<InterruptionConfig>): void {\n this.config = { ...this.config, ...config };\n }\n\n /**\n * Reset state\n */\n reset(): void {\n this.isSpeaking = false;\n this.speechStartTime = 0;\n this.lastSpeechTime = 0;\n this.interruptionTriggeredThisSession = false;\n if (this.silenceTimer) {\n clearTimeout(this.silenceTimer);\n this.silenceTimer = null;\n }\n }\n\n /**\n * Get current state\n */\n getState(): { isSpeaking: boolean; speechDurationMs: number } {\n return {\n isSpeaking: this.isSpeaking,\n speechDurationMs: this.isSpeaking ? Date.now() - this.speechStartTime : 0,\n };\n }\n\n // ==================== Private Methods ====================\n\n private calculateRMS(samples: Float32Array | Int16Array): number {\n let sum = 0;\n const scale = samples instanceof Int16Array ? 32768 : 1;\n\n for (let i = 0; i < samples.length; i++) {\n const sample = samples[i] / scale;\n sum += sample * sample;\n }\n\n return Math.sqrt(sum / samples.length);\n }\n\n private onSpeechDetected(rms: number): void {\n const now = Date.now();\n this.lastSpeechTime = now;\n\n // Clear silence timer\n if (this.silenceTimer) {\n clearTimeout(this.silenceTimer);\n this.silenceTimer = null;\n }\n\n // Start of speech\n if (!this.isSpeaking) {\n this.isSpeaking = true;\n this.speechStartTime = now;\n this.emit('speech.detected', { rms });\n }\n\n // Check for interruption (only emit ONCE per speech session)\n if (this.aiIsSpeaking && !this.interruptionTriggeredThisSession) {\n const speechDuration = now - this.speechStartTime;\n if (speechDuration >= this.config.minSpeechDurationMs) {\n this.interruptionTriggeredThisSession = true;\n this.emit('interruption.triggered', { rms, durationMs: speechDuration });\n }\n }\n }\n\n private onSilenceDetected(): void {\n if (!this.isSpeaking) return;\n\n // Start silence timer\n if (!this.silenceTimer) {\n this.silenceTimer = setTimeout(() => {\n const durationMs = this.lastSpeechTime - this.speechStartTime;\n this.isSpeaking = false;\n this.silenceTimer = null;\n // Reset interruption flag for next speech session\n this.interruptionTriggeredThisSession = false;\n this.emit('speech.ended', { durationMs });\n }, this.config.silenceTimeoutMs);\n }\n }\n}\n","/**\n * HuggingFace CDN Utilities\n *\n * Helper functions for working with HuggingFace CDN URLs.\n * Used by transformers.js models (Whisper, etc.) for model downloads.\n *\n * @category Cache\n */\n\n/**\n * Test URL for HuggingFace CDN reachability check.\n * Uses a small, stable file from a well-known public model.\n */\nexport const HF_CDN_TEST_URL =\n 'https://huggingface.co/Xenova/whisper-tiny/resolve/main/config.json';\n\n/**\n * Parsed HuggingFace URL components\n */\nexport interface HuggingFaceUrlInfo {\n /** Organization or username */\n org: string;\n /** Model name */\n model: string;\n /** Branch, tag, or commit */\n branch: string;\n /** File path within the repository */\n file: string;\n}\n\n/**\n * Parse a HuggingFace CDN URL into its components\n *\n * @param url - The HuggingFace URL to parse\n * @returns Parsed URL info or null if not a valid HF URL\n *\n * @example\n * ```typescript\n * const info = parseHuggingFaceUrl(\n * 'https://huggingface.co/openai/whisper-tiny/resolve/main/model.onnx'\n * );\n * // Returns: { org: 'openai', model: 'whisper-tiny', branch: 'main', file: 'model.onnx' }\n * ```\n */\nexport function parseHuggingFaceUrl(url: string): HuggingFaceUrlInfo | null {\n // Pattern: https://huggingface.co/{org}/{model}/resolve/{branch}/{file...}\n const pattern = /^https:\\/\\/huggingface\\.co\\/([^/]+)\\/([^/]+)\\/resolve\\/([^/]+)\\/(.+)$/;\n const match = url.match(pattern);\n\n if (!match) {\n return null;\n }\n\n return {\n org: match[1],\n model: match[2],\n branch: match[3],\n file: match[4],\n };\n}\n\n/**\n * Check if HuggingFace CDN is reachable\n *\n * Performs a HEAD request to a known HuggingFace model file to verify\n * connectivity. Useful for offline detection or network diagnostics.\n *\n * @param testUrl - Optional custom URL to test (defaults to HF_CDN_TEST_URL)\n * @returns True if CDN is reachable, false otherwise\n *\n * @example\n * ```typescript\n * import { isHuggingFaceCDNReachable } from '@omote/core';\n *\n * const reachable = await isHuggingFaceCDNReachable();\n * if (!reachable) {\n * console.log('HuggingFace CDN unreachable - running offline?');\n * // Fall back to cached models or show error\n * }\n * ```\n */\nexport async function isHuggingFaceCDNReachable(testUrl: string = HF_CDN_TEST_URL): Promise<boolean> {\n try {\n const response = await fetch(testUrl, {\n method: 'HEAD',\n cache: 'no-store', // Don't use cached response for reachability check\n });\n\n return response.ok;\n } catch {\n // Network error, timeout, or CORS issue\n return false;\n }\n}\n","/**\n * Utility to clear transformers.js Cache API storage\n *\n * Problem: transformers.js v4 uses Browser Cache API which persists across hard refreshes.\n * If an HTML error page gets cached (due to network errors, CDN issues, or dev server restarts),\n * it will be served instead of JSON files, causing JSON.parse() errors.\n *\n * Solution: Manually clear Cache API storage before loading models.\n *\n * @module utils/transformersCacheClear\n */\n\nimport { createLogger } from '../logging';\n\nconst logger = createLogger('TransformersCache');\n\n/**\n * Clear all transformers.js and HuggingFace caches from Browser Cache API\n *\n * This clears:\n * - transformers-cache (default cache key)\n * - Any caches with 'transformers' or 'huggingface' in the name\n *\n * @param options Configuration options\n * @returns Promise resolving to array of deleted cache names\n */\nexport async function clearTransformersCache(options?: {\n /** Whether to log deletion details (default: true) */\n verbose?: boolean;\n /** Additional cache name patterns to clear (e.g., ['my-custom-cache']) */\n additionalPatterns?: string[];\n}): Promise<string[]> {\n const verbose = options?.verbose ?? true;\n const additionalPatterns = options?.additionalPatterns ?? [];\n\n if (!('caches' in window)) {\n logger.warn('Cache API not available in this environment');\n return [];\n }\n\n try {\n const cacheNames = await caches.keys();\n const deletedCaches: string[] = [];\n\n const patterns = [\n 'transformers',\n 'huggingface',\n 'onnx',\n ...additionalPatterns,\n ];\n\n for (const cacheName of cacheNames) {\n const shouldDelete = patterns.some(pattern =>\n cacheName.toLowerCase().includes(pattern.toLowerCase())\n );\n\n if (shouldDelete) {\n if (verbose) {\n logger.info('Deleting cache', { cacheName });\n }\n const deleted = await caches.delete(cacheName);\n if (deleted) {\n deletedCaches.push(cacheName);\n } else if (verbose) {\n logger.warn('Failed to delete cache', { cacheName });\n }\n }\n }\n\n if (verbose) {\n logger.info('Cache clearing complete', {\n totalCaches: cacheNames.length,\n deletedCount: deletedCaches.length,\n deletedCaches,\n });\n }\n\n return deletedCaches;\n } catch (error) {\n logger.error('Error clearing caches', { error });\n throw error;\n }\n}\n\n/**\n * Clear a specific cache by exact name\n *\n * @param cacheName Exact cache name to delete\n * @returns Promise resolving to true if deleted, false otherwise\n */\nexport async function clearSpecificCache(cacheName: string): Promise<boolean> {\n if (!('caches' in window)) {\n logger.warn('Cache API not available in this environment');\n return false;\n }\n\n try {\n const deleted = await caches.delete(cacheName);\n logger.info('Cache deletion attempt', { cacheName, deleted });\n return deleted;\n } catch (error) {\n logger.error('Error deleting cache', { cacheName, error });\n return false;\n }\n}\n\n/**\n * List all cache names currently stored\n *\n * @returns Promise resolving to array of cache names\n */\nexport async function listCaches(): Promise<string[]> {\n if (!('caches' in window)) {\n logger.warn('Cache API not available in this environment');\n return [];\n }\n\n try {\n const cacheNames = await caches.keys();\n logger.debug('Available caches', { cacheNames });\n return cacheNames;\n } catch (error) {\n logger.error('Error listing caches', { error });\n return [];\n }\n}\n\n/**\n * Check if a specific cached response is valid JSON/binary (not HTML error page)\n *\n * @param cacheName Cache name to check\n * @param requestUrl URL/key to check\n * @returns Promise resolving to validation result\n */\nexport async function validateCachedResponse(\n cacheName: string,\n requestUrl: string\n): Promise<{\n exists: boolean;\n valid: boolean;\n contentType: string | null;\n isHtml: boolean;\n reason?: string;\n}> {\n if (!('caches' in window)) {\n return {\n exists: false,\n valid: false,\n contentType: null,\n isHtml: false,\n reason: 'Cache API not available',\n };\n }\n\n try {\n const cache = await caches.open(cacheName);\n const response = await cache.match(requestUrl);\n\n if (!response) {\n return {\n exists: false,\n valid: false,\n contentType: null,\n isHtml: false,\n reason: 'Not in cache',\n };\n }\n\n const contentType = response.headers.get('content-type');\n const isHtml =\n contentType?.includes('text/html') ||\n contentType?.includes('text/plain'); // Some servers return plain text HTML\n\n // For validation, we need to check the content\n const clonedResponse = response.clone();\n const text = await clonedResponse.text();\n const looksLikeHtml = text.trim().startsWith('<') || text.includes('<!DOCTYPE');\n\n const valid = Boolean(\n response.status === 200 &&\n !isHtml &&\n !looksLikeHtml &&\n contentType &&\n (contentType.includes('application/json') ||\n contentType.includes('application/octet-stream') ||\n contentType.includes('binary'))\n );\n\n return {\n exists: true,\n valid,\n contentType,\n isHtml: isHtml || looksLikeHtml,\n reason: valid\n ? 'Valid response'\n : `Invalid: status=${response.status}, contentType=${contentType}, isHtml=${isHtml || looksLikeHtml}`,\n };\n } catch (error) {\n logger.error('Error validating cached response', { cacheName, requestUrl, error });\n return {\n exists: false,\n valid: false,\n contentType: null,\n isHtml: false,\n reason: `Error: ${error}`,\n };\n }\n}\n\n/**\n * Scan all caches for potentially invalid cached responses\n *\n * @returns Promise resolving to report of invalid entries\n */\nexport async function scanForInvalidCaches(): Promise<{\n totalCaches: number;\n scannedEntries: number;\n invalidEntries: Array<{\n cacheName: string;\n url: string;\n reason: string;\n }>;\n}> {\n if (!('caches' in window)) {\n return { totalCaches: 0, scannedEntries: 0, invalidEntries: [] };\n }\n\n const invalidEntries: Array<{ cacheName: string; url: string; reason: string }> = [];\n let scannedEntries = 0;\n\n try {\n const cacheNames = await caches.keys();\n\n for (const cacheName of cacheNames) {\n if (!cacheName.toLowerCase().includes('transformers')) {\n continue; // Skip non-transformers caches\n }\n\n const cache = await caches.open(cacheName);\n const requests = await cache.keys();\n\n for (const request of requests) {\n scannedEntries++;\n const url = request.url;\n\n const validation = await validateCachedResponse(cacheName, url);\n\n if (validation.exists && !validation.valid) {\n invalidEntries.push({\n cacheName,\n url,\n reason: validation.reason || 'Unknown',\n });\n }\n }\n }\n\n logger.info('Cache scan complete', {\n totalCaches: cacheNames.length,\n scannedEntries,\n invalidCount: invalidEntries.length,\n });\n\n return {\n totalCaches: cacheNames.length,\n scannedEntries,\n invalidEntries,\n };\n } catch (error) {\n logger.error('Error scanning caches', { error });\n throw error;\n }\n}\n\n/**\n * Clear all caches and optionally prevent re-creation (development mode)\n *\n * WARNING: This is aggressive and should only be used in development.\n * It clears ALL browser caches, not just transformers.js.\n *\n * @param preventRecreation If true, sets env.useBrowserCache = false\n * @returns Promise resolving to number of deleted caches\n */\nexport async function nukeBrowserCaches(preventRecreation = false): Promise<number> {\n if (!('caches' in window)) {\n logger.warn('Cache API not available in this environment');\n return 0;\n }\n\n try {\n const cacheNames = await caches.keys();\n let deletedCount = 0;\n\n for (const cacheName of cacheNames) {\n const deleted = await caches.delete(cacheName);\n if (deleted) {\n deletedCount++;\n }\n }\n\n logger.info('All browser caches cleared', {\n totalDeleted: deletedCount,\n });\n\n if (preventRecreation) {\n // Import dynamically to avoid circular dependencies\n const { env } = await import('@huggingface/transformers');\n env.useBrowserCache = false;\n logger.warn('Browser cache creation disabled (env.useBrowserCache = false)');\n }\n\n return deletedCount;\n } catch (error) {\n logger.error('Error nuking caches', { error });\n throw error;\n }\n}\n","/**\n * Animation Graph Types\n *\n * Renderer-agnostic animation state machine with emotion and audio-driven blending.\n *\n * @module animation\n */\n\n/**\n * Emotion labels for animation blending\n * Note: These are the 8 emotion categories used for animation, separate from the\n * internal EmotionName type used by EmotionController.\n */\nexport type EmotionLabel =\n | 'angry'\n | 'calm'\n | 'disgust'\n | 'fearful'\n | 'happy'\n | 'neutral'\n | 'sad'\n | 'surprised';\n\n/**\n * High-level animation states\n */\nexport type AnimationStateName = 'idle' | 'listening' | 'thinking' | 'speaking';\n\n/**\n * Events that trigger state transitions\n */\nexport type AnimationTrigger =\n | 'user_speech_start'\n | 'user_speech_end'\n | 'transcript_ready'\n | 'ai_response_start'\n | 'ai_audio_start'\n | 'ai_response_end'\n | 'timeout'\n | 'interrupt';\n\n/**\n * Animation layer types for blending\n */\nexport type AnimationLayer = 'base' | 'emotion' | 'gesture' | 'additive';\n\n/**\n * A single animation clip reference\n */\nexport interface AnimationClip {\n /** Unique identifier for the clip */\n name: string;\n /** Animation layer this clip belongs to */\n layer: AnimationLayer;\n /** Whether this clip loops */\n loop: boolean;\n /** Default duration in seconds (can be overridden by actual clip) */\n duration?: number;\n}\n\n/**\n * Blend weight for an animation clip\n */\nexport interface BlendWeight {\n /** Clip name */\n clip: string;\n /** Weight 0-1 */\n weight: number;\n /** Playback speed multiplier */\n speed: number;\n /** Current time in the animation (0-1 normalized) */\n time: number;\n}\n\n/**\n * Animation state definition\n */\nexport interface AnimationState {\n /** State name */\n name: AnimationStateName;\n /** Base animation clips for this state */\n baseClips: string[];\n /** Blend weights for base clips */\n baseWeights: number[];\n /** Whether emotion overlay is enabled in this state */\n emotionBlendEnabled: boolean;\n /** Whether gesture layer is enabled in this state */\n gestureBlendEnabled: boolean;\n /** Timeout in ms to auto-transition (0 = no timeout) */\n timeout: number;\n /** State to transition to on timeout */\n timeoutTarget?: AnimationStateName;\n}\n\n/**\n * Transition between states\n */\nexport interface Transition {\n /** Source state */\n from: AnimationStateName;\n /** Target state */\n to: AnimationStateName;\n /** Event that triggers this transition */\n trigger: AnimationTrigger;\n /** Blend duration in ms */\n duration: number;\n /** Optional condition function */\n condition?: () => boolean;\n}\n\n/**\n * Emotion to animation mapping\n */\nexport interface EmotionAnimationMap {\n /** Emotion label */\n emotion: EmotionLabel;\n /** Animation clip to blend */\n clip: string;\n /** Maximum blend weight for this emotion */\n maxWeight: number;\n /** Blend speed (weight change per second) */\n blendSpeed: number;\n}\n\n/**\n * Configuration for AnimationGraph\n */\nexport interface AnimationGraphConfig {\n /** Available animation states */\n states: AnimationState[];\n /** Transitions between states */\n transitions: Transition[];\n /** Emotion to animation mappings */\n emotionMappings: EmotionAnimationMap[];\n /** Gesture clips for audio-driven animation */\n gestureClips: string[];\n /** Initial state */\n initialState: AnimationStateName;\n /** Global blend speed for state transitions (weight/sec) */\n transitionBlendSpeed: number;\n /** Minimum audio energy to trigger gestures (0-1) */\n gestureThreshold: number;\n /** Gesture intensity multiplier */\n gestureIntensity: number;\n}\n\n/**\n * Current output of the animation graph\n */\nexport interface AnimationOutput {\n /** Current state name */\n state: AnimationStateName;\n /** All blend weights to apply */\n blendWeights: BlendWeight[];\n /** Active emotion (if any) */\n activeEmotion: EmotionLabel | null;\n /** Current gesture intensity (0-1) */\n gestureIntensity: number;\n /** Whether currently transitioning between states */\n isTransitioning: boolean;\n /** Transition progress (0-1) if transitioning */\n transitionProgress: number;\n}\n\n/**\n * Events emitted by AnimationGraph\n */\nexport type AnimationGraphEvents = {\n /** State changed */\n 'state.change': {\n from: AnimationStateName;\n to: AnimationStateName;\n trigger: AnimationTrigger;\n };\n /** Transition started */\n 'transition.start': {\n from: AnimationStateName;\n to: AnimationStateName;\n duration: number;\n };\n /** Transition completed */\n 'transition.end': {\n state: AnimationStateName;\n };\n /** Emotion changed */\n 'emotion.change': {\n emotion: EmotionLabel | null;\n confidence: number;\n };\n /** Animation output updated (every frame) */\n 'output.update': AnimationOutput;\n /** Index signature for EventEmitter compatibility */\n [key: string]: unknown;\n};\n\n/**\n * Default animation graph configuration\n */\nexport const DEFAULT_ANIMATION_CONFIG: AnimationGraphConfig = {\n initialState: 'idle',\n transitionBlendSpeed: 4.0, // Full blend in 250ms\n gestureThreshold: 0.1,\n gestureIntensity: 1.0,\n\n states: [\n {\n name: 'idle',\n baseClips: ['idle_breathe'],\n baseWeights: [1.0],\n emotionBlendEnabled: true,\n gestureBlendEnabled: false,\n timeout: 0,\n },\n {\n name: 'listening',\n baseClips: ['idle_attentive'],\n baseWeights: [1.0],\n emotionBlendEnabled: true,\n gestureBlendEnabled: false,\n timeout: 10000, // 10s timeout back to idle\n timeoutTarget: 'idle',\n },\n {\n name: 'thinking',\n baseClips: ['thinking_look_up', 'thinking_hand_chin'],\n baseWeights: [0.6, 0.4],\n emotionBlendEnabled: false,\n gestureBlendEnabled: false,\n timeout: 5000, // 5s max thinking\n timeoutTarget: 'idle',\n },\n {\n name: 'speaking',\n baseClips: ['talking_idle'],\n baseWeights: [1.0],\n emotionBlendEnabled: true,\n gestureBlendEnabled: true,\n timeout: 0,\n },\n ],\n\n transitions: [\n // User starts speaking\n { from: 'idle', to: 'listening', trigger: 'user_speech_start', duration: 300 },\n { from: 'speaking', to: 'listening', trigger: 'user_speech_start', duration: 200 }, // Interrupt\n\n // User stops speaking, processing\n { from: 'listening', to: 'thinking', trigger: 'transcript_ready', duration: 400 },\n\n // AI starts responding\n { from: 'thinking', to: 'speaking', trigger: 'ai_audio_start', duration: 300 },\n { from: 'idle', to: 'speaking', trigger: 'ai_audio_start', duration: 400 },\n\n // AI done\n { from: 'speaking', to: 'idle', trigger: 'ai_response_end', duration: 500 },\n\n // Timeouts\n { from: 'listening', to: 'idle', trigger: 'timeout', duration: 600 },\n { from: 'thinking', to: 'idle', trigger: 'timeout', duration: 400 },\n\n // Interrupts\n { from: 'speaking', to: 'listening', trigger: 'interrupt', duration: 150 },\n ],\n\n emotionMappings: [\n { emotion: 'happy', clip: 'emotion_happy', maxWeight: 0.7, blendSpeed: 2.0 },\n { emotion: 'sad', clip: 'emotion_sad', maxWeight: 0.6, blendSpeed: 1.5 },\n { emotion: 'angry', clip: 'emotion_angry', maxWeight: 0.5, blendSpeed: 2.5 },\n { emotion: 'fearful', clip: 'emotion_fear', maxWeight: 0.5, blendSpeed: 2.0 },\n { emotion: 'surprised', clip: 'emotion_surprised', maxWeight: 0.6, blendSpeed: 2.5 },\n { emotion: 'calm', clip: 'emotion_calm', maxWeight: 0.3, blendSpeed: 1.0 },\n { emotion: 'disgust', clip: 'emotion_disgust', maxWeight: 0.4, blendSpeed: 2.0 },\n { emotion: 'neutral', clip: 'emotion_neutral', maxWeight: 0.0, blendSpeed: 1.0 },\n ],\n\n gestureClips: ['gesture_hand_small', 'gesture_hand_medium', 'gesture_hand_large'],\n};\n","/**\n * Animation Graph\n *\n * State machine for character animation with emotion and audio-driven blending.\n * Renderer-agnostic - outputs blend weights that any 3D engine can consume.\n *\n * @example\n * ```typescript\n * import { AnimationGraph, DEFAULT_ANIMATION_CONFIG } from '@omote/core';\n *\n * const graph = new AnimationGraph(DEFAULT_ANIMATION_CONFIG);\n *\n * // Connect to voice pipeline\n * graph.on('output.update', (output) => {\n * // Apply blend weights to your 3D character\n * for (const { clip, weight } of output.blendWeights) {\n * mixer.getAction(clip).setEffectiveWeight(weight);\n * }\n * });\n *\n * // Drive from voice state\n * voiceState.on('listening', () => graph.trigger('user_speech_start'));\n * voiceState.on('thinking', () => graph.trigger('transcript_ready'));\n * voiceState.on('speaking', () => graph.trigger('ai_audio_start'));\n *\n * // Drive from emotion detection\n * emotion.on('result', ({ emotion, confidence }) => {\n * graph.setEmotion(emotion, confidence);\n * });\n *\n * // Update every frame\n * function animate(deltaTime: number) {\n * graph.update(deltaTime);\n * }\n * ```\n *\n * @module animation\n */\n\nimport { EventEmitter } from '../events';\nimport type {\n EmotionLabel,\n AnimationGraphConfig,\n AnimationGraphEvents,\n AnimationTrigger,\n AnimationOutput,\n AnimationState,\n AnimationStateName,\n BlendWeight,\n Transition,\n} from './types';\nimport { DEFAULT_ANIMATION_CONFIG } from './types';\n\n/**\n * Animation state machine with smooth blending\n */\nexport class AnimationGraph extends EventEmitter<AnimationGraphEvents> {\n private config: AnimationGraphConfig;\n private currentState: AnimationState;\n private previousState: AnimationState | null = null;\n\n // Transition state\n private isTransitioning: boolean = false;\n private transitionProgress: number = 0;\n private transitionDuration: number = 0;\n private transitionStartTime: number = 0;\n\n // Emotion state\n private currentEmotion: EmotionLabel | null = null;\n private emotionConfidence: number = 0;\n private emotionBlendWeight: number = 0;\n private targetEmotionWeight: number = 0;\n\n // Gesture state (audio-driven)\n private audioEnergy: number = 0;\n private gestureWeight: number = 0;\n private currentGestureClip: number = 0;\n\n // Timing\n private stateEnterTime: number = 0;\n private lastUpdateTime: number = 0;\n\n // Blend weights cache\n private cachedOutput: AnimationOutput;\n\n constructor(config: Partial<AnimationGraphConfig> = {}) {\n super();\n this.config = { ...DEFAULT_ANIMATION_CONFIG, ...config };\n\n // Find initial state\n const initialState = this.config.states.find(\n (s) => s.name === this.config.initialState\n );\n if (!initialState) {\n throw new Error(`Initial state '${this.config.initialState}' not found`);\n }\n this.currentState = initialState;\n this.stateEnterTime = Date.now();\n this.lastUpdateTime = Date.now();\n\n // Initialize cached output\n this.cachedOutput = this.computeOutput();\n }\n\n /**\n * Get current state name\n */\n get state(): AnimationStateName {\n return this.currentState.name;\n }\n\n /**\n * Get current animation output\n */\n get output(): AnimationOutput {\n return this.cachedOutput;\n }\n\n /**\n * Trigger an animation event (may cause state transition)\n */\n trigger(event: AnimationTrigger): boolean {\n // Find matching transition\n const transition = this.config.transitions.find(\n (t) =>\n t.from === this.currentState.name &&\n t.trigger === event &&\n (!t.condition || t.condition())\n );\n\n if (!transition) {\n return false;\n }\n\n this.startTransition(transition, event);\n return true;\n }\n\n /**\n * Set current emotion (from DistilHuBERT or manual)\n */\n setEmotion(emotion: EmotionLabel, confidence: number): void {\n const prevEmotion = this.currentEmotion;\n\n this.currentEmotion = emotion;\n this.emotionConfidence = Math.max(0, Math.min(1, confidence));\n\n // Find emotion mapping\n const mapping = this.config.emotionMappings.find(\n (m) => m.emotion === emotion\n );\n if (mapping && this.currentState.emotionBlendEnabled) {\n this.targetEmotionWeight = mapping.maxWeight * this.emotionConfidence;\n } else {\n this.targetEmotionWeight = 0;\n }\n\n if (prevEmotion !== emotion) {\n this.emit('emotion.change', { emotion, confidence });\n }\n }\n\n /**\n * Clear current emotion\n */\n clearEmotion(): void {\n this.currentEmotion = null;\n this.emotionConfidence = 0;\n this.targetEmotionWeight = 0;\n this.emit('emotion.change', { emotion: null, confidence: 0 });\n }\n\n /**\n * Set audio energy for gesture animation (0-1)\n */\n setAudioEnergy(energy: number): void {\n this.audioEnergy = Math.max(0, Math.min(1, energy));\n }\n\n /**\n * Force transition to a specific state\n */\n setState(stateName: AnimationStateName, blendDuration: number = 300): void {\n const targetState = this.config.states.find((s) => s.name === stateName);\n if (!targetState) {\n console.warn(`[AnimationGraph] State '${stateName}' not found`);\n return;\n }\n\n if (targetState.name === this.currentState.name && !this.isTransitioning) {\n return;\n }\n\n // Create a manual transition\n const manualTransition: Transition = {\n from: this.currentState.name,\n to: stateName,\n trigger: 'timeout', // Arbitrary, not used for manual\n duration: blendDuration,\n };\n\n this.startTransition(manualTransition, 'timeout');\n }\n\n /**\n * Update animation graph (call every frame)\n * @param deltaMs Time since last update in milliseconds\n */\n update(deltaMs?: number): AnimationOutput {\n const now = Date.now();\n const dt = deltaMs ?? now - this.lastUpdateTime;\n this.lastUpdateTime = now;\n\n const dtSeconds = dt / 1000;\n\n // Update transition\n if (this.isTransitioning) {\n this.updateTransition(dtSeconds);\n }\n\n // Check timeout\n this.checkTimeout(now);\n\n // Update emotion blend\n this.updateEmotionBlend(dtSeconds);\n\n // Update gesture\n this.updateGesture(dtSeconds);\n\n // Compute and cache output\n this.cachedOutput = this.computeOutput();\n this.emit('output.update', this.cachedOutput);\n\n return this.cachedOutput;\n }\n\n /**\n * Reset to initial state\n */\n reset(): void {\n const initialState = this.config.states.find(\n (s) => s.name === this.config.initialState\n );\n if (initialState) {\n this.currentState = initialState;\n this.previousState = null;\n this.isTransitioning = false;\n this.transitionProgress = 0;\n this.stateEnterTime = Date.now();\n this.emotionBlendWeight = 0;\n this.gestureWeight = 0;\n this.cachedOutput = this.computeOutput();\n }\n }\n\n /**\n * Get all clip names used by this graph\n */\n getRequiredClips(): string[] {\n const clips = new Set<string>();\n\n // Base clips from all states\n for (const state of this.config.states) {\n for (const clip of state.baseClips) {\n clips.add(clip);\n }\n }\n\n // Emotion clips\n for (const mapping of this.config.emotionMappings) {\n clips.add(mapping.clip);\n }\n\n // Gesture clips\n for (const clip of this.config.gestureClips) {\n clips.add(clip);\n }\n\n return Array.from(clips);\n }\n\n // ─────────────────────────────────────────────────────────────────\n // Private methods\n // ─────────────────────────────────────────────────────────────────\n\n private startTransition(transition: Transition, event: AnimationTrigger): void {\n const targetState = this.config.states.find(\n (s) => s.name === transition.to\n );\n if (!targetState) {\n console.warn(`[AnimationGraph] Target state '${transition.to}' not found`);\n return;\n }\n\n const fromState = this.currentState.name;\n\n this.previousState = this.currentState;\n this.currentState = targetState;\n this.isTransitioning = true;\n this.transitionProgress = 0;\n this.transitionDuration = transition.duration;\n this.transitionStartTime = Date.now();\n this.stateEnterTime = Date.now();\n\n // Update emotion target based on new state\n if (!this.currentState.emotionBlendEnabled) {\n this.targetEmotionWeight = 0;\n }\n\n this.emit('state.change', {\n from: fromState,\n to: targetState.name,\n trigger: event,\n });\n\n this.emit('transition.start', {\n from: fromState,\n to: targetState.name,\n duration: transition.duration,\n });\n }\n\n private updateTransition(dtSeconds: number): void {\n if (!this.isTransitioning || this.transitionDuration <= 0) {\n this.isTransitioning = false;\n this.transitionProgress = 1;\n return;\n }\n\n // Linear progress based on time\n const elapsed = Date.now() - this.transitionStartTime;\n this.transitionProgress = Math.min(1, elapsed / this.transitionDuration);\n\n if (this.transitionProgress >= 1) {\n this.isTransitioning = false;\n this.transitionProgress = 1;\n this.previousState = null;\n this.emit('transition.end', { state: this.currentState.name });\n }\n }\n\n private checkTimeout(now: number): void {\n if (this.isTransitioning) return;\n if (this.currentState.timeout <= 0) return;\n\n const elapsed = now - this.stateEnterTime;\n if (elapsed >= this.currentState.timeout) {\n this.trigger('timeout');\n }\n }\n\n private updateEmotionBlend(dtSeconds: number): void {\n if (!this.currentEmotion) {\n // Decay emotion weight\n this.emotionBlendWeight = Math.max(\n 0,\n this.emotionBlendWeight - dtSeconds * 2.0\n );\n return;\n }\n\n const mapping = this.config.emotionMappings.find(\n (m) => m.emotion === this.currentEmotion\n );\n const blendSpeed = mapping?.blendSpeed ?? 2.0;\n\n // Smoothly interpolate to target\n const diff = this.targetEmotionWeight - this.emotionBlendWeight;\n const maxChange = blendSpeed * dtSeconds;\n\n if (Math.abs(diff) <= maxChange) {\n this.emotionBlendWeight = this.targetEmotionWeight;\n } else {\n this.emotionBlendWeight += Math.sign(diff) * maxChange;\n }\n }\n\n private updateGesture(dtSeconds: number): void {\n if (!this.currentState.gestureBlendEnabled) {\n this.gestureWeight = Math.max(0, this.gestureWeight - dtSeconds * 4.0);\n return;\n }\n\n // Map audio energy to gesture weight\n const targetGesture =\n this.audioEnergy > this.config.gestureThreshold\n ? this.audioEnergy * this.config.gestureIntensity\n : 0;\n\n // Smooth the gesture weight\n const diff = targetGesture - this.gestureWeight;\n const blendSpeed = 8.0; // Fast response\n const maxChange = blendSpeed * dtSeconds;\n\n if (Math.abs(diff) <= maxChange) {\n this.gestureWeight = targetGesture;\n } else {\n this.gestureWeight += Math.sign(diff) * maxChange;\n }\n\n // Select gesture clip based on intensity\n const clipCount = this.config.gestureClips.length;\n if (clipCount > 0) {\n this.currentGestureClip = Math.min(\n clipCount - 1,\n Math.floor(this.gestureWeight * clipCount)\n );\n }\n }\n\n private computeOutput(): AnimationOutput {\n const blendWeights: BlendWeight[] = [];\n\n // Smooth transition weight (ease in-out)\n const t = this.transitionProgress;\n const transitionWeight = t * t * (3 - 2 * t); // smoothstep\n\n // Previous state clips (fading out)\n if (this.previousState && this.isTransitioning) {\n const fadeOut = 1 - transitionWeight;\n for (let i = 0; i < this.previousState.baseClips.length; i++) {\n const clip = this.previousState.baseClips[i];\n const baseWeight = this.previousState.baseWeights[i] ?? 1.0;\n blendWeights.push({\n clip,\n weight: baseWeight * fadeOut,\n speed: 1.0,\n time: 0,\n });\n }\n }\n\n // Current state clips (fading in or full)\n const fadeIn = this.isTransitioning ? transitionWeight : 1.0;\n for (let i = 0; i < this.currentState.baseClips.length; i++) {\n const clip = this.currentState.baseClips[i];\n const baseWeight = this.currentState.baseWeights[i] ?? 1.0;\n blendWeights.push({\n clip,\n weight: baseWeight * fadeIn,\n speed: 1.0,\n time: 0,\n });\n }\n\n // Emotion overlay\n if (this.currentEmotion && this.emotionBlendWeight > 0.01) {\n const mapping = this.config.emotionMappings.find(\n (m) => m.emotion === this.currentEmotion\n );\n if (mapping) {\n blendWeights.push({\n clip: mapping.clip,\n weight: this.emotionBlendWeight,\n speed: 1.0,\n time: 0,\n });\n }\n }\n\n // Gesture layer\n if (this.gestureWeight > 0.01 && this.config.gestureClips.length > 0) {\n const gestureClip = this.config.gestureClips[this.currentGestureClip];\n blendWeights.push({\n clip: gestureClip,\n weight: this.gestureWeight,\n speed: 1.0 + this.audioEnergy * 0.5, // Faster with more energy\n time: 0,\n });\n }\n\n return {\n state: this.currentState.name,\n blendWeights,\n activeEmotion: this.emotionBlendWeight > 0.01 ? this.currentEmotion : null,\n gestureIntensity: this.gestureWeight,\n isTransitioning: this.isTransitioning,\n transitionProgress: this.transitionProgress,\n };\n }\n}\n","/**\n * Audio Energy Analysis\n *\n * Utilities for extracting energy/loudness from audio for gesture animation.\n *\n * @module animation\n */\n\n/**\n * Calculate RMS (Root Mean Square) energy from audio samples\n * @param samples Audio samples (Float32Array, normalized -1 to 1)\n * @returns RMS energy value (0 to 1)\n */\nexport function calculateRMS(samples: Float32Array): number {\n if (samples.length === 0) return 0;\n\n let sumSquares = 0;\n for (let i = 0; i < samples.length; i++) {\n sumSquares += samples[i] * samples[i];\n }\n\n return Math.sqrt(sumSquares / samples.length);\n}\n\n/**\n * Calculate peak amplitude from audio samples\n * @param samples Audio samples (Float32Array, normalized -1 to 1)\n * @returns Peak amplitude (0 to 1)\n */\nexport function calculatePeak(samples: Float32Array): number {\n let peak = 0;\n for (let i = 0; i < samples.length; i++) {\n const abs = Math.abs(samples[i]);\n if (abs > peak) peak = abs;\n }\n return peak;\n}\n\n/**\n * Smoothed energy analyzer for gesture animation\n */\nexport class AudioEnergyAnalyzer {\n private smoothedRMS: number = 0;\n private smoothedPeak: number = 0;\n private readonly smoothingFactor: number;\n private readonly noiseFloor: number;\n\n /**\n * @param smoothingFactor How much to smooth (0 = no smoothing, 1 = infinite smoothing). Default 0.85\n * @param noiseFloor Minimum energy threshold to consider as signal. Default 0.01\n */\n constructor(smoothingFactor: number = 0.85, noiseFloor: number = 0.01) {\n this.smoothingFactor = Math.max(0, Math.min(0.99, smoothingFactor));\n this.noiseFloor = noiseFloor;\n }\n\n /**\n * Process audio samples and return smoothed energy values\n * @param samples Audio samples (Float32Array)\n * @returns Object with rms and peak values\n */\n process(samples: Float32Array): { rms: number; peak: number; energy: number } {\n const instantRMS = calculateRMS(samples);\n const instantPeak = calculatePeak(samples);\n\n // Apply noise gate\n const gatedRMS = instantRMS > this.noiseFloor ? instantRMS : 0;\n const gatedPeak = instantPeak > this.noiseFloor ? instantPeak : 0;\n\n // Smooth the values (exponential moving average)\n // Attack fast (when getting louder), release slower\n if (gatedRMS > this.smoothedRMS) {\n // Fast attack\n this.smoothedRMS =\n this.smoothedRMS * 0.5 + gatedRMS * 0.5;\n } else {\n // Slow release\n this.smoothedRMS =\n this.smoothedRMS * this.smoothingFactor +\n gatedRMS * (1 - this.smoothingFactor);\n }\n\n if (gatedPeak > this.smoothedPeak) {\n this.smoothedPeak = this.smoothedPeak * 0.3 + gatedPeak * 0.7;\n } else {\n this.smoothedPeak =\n this.smoothedPeak * this.smoothingFactor +\n gatedPeak * (1 - this.smoothingFactor);\n }\n\n // Combined energy (weighted average of RMS and peak)\n // RMS is more stable, peak catches transients\n const energy = this.smoothedRMS * 0.7 + this.smoothedPeak * 0.3;\n\n return {\n rms: this.smoothedRMS,\n peak: this.smoothedPeak,\n energy: Math.min(1, energy * 2), // Scale up and clamp\n };\n }\n\n /**\n * Reset analyzer state\n */\n reset(): void {\n this.smoothedRMS = 0;\n this.smoothedPeak = 0;\n }\n\n /**\n * Get current smoothed RMS value\n */\n get rms(): number {\n return this.smoothedRMS;\n }\n\n /**\n * Get current smoothed peak value\n */\n get peak(): number {\n return this.smoothedPeak;\n }\n}\n\n/**\n * Extract emphasis points from audio (for gesture timing)\n *\n * Detects sudden increases in energy that correspond to speech emphasis.\n */\nexport class EmphasisDetector {\n private energyHistory: number[] = [];\n private readonly historySize: number;\n private readonly emphasisThreshold: number;\n\n /**\n * @param historySize Number of frames to track. Default 10\n * @param emphasisThreshold Minimum energy increase to count as emphasis. Default 0.15\n */\n constructor(historySize: number = 10, emphasisThreshold: number = 0.15) {\n this.historySize = historySize;\n this.emphasisThreshold = emphasisThreshold;\n }\n\n /**\n * Process energy value and detect emphasis\n * @param energy Current energy value (0-1)\n * @returns Object with isEmphasis flag and emphasisStrength\n */\n process(energy: number): { isEmphasis: boolean; emphasisStrength: number } {\n this.energyHistory.push(energy);\n if (this.energyHistory.length > this.historySize) {\n this.energyHistory.shift();\n }\n\n if (this.energyHistory.length < 3) {\n return { isEmphasis: false, emphasisStrength: 0 };\n }\n\n // Calculate average of previous frames (excluding current)\n const prevFrames = this.energyHistory.slice(0, -1);\n const avgPrev = prevFrames.reduce((a, b) => a + b, 0) / prevFrames.length;\n\n // Compare current to average\n const increase = energy - avgPrev;\n const isEmphasis = increase > this.emphasisThreshold;\n\n return {\n isEmphasis,\n emphasisStrength: isEmphasis ? Math.min(1, increase / 0.3) : 0,\n };\n }\n\n /**\n * Reset detector state\n */\n reset(): void {\n this.energyHistory = [];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAkBO,IAAM,oBAAN,MAAwB;AAAA,EAS7B,YACU,QACR,SAAkC,CAAC,GACnC;AAFQ;AARV,SAAQ,SAA6B;AACrC,SAAQ,UAA+B;AACvC,SAAQ,YAAwC;AAChD,SAAQ,SAAuB,IAAI,aAAa,CAAC;AACjD,SAAQ,eAAe;AACvB,SAAQ,oBAAoB;AAM1B,SAAK,SAAS;AAAA,MACZ,YAAY,OAAO,cAAc;AAAA,MACjC,WAAW,OAAO,aAAa;AAAA,IACjC;AAAA,EACF;AAAA,EAEA,IAAI,cAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,cAAuB;AACzB,WAAO,OAAO,cAAc,eAAe,CAAC,CAAC,UAAU,cAAc;AAAA,EACvE;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,CAAC,KAAK,aAAa;AACrB,WAAK,OAAO,KAAK,SAAS;AAAA,QACxB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AACD;AAAA,IACF;AAEA,QAAI,KAAK,aAAc;AAEvB,QAAI;AACF,WAAK,SAAS,MAAM,UAAU,aAAa,aAAa;AAAA,QACtD,OAAO;AAAA,UACL,YAAY,EAAE,OAAO,KAAK,OAAO,WAAW;AAAA,UAC5C,cAAc;AAAA,UACd,kBAAkB;AAAA,UAClB,kBAAkB;AAAA,UAClB,iBAAiB;AAAA,QACnB;AAAA,MACF,CAAC;AAED,WAAK,UAAU,IAAI,aAAa,EAAE,YAAY,KAAK,OAAO,WAAW,CAAC;AAGtE,UAAI,KAAK,QAAQ,UAAU,aAAa;AACtC,cAAM,KAAK,QAAQ,OAAO;AAAA,MAC5B;AAEA,YAAM,SAAS,KAAK,QAAQ,wBAAwB,KAAK,MAAM;AAG/D,WAAK,YAAY,KAAK,QAAQ,sBAAsB,MAAM,GAAG,CAAC;AAE9D,WAAK,UAAU,iBAAiB,CAAC,MAAM;AACrC,cAAM,QAAQ,EAAE,YAAY,eAAe,CAAC;AAG5C,YAAI,MAAM;AACV,YAAI,OAAO;AACX,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAM,MAAM,KAAK,IAAI,MAAM,CAAC,CAAC;AAC7B,iBAAO,MAAM,CAAC,IAAI,MAAM,CAAC;AACzB,cAAI,MAAM,KAAM,QAAO;AAAA,QACzB;AACA,cAAM,KAAK,KAAK,MAAM,MAAM,MAAM;AAElC,aAAK,OAAO,KAAK,eAAe,EAAE,KAAK,KAAK,CAAC;AAG7C,cAAM,YAAY,IAAI,aAAa,KAAK,OAAO,SAAS,MAAM,MAAM;AACpE,kBAAU,IAAI,KAAK,MAAM;AACzB,kBAAU,IAAI,OAAO,KAAK,OAAO,MAAM;AACvC,aAAK,SAAS;AAGd,YAAI,aAAa;AACjB,eAAO,KAAK,OAAO,UAAU,KAAK,OAAO,WAAW;AAClD,gBAAM,QAAQ,KAAK,OAAO,MAAM,GAAG,KAAK,OAAO,SAAS;AACxD,eAAK,SAAS,KAAK,OAAO,MAAM,KAAK,OAAO,SAAS;AAErD,gBAAM,MAAM,KAAK,aAAa,KAAK;AACnC,eAAK,OAAO,KAAK,eAAe;AAAA,YAC9B;AAAA,YACA,WAAW,YAAY,IAAI;AAAA,UAC7B,CAAC;AACD;AAAA,QACF;AAEA,YAAI,aAAa,KAAK,CAAC,KAAK,mBAAmB;AAC7C,kBAAQ,IAAI,8CAA8C,UAAU;AACpE,eAAK,oBAAoB;AAAA,QAC3B;AAAA,MACF;AAEA,aAAO,QAAQ,KAAK,SAAS;AAC7B,WAAK,UAAU,QAAQ,KAAK,QAAQ,WAAW;AAE/C,WAAK,eAAe;AACpB,cAAQ,IAAI,yDAAyD,KAAK,QAAQ,KAAK;AAAA,IACzF,SAAS,KAAK;AACZ,WAAK,OAAO,KAAK,SAAS;AAAA,QACxB,MAAM;AAAA,QACN,SAAU,IAAc;AAAA,QACxB,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,OAAa;AACX,QAAI,KAAK,WAAW;AAClB,WAAK,UAAU,WAAW;AAC1B,WAAK,YAAY;AAAA,IACnB;AAEA,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,MAAM;AACnB,WAAK,UAAU;AAAA,IACjB;AAEA,QAAI,KAAK,QAAQ;AACf,WAAK,OAAO,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,KAAK,CAAC;AAC/C,WAAK,SAAS;AAAA,IAChB;AAEA,SAAK,SAAS,IAAI,aAAa,CAAC;AAChC,SAAK,eAAe;AAAA,EACtB;AAAA,EAEQ,aAAa,SAAmC;AACtD,UAAM,MAAM,IAAI,WAAW,QAAQ,MAAM;AACzC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAM,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC;AAC9C,UAAI,CAAC,IAAI,IAAI,IAAI,IAAI,QAAS,IAAI;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AACF;;;ACzJO,IAAM,aAAN,MAAiB;AAAA,EAKtB,YAA6B,MAAc;AAAd;AAH7B,SAAQ,aAAa;AACrB,SAAQ,SAAS;AAGf,SAAK,SAAS,IAAI,aAAa,IAAI;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAAuB;AAC3B,aAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,WAAK,OAAO,KAAK,UAAU,IAAI,IAAI,CAAC,IAAI;AACxC,WAAK,cAAc,KAAK,aAAa,KAAK,KAAK;AAE/C,UAAI,KAAK,eAAe,GAAG;AACzB,aAAK,SAAS;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,SAA6B;AACtC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,WAAK,OAAO,KAAK,UAAU,IAAI,QAAQ,CAAC;AACxC,WAAK,cAAc,KAAK,aAAa,KAAK,KAAK;AAE/C,UAAI,KAAK,eAAe,GAAG;AACzB,aAAK,SAAS;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAA4B;AAC1B,QAAI,CAAC,KAAK,OAAQ,QAAO;AAEzB,UAAM,SAAS,IAAI,aAAa,KAAK,IAAI;AAGzC,UAAM,YAAY,KAAK,OAAO,SAAS,KAAK,UAAU;AACtD,WAAO,IAAI,WAAW,CAAC;AAGvB,UAAM,aAAa,KAAK,OAAO,SAAS,GAAG,KAAK,UAAU;AAC1D,WAAO,IAAI,YAAY,UAAU,MAAM;AAEvC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAAmB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAoB;AACtB,QAAI,KAAK,OAAQ,QAAO;AACxB,WAAO,KAAK,aAAa,KAAK;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,OAAO,KAAK,CAAC;AAClB,SAAK,aAAa;AAClB,SAAK,SAAS;AAAA,EAChB;AACF;;;ACnEO,IAAM,iBAAN,MAAqB;AAAA,EAM1B,YAA6B,UAAiC,CAAC,GAAG;AAArC;AAL7B,SAAQ,UAA+B;AACvC,SAAQ,eAAe;AACvB,SAAQ,mBAAiF,CAAC;AAC1F,SAAQ,YAAY;AAAA,EAE+C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQnE,MAAM,aAA4B;AAEhC,YAAQ,IAAI,gDAAgD;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,SAAwB;AAC5B,UAAM,KAAK,cAAc;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBAAuC;AACnD,QAAI,KAAK,WAAW,KAAK,QAAQ,UAAU,UAAU;AACnD,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,aAAa,KAAK,QAAQ,cAAc;AAC9C,SAAK,UAAU,IAAI,aAAa,EAAE,WAAW,CAAC;AAG9C,QAAI,KAAK,QAAQ,UAAU,aAAa;AACtC,YAAM,KAAK,QAAQ,OAAO;AAAA,IAC5B;AAEA,YAAQ,IAAI,gDAAgD,UAAU,IAAI;AAC1E,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,SAAS,WAA0C;AAEvD,UAAM,MAAM,MAAM,KAAK,cAAc;AACrC,UAAM,WAAW,KAAK,QAAQ,YAAY;AAK1C,QAAI,CAAC,KAAK,WAAW;AACnB,WAAK,eAAe,IAAI,cAAc;AACtC,WAAK,YAAY;AAAA,IACnB;AAGA,UAAM,cAAc,IAAI,aAAa,UAAU,UAAU,QAAQ,IAAI,UAAU;AAC/E,gBAAY,eAAe,CAAC,EAAE,IAAI,SAAS;AAG3C,UAAM,WAAW,IAAI,WAAW;AAChC,aAAS,KAAK,QAAQ;AACtB,aAAS,QAAQ,IAAI,WAAW;AAGhC,UAAM,SAAS,IAAI,mBAAmB;AACtC,WAAO,SAAS;AAChB,WAAO,QAAQ,QAAQ;AAGvB,UAAM,eAAe,KAAK;AAC1B,WAAO,MAAM,YAAY;AAGzB,SAAK,iBAAiB,KAAK,EAAE,QAAQ,SAAS,CAAC;AAG/C,UAAM,WAAW,UAAU,SAAS,IAAI;AACxC,SAAK,eAAe,eAAe;AAEnC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,iBAAyB;AACvB,QAAI,CAAC,KAAK,QAAS,QAAO;AAC1B,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,qBAA6B;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,QAAI,CAAC,KAAK,WAAW,CAAC,KAAK,UAAW,QAAO;AAC7C,WAAO,KAAK,QAAQ,eAAe,KAAK;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,UAAU,YAAoB,IAAmB;AACrD,QAAI,CAAC,KAAK,WAAW,KAAK,iBAAiB,WAAW,GAAG;AACvD;AAAA,IACF;AAEA,UAAM,MAAM,KAAK;AACjB,UAAM,cAAc,IAAI;AACxB,UAAM,aAAa,YAAY;AAG/B,eAAW,EAAE,QAAQ,SAAS,KAAK,KAAK,kBAAkB;AACxD,UAAI;AAEF,iBAAS,KAAK,eAAe,SAAS,KAAK,OAAO,WAAW;AAC7D,iBAAS,KAAK,wBAAwB,GAAK,cAAc,UAAU;AAGnE,eAAO,KAAK,cAAc,UAAU;AAAA,MACtC,SAAS,KAAK;AAAA,MAEd;AAAA,IACF;AAGA,SAAK,mBAAmB,CAAC;AACzB,SAAK,YAAY;AACjB,SAAK,eAAe;AAGpB,UAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,SAAS,CAAC;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAc;AAEZ,QAAI,KAAK,SAAS;AAChB,YAAM,MAAM,KAAK,QAAQ;AACzB,iBAAW,EAAE,QAAQ,SAAS,KAAK,KAAK,kBAAkB;AACxD,YAAI;AACF,mBAAS,KAAK,eAAe,GAAG,GAAG;AACnC,iBAAO,KAAK,GAAG;AAAA,QACjB,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AACA,SAAK,eAAe;AACpB,SAAK,YAAY;AACjB,SAAK,mBAAmB,CAAC;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,MAAM;AACnB,WAAK,UAAU;AAAA,IACjB;AACA,SAAK,mBAAmB,CAAC;AACzB,SAAK,YAAY;AAAA,EACnB;AACF;;;ACjMO,IAAM,sBAAN,MAA0B;AAAA,EAI/B,YAA6B,UAAsC,CAAC,GAAG;AAA1C;AAH7B,SAAQ,aAA2B,CAAC;AAIlC,UAAM,WAAW,QAAQ,oBAAoB;AAC7C,UAAM,aAAa,QAAQ,cAAc;AAGzC,SAAK,cAAe,WAAW,MAAQ,aAAa;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,OAAuC;AAEzC,SAAK,WAAW,KAAK,KAAK;AAG1B,UAAM,aAAa,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AAGvE,QAAI,cAAc,KAAK,aAAa;AAClC,aAAO,KAAK,MAAM;AAAA,IACpB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,QAA4B;AAC1B,QAAI,KAAK,WAAW,WAAW,GAAG;AAChC,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AAGvE,UAAM,WAAW,IAAI,WAAW,UAAU;AAC1C,QAAI,SAAS;AACb,eAAW,SAAS,KAAK,YAAY;AACnC,eAAS,IAAI,OAAO,MAAM;AAC1B,gBAAU,MAAM;AAAA,IAClB;AAGA,SAAK,aAAa,CAAC;AAEnB,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAoB;AACtB,UAAM,aAAa,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AACvE,WAAO,KAAK,IAAI,GAAG,aAAa,KAAK,WAAW;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,wBAAgC;AAC9B,UAAM,aAAa,KAAK,QAAQ,cAAc;AAC9C,UAAM,aAAa,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AACvE,UAAM,UAAU,aAAa;AAC7B,WAAQ,UAAU,aAAc;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,aAAqB;AACvB,WAAO,KAAK,WAAW;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,aAAa,CAAC;AAAA,EACrB;AACF;;;AClFO,IAAM,cAAN,MAAkB;AAAA,EAcvB,YAA6B,UAA8B,CAAC,GAAG;AAAlC;AAb7B,SAAiB,mBAAmB;AACpC;AAAA,SAAiB,aAAa;AAE9B;AAAA,SAAQ,SAAuB,IAAI,aAAa,CAAC;AACjD,SAAQ,kBAAkB;AAC1B,SAAQ,aAAyB,CAAC;AAMlC;AAAA;AAAA;AAAA;AAAA,SAAQ,YAAiC;AAAA,EAEuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYhE,MAAM,KAAK,SAAuB,WAAmB,KAAoC;AAEvF,QAAI,KAAK,OAAO,WAAW,GAAG;AAC5B,WAAK,kBAAkB;AAAA,IACzB;AAGA,UAAM,YAAY,IAAI,aAAa,KAAK,OAAO,SAAS,QAAQ,MAAM;AACtE,cAAU,IAAI,KAAK,QAAQ,CAAC;AAC5B,cAAU,IAAI,SAAS,KAAK,OAAO,MAAM;AACzC,SAAK,SAAS;AAKd,WAAO,KAAK,OAAO,UAAU,KAAK,kBAAkB;AAClD,YAAM,KAAK,cAAc,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cAAc,KAAoC;AAC9D,QAAI;AAEF,YAAM,YAAY,KAAK,OAAO,MAAM,GAAG,KAAK,gBAAgB;AAC5D,YAAM,qBAAqB,KAAK;AAGhC,WAAK,SAAS,KAAK,OAAO,MAAM,KAAK,gBAAgB;AAGrD,YAAM,oBAAoB,KAAK,oBAAoB,KAAK,QAAQ,cAAc;AAC9E,WAAK,kBAAkB,qBAAqB;AAG5C,YAAM,SAAS,MAAM,IAAI,MAAM,SAAS;AAGxC,YAAM,gBAAgB,IAAI,KAAK;AAC/B,eAAS,IAAI,GAAG,IAAI,OAAO,YAAY,QAAQ,KAAK;AAClD,cAAM,QAAQ,OAAO,YAAY,CAAC;AAClC,cAAM,YAAY,qBAAsB,IAAI;AAC5C,aAAK,WAAW,KAAK,EAAE,OAAO,UAAU,CAAC;AAAA,MAC3C;AAGA,WAAK,QAAQ,cAAc,OAAO,YAAY,MAAM;AAAA,IACtD,SAAS,OAAO;AACd,WAAK,QAAQ,UAAU,KAAc;AAGrC,WAAK,SAAS,IAAI,aAAa,CAAC;AAChC,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,gBAAgB,aAAqB,KAAkE;AAErG,UAAM,gBAAgB,KAAK,YAAY,SAAS,IAAM;AAGtD,QAAI,iBAAiB;AACrB,WAAO,KAAK,WAAW,SAAS,KAAK,KAAK,WAAW,CAAC,EAAE,YAAY,cAAc,eAAe;AAC/F,YAAM,YAAY,KAAK,WAAW,MAAM;AACxC;AAGA,UAAI,mBAAmB,GAAG;AACxB,cAAM,UAAU,cAAc,UAAU,aAAa,KAAM,QAAQ,CAAC;AACpE,gBAAQ,KAAK,uCAAuC;AAAA,UAClD;AAAA,UACA,iBAAiB,gBAAgB;AAAA,UACjC,aAAa,KAAK,WAAW;AAAA,UAC7B,SAAS,KAAK,WAAW;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,KAAK,WAAW,SAAS,KAAK,KAAK,WAAW,CAAC,EAAE,aAAa,aAAa;AAC7E,YAAM,EAAE,MAAM,IAAI,KAAK,WAAW,MAAM;AACxC,WAAK,YAAY;AACjB,aAAO;AAAA,IACT;AAIA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,kBAA8B;AAC5B,WAAO,CAAC,GAAG,KAAK,UAAU;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAoB;AACtB,WAAO,KAAK,IAAI,GAAG,KAAK,OAAO,SAAS,KAAK,gBAAgB;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,mBAA2B;AAC7B,WAAO,KAAK,WAAW;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,mBAA2B;AAC7B,WAAO,KAAK,OAAO,UAAU,KAAK,QAAQ,cAAc;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,MAAM,KAAoC;AAC9C,QAAI,KAAK,OAAO,WAAW,GAAG;AAC5B;AAAA,IACF;AAGA,UAAM,SAAS,IAAI,aAAa,KAAK,gBAAgB;AACrD,WAAO,IAAI,KAAK,QAAQ,CAAC;AAIzB,UAAM,qBAAqB,KAAK;AAEhC,QAAI;AAEF,YAAM,SAAS,MAAM,IAAI,MAAM,MAAM;AAIrC,YAAM,iBAAiB,KAAK,OAAO,UAAU,KAAK,QAAQ,cAAc;AACxE,YAAM,gBAAgB,IAAI,KAAK;AAC/B,YAAM,mBAAmB,KAAK,KAAK,iBAAiB,KAAK,UAAU;AAEnE,eAAS,IAAI,GAAG,IAAI,KAAK,IAAI,kBAAkB,OAAO,YAAY,MAAM,GAAG,KAAK;AAC9E,cAAM,QAAQ,OAAO,YAAY,CAAC;AAClC,cAAM,YAAY,qBAAsB,IAAI;AAC5C,aAAK,WAAW,KAAK,EAAE,OAAO,UAAU,CAAC;AAAA,MAC3C;AAGA,WAAK,SAAS,IAAI,aAAa,CAAC;AAChC,WAAK,kBAAkB;AAGvB,WAAK,QAAQ,cAAc,KAAK,IAAI,kBAAkB,OAAO,YAAY,MAAM,CAAC;AAAA,IAClF,SAAS,OAAO;AACd,WAAK,QAAQ,UAAU,KAAc;AAGrC,WAAK,SAAS,IAAI,aAAa,CAAC;AAChC,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,iBAAiB,QAAsB;AACrC,eAAW,SAAS,KAAK,YAAY;AACnC,YAAM,aAAa;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,SAAS,IAAI,aAAa,CAAC;AAChC,SAAK,kBAAkB;AACvB,SAAK,aAAa,CAAC;AACnB,SAAK,YAAY;AAAA,EACnB;AACF;;;AChQA,SAAS,eAAe,QAAmC;AAEzD,QAAM,UAAU,OAAO,aAAa,CAAC;AACrC,QAAM,QAAQ,YAAY,OAAO,aAC7B,IAAI,WAAW,MAAM,IACrB,IAAI,WAAW,QAAQ,GAAG,UAAU,CAAC;AACzC,QAAM,UAAU,IAAI,aAAa,MAAM,MAAM;AAC7C,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAQ,CAAC,IAAI,MAAM,CAAC,IAAI;AAAA,EAC1B;AACA,SAAO;AACT;AAwBO,IAAM,sBAAN,cAAkC,aAAwC;AAAA,EAS/E,YAA6B,SAAqC;AAChE,UAAM;AADqB;AAJ7B,SAAQ,kBAAkB;AAC1B,SAAQ,kBAAiC;AACzC,SAAQ,mBAAkC;AAKxC,UAAM,aAAa,QAAQ,cAAc;AAEzC,SAAK,YAAY,IAAI,eAAe,EAAE,WAAW,CAAC;AAClD,SAAK,YAAY,IAAI,oBAAoB;AAAA,MACvC;AAAA,MACA,kBAAkB,QAAQ,iBAAiB;AAAA,IAC7C,CAAC;AACD,SAAK,cAAc,IAAI,YAAY;AAAA,MACjC;AAAA,MACA,SAAS,CAAC,UAAU;AAClB,aAAK,KAAK,SAAS,KAAK;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,UAAM,KAAK,UAAU,WAAW;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,QAAc;AAEZ,SAAK,eAAe;AAEpB,SAAK,UAAU,MAAM;AACrB,SAAK,UAAU,MAAM;AACrB,SAAK,YAAY,MAAM;AACvB,SAAK,kBAAkB;AAMvB,SAAK,UAAU,OAAO;AAGtB,SAAK,eAAe;AAGpB,SAAK,gBAAgB;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,aAAa,OAAkC;AAEnD,UAAM,WAAW,KAAK,UAAU,IAAI,KAAK;AACzC,QAAI,CAAC,UAAU;AACb;AAAA,IACF;AAGA,UAAM,UAAU,eAAe,QAAQ;AAGvC,UAAM,eAAe,MAAM,KAAK,UAAU,SAAS,OAAO;AAG1D,QAAI,CAAC,KAAK,iBAAiB;AACzB,WAAK,kBAAkB;AACvB,WAAK,KAAK,kBAAkB,YAAY;AAAA,IAC1C;AAOA,SAAK,YAAY,KAAK,SAAS,cAAc,KAAK,QAAQ,GAAG,EAAE,MAAM,SAAO;AAC1E,WAAK,KAAK,SAAS,GAAG;AAAA,IACxB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,MAAqB;AAEzB,UAAM,YAAY,KAAK,UAAU,MAAM;AACvC,QAAI,WAAW;AACb,YAAM,QAAQ,IAAI,WAAW,SAAS;AACtC,YAAM,KAAK,aAAa,KAAK;AAAA,IAC/B;AAIA,UAAM,KAAK,YAAY,MAAM,KAAK,QAAQ,GAAG;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,KAAK,YAAoB,IAAmB;AAEhD,SAAK,eAAe;AAGpB,UAAM,KAAK,UAAU,UAAU,SAAS;AAGxC,SAAK,UAAU,MAAM;AACrB,SAAK,YAAY,MAAM;AACvB,SAAK,kBAAkB;AAGvB,SAAK,KAAK,qBAAqB,MAAgB;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaQ,iBAAuB;AAC7B,UAAM,cAAc,MAAM;AACxB,YAAM,cAAc,KAAK,UAAU,eAAe;AAClD,YAAM,QAAQ,KAAK,YAAY,gBAAgB,aAAa,KAAK,QAAQ,GAAG;AAE5E,UAAI,OAAO;AACT,aAAK,KAAK,eAAe,KAAK;AAAA,MAChC;AAEA,WAAK,mBAAmB,sBAAsB,WAAW;AAAA,IAC3D;AAEA,SAAK,mBAAmB,sBAAsB,WAAW;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAwB;AAC9B,QAAI,KAAK,iBAAiB;AACxB,oBAAc,KAAK,eAAe;AAAA,IACpC;AAEA,SAAK,kBAAkB,OAAO,YAAY,MAAM;AAC9C,UAAI,KAAK,UAAU,WAAW,KAAK,KAAK,YAAY,qBAAqB,GAAG;AAC1E,aAAK,KAAK,qBAAqB,MAAgB;AAC/C,aAAK,eAAe;AAAA,MACtB;AAAA,IACF,GAAG,GAAG;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAuB;AAC7B,QAAI,KAAK,iBAAiB;AACxB,oBAAc,KAAK,eAAe;AAClC,WAAK,kBAAkB;AAAA,IACzB;AAEA,QAAI,KAAK,kBAAkB;AACzB,2BAAqB,KAAK,gBAAgB;AAC1C,WAAK,mBAAmB;AAAA,IAC1B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW;AACT,WAAO;AAAA,MACL,iBAAiB,KAAK;AAAA,MACtB,eAAe,KAAK,UAAU;AAAA,MAC9B,SAAS,KAAK,YAAY;AAAA,MAC1B,cAAc,KAAK,YAAY;AAAA,MAC/B,aAAa,KAAK,UAAU,eAAe;AAAA,MAC3C,iBAAiB,KAAK,UAAU,mBAAmB;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,SAAK,eAAe;AACpB,SAAK,UAAU,QAAQ;AACvB,SAAK,UAAU,MAAM;AACrB,SAAK,YAAY,MAAM;AAAA,EACzB;AACF;;;AChOO,IAAM,yBAAyB;AAAA;AAAA,EAEpC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AACF;;;AC/BO,IAAM,kBAAN,MAA4D;AAAA,EAIjE,YAAY,UAAkD,CAAC,GAAG;AAChE,SAAK,UAAU,QAAQ,WAAW;AAClC,SAAK,SAAS,QAAQ,UAAU;AAAA,EAClC;AAAA,EAEA,WAAW,MAAsB;AAC/B,QAAI,CAAC,KAAK,QAAS;AAEnB,UAAM,aAAa,KAAK,WAAW,OAAO,WAAM;AAChD,UAAM,cAAc,KAAK,WAAW,OAAO,iBAAiB;AAE5D,YAAQ;AAAA,MACN,KAAK,KAAK,MAAM,MAAM,UAAU,IAAI,KAAK,IAAI,OAAO,KAAK,WAAW,QAAQ,CAAC,CAAC;AAAA,MAC9E;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,YAAQ,IAAI,aAAa,KAAK,OAAO;AACrC,YAAQ,IAAI,YAAY,KAAK,MAAM;AACnC,QAAI,KAAK,cAAc;AACrB,cAAQ,IAAI,mBAAmB,KAAK,YAAY;AAAA,IAClD;AACA,YAAQ,IAAI,aAAa,GAAG,KAAK,WAAW,QAAQ,CAAC,CAAC,IAAI;AAC1D,YAAQ,IAAI,WAAW,KAAK,MAAM;AAElC,QAAI,OAAO,KAAK,KAAK,UAAU,EAAE,SAAS,GAAG;AAC3C,cAAQ,IAAI,eAAe,KAAK,UAAU;AAAA,IAC5C;AAEA,QAAI,KAAK,OAAO;AACd,cAAQ,MAAM,UAAU,KAAK,KAAK;AAAA,IACpC;AAEA,YAAQ,SAAS;AAAA,EACnB;AAAA,EAEA,aAAa,QAA0B;AACrC,QAAI,CAAC,KAAK,QAAS;AAEnB,UAAM,WAAW,OAAO,SAAS,YAAY,WAAM;AAEnD,YAAQ;AAAA,MACN,KAAK,KAAK,MAAM,MAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,OAAO,KAAK;AAAA,MAChE;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,QAAuB;AAAA,EAE7B;AAAA,EAEA,MAAM,WAA0B;AAC9B,SAAK,UAAU;AAAA,EACjB;AACF;;;ACvGA,IAAM,aAAa;AAAA,EACjB,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,OAAO;AACT;AAKA,SAAS,WAAW,MAAgB,aAAqB,gBAAwB;AAC/E,QAAM,aAAa,OAAO,QAAQ,KAAK,UAAU,EAC9C,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS,EACjC,IAAI,CAAC,CAAC,KAAK,KAAK,OAAO;AAAA,IACtB;AAAA,IACA,OAAO,OAAO,UAAU,WACpB,EAAE,aAAa,MAAM,IACrB,OAAO,UAAU,WACf,OAAO,UAAU,KAAK,IACpB,EAAE,UAAU,MAAM,IAClB,EAAE,aAAa,MAAM,IACvB,EAAE,WAAW,MAAM;AAAA,EAC3B,EAAE;AAEJ,SAAO;AAAA,IACL,eAAe,CAAC;AAAA,MACd,UAAU;AAAA,QACR,YAAY;AAAA,UACV,EAAE,KAAK,gBAAgB,OAAO,EAAE,aAAa,YAAY,EAAE;AAAA,UAC3D,EAAE,KAAK,mBAAmB,OAAO,EAAE,aAAa,eAAe,EAAE;AAAA,UACjE,EAAE,KAAK,sBAAsB,OAAO,EAAE,aAAa,YAAY,EAAE;AAAA,UACjE,EAAE,KAAK,0BAA0B,OAAO,EAAE,aAAa,aAAa,EAAE;AAAA,QACxE;AAAA,MACF;AAAA,MACA,YAAY,CAAC;AAAA,QACX,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,OAAO,CAAC;AAAA,UACN,SAAS,KAAK;AAAA,UACd,QAAQ,KAAK;AAAA,UACb,cAAc,KAAK,gBAAgB;AAAA,UACnC,MAAM,KAAK;AAAA,UACX,MAAM;AAAA;AAAA,UACN,mBAAmB,OAAO,KAAK,YAAY,GAAS;AAAA,UACpD,iBAAiB,OAAO,KAAK,UAAU,GAAS;AAAA,UAChD;AAAA,UACA,QAAQ;AAAA,YACN,MAAM,KAAK,WAAW,OAAO,WAAW,KAAK,WAAW;AAAA,YACxD,SAAS,KAAK,OAAO,WAAW;AAAA,UAClC;AAAA,QACF,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAKA,SAAS,aAAa,QAAoB,aAAqB,gBAAwB;AACrF,QAAM,aAAa,OAAO,QAAQ,OAAO,UAAU,EAChD,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS,EACjC,IAAI,CAAC,CAAC,KAAK,KAAK,OAAO;AAAA,IACtB;AAAA,IACA,OAAO,OAAO,UAAU,WACpB,EAAE,aAAa,MAAM,IACrB,OAAO,UAAU,WACf,OAAO,UAAU,KAAK,IACpB,EAAE,UAAU,MAAM,IAClB,EAAE,aAAa,MAAM,IACvB,EAAE,WAAW,MAAM;AAAA,EAC3B,EAAE;AAEJ,QAAM,YAAY;AAAA,IAChB;AAAA,IACA,cAAc,OAAO,OAAO,YAAY,GAAS;AAAA,IACjD,GAAI,OAAO,SAAS,YAChB,EAAE,OAAO,OAAO,MAAM,IACtB,EAAE,UAAU,OAAO,MAAM;AAAA,EAC/B;AAEA,SAAO;AAAA,IACL,iBAAiB,CAAC;AAAA,MAChB,UAAU;AAAA,QACR,YAAY;AAAA,UACV,EAAE,KAAK,gBAAgB,OAAO,EAAE,aAAa,YAAY,EAAE;AAAA,UAC3D,EAAE,KAAK,mBAAmB,OAAO,EAAE,aAAa,eAAe,EAAE;AAAA,QACnE;AAAA,MACF;AAAA,MACA,cAAc,CAAC;AAAA,QACb,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,SAAS,CAAC;AAAA,UACR,MAAM,OAAO;AAAA,UACb,GAAI,OAAO,SAAS,YAChB;AAAA,YACA,KAAK;AAAA,cACH,YAAY,CAAC,SAAS;AAAA,cACtB,wBAAwB;AAAA;AAAA,cACxB,aAAa;AAAA,YACf;AAAA,UACF,IACE;AAAA,YACA,OAAO;AAAA,cACL,YAAY,CAAC,SAAS;AAAA,YACxB;AAAA,UACF;AAAA,QACJ,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAYO,IAAM,eAAN,MAAyD;AAAA,EAW9D,YACE,QACA,cAAsB,aACtB,iBAAyB,SACzB;AAXF,SAAQ,aAAyB,CAAC;AAClC,SAAQ,eAA6B,CAAC;AACtC,SAAQ,kBAAyD;AACjE,SAAiB,cAAc;AAC/B,SAAiB,oBAAoB;AACrC,SAAQ,aAAa;AAOnB,SAAK,SAAS;AAAA,MACZ,WAAW;AAAA,MACX,SAAS,CAAC;AAAA,MACV,GAAG;AAAA,IACL;AACA,SAAK,cAAc;AACnB,SAAK,iBAAiB;AAGtB,SAAK,kBAAkB,YAAY,MAAM;AACvC,WAAK,MAAM,EAAE,MAAM,QAAQ,KAAK;AAAA,IAClC,GAAG,KAAK,iBAAiB;AAAA,EAC3B;AAAA,EAEA,WAAW,MAAsB;AAC/B,QAAI,KAAK,WAAY;AAErB,SAAK,WAAW,KAAK,IAAI;AAEzB,QAAI,KAAK,WAAW,UAAU,KAAK,aAAa;AAC9C,WAAK,MAAM,EAAE,MAAM,QAAQ,KAAK;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,aAAa,QAA0B;AACrC,QAAI,KAAK,WAAY;AAErB,SAAK,aAAa,KAAK,MAAM;AAE7B,QAAI,KAAK,aAAa,UAAU,KAAK,aAAa;AAChD,WAAK,MAAM,EAAE,MAAM,QAAQ,KAAK;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,KAAK,WAAY;AAErB,UAAM,QAAQ,KAAK,WAAW,OAAO,CAAC;AACtC,UAAM,UAAU,KAAK,aAAa,OAAO,CAAC;AAE1C,UAAM,WAA4B,CAAC;AAGnC,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK,YAAY,KAAK,CAAC;AAAA,IACvC;AAGA,QAAI,QAAQ,SAAS,GAAG;AACtB,eAAS,KAAK,KAAK,cAAc,OAAO,CAAC;AAAA,IAC3C;AAEA,UAAM,QAAQ,IAAI,QAAQ;AAAA,EAC5B;AAAA,EAEA,MAAM,WAA0B;AAC9B,QAAI,KAAK,iBAAiB;AACxB,oBAAc,KAAK,eAAe;AAClC,WAAK,kBAAkB;AAAA,IACzB;AAGA,UAAM,KAAK,MAAM;AAEjB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,MAAc,YAAY,OAAkC;AAE1D,UAAM,gBAAgB,MAAM;AAAA,MAAI,UAC9B,WAAW,MAAM,KAAK,aAAa,KAAK,cAAc,EAAE,cAAc,CAAC;AAAA,IACzE;AAEA,UAAM,OAAO,EAAE,cAAc;AAC7B,UAAM,WAAW,KAAK,OAAO,SAAS,QAAQ,OAAO,EAAE,IAAI;AAE3D,UAAM,KAAK,YAAY,UAAU,IAAI;AAAA,EACvC;AAAA,EAEA,MAAc,cAAc,SAAsC;AAEhE,UAAM,kBAAkB,QAAQ;AAAA,MAAI,YAClC,aAAa,QAAQ,KAAK,aAAa,KAAK,cAAc,EAAE,gBAAgB,CAAC;AAAA,IAC/E;AAEA,UAAM,OAAO,EAAE,gBAAgB;AAC/B,UAAM,WAAW,KAAK,OAAO,SAAS,QAAQ,OAAO,EAAE,IAAI;AAE3D,UAAM,KAAK,YAAY,UAAU,IAAI;AAAA,EACvC;AAAA,EAEA,MAAc,YAAY,UAAkB,MAA8B;AACxE,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,OAAO,SAAS;AAE5E,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,UAAU;AAAA,QACrC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,GAAG,KAAK,OAAO;AAAA,QACjB;AAAA,QACA,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,QAAQ,WAAW;AAAA,MACrB,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,gBAAQ,KAAK,yBAAyB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,MAChF;AAAA,IACF,SAAS,OAAO;AACd,UAAK,MAAgB,SAAS,cAAc;AAC1C,gBAAQ,KAAK,yBAAyB;AAAA,MACxC,OAAO;AACL,gBAAQ,KAAK,wBAAwB,KAAK;AAAA,MAC5C;AAAA,IACF,UAAE;AACA,mBAAa,SAAS;AAAA,IACxB;AAAA,EACF;AACF;;;ACpQA,SAAS,WAAW,SAAiB,IAAY;AAC/C,QAAM,QAAQ,IAAI,WAAW,MAAM;AACnC,SAAO,gBAAgB,KAAK;AAC5B,SAAO,MAAM,KAAK,KAAK,EACpB,IAAI,OAAK,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EACxC,KAAK,EAAE;AACZ;AA4BA,IAAI,kBAAyC;AA0BtC,SAAS,mBAAmB,QAAyC;AAC1E,MAAI,iBAAiB;AACnB,oBAAgB,SAAS;AAAA,EAC3B;AACA,oBAAkB,IAAI,eAAe,MAAM;AAC3C,SAAO;AACT;AAKO,SAAS,eAAsC;AACpD,SAAO;AACT;AAOO,IAAM,iBAAN,MAAqB;AAAA,EAU1B,YAAY,QAAyB;AARrC,SAAQ,WAA8C;AACtD,SAAQ,gBAA+B;AACvC,SAAQ,oBAA2D;AAGnE;AAAA,SAAQ,WAAkG,oBAAI,IAAI;AAClH,SAAQ,aAAuG,oBAAI,IAAI;AAGrH,SAAK,SAAS;AAAA,MACZ,SAAS,OAAO,WAAW;AAAA,MAC3B,aAAa,OAAO,eAAe;AAAA,MACnC,gBAAgB,OAAO,kBAAkB;AAAA,MACzC,UAAU,OAAO,YAAY;AAAA,MAC7B,gBAAgB,OAAO;AAAA,MACvB,UAAU,OAAO,YAAY,EAAE,OAAO,GAAK,oBAAoB,KAAK;AAAA,MACpE,gBAAgB,OAAO,kBAAkB;AAAA,MACzC,mBAAmB,OAAO,qBAAqB;AAAA,IACjD;AAEA,QAAI,KAAK,OAAO,SAAS;AACvB,WAAK,aAAa;AAClB,WAAK,uBAAuB;AAAA,IAC9B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAqB;AAC3B,YAAQ,KAAK,OAAO,UAAU;AAAA,MAC5B,KAAK;AACH,aAAK,WAAW,IAAI,gBAAgB,EAAE,SAAS,KAAK,CAAC;AACrD;AAAA,MACF,KAAK;AACH,YAAI,CAAC,KAAK,OAAO,gBAAgB;AAC/B,kBAAQ,KAAK,iEAAiE;AAC9E;AAAA,QACF;AACA,aAAK,WAAW,IAAI;AAAA,UAClB,KAAK,OAAO;AAAA,UACZ,KAAK,OAAO;AAAA,UACZ,KAAK,OAAO;AAAA,QACd;AACA;AAAA,MACF,KAAK;AAAA,MACL;AACE,aAAK,WAAW;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAA+B;AACrC,QAAI,CAAC,KAAK,OAAO,kBAAkB,CAAC,KAAK,SAAU;AAEnD,SAAK,oBAAoB,YAAY,MAAM;AACzC,WAAK,aAAa;AAAA,IACpB,GAAG,KAAK,OAAO,iBAAiB;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,UAAmB,OAAgB;AACtD,QAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAEjC,UAAM,WAAW,KAAK,OAAO;AAC7B,QAAI,WAAW,SAAS,mBAAoB,QAAO;AAEnD,UAAM,QAAQ,SAAS,SAAS;AAChC,WAAO,KAAK,OAAO,IAAI;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,UAAU,MAAc,aAAsC,CAAC,GAAG,eAAyC;AACzG,UAAM,UAAU,eAAe,WAAW,KAAK,iBAAiB,WAAW,EAAE;AAC7E,UAAM,SAAS,WAAW,CAAC;AAC3B,UAAM,eAAe,eAAe;AACpC,UAAM,YAAY,YAAY,IAAI;AAGlC,QAAI,CAAC,iBAAiB,CAAC,KAAK,eAAe;AACzC,WAAK,gBAAgB;AAAA,IACvB;AAEA,QAAI,iBAAiB,EAAE,GAAG,WAAW;AACrC,QAAI,QAAQ;AACZ,QAAI,UAAU,KAAK,aAAa;AAEhC,UAAM,UAAuB,EAAE,SAAS,QAAQ,aAAa;AAE7D,UAAM,UAAU,CAAC,QAAwB,UAAwB;AAC/D,UAAI,MAAO;AACX,cAAQ;AAER,YAAM,UAAU,YAAY,IAAI;AAChC,YAAM,aAAa,UAAU;AAG7B,UAAI,WAAW,WAAW,CAAC,SAAS;AAClC,kBAAU,KAAK,aAAa,IAAI;AAAA,MAClC;AAEA,UAAI,CAAC,WAAW,CAAC,KAAK,SAAU;AAEhC,YAAM,WAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAEA,WAAK,SAAS,WAAW,QAAQ;AAGjC,UAAI,CAAC,gBAAgB,KAAK,kBAAkB,SAAS;AACnD,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF;AAEA,WAAO;AAAA,MACL,KAAK,MAAM,QAAQ,IAAI;AAAA,MACvB,cAAc,CAAC,UAAiB,QAAQ,SAAS,KAAK;AAAA,MACtD,eAAe,CAAC,UAAmC;AACjD,yBAAiB,EAAE,GAAG,gBAAgB,GAAG,MAAM;AAAA,MACjD;AAAA,MACA,YAAY,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,SACJ,MACA,IACA,aAAsC,CAAC,GACvC,eACY;AACZ,UAAM,OAAO,KAAK,UAAU,MAAM,YAAY,aAAa;AAE3D,QAAI;AACF,YAAM,SAAS,MAAM,GAAG,IAAI;AAC5B,WAAK,IAAI;AACT,aAAO;AAAA,IACT,SAAS,OAAO;AACd,WAAK,aAAa,KAAc;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,iBACE,MACA,QAAgB,GAChB,aAAwD,CAAC,GACnD;AACN,QAAI,CAAC,KAAK,OAAO,WAAW,CAAC,KAAK,OAAO,eAAgB;AAEzD,UAAM,MAAM,KAAK,aAAa,MAAM,UAAU;AAC9C,UAAM,WAAW,KAAK,SAAS,IAAI,GAAG;AAEtC,QAAI,UAAU;AACZ,eAAS,SAAS;AAAA,IACpB,OAAO;AACL,WAAK,SAAS,IAAI,KAAK,EAAE,OAAO,WAAW,CAAC;AAAA,IAC9C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,gBACE,MACA,OACA,aAAwD,CAAC,GACnD;AACN,QAAI,CAAC,KAAK,OAAO,WAAW,CAAC,KAAK,OAAO,eAAgB;AAEzD,UAAM,MAAM,KAAK,aAAa,MAAM,UAAU;AAC9C,UAAM,WAAW,KAAK,WAAW,IAAI,GAAG;AAExC,QAAI,UAAU;AACZ,eAAS,OAAO,KAAK,KAAK;AAAA,IAC5B,OAAO;AACL,WAAK,WAAW,IAAI,KAAK,EAAE,QAAQ,CAAC,KAAK,GAAG,WAAW,CAAC;AAAA,IAC1D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAc,YAA+D;AAChG,UAAM,cAAc,OAAO,QAAQ,UAAU,EAC1C,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EACrC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAC3B,KAAK,GAAG;AACX,WAAO,GAAG,IAAI,IAAI,WAAW;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAqB;AAC3B,QAAI,CAAC,KAAK,SAAU;AAEpB,UAAM,YAAY,YAAY,IAAI;AAGlC,eAAW,CAAC,KAAK,IAAI,KAAK,KAAK,UAAU;AACvC,YAAM,OAAO,IAAI,MAAM,GAAG,EAAE,CAAC;AAC7B,YAAM,SAAqB;AAAA,QACzB;AAAA,QACA,MAAM;AAAA,QACN,OAAO,KAAK;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB;AAAA,MACF;AACA,WAAK,SAAS,aAAa,MAAM;AAAA,IACnC;AAGA,eAAW,CAAC,KAAK,IAAI,KAAK,KAAK,YAAY;AACzC,YAAM,OAAO,IAAI,MAAM,GAAG,EAAE,CAAC;AAC7B,UAAI,KAAK,OAAO,WAAW,EAAG;AAG9B,YAAM,MAAM,KAAK,OAAO,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AACjD,YAAM,MAAM,MAAM,KAAK,OAAO;AAE9B,YAAM,SAAqB;AAAA,QACzB;AAAA,QACA,MAAM;AAAA,QACN,OAAO;AAAA,QACP,YAAY;AAAA,UACV,GAAG,KAAK;AAAA,UACR,OAAO,KAAK,OAAO;AAAA,UACnB;AAAA,UACA,KAAK,KAAK,IAAI,GAAG,KAAK,MAAM;AAAA,UAC5B,KAAK,KAAK,IAAI,GAAG,KAAK,MAAM;AAAA,QAC9B;AAAA,QACA;AAAA,MACF;AACA,WAAK,SAAS,aAAa,MAAM;AAGjC,WAAK,SAAS,CAAC;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,SAAK,aAAa;AAClB,UAAM,KAAK,UAAU,MAAM;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAA0B;AAC9B,QAAI,KAAK,mBAAmB;AAC1B,oBAAc,KAAK,iBAAiB;AACpC,WAAK,oBAAoB;AAAA,IAC3B;AAEA,UAAM,KAAK,MAAM;AACjB,UAAM,KAAK,UAAU,SAAS;AAC9B,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,YAAqB;AACnB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,YAA6B;AAC3B,WAAO,EAAE,GAAG,KAAK,OAAO;AAAA,EAC1B;AACF;;;AChUO,IAAM,cAAc;AAAA;AAAA,EAEzB,mBAAmB;AAAA;AAAA,EAEnB,iBAAiB;AAAA;AAAA,EAEjB,iBAAiB;AAAA;AAAA,EAEjB,cAAc;AAAA;AAAA,EAEd,YAAY;AAAA;AAAA,EAEZ,cAAc;AAChB;AAKO,IAAM,4BAA4B,CAAC,GAAG,GAAG,IAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAM,MAAM,GAAI;AAKpF,IAAM,0BAA0B,CAAC,KAAK,KAAK,KAAM,MAAM,KAAM,KAAO,KAAO,GAAK;;;ACjIvF,IAAM,UAAU;AAChB,IAAM,aAAa;AACnB,IAAM,aAAa;AAGnB,IAAM,yBAAyB,OAAO,OAAO;AA6B7C,IAAI,oBAAiC;AAAA,EACnC,cAAc;AAChB;AAqBO,SAAS,oBAAoB,QAA2B;AAC7D,sBAAoB;AAAA,IAClB,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AAGA,QAAM,QAAQ,cAAc;AAC5B,QAAM,aAAa,EAAE,MAAM,CAAC,QAAQ;AAClC,YAAQ,KAAK,6DAA6D,GAAG;AAAA,EAC/E,CAAC;AACH;AAKO,SAAS,iBAA8B;AAC5C,SAAO,EAAE,GAAG,kBAAkB;AAChC;AAuCO,SAAS,YAAY,KAAa,SAA0B;AACjE,MAAI,SAAS;AACX,WAAO,GAAG,GAAG,KAAK,OAAO;AAAA,EAC3B;AACA,SAAO;AACT;AAWO,IAAM,aAAN,MAAiB;AAAA,EAAjB;AACL,SAAQ,KAAyB;AACjC,SAAQ,YAAyC;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjD,MAAc,QAA8B;AAC1C,QAAI,KAAK,GAAI,QAAO,KAAK;AACzB,QAAI,KAAK,UAAW,QAAO,KAAK;AAIhC,QAAI,UAAU,WAAW,UAAU,QAAQ,SAAS;AAClD,UAAI;AACF,cAAM,cAAc,MAAM,UAAU,QAAQ,QAAQ;AACpD,YAAI,aAAa;AACf,kBAAQ,IAAI,qEAAqE;AAAA,QACnF,OAAO;AACL,kBAAQ,IAAI,8DAA8D;AAAA,QAC5E;AAGA,YAAI,UAAU,QAAQ,UAAU;AAC9B,gBAAM,WAAW,MAAM,UAAU,QAAQ,SAAS;AAClD,gBAAM,WAAW,SAAS,SAAS,KAAK,OAAO,MAAM,QAAQ,CAAC;AAC9D,gBAAM,YAAY,SAAS,SAAS,KAAK,OAAO,MAAM,QAAQ,CAAC;AAC/D,kBAAQ,IAAI,yBAAyB,MAAM,QAAQ,OAAO,UAAU;AAAA,QACtE;AAAA,MACF,SAAS,KAAK;AACZ,gBAAQ,KAAK,sDAAsD,GAAG;AAAA,MACxE;AAAA,IACF;AAEA,SAAK,YAAY,IAAI,QAAQ,CAAC,SAAS,WAAW;AAChD,YAAM,UAAU,UAAU,KAAK,SAAS,UAAU;AAElD,cAAQ,UAAU,MAAM;AACtB,gBAAQ,MAAM,0CAA0C,QAAQ,KAAK;AACrE,eAAO,QAAQ,KAAK;AAAA,MACtB;AAEA,cAAQ,YAAY,MAAM;AACxB,aAAK,KAAK,QAAQ;AAClB,gBAAQ,KAAK,EAAE;AAAA,MACjB;AAEA,cAAQ,kBAAkB,CAAC,UAAU;AACnC,cAAM,KAAM,MAAM,OAA4B;AAC9C,cAAM,aAAc,MAAgC;AACpD,cAAM,KAAM,MAAM,OAA4B;AAE9C,YAAI,aAAa,GAAG;AAElB,gBAAM,QAAQ,GAAG,kBAAkB,YAAY,EAAE,SAAS,MAAM,CAAC;AACjE,gBAAM,YAAY,kBAAkB,kBAAkB,EAAE,QAAQ,MAAM,CAAC;AAAA,QACzE,WAAW,aAAa,KAAK,IAAI;AAE/B,gBAAM,QAAQ,GAAG,YAAY,UAAU;AAGvC,cAAI,CAAC,MAAM,WAAW,SAAS,gBAAgB,GAAG;AAChD,kBAAM,YAAY,kBAAkB,kBAAkB,EAAE,QAAQ,MAAM,CAAC;AAAA,UACzE;AAGA,gBAAM,gBAAgB,MAAM,WAAW;AACvC,wBAAc,YAAY,CAAC,gBAAgB;AACzC,kBAAM,SAAU,YAAY,OAA0C;AACtE,gBAAI,QAAQ;AACV,oBAAM,QAAQ,OAAO;AACrB,kBAAI,MAAM,mBAAmB,QAAW;AACtC,sBAAM,iBAAiB,MAAM,YAAY,KAAK,IAAI;AAClD,uBAAO,OAAO,KAAK;AAAA,cACrB;AACA,qBAAO,SAAS;AAAA,YAClB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,KAA+B;AACvC,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,KAAK,GAAG,YAAY,YAAY,UAAU;AAChD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,MAAM,GAAG;AAC/B,gBAAQ,YAAY,MAAM,QAAQ,QAAQ,SAAS,CAAC;AACpD,gBAAQ,UAAU,MAAM,QAAQ,KAAK;AAAA,MACvC,CAAC;AAAA,IACH,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,IAAI,KAA0C;AAClD,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,kBAAkB,EAAE,aAAa,IAAI,CAAC;AACxE,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAE9B,cAAM,KAAK,GAAG,YAAY,YAAY,WAAW;AACjD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,IAAI,GAAG;AAC7B,gBAAQ,YAAY,MAAM;AACxB,gBAAM,SAAS,QAAQ;AACvB,gBAAM,MAAM,QAAQ,QAAQ;AAC5B,gBAAM,cAAc,EAAE,aAAa,IAAI,CAAC;AACxC,cAAI,QAAQ;AACV,kBAAM,cAAc,EAAE,oBAAoB,OAAO,KAAK,CAAC;AAEvD,mBAAO,iBAAiB,KAAK,IAAI;AACjC,kBAAM,IAAI,MAAM;AAAA,UAClB;AACA,gBAAM,IAAI;AACV,cAAI,KAAK;AACP,uBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AAAA,UACvD,OAAO;AACL,uBAAW,iBAAiB,sBAAsB,GAAG,CAAC,CAAC;AAAA,UACzD;AACA,kBAAQ,QAAQ,QAAQ,IAAI;AAAA,QAC9B;AACA,gBAAQ,UAAU,MAAM;AACtB,gBAAM,cAAc,EAAE,aAAa,MAAM,CAAC;AAC1C,gBAAM,IAAI;AACV,qBAAW,iBAAiB,sBAAsB,GAAG,CAAC,CAAC;AACvD,kBAAQ,IAAI;AAAA,QACd;AAAA,MACF,CAAC;AAAA,IACH,QAAQ;AACN,YAAM,aAAa,IAAI,MAAM,kBAAkB,CAAC;AAChD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,MAAM,kBAAkB,KAAa,aAAiD;AACpF,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,gCAAgC,EAAE,aAAa,IAAI,CAAC;AAEtF,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,YAAM,SAAS,MAAM,IAAI,QAAiC,CAAC,YAAY;AACrE,cAAM,KAAK,GAAG,YAAY,YAAY,UAAU;AAChD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,IAAI,GAAG;AAC7B,gBAAQ,YAAY,MAAM,QAAQ,QAAQ,MAAiC;AAC3E,gBAAQ,UAAU,MAAM,QAAQ,MAAS;AAAA,MAC3C,CAAC;AAGD,UAAI,CAAC,QAAQ,MAAM;AACjB,cAAM,cAAc,EAAE,aAAa,MAAM,CAAC;AAC1C,cAAM,IAAI;AACV,mBAAW,iBAAiB,sBAAsB,GAAG,CAAC,CAAC;AACvD,eAAO,EAAE,MAAM,MAAM,OAAO,MAAM;AAAA,MACpC;AAEA,YAAM,cAAc,EAAE,aAAa,MAAM,oBAAoB,OAAO,KAAK,CAAC;AAG1E,UAAI,CAAC,OAAO,MAAM;AAChB,cAAM,cAAc,EAAE,mBAAmB,OAAO,eAAe,MAAM,CAAC;AACtE,cAAM,IAAI;AACV,mBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AACrD,eAAO,EAAE,MAAM,OAAO,MAAM,OAAO,MAAM;AAAA,MAC3C;AAGA,YAAM,WAAW,eAAe;AAChC,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,UAAU,EAAE,QAAQ,OAAO,CAAC;AACzD,YAAI,CAAC,SAAS,IAAI;AAEhB,gBAAM,cAAc,EAAE,mBAAmB,OAAO,eAAe,MAAM,CAAC;AACtE,gBAAM,IAAI;AACV,qBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AACrD,iBAAO,EAAE,MAAM,OAAO,MAAM,OAAO,MAAM;AAAA,QAC3C;AAEA,cAAM,aAAa,SAAS,QAAQ,IAAI,MAAM;AAC9C,cAAM,UAAU,eAAe,QAAQ,eAAe,OAAO;AAE7D,cAAM,cAAc;AAAA,UAClB,mBAAmB;AAAA,UACnB,eAAe;AAAA,UACf,qBAAqB,cAAc;AAAA,UACnC,qBAAqB,OAAO;AAAA,QAC9B,CAAC;AACD,cAAM,IAAI;AAEV,YAAI,SAAS;AACX,qBAAW,iBAAiB,qBAAqB,GAAG,CAAC,CAAC;AACtD,kBAAQ,IAAI,yCAAyC,GAAG,EAAE;AAAA,QAC5D,OAAO;AACL,qBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AAAA,QACvD;AAEA,eAAO,EAAE,MAAM,OAAO,MAAM,OAAO,QAAQ;AAAA,MAC7C,SAAS,YAAY;AAGnB,gBAAQ,KAAK,2DAA2D,UAAU;AAClF,cAAM,cAAc,EAAE,mBAAmB,OAAO,eAAe,MAAM,CAAC;AACtE,cAAM,IAAI;AACV,mBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AACrD,eAAO,EAAE,MAAM,OAAO,MAAM,OAAO,MAAM;AAAA,MAC3C;AAAA,IACF,QAAQ;AACN,YAAM,aAAa,IAAI,MAAM,gCAAgC,CAAC;AAC9D,aAAO,EAAE,MAAM,MAAM,OAAO,MAAM;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,IAAI,KAAa,MAAmB,MAAe,SAAiC;AACxF,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,kBAAkB;AAAA,MAClD,aAAa;AAAA,MACb,oBAAoB,KAAK;AAAA,MACzB,GAAI,WAAW,EAAE,iBAAiB,QAAQ;AAAA,IAC5C,CAAC;AACD,QAAI;AAEF,WAAK,WAAW,EAAE,MAAM,CAAC,QAAQ;AAC/B,gBAAQ,KAAK,oCAAoC,GAAG;AAAA,MACtD,CAAC;AAED,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,YAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,cAAM,KAAK,GAAG,YAAY,YAAY,WAAW;AACjD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,MAAM,KAAK,IAAI;AACrB,cAAM,SAAsB;AAAA,UAC1B;AAAA,UACA;AAAA,UACA,MAAM,KAAK;AAAA,UACX,UAAU;AAAA,UACV,gBAAgB;AAAA,UAChB;AAAA,UACA;AAAA,QACF;AACA,cAAM,UAAU,MAAM,IAAI,MAAM;AAChC,gBAAQ,YAAY,MAAM;AACxB,gBAAM,IAAI;AACV,kBAAQ;AAAA,QACV;AACA,gBAAQ,UAAU,MAAM;AACtB,gBAAM,aAAa,QAAQ,SAAS,IAAI,MAAM,kBAAkB,CAAC;AACjE,iBAAO,QAAQ,KAAK;AAAA,QACtB;AAAA,MACF,CAAC;AAGD,WAAK,aAAa,EAAE,MAAM,CAAC,QAAQ;AACjC,gBAAQ,KAAK,mDAAmD,GAAG;AAAA,MACrE,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,cAAQ,KAAK,uCAAuC,GAAG;AACvD,YAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,IACxE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,aAA4B;AACxC,UAAM,QAAQ,MAAM,KAAK,aAAa;AACtC,QAAI,CAAC,OAAO;AACV;AAAA,IACF;AAEA,UAAM,SAAS;AACf,UAAM,YAAY,aAAa;AAE/B,QAAI,MAAM,cAAc,IAAI;AAC1B,cAAQ,KAAK,8BAA8B,MAAM,YAAY,QAAQ,CAAC,CAAC,WAAW,YAAY,MAAM,SAAS,CAAC,MAAM,YAAY,MAAM,UAAU,CAAC,GAAG;AAGpJ,iBAAW,iBAAiB,6BAA6B,GAAG;AAAA,QAC1D,cAAc,OAAO,KAAK,MAAM,MAAM,WAAW,CAAC;AAAA,MACpD,CAAC;AAGD,UAAI,OAAO,gBAAgB;AACzB,YAAI;AACF,iBAAO,eAAe,KAAK;AAAA,QAC7B,SAAS,KAAK;AACZ,kBAAQ,KAAK,+CAA+C,GAAG;AAAA,QACjE;AAAA,MACF;AAAA,IACF;AAEA,QAAI,MAAM,cAAc,IAAI;AAC1B,cAAQ,KAAK,oEAAoE;AAEjF,YAAM,cAAc,KAAK,IAAI,MAAM,aAAa,KAAK,KAAK,OAAO,IAAI;AACrE,YAAM,KAAK,YAAY,WAAW;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,KAA4B;AACvC,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,KAAK,GAAG,YAAY,YAAY,WAAW;AACjD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,OAAO,GAAG;AAChB,WAAG,aAAa,MAAM,QAAQ;AAAA,MAChC,CAAC;AAAA,IACH,QAAQ;AAAA,IAER;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,KAAK,GAAG,YAAY,YAAY,WAAW;AACjD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,MAAM;AACZ,WAAG,aAAa,MAAM,QAAQ;AAAA,MAChC,CAAC;AAAA,IACH,QAAQ;AAAA,IAER;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAgC;AACpC,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,KAAK,GAAG,YAAY,YAAY,UAAU;AAChD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,OAAO;AAC7B,gBAAQ,YAAY,MAAM;AACxB,gBAAM,SAAU,QAAQ,UAA4B,CAAC;AACrD,kBAAQ;AAAA,YACN,WAAW,OAAO,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,MAAM,CAAC;AAAA,YACpD,YAAY,OAAO;AAAA,YACnB,QAAQ,OAAO,IAAI,CAAC,OAAO;AAAA,cACzB,KAAK,EAAE;AAAA,cACP,MAAM,EAAE;AAAA,cACR,UAAU,IAAI,KAAK,EAAE,QAAQ;AAAA,YAC/B,EAAE;AAAA,UACJ,CAAC;AAAA,QACH;AACA,gBAAQ,UAAU,MAAM,QAAQ,EAAE,WAAW,GAAG,YAAY,GAAG,QAAQ,CAAC,EAAE,CAAC;AAAA,MAC7E,CAAC;AAAA,IACH,QAAQ;AACN,aAAO,EAAE,WAAW,GAAG,YAAY,GAAG,QAAQ,CAAC,EAAE;AAAA,IACnD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,eAA8B;AAClC,UAAM,SAAS;AACf,UAAM,UAAU,OAAO,gBAAgB;AAEvC,UAAM,QAAQ,MAAM,KAAK,SAAS;AAClC,QAAI,MAAM,aAAa,SAAS;AAC9B;AAAA,IACF;AAEA,UAAM,cAAc,MAAM,YAAY;AACtC,UAAM,cAAc,MAAM,KAAK,YAAY,WAAW;AAEtD,QAAI,YAAY,SAAS,GAAG;AAC1B,cAAQ,IAAI,sCAAsC,YAAY,MAAM,mBAAmB,YAAY,WAAW,CAAC,EAAE;AAAA,IACnH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,YAAY,aAAwC;AACxD,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,0BAA0B;AAAA,MAC1D,4BAA4B;AAAA,IAC9B,CAAC;AAED,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAG5B,YAAM,SAAS,MAAM,IAAI,QAAuB,CAAC,YAAY;AAC3D,cAAM,KAAK,GAAG,YAAY,YAAY,UAAU;AAChD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,OAAO;AAC7B,gBAAQ,YAAY,MAAM;AACxB,gBAAM,MAAO,QAAQ,UAA4B,CAAC;AAElD,cAAI,KAAK,CAAC,GAAG,OAAO,EAAE,kBAAkB,EAAE,YAAY,MAAM,EAAE,kBAAkB,EAAE,YAAY,EAAE;AAChG,kBAAQ,GAAG;AAAA,QACb;AACA,gBAAQ,UAAU,MAAM,QAAQ,CAAC,CAAC;AAAA,MACpC,CAAC;AAED,YAAM,cAAwB,CAAC;AAC/B,UAAI,aAAa;AAGjB,iBAAW,SAAS,QAAQ;AAC1B,YAAI,cAAc,aAAa;AAC7B;AAAA,QACF;AAEA,cAAM,KAAK,OAAO,MAAM,GAAG;AAC3B,oBAAY,KAAK,MAAM,GAAG;AAC1B,sBAAc,MAAM;AAEpB,gBAAQ,IAAI,yBAAyB,MAAM,GAAG,KAAK,YAAY,MAAM,IAAI,CAAC,GAAG;AAAA,MAC/E;AAEA,YAAM,cAAc;AAAA,QAClB,wBAAwB;AAAA,QACxB,2BAA2B,YAAY;AAAA,MACzC,CAAC;AACD,YAAM,IAAI;AAGV,UAAI,aAAa,GAAG;AAClB,mBAAW,iBAAiB,wBAAwB,YAAY,QAAQ;AAAA,UACtE,aAAa,OAAO,UAAU;AAAA,QAChC,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,SAAS,KAAK;AACZ,YAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,cAAQ,KAAK,iCAAiC,GAAG;AACjD,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,MAAM,eAA0C;AAC9C,QAAI,CAAC,WAAW,SAAS,UAAU;AACjC,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,WAAW,MAAM,UAAU,QAAQ,SAAS;AAClD,YAAM,YAAY,SAAS,SAAS;AACpC,YAAM,aAAa,SAAS,SAAS;AACrC,YAAM,cAAc,aAAa,IAAK,YAAY,aAAc,MAAM;AAEtE,YAAM,QAAQ,MAAM,KAAK,SAAS;AAElC,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA,YAAY,MAAM;AAAA,MACpB;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAGA,IAAI,gBAAmC;AAKhC,SAAS,gBAA4B;AAC1C,MAAI,CAAC,eAAe;AAClB,oBAAgB,IAAI,WAAW;AAAA,EACjC;AACA,SAAO;AACT;AAQA,IAAM,uBAAuB,MAAM,OAAO;AA0C1C,eAAsB,eACpB,KACA,mBACsB;AAEtB,MAAI,UAAiC,CAAC;AACtC,MAAI,OAAO,sBAAsB,YAAY;AAE3C,cAAU,EAAE,YAAY,kBAAkB;AAAA,EAC5C,WAAW,mBAAmB;AAE5B,cAAU;AAAA,EACZ;AAEA,QAAM,EAAE,SAAS,gBAAgB,OAAO,WAAW,IAAI;AAEvD,QAAM,QAAQ,cAAc;AAC5B,QAAM,WAAW,UAAU,YAAY,KAAK,OAAO,IAAI;AACvD,QAAM,YAAY,aAAa;AAC/B,QAAM,OAAO,WAAW,UAAU,kBAAkB;AAAA,IAClD,aAAa;AAAA,IACb,GAAI,WAAW,EAAE,iBAAiB,QAAQ;AAAA,IAC1C,wBAAwB;AAAA,EAC1B,CAAC;AAGD,MAAI,eAAe;AACjB,UAAM,aAAa,MAAM,MAAM,kBAAkB,UAAU,GAAG;AAE9D,QAAI,WAAW,QAAQ,CAAC,WAAW,OAAO;AACxC,cAAQ,IAAI,uCAAuC,GAAG,MAAM,WAAW,KAAK,aAAa,OAAO,MAAM,QAAQ,CAAC,CAAC,KAAK;AACrH,mBAAa,WAAW,KAAK,YAAY,WAAW,KAAK,UAAU;AACnE,YAAM,cAAc;AAAA,QAClB,mBAAmB;AAAA,QACnB,yBAAyB;AAAA,QACzB,qBAAqB;AAAA,QACrB,oBAAoB,WAAW,KAAK;AAAA,MACtC,CAAC;AACD,YAAM,IAAI;AACV,aAAO,WAAW;AAAA,IACpB;AAEA,QAAI,WAAW,OAAO;AACpB,cAAQ,IAAI,yCAAyC,GAAG,EAAE;AAC1D,YAAM,cAAc;AAAA,QAClB,mBAAmB;AAAA,QACnB,yBAAyB;AAAA,QACzB,qBAAqB;AAAA,MACvB,CAAC;AAAA,IAEH;AAAA,EAEF,OAAO;AAEL,UAAM,SAAS,MAAM,MAAM,IAAI,QAAQ;AACvC,QAAI,QAAQ;AACV,cAAQ,IAAI,2BAA2B,GAAG,MAAM,OAAO,aAAa,OAAO,MAAM,QAAQ,CAAC,CAAC,KAAK;AAChG,mBAAa,OAAO,YAAY,OAAO,UAAU;AACjD,YAAM,cAAc;AAAA,QAClB,mBAAmB;AAAA,QACnB,oBAAoB,OAAO;AAAA,MAC7B,CAAC;AACD,YAAM,IAAI;AACV,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,cAAc,EAAE,mBAAmB,MAAM,CAAC;AAChD,UAAQ,IAAI,sCAAsC,GAAG,EAAE;AAEvD,MAAI;AAEF,UAAM,WAAW,MAAM,MAAM,GAAG;AAChC,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,mBAAmB,GAAG,KAAK,SAAS,MAAM,EAAE;AAAA,IAC9D;AAEA,UAAM,gBAAgB,SAAS,QAAQ,IAAI,gBAAgB;AAC3D,UAAM,QAAQ,gBAAgB,SAAS,eAAe,EAAE,IAAI;AAC5D,UAAM,OAAO,SAAS,QAAQ,IAAI,MAAM,KAAK;AAG7C,UAAM,mBAAmB,QAAQ;AACjC,QAAI,kBAAkB;AACpB,cAAQ,IAAI,+CAA+C,QAAQ,OAAO,MAAM,QAAQ,CAAC,CAAC,oCAAoC;AAAA,IAChI;AAEA,QAAI,CAAC,SAAS,MAAM;AAClB,YAAMA,QAAO,MAAM,SAAS,YAAY;AACxC,UAAI,CAAC,kBAAkB;AACrB,cAAM,MAAM,IAAI,UAAUA,OAAM,MAAM,OAAO;AAAA,MAC/C;AACA,YAAM,cAAc;AAAA,QAClB,oBAAoBA,MAAK;AAAA,QACzB,6BAA6B,CAAC;AAAA,MAChC,CAAC;AACD,YAAM,IAAI;AACV,aAAOA;AAAA,IACT;AAGA,UAAM,SAAS,SAAS,KAAK,UAAU;AACvC,UAAM,SAAuB,CAAC;AAC9B,QAAI,SAAS;AAEb,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,KAAM;AACV,aAAO,KAAK,KAAK;AACjB,gBAAU,MAAM;AAChB,mBAAa,QAAQ,SAAS,MAAM;AAAA,IACtC;AAGA,UAAM,OAAO,IAAI,WAAW,MAAM;AAClC,QAAI,SAAS;AACb,eAAW,SAAS,QAAQ;AAC1B,WAAK,IAAI,OAAO,MAAM;AACtB,gBAAU,MAAM;AAAA,IAClB;AAEA,UAAM,SAAS,KAAK;AAGpB,QAAI,CAAC,kBAAkB;AACrB,YAAM,MAAM,IAAI,UAAU,QAAQ,MAAM,OAAO;AAC/C,cAAQ,IAAI,wBAAwB,GAAG,MAAM,OAAO,aAAa,OAAO,MAAM,QAAQ,CAAC,CAAC,KAAK;AAAA,IAC/F;AAEA,UAAM,cAAc;AAAA,MAClB,oBAAoB,OAAO;AAAA,MAC3B,6BAA6B,CAAC;AAAA,IAChC,CAAC;AACD,UAAM,IAAI;AAEV,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,UAAM;AAAA,EACR;AACF;AAKA,eAAsB,cACpB,MACA,YACe;AACf,QAAM,QAAQ,cAAc;AAE5B,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,MAAM,KAAK,CAAC;AAClB,iBAAa,GAAG,KAAK,QAAQ,GAAG;AAEhC,QAAI,MAAM,MAAM,IAAI,GAAG,GAAG;AACxB,cAAQ,IAAI,gCAAgC,GAAG,EAAE;AACjD;AAAA,IACF;AAEA,UAAM,eAAe,GAAG;AAAA,EAC1B;AAEA,eAAa,KAAK,QAAQ,KAAK,QAAQ,MAAM;AAC/C;AAKO,SAAS,YAAY,OAAuB;AACjD,MAAI,QAAQ,KAAM,QAAO,GAAG,KAAK;AACjC,MAAI,QAAQ,OAAO,KAAM,QAAO,IAAI,QAAQ,MAAM,QAAQ,CAAC,CAAC;AAC5D,MAAI,QAAQ,OAAO,OAAO,KAAM,QAAO,IAAI,QAAQ,OAAO,MAAM,QAAQ,CAAC,CAAC;AAC1E,SAAO,IAAI,QAAQ,OAAO,OAAO,MAAM,QAAQ,CAAC,CAAC;AACnD;;;ACr3BO,SAAS,cAAuB;AACrC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,QAAM,KAAK,UAAU,UAAU,YAAY;AAC3C,SACE,mBAAmB,KAAK,EAAE;AAAA;AAAA,EAGzB,SAAS,KAAK,EAAE,KAAK,SAAS,KAAK,EAAE,KAAK,CAAC,qBAAqB,KAAK,EAAE;AAE5E;AAUO,SAAS,QAAiB;AAC/B,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,QAAM,KAAK,UAAU,UAAU,YAAY;AAC3C,SAAO,mBAAmB,KAAK,EAAE;AACnC;AASO,SAAS,YAAqB;AACnC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,SAAO,WAAW,KAAK,UAAU,SAAS;AAC5C;AAYO,SAAS,WAAoB;AAClC,SAAO,MAAM,KAAK,UAAU;AAC9B;AAUO,SAAS,eAAwB;AACtC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,SAAO,SAAS,aAAa,UAAU,QAAQ;AACjD;AAYO,SAAS,wBAAwC;AAItD,MAAI,SAAS,KAAK,MAAM,GAAG;AACzB,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AASO,SAAS,eACd,YACA,iBACgB;AAChB,UAAQ,YAAY;AAAA,IAClB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,UAAI,CAAC,iBAAiB;AACpB,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO,kBAAkB,WAAW;AAAA,IAEtC,KAAK;AAAA,IACL;AAEE,YAAM,cAAc,sBAAsB;AAC1C,UAAI,gBAAgB,YAAY,CAAC,iBAAiB;AAChD,eAAO;AAAA,MACT;AACA,aAAO;AAAA,EACX;AACF;AAOO,SAAS,wBAAgC;AAC9C,MAAI,MAAM,GAAG;AAEX,WAAO;AAAA,EACT;AAEA,MAAI,UAAU,GAAG;AAEf,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAWO,SAAS,wBAAiC;AAE/C,MAAI,SAAS,GAAG;AACd,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAUO,SAAS,WAAoB;AAClC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,QAAM,KAAK,UAAU,UAAU,YAAY;AAE3C,SAAO,SAAS,KAAK,EAAE,KAAK,CAAC,kCAAkC,KAAK,EAAE;AACxE;AAaO,SAAS,sBAA+B;AAC7C,SAAO,SAAS,KAAK,MAAM;AAC7B;AAUO,SAAS,+BAAwC;AACtD,MAAI,OAAO,WAAW,YAAa,QAAO;AAC1C,SAAO,uBAAuB,UAAU,6BAA6B;AACvE;AAaO,SAAS,qBAA8B;AAC5C,SAAO,MAAM,KAAK,6BAA6B;AACjD;AAaO,SAAS,yBAAkC;AAChD,SAAO,MAAM;AACf;;;ACpOA,IAAM,SAAS,aAAa,YAAY;AAGxC,IAAI,cAAgC;AACpC,IAAI,gBAAuC;AAG3C,IAAM,gBAAgB;AAUtB,eAAsB,oBAAsC;AAE1D,MAAI,MAAM,GAAG;AACX,WAAO,MAAM,6CAA6C;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,aAAa,GAAG;AACnB,WAAO,MAAM,2CAA2C;AACxD,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAU,MAAM,UAAU,IAAI,eAAe;AACnD,QAAI,CAAC,SAAS;AACZ,aAAO,MAAM,oCAAoC;AACjD,aAAO;AAAA,IACT;AAGA,UAAM,SAAS,MAAM,QAAQ,cAAc;AAC3C,QAAI,CAAC,QAAQ;AACX,aAAO,MAAM,uCAAuC;AACpD,aAAO;AAAA,IACT;AAGA,WAAO,QAAQ;AAEf,WAAO,MAAM,qCAAqC;AAClD,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,MAAM,iDAAiD,EAAE,OAAO,IAAI,CAAC;AAC5E,WAAO;AAAA,EACT;AACF;AAOA,SAAS,cAAc,KAAsB;AAE3C,MAAI,IAAI,KAAK,YAAY;AAGzB,QAAM,aAAa,sBAAsB;AACzC,QAAM,cAAc,sBAAsB;AAE1C,MAAI,IAAI,KAAK,aAAa;AAC1B,MAAI,IAAI,KAAK,OAAO;AACpB,MAAI,IAAI,KAAK,QAAQ;AAErB,SAAO,KAAK,mBAAmB;AAAA,IAC7B;AAAA,IACA,MAAM;AAAA,IACN,OAAO;AAAA,IACP,UAAU,MAAM,IAAI,QAAQ,SAAS,IAAI,YAAY;AAAA,EACvD,CAAC;AACH;AAeA,eAAsB,eACpB,SACoB;AAEpB,MAAI,eAAe,kBAAkB,SAAS;AAC5C,WAAO;AAAA,EACT;AAGA,MAAI,eAAe,kBAAkB,SAAS;AAC5C,WAAO;AAAA,MACL,oCAAoC,aAAa,8BAC3B,OAAO;AAAA,IAC/B;AACA,WAAO;AAAA,EACT;AAEA,SAAO,KAAK,6BAA6B,OAAO,aAAa;AAE7D,MAAI;AACF,QAAI,YAAY,QAAQ;AAEtB,YAAM,SAAS,MAAM,OAAO,iBAAiB;AAC7C,oBAAc,OAAO,WAAW;AAAA,IAClC,OAAO;AAEL,YAAM,SAAS,MAAM,OAAO,wBAAwB;AACpD,oBAAc,OAAO,WAAW;AAAA,IAClC;AAEA,oBAAgB;AAGhB,kBAAc,WAAW;AAEzB,WAAO,KAAK,oCAAoC,EAAE,QAAQ,CAAC;AAE3D,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,MAAM,oCAAoC,OAAO,YAAY;AAAA,MAClE,OAAO;AAAA,IACT,CAAC;AACD,UAAM,IAAI;AAAA,MACR,gCAAgC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IAClF;AAAA,EACF;AACF;AAWA,eAAsB,4BACpB,aAAgC,QACsB;AAEtD,QAAM,kBAAkB,MAAM,kBAAkB;AAGhD,QAAM,UAAU,eAAe,YAAY,eAAe;AAE1D,SAAO,KAAK,+BAA+B;AAAA,IACzC;AAAA,IACA;AAAA,IACA,iBAAiB;AAAA,EACnB,CAAC;AAGD,QAAM,MAAM,MAAM,eAAe,OAAO;AAExC,SAAO,EAAE,KAAK,QAAQ;AACxB;AAUO,SAAS,kBACd,SACgB;AAChB,MAAI,YAAY,UAAU;AACxB,WAAO;AAAA,MACL,oBAAoB;AAAA,QAClB;AAAA,UACE,MAAM;AAAA,UACN,iBAAiB;AAAA;AAAA,QACnB;AAAA,MACF;AAAA,MACA,wBAAwB;AAAA,IAC1B;AAAA,EACF;AAGA,SAAO;AAAA,IACL,oBAAoB,CAAC,MAAM;AAAA,IAC3B,wBAAwB;AAAA,EAC1B;AACF;AAWA,eAAsB,0BACpB,aACA,kBAIC;AACD,QAAM,MAAM,MAAM,eAAe,gBAAgB;AAGjD,QAAM,YAAY,IAAI,WAAW,WAAW;AAE5C,MAAI,qBAAqB,UAAU;AACjC,QAAI;AACF,YAAMC,WAAU,kBAAkB,QAAQ;AAC1C,YAAMC,WAAU,MAAM,IAAI,iBAAiB,OAAO,WAAWD,QAAO;AAEpE,aAAO,KAAK,qCAAqC;AACjD,aAAO,EAAE,SAAAC,UAAS,SAAS,SAAS;AAAA,IACtC,SAAS,KAAK;AACZ,aAAO,KAAK,wDAAwD;AAAA,QAClE,OAAO,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,MACxD,CAAC;AAAA,IAEH;AAAA,EACF;AAGA,QAAM,UAAU,kBAAkB,MAAM;AACxC,QAAM,UAAU,MAAM,IAAI,iBAAiB,OAAO,WAAW,OAAO;AAEpE,SAAO,KAAK,mCAAmC;AAC/C,SAAO,EAAE,SAAS,SAAS,OAAO;AACpC;AAKO,SAAS,mBAA0C;AACxD,SAAO;AACT;AAKO,SAAS,sBAA+B;AAC7C,SAAO,gBAAgB;AACzB;;;AC/RO,IAAM,kBAAkB;AAAA,EAC7B;AAAA,EAAgB;AAAA,EAAiB;AAAA,EAAe;AAAA,EAAmB;AAAA,EACnE;AAAA,EAAa;AAAA,EAAmB;AAAA,EAChC;AAAA,EAAgB;AAAA,EAAiB;AAAA,EAAmB;AAAA,EACpD;AAAA,EAAiB;AAAA,EAAkB;AAAA,EAAkB;AAAA,EACrD;AAAA,EAAiB;AAAA,EAAkB;AAAA,EAAiB;AAAA,EACpD;AAAA,EAAe;AAAA,EACf;AAAA,EAAc;AAAA,EAAW;AAAA,EAAW;AAAA,EACpC;AAAA,EAAc;AAAA,EAAmB;AAAA,EAAoB;AAAA,EAAkB;AAAA,EACvE;AAAA,EAAe;AAAA,EAAa;AAAA,EAAsB;AAAA,EAClD;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAe;AAAA,EACpD;AAAA,EAAkB;AAAA,EAAkB;AAAA,EAAmB;AAAA,EACvD;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAoB;AAAA,EACzD;AAAA,EAAoB;AAAA,EACpB;AAAA,EAAiB;AAAA,EAAkB;AACrC;AAGO,IAAM,oBAAoB;AAMjC,IAAM,wBAA4C;AAAA,EAChD,CAAC,WAAW,UAAU;AAAA,EACtB,CAAC,aAAa,YAAY;AAAA,EAC1B,CAAC,kBAAkB,iBAAiB;AAAA,EACpC,CAAC,kBAAkB,iBAAiB;AAAA,EACpC,CAAC,mBAAmB,kBAAkB;AAAA,EACtC,CAAC,oBAAoB,mBAAmB;AAAA,EACxC,CAAC,kBAAkB,iBAAiB;AAAA,EACpC,CAAC,oBAAoB,mBAAmB;AAAA,EACxC,CAAC,sBAAsB,qBAAqB;AAAA,EAC5C,CAAC,iBAAiB,gBAAgB;AAAA,EAClC,CAAC,mBAAmB,kBAAkB;AAAA,EACtC,CAAC,gBAAgB,eAAe;AAAA,EAChC,CAAC,mBAAmB,kBAAkB;AAAA,EACtC,CAAC,gBAAgB,eAAe;AAAA,EAChC,CAAC,iBAAiB,gBAAgB;AAAA,EAClC,CAAC,mBAAmB,kBAAkB;AAAA,EACtC,CAAC,iBAAiB,gBAAgB;AAAA,EAClC,CAAC,kBAAkB,iBAAiB;AAAA,EACpC,CAAC,iBAAiB,gBAAgB;AAAA,EAClC,CAAC,eAAe,cAAc;AAChC;AAGA,IAAM,wBAA4C,sBAAsB,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM;AAAA,EACtF,gBAAgB,QAAQ,CAAmC;AAAA,EAC3D,gBAAgB,QAAQ,CAAmC;AAC7D,CAAC,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,MAAM,MAAM,MAAM,MAAM,EAAE;AAOnC,SAAS,sBAAsB,OAAmC;AACvE,QAAM,SAAS,IAAI,aAAa,KAAK;AACrC,aAAW,CAAC,MAAM,IAAI,KAAK,uBAAuB;AAChD,UAAM,OAAO,MAAM,IAAI,IAAI,MAAM,IAAI,KAAK;AAC1C,WAAO,IAAI,IAAI;AACf,WAAO,IAAI,IAAI;AAAA,EACjB;AACA,SAAO;AACT;AASO,IAAM,wBAAwB;AAAA,EACnC;AAAA,EAAgB;AAAA,EAAiB;AAAA,EAAe;AAAA,EAAmB;AAAA,EACnE;AAAA,EAAa;AAAA,EAAmB;AAAA,EAChC;AAAA,EAAgB;AAAA,EAAiB;AAAA,EAAmB;AAAA,EACpD;AAAA,EAAiB;AAAA,EAAkB;AAAA,EAAkB;AAAA,EACrD;AAAA,EAAiB;AAAA,EAAkB;AAAA,EAAiB;AAAA,EACpD;AAAA,EAAe;AAAA,EACf;AAAA,EAAc;AAAA,EAAW;AAAA,EACzB;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAe;AAAA,EACpD;AAAA,EAAsB;AAAA,EACtB;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAe;AAAA,EACpD;AAAA,EAAkB;AAAA,EAAkB;AAAA,EAAmB;AAAA,EACvD;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAoB;AAAA,EACzD;AAAA,EAAoB;AAAA,EACpB;AAAA,EAAiB;AAAA,EAAkB;AAAA,EACnC;AAAA,EAAc;AAAA,EAAmB;AAAA,EAAoB;AACvD;AAQO,IAAM,yBAAmC,sBAAsB;AAAA,EACpE,CAAC,SAAS,gBAAgB,QAAQ,IAAsC;AAC1E;AAQO,SAAS,oBAAoB,OAAmC;AACrE,QAAM,SAAS,IAAI,aAAa,EAAE;AAClC,WAAS,IAAI,GAAG,IAAI,IAAI,KAAK;AAC3B,WAAO,uBAAuB,CAAC,CAAC,IAAI,MAAM,CAAC;AAAA,EAC7C;AACA,SAAO;AACT;;;ACnFA,IAAMC,UAAS,aAAa,UAAU;AAyB/B,IAAM,YAAY;AAAA,EACvB;AAAA,EAAS;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAC1D;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAC7C;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AACzD;AAmBO,IAAM,oBAAN,MAAwB;AAAA,EAW7B,YAAY,QAAiC;AAV7C,SAAQ,UAAmC;AAC3C,SAAQ,MAAwB;AAEhC,SAAQ,WAA2B;AACnC,SAAQ,YAAY;AAIpB;AAAA,SAAQ,iBAAgC,QAAQ,QAAQ;AAGtD,SAAK,SAAS;AACd,SAAK,qBAAqB,OAAO,sBAAsB;AAAA,EACzD;AAAA,EAQA,IAAI,UAAoC;AACtC,WAAO,KAAK,UAAU,KAAK,WAAW;AAAA,EACxC;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAA2B;AAC/B,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,iBAAiB;AAAA,MACjD,aAAa,KAAK,OAAO;AAAA,MACzB,2BAA2B,KAAK,OAAO,WAAW;AAAA,IACpD,CAAC;AAED,QAAI;AAIF,MAAAA,QAAO,KAAK,2BAA2B,EAAE,YAAY,KAAK,OAAO,WAAW,OAAO,CAAC;AAEpF,YAAM,EAAE,KAAK,QAAQ,IAAI,MAAM,4BAA4B,KAAK,OAAO,WAAW,MAAM;AACxF,WAAK,MAAM;AACX,WAAK,WAAW;AAEhB,MAAAA,QAAO,KAAK,uBAAuB,EAAE,SAAS,KAAK,SAAS,CAAC;AAG7D,YAAM,QAAQ,cAAc;AAC5B,YAAM,WAAW,KAAK,OAAO;AAC7B,YAAM,WAAW,MAAM,MAAM,IAAI,QAAQ;AAEzC,UAAI;AACJ,UAAI,UAAU;AACZ,QAAAA,QAAO,MAAM,4BAA4B,EAAE,SAAS,CAAC;AACrD,sBAAe,MAAM,MAAM,IAAI,QAAQ;AAGvC,YAAI,CAAC,aAAa;AAChB,UAAAA,QAAO,KAAK,oDAAoD,EAAE,SAAS,CAAC;AAC5E,gBAAM,MAAM,OAAO,QAAQ;AAC3B,UAAAA,QAAO,KAAK,uDAAuD,EAAE,SAAS,CAAC;AAC/E,wBAAc,MAAM,eAAe,QAAQ;AAAA,QAC7C;AAAA,MACF,OAAO;AACL,QAAAA,QAAO,MAAM,8BAA8B,EAAE,SAAS,CAAC;AACvD,sBAAc,MAAM,eAAe,QAAQ;AAAA,MAC7C;AAGA,UAAI,CAAC,aAAa;AAChB,cAAM,WAAW,yBAAyB,QAAQ;AAClD,QAAAA,QAAO,MAAM,UAAU,EAAE,UAAU,SAAS,CAAC;AAC7C,cAAM,IAAI,MAAM,QAAQ;AAAA,MAC1B;AAEA,MAAAA,QAAO,MAAM,yBAAyB;AAAA,QACpC,MAAM,YAAY,YAAY,UAAU;AAAA,QACxC,SAAS,KAAK;AAAA,MAChB,CAAC;AAGD,YAAM,iBAAiB,kBAAkB,KAAK,QAAQ;AACtD,MAAAA,QAAO,KAAK,4CAA4C;AAAA,QACtD,mBAAmB,KAAK;AAAA,MAC1B,CAAC;AAGD,YAAM,YAAY,IAAI,WAAW,WAAW;AAC5C,WAAK,UAAU,MAAM,KAAK,IAAK,iBAAiB,OAAO,WAAW,cAAc;AAEhF,MAAAA,QAAO,KAAK,qCAAqC;AAAA,QAC/C,mBAAmB,KAAK;AAAA,QACxB,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,SAAS,KAAK;AAAA,QACd,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,QAAQ,KAAK,QAAQ;AAAA,QACrB,SAAS,KAAK,QAAQ;AAAA,MACxB,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB,KAAK;AAAA,QACtB,sBAAsB;AAAA,QACtB,gBAAgB;AAAA,MAClB,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAID,MAAAA,QAAO,MAAM,oDAAoD;AACjE,YAAM,cAAc,YAAY,IAAI;AACpC,YAAM,cAAc,IAAI,aAAa,IAAK;AAC1C,YAAM,KAAK,MAAM,aAAa,CAAC;AAC/B,YAAM,eAAe,YAAY,IAAI,IAAI;AACzC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,cAAc,KAAK,MAAM,YAAY;AAAA,QACrC,SAAS,KAAK;AAAA,MAChB,CAAC;AACD,iBAAW,gBAAgB,2BAA2B,cAAc;AAAA,QAClE,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,MAC3C;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AACD,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,MACJ,cACA,gBAAwB,GACC;AACzB,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAKA,UAAM,mBAAmB,IAAI,aAAa,YAAY;AAGtD,QAAI;AACJ,QAAI,iBAAiB,WAAW,MAAO;AACrC,cAAQ;AAAA,IACV,WAAW,iBAAiB,SAAS,MAAO;AAE1C,cAAQ,IAAI,aAAa,IAAK;AAC9B,YAAM,IAAI,kBAAkB,CAAC;AAAA,IAC/B,OAAO;AAEL,cAAQ,iBAAiB,MAAM,GAAG,IAAK;AAAA,IACzC;AAGA,UAAM,WAAW,IAAI,aAAa,KAAK,kBAAkB;AACzD,aAAS,KAAK,IAAI,eAAe,KAAK,qBAAqB,CAAC,CAAC,IAAI;AAIjE,UAAM,YAAY,IAAI,aAAa,KAAK;AACxC,UAAM,eAAe,IAAI,aAAa,QAAQ;AAE9C,UAAM,QAAQ;AAAA,MACZ,SAAS,IAAI,KAAK,IAAK,OAAO,WAAW,WAAW,CAAC,GAAG,IAAK,CAAC;AAAA,MAC9D,YAAY,IAAI,KAAK,IAAK,OAAO,WAAW,cAAc,CAAC,GAAG,KAAK,kBAAkB,CAAC;AAAA,IACxF;AAGA,WAAO,KAAK,eAAe,KAAK;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,QAAgC;AAChD,UAAM,SAAmB,CAAC;AAC1B,QAAI,YAAY;AAEhB,eAAW,SAAS,QAAQ;AAE1B,UAAI,SAAS;AACb,UAAI,SAAS,MAAM,CAAC;AACpB,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAI,MAAM,CAAC,IAAI,QAAQ;AACrB,mBAAS,MAAM,CAAC;AAChB,mBAAS;AAAA,QACX;AAAA,MACF;AAGA,UAAI,WAAW,aAAa,WAAW,GAAG;AACxC,eAAO,KAAK,MAAM;AAAA,MACpB;AACA,kBAAY;AAAA,IACd;AAGA,WAAO,OAAO,IAAI,OAAK,UAAU,CAAC,MAAM,MAAM,MAAM,UAAU,CAAC,CAAC,EAAE,KAAK,EAAE;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA,EAKQ,eACN,OACyB;AACzB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,kBAAkB;AAAA,UAClD,qBAAqB,KAAK;AAAA,UAC1B,2BAA2B;AAAA,QAC7B,CAAC;AACD,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAClC,gBAAM,UAAU,MAAM,KAAK,QAAS,IAAI,KAAK;AAC7C,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAE5C,gBAAM,YAAY,QAAQ,YAAY;AACtC,gBAAM,mBAAmB,QAAQ,aAAa;AAE9C,cAAI,CAAC,aAAa,CAAC,kBAAkB;AACnC,kBAAM,IAAI,MAAM,4BAA4B;AAAA,UAC9C;AAEA,gBAAM,UAAU,UAAU;AAC1B,gBAAM,iBAAiB,iBAAiB;AAGxC,gBAAM,eAAe,UAAU,KAAK,CAAC;AACrC,gBAAM,eAAe,iBAAiB,KAAK,CAAC;AAC5C,gBAAM,eAAe,UAAU,KAAK,CAAC;AACrC,gBAAM,iBAAiB,iBAAiB,KAAK,CAAC;AAG9C,gBAAM,YAA4B,CAAC;AACnC,gBAAM,cAA8B,CAAC;AAErC,mBAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,sBAAU,KAAK,QAAQ,MAAM,IAAI,eAAe,IAAI,KAAK,YAAY,CAAC;AAAA,UACxE;AAEA,mBAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,kBAAM,WAAW,eAAe,MAAM,IAAI,iBAAiB,IAAI,KAAK,cAAc;AAElF,wBAAY,KAAK,sBAAsB,QAAQ,CAAC;AAAA,UAClD;AAGA,gBAAM,OAAO,KAAK,UAAU,SAAS;AAErC,UAAAA,QAAO,MAAM,uBAAuB;AAAA,YAClC,iBAAiB,KAAK,MAAM,kBAAkB,GAAG,IAAI;AAAA,YACrD;AAAA,YACA;AAAA,YACA,YAAY,KAAK;AAAA,UACnB,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,wBAAwB;AAAA,YACxB,wBAAwB;AAAA,UAC1B,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,UAChB,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN;AAAA,YACA;AAAA,YACA;AAAA,YACA,WAAW;AAAA,YACX;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,aAA2B,MAA8C;AACrF,UAAM,QAAQ,gBAAgB,QAAQ,IAAI;AAC1C,QAAI,UAAU,IAAI;AAChB,YAAM,IAAI,MAAM,uBAAuB,IAAI,EAAE;AAAA,IAC/C;AACA,WAAO,YAAY,KAAK;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,QAAQ;AAC3B,WAAK,UAAU;AAAA,IACjB;AAAA,EACF;AACF;AAAA;AAAA;AAAA;AAAA;AA1Wa,kBAoBJ,oBAAoB;;;AC9D7B,IAAMC,UAAS,aAAa,kBAAkB;AAK9C,IAAM,uBAAuB,oBAAI,IAAoB;AACrD,gBAAgB,QAAQ,CAAC,MAAM,UAAU;AACvC,uBAAqB,IAAI,MAAM,KAAK;AACtC,CAAC;AAKD,IAAM,iBAAiB,IAAI,IAAY,sBAAsB;;;AC1D7D,IAAMC,UAAS,aAAa,SAAS;AA8C9B,IAAM,mBAAN,MAAM,kBAAiB;AAAA,EAO5B,YAAY,SAAwB,CAAC,GAAG;AALxC,SAAQ,WAAsD;AAC9D,SAAQ,eAA8B;AACtC,SAAQ,YAAY;AACpB,SAAQ,gBAAwB;AAG9B,SAAK,SAAS;AAAA,MACZ,OAAO,OAAO,SAAS;AAAA,MACvB,cAAc,OAAO,gBAAgB;AAAA,MACrC,UAAU,OAAO,YAAY;AAAA,MAC7B,MAAM,OAAO,QAAQ;AAAA,MACrB,OAAO,OAAO,SAAS;AAAA,MACvB,QAAQ,OAAO,UAAU;AAAA,MACzB,gBAAgB,OAAO;AAAA,MACvB,OAAO,OAAO;AAAA,MACd,mBAAmB,OAAO,sBAAsB;AAAA;AAAA,IAClD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,oBAAsC;AACjD,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,YAAsG;AAC/G,QAAI,KAAK,WAAW;AAClB,MAAAA,QAAO,MAAM,mCAAmC;AAChD,aAAO,KAAK,WAAW;AACrB,cAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAG,CAAC;AAAA,MACvD;AACA;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,aAAa;AAGpC,QAAI,KAAK,aAAa,QAAQ,KAAK,iBAAiB,WAAW;AAC7D,MAAAA,QAAO,MAAM,wBAAwB,EAAE,OAAO,UAAU,CAAC;AACzD;AAAA,IACF;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,gBAAgB;AAAA,MAChD,iBAAiB;AAAA,MACjB,iBAAiB,KAAK,OAAO;AAAA,MAC7B,kBAAkB,KAAK,OAAO;AAAA,IAChC,CAAC;AAED,QAAI;AACF,YAAM,YAAY,YAAY,IAAI;AAElC,MAAAA,QAAO,KAAK,iBAAiB;AAAA,QAC3B,OAAO;AAAA,QACP,OAAO,KAAK,OAAO;AAAA,QACnB,QAAQ,KAAK,OAAO;AAAA,QACpB,cAAc,KAAK,OAAO;AAAA,MAC5B,CAAC;AAGD,UAAI,KAAK,aAAa,QAAQ,KAAK,iBAAiB,WAAW;AAC7D,QAAAA,QAAO,MAAM,uBAAuB,EAAE,UAAU,KAAK,aAAa,CAAC;AACnE,cAAM,KAAK,SAAS,QAAQ;AAC5B,aAAK,WAAW;AAAA,MAClB;AAIA,YAAM,YAAY,MAAM,kBAAiB,kBAAkB;AAC3D,YAAM,SAAS,KAAK,OAAO,WAAW,SACjC,YAAY,WAAW,SACxB,KAAK,OAAO;AAEhB,MAAAA,QAAO,KAAK,qBAAqB,EAAE,QAAQ,UAAU,CAAC;AAItD,UAAI,mBAAmB;AACvB,UAAI,oBAAoB;AACxB,UAAI,kBAAkB;AACtB,UAAI,iBAAiB;AACrB,UAAI,eAAe;AACnB,UAAI,IAAI,SAAS,KAAK,MAAM;AAC1B,YAAI,SAAS,KAAK,KAAK,QAAQ;AAC/B,YAAI,SAAS,KAAK,KAAK,aAAa;AAAA,MACtC;AAEA,MAAAA,QAAO,KAAK,kCAAkC;AAAA,QAC5C,kBAAkB,IAAI;AAAA,QACtB,iBAAiB,IAAI;AAAA,QACrB,cAAc,IAAI;AAAA,MACpB,CAAC;AAGD,YAAM,kBAAuB;AAAA,QAC3B,OAAO,KAAK,OAAO;AAAA,QACnB;AAAA,QACA,mBAAmB;AAAA;AAAA,QAEnB,UAAU,UAAU,SAAS,gBAAgB,IAAI,kBAAkB;AAAA;AAAA,QAEnE,GAAI,KAAK,OAAO,SAAS,EAAE,OAAO,KAAK,OAAO,MAAM;AAAA,MACtD;AAGA,UAAI,WAAW,UAAU;AACvB,wBAAgB,kBAAkB;AAAA,UAChC,oBAAoB,CAAC,QAAQ;AAAA,QAC/B;AACA,QAAAA,QAAO,KAAK,oCAAoC;AAAA,MAClD;AAEA,WAAK,WAAW,MAAM;AAAA,QACpB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAGA,WAAK,gBAAgB;AAErB,WAAK,eAAe;AACpB,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,OAAO;AAAA,QACP,YAAY,KAAK,MAAM,UAAU;AAAA,MACnC,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,wBAAwB;AAAA,MAC1B,CAAC;AACD,YAAM,IAAI;AAAA,IACZ,SAAS,OAAO;AACd,YAAM,eAAe;AAAA,QACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,QAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,QAC5C;AAAA,MACF;AACA,MAAAA,QAAO,MAAM,wBAAwB,YAAY;AACjD,YAAM,aAAa,KAAc;AACjC,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,WACJ,OACA,SAY8B;AAC9B,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAKA,UAAM,YAAY,IAAI,aAAa,KAAK;AAExC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,sBAAsB;AAAA,MACtD,iBAAiB,UAAU;AAAA,MAC3B,oBAAoB,UAAU,SAAS;AAAA,MACvC,iBAAiB,KAAK;AAAA,IACxB,CAAC;AAED,QAAI;AACF,YAAM,aAAa,YAAY,IAAI;AAEnC,YAAM,mBAAmB,UAAU,SAAS;AAC5C,YAAM,eAAe,mBAAmB;AAExC,MAAAA,QAAO,MAAM,0BAA0B;AAAA,QACrC,cAAc,UAAU;AAAA,QACxB,iBAAiB,iBAAiB,QAAQ,CAAC;AAAA,QAC3C;AAAA,MACF,CAAC;AAMD,YAAM,oBAAyB;AAAA;AAAA,QAE7B,OAAO;AAAA,QACP,WAAW;AAAA;AAAA,QAGX,gBAAgB,SAAS,iBAAiB,eAAe,mBAAmB;AAAA,QAC5E,iBAAiB,SAAS,kBAAkB,eAAe,IAAI;AAAA;AAAA,QAG/D,mBAAmB,SAAS,oBAAoB;AAAA,QAChD,sBAAsB;AAAA,MACxB;AAGA,UAAI,KAAK,OAAO,cAAc;AAC5B,0BAAkB,WAAW,SAAS,YAAY,KAAK,OAAO;AAC9D,0BAAkB,OAAO,SAAS,QAAQ,KAAK,OAAO;AAAA,MACxD;AAGA,YAAM,YAAY,MAAM,KAAK,SAAS,WAAW,iBAAiB;AAGlE,YAAM,SAAS,MAAM,QAAQ,SAAS,IAAI,UAAU,CAAC,IAAI;AAEzD,YAAM,kBAAkB,YAAY,IAAI,IAAI;AAG5C,UAAI,cAAc,OAAO;AACzB,UAAI,KAAK,OAAO,mBAAmB;AACjC,sBAAc,KAAK,sBAAsB,WAAW;AAAA,MACtD;AAEA,YAAM,gBAAqC;AAAA,QACzC,MAAM;AAAA,QACN,UAAU,KAAK,OAAO;AAAA,QACtB;AAAA,QACA,QAAQ,OAAO;AAAA,MACjB;AAEA,MAAAA,QAAO,MAAM,0BAA0B;AAAA,QACrC,MAAM,cAAc;AAAA,QACpB,iBAAiB,KAAK,MAAM,eAAe;AAAA,QAC3C,aAAa,OAAO,QAAQ,UAAU;AAAA,MACxC,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,6BAA6B;AAAA,QAC7B,uBAAuB,cAAc,KAAK;AAAA,MAC5C,CAAC;AACD,YAAM,IAAI;AAEV,aAAO;AAAA,IACT,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,oBAAoB,EAAE,MAAM,CAAC;AAC1C,YAAM,aAAa,KAAc;AACjC,YAAM,IAAI,MAAM,iCAAiC,KAAK,EAAE;AAAA,IAC1D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,oBACJ,OACA,SACA,UACA,SAM8B;AAC9B,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,gCAAgC;AAAA,MAChE,iBAAiB,MAAM;AAAA,MACvB,oBAAoB,MAAM,SAAS;AAAA,IACrC,CAAC;AAED,QAAI;AACF,YAAM,aAAa,YAAY,IAAI;AAEnC,MAAAA,QAAO,MAAM,oCAAoC;AAAA,QAC/C,cAAc,MAAM;AAAA,QACpB,kBAAkB,MAAM,SAAS,MAAO,QAAQ,CAAC;AAAA,MACnD,CAAC;AAOD,YAAM,oBAAyB;AAAA,QAC7B,OAAO;AAAA,QACP,WAAW;AAAA,QACX,gBAAgB,SAAS,gBAAgB;AAAA,QACzC,iBAAiB,SAAS,iBAAiB;AAAA,QAC3C,mBAAmB;AAAA,QACnB,sBAAsB;AAAA,MACxB;AAGA,UAAI,KAAK,OAAO,cAAc;AAC5B,0BAAkB,WAAW,SAAS,YAAY,KAAK,OAAO;AAC9D,0BAAkB,OAAO,SAAS,QAAQ,KAAK,OAAO;AAAA,MACxD;AAGA,YAAM,YAAY,MAAM,KAAK,SAAS,OAAO,iBAAiB;AAG9D,YAAM,SAAS,MAAM,QAAQ,SAAS,IAAI,UAAU,CAAC,IAAI;AAEzD,YAAM,kBAAkB,YAAY,IAAI,IAAI;AAG5C,UAAI,OAAO,UAAU,SAAS;AAC5B,mBAAW,SAAS,OAAO,QAAQ;AACjC,kBAAQ;AAAA,YACN,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,UACnB,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,UAAU;AACZ,iBAAS,OAAO,IAAI;AAAA,MACtB;AAEA,MAAAA,QAAO,MAAM,oCAAoC;AAAA,QAC/C,MAAM,OAAO;AAAA,QACb,iBAAiB,KAAK,MAAM,eAAe;AAAA,QAC3C,aAAa,OAAO,QAAQ,UAAU;AAAA,MACxC,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,6BAA6B;AAAA,QAC7B,wBAAwB,OAAO,QAAQ,UAAU;AAAA,MACnD,CAAC;AACD,YAAM,IAAI;AAEV,aAAO;AAAA,QACL,MAAM,OAAO;AAAA,QACb,UAAU,KAAK,OAAO;AAAA,QACtB;AAAA,QACA,QAAQ,OAAO;AAAA,MACjB;AAAA,IACF,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,8BAA8B,EAAE,MAAM,CAAC;AACpD,YAAM,aAAa,KAAc;AACjC,YAAM,IAAI,MAAM,2CAA2C,KAAK,EAAE;AAAA,IACpE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,UAAU;AACjB,MAAAA,QAAO,MAAM,mBAAmB,EAAE,OAAO,KAAK,aAAa,CAAC;AAC5D,YAAM,KAAK,SAAS,QAAQ;AAC5B,WAAK,WAAW;AAChB,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAAoB;AACtB,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAAkB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAuB;AAE7B,QAAI,KAAK,OAAO,gBAAgB;AAC9B,aAAO,KAAK,OAAO;AAAA,IACrB;AAGA,QAAI,YAAY,0BAA0B,KAAK,OAAO,KAAK;AAG3D,QAAI,CAAC,KAAK,OAAO,cAAc;AAC7B,mBAAa;AAAA,IACf;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,sBAAsB,MAAsB;AAGlD,UAAM,UAAU,KAAK,QAAQ,iBAAiB,EAAE;AAGhD,WAAO,QAAQ,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAAA,EAC3C;AACF;;;ACxbA,IAAMC,UAAS,aAAa,cAAc;AASnC,IAAM,wBAAN,MAAsD;AAAA,EAU3D,YAAY,QAA4B;AATxC,SAAQ,UAAmC;AAC3C,SAAQ,MAAwB;AAEhC,SAAQ,WAA2B;AACnC,SAAQ,YAAY;AAGpB;AAAA,SAAQ,iBAAgC,QAAQ,QAAQ;AAGtD,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,KAAK,UAAU,KAAK,WAAW;AAAA,EACxC;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAkC;AACtC,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,qBAAqB;AAAA,MACrD,aAAa,KAAK,OAAO;AAAA,MACzB,2BAA2B,KAAK,OAAO,WAAW;AAAA,IACpD,CAAC;AAED,QAAI;AAEF,YAAM,aAAa,KAAK,OAAO,WAAW;AAC1C,MAAAA,QAAO,KAAK,2BAA2B,EAAE,WAAW,CAAC;AAErD,YAAM,EAAE,KAAK,QAAQ,IAAI,MAAM,4BAA4B,UAAU;AACrE,WAAK,MAAM;AACX,WAAK,WAAW;AAEhB,MAAAA,QAAO,KAAK,uBAAuB,EAAE,SAAS,KAAK,SAAS,CAAC;AAG7D,YAAM,QAAQ,cAAc;AAC5B,YAAM,WAAW,KAAK,OAAO;AAC7B,YAAM,WAAW,MAAM,MAAM,IAAI,QAAQ;AAEzC,UAAI;AACJ,UAAI,UAAU;AACZ,QAAAA,QAAO,MAAM,4BAA4B,EAAE,SAAS,CAAC;AACrD,sBAAe,MAAM,MAAM,IAAI,QAAQ;AAEvC,YAAI,CAAC,aAAa;AAChB,UAAAA,QAAO,KAAK,oDAAoD,EAAE,SAAS,CAAC;AAC5E,gBAAM,MAAM,OAAO,QAAQ;AAC3B,wBAAc,MAAM,eAAe,QAAQ;AAAA,QAC7C;AAAA,MACF,OAAO;AACL,QAAAA,QAAO,MAAM,8BAA8B,EAAE,SAAS,CAAC;AACvD,sBAAc,MAAM,eAAe,QAAQ;AAAA,MAC7C;AAEA,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI,MAAM,yBAAyB,QAAQ,EAAE;AAAA,MACrD;AAEA,MAAAA,QAAO,MAAM,yBAAyB;AAAA,QACpC,MAAM,YAAY,YAAY,UAAU;AAAA,QACxC,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,YAAM,iBAAiB,kBAAkB,KAAK,QAAQ;AACtD,YAAM,YAAY,IAAI,WAAW,WAAW;AAC5C,WAAK,UAAU,MAAM,KAAK,IAAK,iBAAiB,OAAO,WAAW,cAAc;AAEhF,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,SAAS,KAAK;AAAA,QACd,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,QAAQ,KAAK,QAAQ;AAAA,QACrB,SAAS,KAAK,QAAQ;AAAA,MACxB,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB,KAAK;AAAA,QACtB,sBAAsB;AAAA,QACtB,gBAAgB;AAAA,MAClB,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAGD,MAAAA,QAAO,MAAM,0BAA0B;AACvC,YAAM,cAAc,YAAY,IAAI;AACpC,YAAM,cAAc,IAAI,aAAa,IAAK;AAC1C,YAAM,KAAK,MAAM,WAAW;AAC5B,YAAM,eAAe,YAAY,IAAI,IAAI;AACzC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,cAAc,KAAK,MAAM,YAAY;AAAA,QACrC,SAAS,KAAK;AAAA,MAChB,CAAC;AACD,iBAAW,gBAAgB,2BAA2B,cAAc;AAAA,QAClE,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,MAC3C;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AACD,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,MACJ,cACA,gBACwB;AACxB,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAGA,UAAM,YAAY,IAAI,aAAa,YAAY;AAE/C,UAAM,QAAQ;AAAA,MACZ,kBAAkB,IAAI,KAAK,IAAK,OAAO,WAAW,WAAW,CAAC,GAAG,UAAU,MAAM,CAAC;AAAA,IACpF;AAEA,WAAO,KAAK,eAAe,OAAO,UAAU,MAAM;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKQ,eACN,OACA,cACwB;AACxB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,sBAAsB;AAAA,UACtD,qBAAqB,KAAK;AAAA,UAC1B,2BAA2B;AAAA,QAC7B,CAAC;AACD,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAClC,gBAAM,UAAU,MAAM,KAAK,QAAS,IAAI,KAAK;AAC7C,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAE5C,gBAAM,mBAAmB,QAAQ,aAAa;AAE9C,cAAI,CAAC,kBAAkB;AACrB,kBAAM,IAAI,MAAM,uCAAuC;AAAA,UACzD;AAEA,gBAAM,iBAAiB,iBAAiB;AACxC,gBAAM,YAAY,iBAAiB,KAAK,CAAC;AACzC,gBAAM,iBAAiB,iBAAiB,KAAK,CAAC;AAG9C,gBAAM,cAA8B,CAAC;AACrC,mBAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,kBAAM,WAAW,eAAe,MAAM,IAAI,iBAAiB,IAAI,KAAK,cAAc;AAClF,kBAAM,WAAW,oBAAoB,QAAQ;AAC7C,wBAAY,KAAK,sBAAsB,QAAQ,CAAC;AAAA,UAClD;AAEA,UAAAA,QAAO,MAAM,uBAAuB;AAAA,YAClC,iBAAiB,KAAK,MAAM,kBAAkB,GAAG,IAAI;AAAA,YACrD;AAAA,YACA;AAAA,UACF,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,oBAAoB;AAAA,UACtB,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,UAChB,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,QAAQ;AAC3B,WAAK,UAAU;AAAA,IACjB;AAAA,EACF;AACF;;;AC9QA,IAAMC,UAAS,aAAa,eAAe;AAkCpC,SAAS,cAAc,QAA6C;AACzE,QAAM,OAAO,OAAO,QAAQ;AAC5B,QAAM,kBAAkB,OAAO,mBAAmB;AAGlD,MAAI;AAEJ,MAAI,SAAS,OAAO;AAClB,aAAS;AACT,IAAAA,QAAO,KAAK,4CAA4C;AAAA,EAC1D,WAAW,SAAS,OAAO;AACzB,aAAS;AACT,IAAAA,QAAO,KAAK,uCAAuC;AAAA,EACrD,OAAO;AAEL,aAAS,oBAAoB;AAC7B,IAAAA,QAAO,KAAK,gCAAgC;AAAA,MAC1C;AAAA,MACA,UAAU,SAAS;AAAA,IACrB,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ;AACV,IAAAA,QAAO,KAAK,8CAA8C;AAC1D,WAAO,IAAI,sBAAsB;AAAA,MAC/B,UAAU,OAAO;AAAA,IACnB,CAAC;AAAA,EACH;AAGA,QAAM,cAAc,IAAI,kBAAkB;AAAA,IACxC,UAAU,OAAO;AAAA,IACjB,SAAS,OAAO,cAAc;AAAA,IAC9B,oBAAoB,OAAO;AAAA,EAC7B,CAAC;AAED,MAAI,iBAAiB;AACnB,IAAAA,QAAO,KAAK,8CAA8C;AAC1D,WAAO,IAAI,oBAAoB,aAAa,MAAM;AAAA,EACpD;AAEA,EAAAA,QAAO,KAAK,0CAA0C;AACtD,SAAO;AACT;AAQA,IAAM,sBAAN,MAAoD;AAAA,EAKlD,YAAY,aAAgC,QAA6B;AAFzE,SAAQ,gBAAgB;AAGtB,SAAK,iBAAiB;AACtB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,MAAM,OAAkC;AACtC,QAAI;AACF,aAAO,MAAM,KAAK,eAAe,KAAK;AAAA,IACxC,SAAS,OAAO;AACd,MAAAA,QAAO,KAAK,oDAAoD;AAAA,QAC9D,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,CAAC;AAGD,UAAI;AACF,cAAM,KAAK,eAAe,QAAQ;AAAA,MACpC,QAAQ;AAAA,MAER;AAGA,WAAK,iBAAiB,IAAI,sBAAsB;AAAA,QAC9C,UAAU,KAAK,OAAO;AAAA,MACxB,CAAC;AACD,WAAK,gBAAgB;AAErB,MAAAA,QAAO,KAAK,8CAA8C;AAC1D,aAAO,MAAM,KAAK,eAAe,KAAK;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,cAA4B,eAAgD;AACtF,WAAO,KAAK,eAAe,MAAM,cAAc,aAAa;AAAA,EAC9D;AAAA,EAEA,MAAM,UAAyB;AAC7B,WAAO,KAAK,eAAe,QAAQ;AAAA,EACrC;AACF;;;AClHA,IAAMC,UAAS,aAAa,WAAW;AAkFhC,IAAM,qBAAN,MAAyB;AAAA,EAwB9B,YAAY,QAAyB;AAvBrC,SAAQ,UAAmC;AAC3C,SAAQ,MAAwB;AAEhC,SAAQ,WAA2B;AACnC,SAAQ,YAAY;AAGpB;AAAA,SAAQ,QAAuB;AAU/B;AAAA,SAAQ,iBAAgC,QAAQ,QAAQ;AAGxD;AAAA,SAAQ,kBAAkC,CAAC;AAC3C,SAAQ,cAAc;AAGpB,UAAM,aAAa,OAAO,cAAc;AAExC,QAAI,eAAe,OAAQ,eAAe,MAAO;AAC/C,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,SAAK,SAAS;AAAA,MACZ,UAAU,OAAO;AAAA,MACjB,SAAS,OAAO,WAAW;AAAA,MAC3B;AAAA,MACA,WAAW,OAAO,aAAa;AAAA,MAC/B,uBAAuB,OAAO,yBAAyB;AAAA,IACzD;AAGA,SAAK,YAAY,eAAe,OAAQ,MAAM;AAC9C,SAAK,cAAc,eAAe,OAAQ,KAAK;AAC/C,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAAA,EAClD;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,KAAK,UAAU,KAAK,WAAW;AAAA,EACxC;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,IAAI,aAAqB;AACvB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,eAAuB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,qBAA6B;AAC3B,WAAQ,KAAK,YAAY,KAAK,OAAO,aAAc;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,OAA8B;AAClC,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,kBAAkB;AAAA,MAClD,aAAa,KAAK,OAAO;AAAA,MACzB,2BAA2B,KAAK,OAAO;AAAA,MACvC,qBAAqB,KAAK,OAAO;AAAA,IACnC,CAAC;AAED,QAAI;AAIF,MAAAA,QAAO,KAAK,2BAA2B,EAAE,YAAY,KAAK,OAAO,QAAQ,CAAC;AAE1E,YAAM,EAAE,KAAK,QAAQ,IAAI,MAAM,4BAA4B,KAAK,OAAO,OAAO;AAC9E,WAAK,MAAM;AACX,WAAK,WAAW;AAEhB,MAAAA,QAAO,KAAK,uBAAuB,EAAE,SAAS,KAAK,SAAS,CAAC;AAG7D,YAAM,QAAQ,cAAc;AAC5B,YAAM,WAAW,KAAK,OAAO;AAC7B,YAAM,WAAW,MAAM,MAAM,IAAI,QAAQ;AAEzC,UAAI;AACJ,UAAI,UAAU;AACZ,QAAAA,QAAO,MAAM,4BAA4B,EAAE,SAAS,CAAC;AACrD,sBAAe,MAAM,MAAM,IAAI,QAAQ;AAAA,MACzC,OAAO;AACL,QAAAA,QAAO,MAAM,8BAA8B,EAAE,SAAS,CAAC;AACvD,sBAAc,MAAM,eAAe,QAAQ;AAAA,MAC7C;AAEA,MAAAA,QAAO,MAAM,yBAAyB;AAAA,QACpC,MAAM,YAAY,YAAY,UAAU;AAAA,QACxC,SAAS,KAAK;AAAA,MAChB,CAAC;AAID,YAAM,iBAAiB,kBAAkB,KAAK,QAAQ;AACtD,YAAM,YAAY,IAAI,WAAW,WAAW;AAC5C,WAAK,UAAU,MAAM,IAAI,iBAAiB,OAAO,WAAW,cAAc;AAG1E,WAAK,MAAM;AAEX,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,SAAS,KAAK;AAAA,QACd,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,YAAY,KAAK,OAAO;AAAA,QACxB,WAAW,KAAK;AAAA,QAChB,WAAW,KAAK,OAAO;AAAA,MACzB,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB,KAAK;AAAA,QACtB,sBAAsB;AAAA,QACtB,gBAAgB;AAAA,MAClB,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,QACzC,YAAY,KAAK,OAAO;AAAA,QACxB,WAAW,KAAK;AAAA,MAClB;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AACD,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,QAAI,CAAC,KAAK,KAAK;AACb,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,QAAQ,IAAI,KAAK,IAAI,OAAO,WAAW,IAAI,aAAa,IAAI,IAAI,GAAG,GAAG,CAAC,GAAG,GAAG,GAAG,CAAC;AAEtF,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAEhD,SAAK,kBAAkB,CAAC;AACxB,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAAQ,YAA8C;AAC1D,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,WAAW,WAAW,KAAK,WAAW;AACxC,YAAM,IAAI;AAAA,QACR,+BAA+B,KAAK,SAAS,iBAAiB,WAAW,MAAM;AAAA,MAEjF;AAAA,IACF;AAEA,WAAO,KAAK,eAAe,UAAU;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,aACJ,OACA,UAOI,CAAC,GACqB;AAC1B,UAAM;AAAA,MACJ,sBAAsB;AAAA,MACtB,uBAAuB;AAAA,MACvB,cAAc;AAAA,IAChB,IAAI;AAEJ,SAAK,MAAM;AAEX,UAAM,WAA4B,CAAC;AACnC,UAAM,kBAAkB,KAAK,mBAAmB;AAChD,UAAM,kBAAkB,KAAK,KAAK,sBAAsB,eAAe;AACvE,UAAM,mBAAmB,KAAK,KAAK,uBAAuB,eAAe;AACzE,UAAM,YAAY,KAAK,KAAK,cAAc,eAAe;AAEzD,QAAI,WAAW;AACf,QAAI,cAAc;AAClB,QAAI,eAAe;AACnB,QAAI,eAAe;AACnB,QAAI,YAAY;AAGhB,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,MAAM,QAAQ,KAAK,KAAK,WAAW;AACvE,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,SAAS,MAAM,KAAK,QAAQ,KAAK;AACvC,YAAM,aAAa,IAAI,KAAK;AAC5B,YAAM,SAAS,aAAa;AAE5B,UAAI,OAAO,UAAU;AACnB,YAAI,CAAC,UAAU;AAEb,qBAAW;AACX,wBAAc,KAAK,IAAI,GAAG,SAAS,WAAW;AAC9C,yBAAe;AACf,yBAAe;AACf,sBAAY;AAAA,QACd;AACA,uBAAe;AACf;AACA,qBAAa,OAAO;AAAA,MACtB,WAAW,UAAU;AACnB;AACA,YAAI,gBAAgB,kBAAkB;AAEpC,cAAI,gBAAgB,iBAAiB;AACnC,qBAAS,KAAK;AAAA,cACZ,OAAO,cAAc;AAAA,cACrB,MAAM,SAAS,eAAe;AAAA,cAC9B,gBAAgB,YAAY;AAAA,YAC9B,CAAC;AAAA,UACH;AACA,qBAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,QAAI,YAAY,gBAAgB,iBAAiB;AAC/C,YAAM,QAAS,MAAM,SAAS,KAAK,OAAO,aAAc;AACxD,eAAS,KAAK;AAAA,QACZ,OAAO,cAAc;AAAA,QACrB,KAAK,QAAQ;AAAA,QACb,gBAAgB,YAAY;AAAA,MAC9B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAA+B;AAClD,QAAI,MAAM;AACV,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,aAAO,QAAQ,CAAC,IAAI,QAAQ,CAAC;AAAA,IAC/B;AACA,WAAO,KAAK,KAAK,MAAM,QAAQ,MAAM;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,YAA8C;AAInE,UAAM,iBAAiB,IAAI,aAAa,UAAU;AAIlD,UAAM,uBAAuB;AAC7B,UAAM,MAAM,KAAK,aAAa,cAAc;AAC5C,QAAI,MAAM,sBAAsB;AAE9B,UAAI,CAAC,KAAK,aAAa;AACrB,aAAK,gBAAgB,KAAK,IAAI,aAAa,cAAc,CAAC;AAC1D,YAAI,KAAK,gBAAgB,SAAS,KAAK,OAAO,uBAAuB;AACnE,eAAK,gBAAgB,MAAM;AAAA,QAC7B;AAAA,MACF;AAEA,MAAAA,QAAO,MAAM,4CAA4C;AAAA,QACvD,KAAK,KAAK,MAAM,MAAM,GAAK,IAAI;AAAA,QAC/B,WAAW;AAAA,MACb,CAAC;AAED,aAAO,QAAQ,QAAQ;AAAA,QACrB,aAAa;AAAA,QACb,UAAU;AAAA,QACV,iBAAiB;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,qBAAqB;AAAA,UACrD,qBAAqB,KAAK;AAAA,UAC1B,wBAAwB,KAAK;AAAA,QAC/B,CAAC;AACD,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAGlC,gBAAM,YAAY,KAAK,cAAc,KAAK;AAC1C,gBAAM,cAAc,IAAI,aAAa,SAAS;AAC9C,sBAAY,IAAI,KAAK,SAAS,CAAC;AAC/B,sBAAY,IAAI,gBAAgB,KAAK,WAAW;AAKhD,gBAAM,kBAAkB,IAAI,aAAa,WAAW;AACpD,gBAAM,cAAc,IAAI,KAAK,IAAK,OAAO,WAAW,iBAAiB,CAAC,GAAG,SAAS,CAAC;AACnF,gBAAM,WAAW,IAAI,KAAK,IAAK,OAAO,SAAS,cAAc,KAAK,CAAC,OAAO,KAAK,OAAO,UAAU,CAAC,CAAC,GAAG,CAAC,CAAC;AAIvG,gBAAM,YAAY,IAAI,aAAa,KAAK,MAAO,IAAoB;AACnE,gBAAM,cAAc,IAAI,KAAK,IAAK,OAAO,WAAW,WAAW,KAAK,MAAO,IAAgB;AAE3F,gBAAM,QAAQ;AAAA,YACZ,SAAS;AAAA,YACT,SAAS;AAAA,YACT,MAAM;AAAA,UACR;AAGA,gBAAM,UAAU,MAAM,KAAK,QAAS,IAAI,KAAK;AAG7C,gBAAM,eAAe,QAAQ,QAAQ;AACrC,gBAAM,iBAAiB,QAAQ,QAAQ,KAAK,QAAQ,OAAO;AAE3D,cAAI,CAAC,cAAc;AACjB,kBAAM,IAAI,MAAM,sCAAsC;AAAA,UACxD;AAEA,gBAAM,cAAe,aAAa,KAAsB,CAAC;AAGzD,cAAI,gBAAgB;AAClB,iBAAK,QAAQ,IAAI,KAAK,IAAK;AAAA,cACzB;AAAA,cACA,IAAI,aAAa,eAAe,IAAoB;AAAA,cACpD,CAAC,GAAG,GAAG,GAAG;AAAA,YACZ;AAAA,UACF;AAGA,eAAK,UAAU,WAAW,MAAM,CAAC,KAAK,WAAW;AAEjD,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAC5C,gBAAM,WAAW,cAAc,KAAK,OAAO;AAG3C,cAAI;AAEJ,cAAI,YAAY,CAAC,KAAK,aAAa;AAEjC,8BAAkB,CAAC,GAAG,KAAK,eAAe;AAC1C,iBAAK,kBAAkB,CAAC;AACxB,YAAAA,QAAO,MAAM,yCAAyC;AAAA,cACpD,iBAAiB,gBAAgB;AAAA,cACjC,YAAY,KAAK,MAAM,gBAAgB,SAAS,KAAK,mBAAmB,CAAC;AAAA,YAC3E,CAAC;AAAA,UACH,WAAW,CAAC,YAAY,CAAC,KAAK,aAAa;AAEzC,iBAAK,gBAAgB,KAAK,IAAI,aAAa,UAAU,CAAC;AACtD,gBAAI,KAAK,gBAAgB,SAAS,KAAK,OAAO,uBAAuB;AACnE,mBAAK,gBAAgB,MAAM;AAAA,YAC7B;AAAA,UACF,WAAW,CAAC,YAAY,KAAK,aAAa;AAExC,iBAAK,kBAAkB,CAAC;AAAA,UAC1B;AAEA,eAAK,cAAc;AAEnB,UAAAA,QAAO,MAAM,2BAA2B;AAAA,YACtC,aAAa,KAAK,MAAM,cAAc,GAAI,IAAI;AAAA,YAC9C;AAAA,YACA,iBAAiB,KAAK,MAAM,kBAAkB,GAAG,IAAI;AAAA,UACvD,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,yBAAyB;AAAA,YACzB,uBAAuB;AAAA,UACzB,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,UAChB,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,QAAQ;AAC3B,WAAK,UAAU;AAAA,IACjB;AACA,SAAK,QAAQ;AAAA,EACf;AACF;AAAA;AAAA;AAAA;AAAA;AAxea,mBA+EJ,oBAAoB;;;AC7L7B,IAAMC,UAAS,aAAa,iBAAiB;AAG7C,IAAMC,iBAAgB;AAGtB,IAAM,kBAAkB;AACxB,IAAM,uBAAuB;AAmF7B,IAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAyMf,IAAM,kBAAN,MAAsB;AAAA,EA2B3B,YAAY,QAAyB;AA1BrC,SAAQ,SAAwB;AAEhC,SAAQ,YAAY;AACpB,SAAQ,YAAY;AAapB;AAAA,SAAQ,iBAAgC,QAAQ,QAAQ;AAGxD;AAAA,SAAQ,kBAAkC,CAAC;AAC3C,SAAQ,cAAc;AAGtB;AAAA,SAAQ,mBAAuG,oBAAI,IAAI;AACvH,SAAQ,YAAY;AAGlB,UAAM,aAAa,OAAO,cAAc;AAExC,QAAI,eAAe,OAAQ,eAAe,MAAO;AAC/C,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,SAAK,SAAS;AAAA,MACZ,UAAU,OAAO;AAAA,MACjB;AAAA,MACA,WAAW,OAAO,aAAa;AAAA,MAC/B,uBAAuB,OAAO,yBAAyB;AAAA,IACzD;AAGA,SAAK,YAAY,eAAe,OAAQ,MAAM;AAC9C,SAAK,cAAc,eAAe,OAAQ,KAAK;AAG/C,SAAK,QAAQ,IAAI,aAAa,IAAI,IAAI,GAAG;AACzC,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAAA,EAClD;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAAyB;AAC3B,WAAO,KAAK,YAAY,SAAS;AAAA,EACnC;AAAA,EAEA,IAAI,aAAqB;AACvB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,eAAuB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,qBAA6B;AAC3B,WAAQ,KAAK,YAAY,KAAK,OAAO,aAAc;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAuB;AAC7B,UAAM,OAAO,IAAI,KAAK,CAAC,aAAa,GAAG,EAAE,MAAM,yBAAyB,CAAC;AACzE,UAAM,UAAU,IAAI,gBAAgB,IAAI;AACxC,UAAM,SAAS,IAAI,OAAO,OAAO;AAGjC,QAAI,gBAAgB,OAAO;AAG3B,WAAO,YAAY,CAAC,UAAyC;AAC3D,WAAK,oBAAoB,MAAM,IAAI;AAAA,IACrC;AAGA,WAAO,UAAU,CAAC,UAAU;AAC1B,MAAAD,QAAO,MAAM,gBAAgB,EAAE,OAAO,MAAM,QAAQ,CAAC;AAErD,iBAAW,CAAC,EAAE,QAAQ,KAAK,KAAK,kBAAkB;AAChD,iBAAS,OAAO,IAAI,MAAM,iBAAiB,MAAM,OAAO,EAAE,CAAC;AAAA,MAC7D;AACA,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,QAA+B;AAEzD,UAAM,WAAW,KAAK,iBAAiB,IAAI,OAAO,IAAI;AACtD,QAAI,UAAU;AACZ,WAAK,iBAAiB,OAAO,OAAO,IAAI;AACxC,UAAI,OAAO,SAAS,SAAS;AAC3B,iBAAS,OAAO,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MACzC,OAAO;AACL,iBAAS,QAAQ,MAAM;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAe,SAA2B,cAAsB,WAA+B;AACrG,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,UAAI,CAAC,KAAK,QAAQ;AAChB,eAAO,IAAI,MAAM,wBAAwB,CAAC;AAC1C;AAAA,MACF;AAGA,YAAM,YAAY,WAAW,MAAM;AACjC,aAAK,iBAAiB,OAAO,YAAY;AACzC,eAAO,IAAI,MAAM,oCAAoC,SAAS,IAAI,CAAC;AAAA,MACrE,GAAG,SAAS;AAGZ,WAAK,iBAAiB,IAAI,cAAc;AAAA,QACtC,SAAS,CAAC,UAAU;AAClB,uBAAa,SAAS;AACtB,kBAAQ,KAAU;AAAA,QACpB;AAAA,QACA,QAAQ,CAAC,UAAU;AACjB,uBAAa,SAAS;AACtB,iBAAO,KAAK;AAAA,QACd;AAAA,MACF,CAAC;AAGD,WAAK,iBAAiB,IAAI,SAAS;AAAA,QACjC,SAAS,MAAM;AAAA,QAAC;AAAA;AAAA,QAChB,QAAQ,CAAC,UAAU;AACjB,uBAAa,SAAS;AACtB,eAAK,iBAAiB,OAAO,YAAY;AACzC,iBAAO,KAAK;AAAA,QACd;AAAA,MACF,CAAC;AAGD,WAAK,OAAO,YAAY,OAAO;AAAA,IACjC,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAoC;AACxC,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,wBAAwB;AAAA,MACxD,aAAa,KAAK,OAAO;AAAA,MACzB,qBAAqB,KAAK,OAAO;AAAA,IACnC,CAAC;AAED,QAAI;AACF,MAAAA,QAAO,KAAK,wBAAwB;AAGpC,WAAK,SAAS,KAAK,aAAa;AAEhC,MAAAA,QAAO,KAAK,8BAA8B;AAAA,QACxC,UAAU,KAAK,OAAO;AAAA,QACtB,YAAY,KAAK,OAAO;AAAA,MAC1B,CAAC;AAGD,YAAM,SAAS,MAAM,KAAK;AAAA,QAMxB;AAAA,UACE,MAAM;AAAA,UACN,UAAU,KAAK,OAAO;AAAA,UACtB,YAAY,KAAK,OAAO;AAAA,UACxB,WAAWC;AAAA,QACb;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,WAAK,YAAY;AAEjB,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAD,QAAO,KAAK,kCAAkC;AAAA,QAC5C,SAAS;AAAA,QACT,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,kBAAkB,KAAK,MAAM,OAAO,UAAU;AAAA,QAC9C,YAAY,KAAK,OAAO;AAAA,QACxB,WAAW,KAAK;AAAA,QAChB,WAAW,KAAK,OAAO;AAAA,MACzB,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB;AAAA,QACjB,sBAAsB;AAAA,QACtB,6BAA6B,OAAO;AAAA,MACtC,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,YAAY,OAAO;AAAA,QACnB,aAAa,OAAO;AAAA,QACpB,YAAY,KAAK,OAAO;AAAA,QACxB,WAAW,KAAK;AAAA,MAClB;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AAGD,UAAI,KAAK,QAAQ;AACf,aAAK,OAAO,UAAU;AACtB,aAAK,SAAS;AAAA,MAChB;AAEA,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI,CAAC,KAAK,aAAa,CAAC,KAAK,QAAQ;AACnC,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAGA,UAAM,SAAS,MAAM,KAAK;AAAA,MACxB,EAAE,MAAM,QAAQ;AAAA,MAChB;AAAA,MACA;AAAA,IACF;AAGA,SAAK,QAAQ,OAAO;AACpB,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAChD,SAAK,kBAAkB,CAAC;AACxB,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAAQ,YAA8C;AAC1D,QAAI,CAAC,KAAK,aAAa,CAAC,KAAK,QAAQ;AACnC,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,QAAI,WAAW,WAAW,KAAK,WAAW;AACxC,YAAM,IAAI;AAAA,QACR,+BAA+B,KAAK,SAAS,iBAAiB,WAAW,MAAM;AAAA,MAEjF;AAAA,IACF;AAEA,WAAO,KAAK,eAAe,UAAU;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,YAA8C;AAEnE,UAAM,iBAAiB,IAAI,aAAa,UAAU;AAElD,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,2BAA2B;AAAA,UAC3D,qBAAqB;AAAA,UACrB,wBAAwB,KAAK;AAAA,QAC/B,CAAC;AAED,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAGlC,gBAAM,SAAS,MAAM,KAAK;AAAA,YAMxB;AAAA,cACE,MAAM;AAAA,cACN,OAAO;AAAA,cACP,OAAO,KAAK;AAAA,cACZ,SAAS,KAAK;AAAA,YAChB;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAGA,eAAK,QAAQ,OAAO;AAGpB,eAAK,UAAU,eAAe,MAAM,CAAC,KAAK,WAAW;AAErD,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAC5C,gBAAM,WAAW,OAAO,cAAc,KAAK,OAAO;AAGlD,cAAI;AAEJ,cAAI,YAAY,CAAC,KAAK,aAAa;AAEjC,8BAAkB,CAAC,GAAG,KAAK,eAAe;AAC1C,iBAAK,kBAAkB,CAAC;AACxB,YAAAA,QAAO,MAAM,yCAAyC;AAAA,cACpD,iBAAiB,gBAAgB;AAAA,cACjC,YAAY,KAAK,MAAM,gBAAgB,SAAS,KAAK,mBAAmB,CAAC;AAAA,YAC3E,CAAC;AAAA,UACH,WAAW,CAAC,YAAY,CAAC,KAAK,aAAa;AAEzC,iBAAK,gBAAgB,KAAK,IAAI,aAAa,cAAc,CAAC;AAC1D,gBAAI,KAAK,gBAAgB,SAAS,KAAK,OAAO,uBAAuB;AACnE,mBAAK,gBAAgB,MAAM;AAAA,YAC7B;AAAA,UACF,WAAW,CAAC,YAAY,KAAK,aAAa;AAExC,iBAAK,kBAAkB,CAAC;AAAA,UAC1B;AAEA,eAAK,cAAc;AAEnB,UAAAA,QAAO,MAAM,kCAAkC;AAAA,YAC7C,aAAa,KAAK,MAAM,OAAO,cAAc,GAAI,IAAI;AAAA,YACrD;AAAA,YACA,iBAAiB,KAAK,MAAM,kBAAkB,GAAG,IAAI;AAAA,YACrD,cAAc,KAAK,MAAM,OAAO,kBAAkB,GAAG,IAAI;AAAA,UAC3D,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,gCAAgC,OAAO;AAAA,YACvC,yBAAyB,OAAO;AAAA,YAChC,uBAAuB;AAAA,UACzB,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS;AAAA,UACX,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS;AAAA,YACT,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN,aAAa,OAAO;AAAA,YACpB;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS;AAAA,YACT,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,QAAQ;AACf,UAAI;AAEF,cAAM,KAAK,YAAY,EAAE,MAAM,UAAU,GAAG,YAAY,oBAAoB;AAAA,MAC9E,QAAQ;AAAA,MAER;AAGA,WAAK,OAAO,UAAU;AACtB,WAAK,SAAS;AAAA,IAChB;AAEA,SAAK,YAAY;AACjB,SAAK,QAAQ,IAAI,aAAa,IAAI,IAAI,GAAG;AACzC,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAChD,SAAK,kBAAkB,CAAC;AACxB,SAAK,cAAc;AACnB,SAAK,iBAAiB,MAAM;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cAAuB;AAC5B,WAAO,OAAO,WAAW;AAAA,EAC3B;AACF;;;ACztBA,IAAME,UAAS,aAAa,iBAAiB;AAkGtC,SAAS,oBAA6B;AAE3C,MAAI,OAAO,WAAW,aAAa;AACjC,IAAAA,QAAO,MAAM,oDAAoD;AACjE,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,QAAQ,eAAe,OAAO,IAAI,oBAAoB,aAAa;AAC5E,IAAAA,QAAO,MAAM,uDAAuD;AACpE,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,SAAS,aAAa;AAC/B,IAAAA,QAAO,MAAM,oDAAoD;AACjE,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAgCO,SAAS,gBAAgB,QAAkD;AAChF,QAAM,kBAAkB,OAAO,mBAAmB;AAGlD,MAAI;AAEJ,MAAI,OAAO,cAAc,QAAW;AAElC,gBAAY,OAAO;AACnB,IAAAA,QAAO,MAAM,oCAAoC,EAAE,UAAU,CAAC;AAAA,EAChE,OAAO;AAEL,UAAM,kBAAkB,kBAAkB;AAC1C,UAAM,WAAW,SAAS;AAI1B,gBAAY,mBAAmB,CAAC;AAEhC,IAAAA,QAAO,MAAM,mCAAmC;AAAA,MAC9C;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAGA,MAAI,WAAW;AACb,IAAAA,QAAO,KAAK,4CAA4C;AACxD,UAAM,SAAS,IAAI,gBAAgB;AAAA,MACjC,UAAU,OAAO;AAAA,MACjB,YAAY,OAAO;AAAA,MACnB,WAAW,OAAO;AAAA,MAClB,uBAAuB,OAAO;AAAA,IAChC,CAAC;AAED,QAAI,iBAAiB;AAEnB,aAAO,IAAI,sBAAsB,QAAQ,MAAM;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AAEA,EAAAA,QAAO,KAAK,2CAA2C;AACvD,SAAO,IAAI,mBAAmB,MAAM;AACtC;AAQA,IAAM,wBAAN,MAAwD;AAAA,EAKtD,YAAY,QAAyB,QAAgC;AAFrE,SAAQ,gBAAgB;AAGtB,SAAK,iBAAiB;AACtB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,UAAiC;AAEnC,QAAI,CAAC,KAAK,SAAU,QAAO;AAC3B,WAAO,KAAK,gBAAiB,KAAK,eAAsC,UAAU;AAAA,EACpF;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,aAAqB;AACvB,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,MAAM,OAAmD;AACvD,QAAI;AACF,aAAO,MAAM,KAAK,eAAe,KAAK;AAAA,IACxC,SAAS,OAAO;AACd,MAAAA,QAAO,KAAK,mDAAmD;AAAA,QAC7D,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,CAAC;AAGD,UAAI;AACF,cAAM,KAAK,eAAe,QAAQ;AAAA,MACpC,QAAQ;AAAA,MAER;AAGA,WAAK,iBAAiB,IAAI,mBAAmB,KAAK,MAAM;AACxD,WAAK,gBAAgB;AAErB,MAAAA,QAAO,KAAK,2CAA2C;AACvD,aAAO,MAAM,KAAK,eAAe,KAAK;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,QAAQ,YAA8C;AAC1D,WAAO,KAAK,eAAe,QAAQ,UAAU;AAAA,EAC/C;AAAA,EAEA,QAA8B;AAC5B,WAAO,KAAK,eAAe,MAAM;AAAA,EACnC;AAAA,EAEA,MAAM,UAAyB;AAC7B,WAAO,KAAK,eAAe,QAAQ;AAAA,EACrC;AAAA,EAEA,eAAuB;AACrB,WAAO,KAAK,eAAe,aAAa;AAAA,EAC1C;AAAA,EAEA,qBAA6B;AAC3B,WAAO,KAAK,eAAe,mBAAmB;AAAA,EAChD;AACF;;;ACtQA,IAAMC,WAAS,aAAa,aAAa;AAalC,IAAM,qBAAqB,CAAC,WAAW,SAAS,SAAS,KAAK;AAmE9D,IAAM,uBAAN,MAA2B;AAAA,EAQhC,YAAY,QAA2B;AAPvC,SAAQ,UAAmC;AAC3C,SAAQ,MAAwB;AAEhC,SAAQ,WAA2B;AACnC,SAAQ,YAAY;AACpB,SAAQ,iBAAgC,QAAQ,QAAQ;AAGtD,SAAK,SAAS;AAAA,MACZ,UAAU,OAAO;AAAA,MACjB,SAAS,OAAO,WAAW;AAAA,MAC3B,YAAY,OAAO,cAAc;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,KAAK,UAAU,KAAK,WAAW;AAAA,EACxC;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,IAAI,aAAqB;AACvB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,OAAsC;AAC1C,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,oBAAoB;AAAA,MACpD,aAAa,KAAK,OAAO;AAAA,MACzB,2BAA2B,KAAK,OAAO;AAAA,IACzC,CAAC;AAED,QAAI;AACF,MAAAA,SAAO,KAAK,2BAA2B,EAAE,YAAY,KAAK,OAAO,QAAQ,CAAC;AAE1E,YAAM,EAAE,KAAK,QAAQ,IAAI,MAAM,4BAA4B,KAAK,OAAO,OAAO;AAC9E,WAAK,MAAM;AACX,WAAK,WAAW;AAEhB,MAAAA,SAAO,KAAK,uBAAuB,EAAE,SAAS,KAAK,SAAS,CAAC;AAG7D,MAAAA,SAAO,KAAK,yBAAyB;AACrC,YAAM,QAAQ,cAAc;AAC5B,YAAM,WAAW,KAAK,OAAO;AAC7B,YAAM,WAAW,MAAM,MAAM,IAAI,QAAQ;AACzC,MAAAA,SAAO,KAAK,wBAAwB,EAAE,UAAU,SAAS,CAAC;AAE1D,UAAI;AACJ,UAAI,UAAU;AACZ,QAAAA,SAAO,KAAK,+BAA+B,EAAE,SAAS,CAAC;AACvD,sBAAe,MAAM,MAAM,IAAI,QAAQ;AACvC,QAAAA,SAAO,KAAK,2BAA2B,EAAE,MAAM,YAAY,YAAY,UAAU,EAAE,CAAC;AAAA,MACtF,OAAO;AACL,QAAAA,SAAO,KAAK,kCAAkC,EAAE,SAAS,CAAC;AAC1D,sBAAc,MAAM,eAAe,QAAQ;AAC3C,QAAAA,SAAO,KAAK,4BAA4B,EAAE,MAAM,YAAY,YAAY,UAAU,EAAE,CAAC;AAAA,MACvF;AAEA,MAAAA,SAAO,KAAK,mEAAmE;AAC/E,MAAAA,SAAO,MAAM,yBAAyB;AAAA,QACpC,MAAM,YAAY,YAAY,UAAU;AAAA,QACxC,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,YAAM,iBAAiB,kBAAkB,KAAK,QAAQ;AACtD,YAAM,YAAY,IAAI,WAAW,WAAW;AAC5C,WAAK,UAAU,MAAM,IAAI,iBAAiB,OAAO,WAAW,cAAc;AAE1E,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,SAAO,KAAK,6BAA6B;AAAA,QACvC,SAAS,KAAK;AAAA,QACd,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,YAAY,KAAK,OAAO;AAAA,QACxB,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,MAC3C,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB,KAAK;AAAA,QACtB,sBAAsB;AAAA,QACtB,gBAAgB;AAAA,MAClB,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,QACzC,YAAY,KAAK,OAAO;AAAA,MAC1B;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AACD,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,MAAM,OAAiD;AAC3D,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,WAAO,KAAK,eAAe,KAAK;AAAA,EAClC;AAAA,EAEQ,eAAe,OAAiD;AAItE,UAAM,YAAY,IAAI,aAAa,KAAK;AAExC,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,qBAAqB;AAAA,UACrD,qBAAqB,KAAK;AAAA,UAC1B,2BAA2B,UAAU;AAAA,QACvC,CAAC;AAED,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAIlC,gBAAM,cAAc,IAAI,KAAK,IAAK,OAAO,WAAW,WAAW,CAAC,GAAG,UAAU,MAAM,CAAC;AAGpF,gBAAM,UAAU,MAAM,KAAK,QAAS,IAAI,EAAE,OAAO,YAAY,CAAC;AAK9D,gBAAM,eAAe,QAAQ,QAAQ;AACrC,gBAAM,mBAAmB,QAAQ,eAAe;AAEhD,cAAI,CAAC,cAAc;AACjB,kBAAM,IAAI;AAAA,cACR,yDAAyD,OAAO,KAAK,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,YAC1F;AAAA,UACF;AAGA,gBAAM,aAAa,aAAa;AAChC,gBAAM,SAAS,IAAI,aAAa,UAAU;AAG1C,gBAAM,QAAQ,KAAK,QAAQ,MAAM;AAGjC,gBAAM,gBAAkD;AAAA,YACtD,SAAS,MAAM,CAAC;AAAA,YAChB,OAAO,MAAM,CAAC;AAAA,YACd,OAAO,MAAM,CAAC;AAAA,YACd,KAAK,MAAM,CAAC;AAAA,UACd;AAGA,cAAI,SAAS;AACb,cAAI,UAAU,MAAM,CAAC;AACrB,mBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAI,MAAM,CAAC,IAAI,SAAS;AACtB,wBAAU,MAAM,CAAC;AACjB,uBAAS;AAAA,YACX;AAAA,UACF;AAEA,gBAAM,WAAyB;AAAA,YAC7B,SAAS,mBAAmB,MAAM;AAAA,YAClC,YAAY;AAAA,YACZ;AAAA,UACF;AAGA,cAAI,aAA6B,CAAC;AAClC,cAAI,YAAY;AAEhB,cAAI,kBAAkB;AACpB,kBAAM,gBAAgB,iBAAiB;AACvC,kBAAM,OAAO,iBAAiB;AAE9B,gBAAI,KAAK,WAAW,GAAG;AAErB,0BAAY,KAAK,CAAC;AAClB,oBAAM,eAAe,KAAK,CAAC;AAE3B,uBAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,sBAAM,QAAQ,IAAI;AAClB,2BAAW,KAAK,IAAI,aAAa,cAAc,MAAM,OAAO,QAAQ,YAAY,CAAC,CAAC;AAAA,cACpF;AAAA,YACF;AAAA,UACF;AAKA,gBAAM,SAAyB,CAAC;AAChC,mBAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,mBAAO,KAAK;AAAA,cACV,SAAS,SAAS;AAAA,cAClB,YAAY,SAAS;AAAA,cACrB,eAAe,EAAE,GAAG,cAAc;AAAA,YACpC,CAAC;AAAA,UACH;AAEA,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAE5C,UAAAA,SAAO,MAAM,+BAA+B;AAAA,YAC1C;AAAA,YACA,UAAU,SAAS;AAAA,YACnB,YAAY,KAAK,MAAM,SAAS,aAAa,GAAG;AAAA,YAChD,iBAAiB,KAAK,MAAM,eAAe;AAAA,UAC7C,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,wBAAwB;AAAA,YACxB,8BAA8B,SAAS;AAAA,UACzC,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,UAChB,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,QAAQ,QAAoC;AAElD,QAAI,MAAM,OAAO,CAAC;AAClB,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAI,OAAO,CAAC,IAAI,IAAK,OAAM,OAAO,CAAC;AAAA,IACrC;AAGA,UAAM,MAAM,IAAI,aAAa,OAAO,MAAM;AAC1C,QAAI,MAAM;AACV,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAI,CAAC,IAAI,KAAK,IAAI,OAAO,CAAC,IAAI,GAAG;AACjC,aAAO,IAAI,CAAC;AAAA,IACd;AAGA,UAAM,QAAQ,IAAI,aAAa,OAAO,MAAM;AAC5C,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,CAAC,IAAI,IAAI,CAAC,IAAI;AAAA,IACtB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,QAAQ;AAC3B,WAAK,UAAU;AAAA,IACjB;AAAA,EACF;AACF;AAAA;AAAA;AAAA;AAAA;AArUa,qBAgCJ,oBAAoB;;;ACrI7B,IAAMC,WAAS,aAAa,cAAc;AAsGnC,IAAM,0BAAN,MAAM,yBAAwB;AAAA,EAenC,YAAY,SAA6B,CAAC,GAAG;AAb7C,SAAQ,cAAiD;AACzD,SAAQ,cAAc;AACtB,SAAQ,YAAY;AACpB,SAAQ,kBAAkB;AAG1B;AAAA,SAAQ,kBAA0C,CAAC;AACnD,SAAQ,iBAAwC,CAAC;AAGjD;AAAA,SAAQ,eAAmE;AAC3E,SAAQ,eAAgD;AAGtD,SAAK,SAAS;AAAA,MACZ,UAAU,OAAO,YAAY;AAAA,MAC7B,YAAY,OAAO,cAAc;AAAA,MACjC,gBAAgB,OAAO,kBAAkB;AAAA,MACzC,iBAAiB,OAAO,mBAAmB;AAAA,IAC7C;AAEA,IAAAA,SAAO,MAAM,mCAAmC;AAAA,MAC9C,UAAU,KAAK,OAAO;AAAA,MACtB,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cAAuB;AAC5B,WAAO,6BAA6B;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,UAAsC;AAC7C,SAAK,gBAAgB,KAAK,QAAQ;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,UAAqC;AAC3C,SAAK,eAAe,KAAK,QAAQ;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,UAAsC;AAC9C,UAAM,QAAQ,KAAK,gBAAgB,QAAQ,QAAQ;AACnD,QAAI,UAAU,IAAI;AAChB,WAAK,gBAAgB,OAAO,OAAO,CAAC;AAAA,IACtC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,UAAqC;AAC5C,UAAM,QAAQ,KAAK,eAAe,QAAQ,QAAQ;AAClD,QAAI,UAAU,IAAI;AAChB,WAAK,eAAe,OAAO,OAAO,CAAC;AAAA,IACrC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAAuB;AAC3B,QAAI,KAAK,aAAa;AACpB,MAAAA,SAAO,KAAK,mBAAmB;AAC/B;AAAA,IACF;AAEA,QAAI,CAAC,yBAAwB,YAAY,GAAG;AAC1C,YAAM,QAAQ,IAAI;AAAA,QAChB;AAAA,MAGF;AACA,WAAK,UAAU,KAAK;AACpB,YAAM;AAAA,IACR;AAEA,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,sBAAsB;AAAA,MACtD,mBAAmB,KAAK,OAAO;AAAA,MAC/B,qBAAqB,KAAK,OAAO;AAAA,IACnC,CAAC;AAED,QAAI;AAEF,YAAM,yBAAyB,OAAO,qBAAqB,OAAO;AAClE,UAAI,CAAC,wBAAwB;AAC3B,cAAM,IAAI,MAAM,yCAAyC;AAAA,MAC3D;AAEA,WAAK,cAAc,IAAI,uBAAuB;AAC9C,WAAK,YAAY,aAAa,KAAK,OAAO;AAC1C,WAAK,YAAY,iBAAiB,KAAK,OAAO;AAC9C,WAAK,YAAY,OAAO,KAAK,OAAO;AACpC,WAAK,YAAY,kBAAkB,KAAK,OAAO;AAG/C,WAAK,mBAAmB;AAGxB,WAAK,YAAY,MAAM;AACvB,WAAK,cAAc;AACnB,WAAK,YAAY,YAAY,IAAI;AACjC,WAAK,kBAAkB;AAEvB,MAAAA,SAAO,KAAK,8BAA8B;AAAA,QACxC,UAAU,KAAK,OAAO;AAAA,MACxB,CAAC;AAED,YAAM,IAAI;AAAA,IACZ,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,WAAK,UAAU,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AACxE,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAyC;AAC7C,QAAI,CAAC,KAAK,eAAe,CAAC,KAAK,aAAa;AAC1C,MAAAA,SAAO,KAAK,yBAAyB;AACrC,aAAO;AAAA,QACL,MAAM,KAAK;AAAA,QACX,UAAU,KAAK,OAAO;AAAA,QACtB,iBAAiB;AAAA,QACjB,SAAS;AAAA,MACX;AAAA,IACF;AAEA,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,mBAAmB;AAErD,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,eAAe;AACpB,WAAK,eAAe;AAEpB,UAAI;AACF,aAAK,YAAa,KAAK;AAAA,MAEzB,SAAS,OAAO;AACd,cAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,aAAK,cAAc;AACnB,eAAO,KAAK;AAAA,MACd;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,QAAI,KAAK,eAAe,KAAK,aAAa;AACxC,WAAK,YAAY,MAAM;AACvB,WAAK,cAAc;AACnB,MAAAA,SAAO,KAAK,4BAA4B;AAAA,IAC1C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,WAAW,QAAwD;AACvE,UAAM,IAAI;AAAA,MACR;AAAA,IAGF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,QAAI,KAAK,aAAa;AACpB,UAAI,KAAK,aAAa;AACpB,aAAK,YAAY,MAAM;AAAA,MACzB;AACA,WAAK,cAAc;AAAA,IACrB;AACA,SAAK,cAAc;AACnB,SAAK,kBAAkB,CAAC;AACxB,SAAK,iBAAiB,CAAC;AACvB,IAAAA,SAAO,MAAM,kCAAkC;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,YAAa;AAEvB,SAAK,YAAY,WAAW,CAAC,UAAkC;AAC7D,YAAM,YAAY,aAAa;AAC/B,YAAM,OAAO,WAAW,UAAU,uBAAuB;AAEzD,UAAI;AAEF,iBAAS,IAAI,MAAM,aAAa,IAAI,MAAM,QAAQ,QAAQ,KAAK;AAC7D,gBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,gBAAM,cAAc,OAAO,CAAC;AAE5B,cAAI,aAAa;AACf,kBAAM,OAAO,YAAY;AACzB,kBAAM,UAAU,OAAO;AAGvB,gBAAI,SAAS;AACX,mBAAK,mBAAmB,OAAO;AAAA,YACjC;AAEA,kBAAM,eAAwC;AAAA,cAC5C,MAAM,UAAU,KAAK,gBAAgB,KAAK,IAAI;AAAA,cAC9C,UAAU,KAAK,OAAO;AAAA,cACtB,iBAAiB,YAAY,IAAI,IAAI,KAAK;AAAA,cAC1C;AAAA,cACA,YAAY,YAAY;AAAA,YAC1B;AAGA,iBAAK,WAAW,YAAY;AAE5B,YAAAA,SAAO,MAAM,iBAAiB;AAAA,cAC5B,MAAM,KAAK,UAAU,GAAG,EAAE;AAAA,cAC1B;AAAA,cACA,YAAY,YAAY;AAAA,YAC1B,CAAC;AAAA,UACH;AAAA,QACF;AAEA,cAAM,IAAI;AAAA,MACZ,SAAS,OAAO;AACd,cAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,QAAAA,SAAO,MAAM,kCAAkC,EAAE,MAAM,CAAC;AAAA,MAC1D;AAAA,IACF;AAEA,SAAK,YAAY,UAAU,CAAC,UAAuC;AACjE,YAAM,QAAQ,IAAI,MAAM,6BAA6B,MAAM,KAAK,MAAM,MAAM,OAAO,EAAE;AACrF,MAAAA,SAAO,MAAM,4BAA4B,EAAE,OAAO,MAAM,OAAO,SAAS,MAAM,QAAQ,CAAC;AACvF,WAAK,UAAU,KAAK;AAEpB,UAAI,KAAK,cAAc;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,eAAe;AACpB,aAAK,eAAe;AAAA,MACtB;AAAA,IACF;AAEA,SAAK,YAAY,QAAQ,MAAM;AAC7B,WAAK,cAAc;AACnB,MAAAA,SAAO,KAAK,4BAA4B;AAAA,QACtC,WAAW,KAAK,gBAAgB;AAAA,QAChC,YAAY,YAAY,IAAI,IAAI,KAAK;AAAA,MACvC,CAAC;AAGD,UAAI,KAAK,cAAc;AACrB,cAAM,SAAkC;AAAA,UACtC,MAAM,KAAK,gBAAgB,KAAK;AAAA,UAChC,UAAU,KAAK,OAAO;AAAA,UACtB,iBAAiB,YAAY,IAAI,IAAI,KAAK;AAAA,UAC1C,SAAS;AAAA,QACX;AACA,aAAK,aAAa,MAAM;AACxB,aAAK,eAAe;AACpB,aAAK,eAAe;AAAA,MACtB;AAAA,IACF;AAEA,SAAK,YAAY,UAAU,MAAM;AAC/B,MAAAA,SAAO,MAAM,uCAAuC;AAAA,IACtD;AAEA,SAAK,YAAY,gBAAgB,MAAM;AACrC,MAAAA,SAAO,MAAM,iBAAiB;AAAA,IAChC;AAEA,SAAK,YAAY,cAAc,MAAM;AACnC,MAAAA,SAAO,MAAM,cAAc;AAAA,IAC7B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,QAAuC;AACxD,eAAW,YAAY,KAAK,iBAAiB;AAC3C,UAAI;AACF,iBAAS,MAAM;AAAA,MACjB,SAAS,OAAO;AACd,QAAAA,SAAO,MAAM,4BAA4B,EAAE,MAAM,CAAC;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,OAAoB;AACpC,eAAW,YAAY,KAAK,gBAAgB;AAC1C,UAAI;AACF,iBAAS,KAAK;AAAA,MAChB,SAAS,eAAe;AACtB,QAAAA,SAAO,MAAM,2BAA2B,EAAE,OAAO,cAAc,CAAC;AAAA,MAClE;AAAA,IACF;AAAA,EACF;AACF;;;ACxcO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAQO,IAAM,sBAAsB;AAgB5B,SAAS,oBAAoB,UAA0B,CAAC,GAAiB;AAC9E,QAAM,SAAS,IAAI,aAAa,mBAAmB;AAEnD,aAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,UAAM,MAAM,cAAc,QAAQ,IAAmB;AACrD,QAAI,OAAO,GAAG;AACZ,aAAO,GAAG,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,IAAM,iBAAiB;AAAA;AAAA,EAE5B,SAAS,oBAAoB,CAAC,CAAC;AAAA;AAAA,EAG/B,OAAO,oBAAoB,EAAE,KAAK,KAAK,WAAW,IAAI,CAAC;AAAA;AAAA,EAGvD,KAAK,oBAAoB,EAAE,SAAS,KAAK,OAAO,IAAI,CAAC;AAAA;AAAA,EAGrD,OAAO,oBAAoB,EAAE,OAAO,KAAK,SAAS,IAAI,CAAC;AAAA;AAAA,EAGvD,WAAW,oBAAoB,EAAE,WAAW,KAAK,MAAM,IAAI,CAAC;AAAA;AAAA,EAG5D,QAAQ,oBAAoB,EAAE,MAAM,KAAK,MAAM,IAAI,CAAC;AAAA;AAAA,EAGpD,WAAW,oBAAoB,EAAE,SAAS,KAAK,OAAO,IAAI,CAAC;AAAA;AAAA,EAG3D,SAAS,oBAAoB,EAAE,KAAK,KAAK,WAAW,KAAK,YAAY,IAAI,CAAC;AAAA;AAAA,EAG1E,OAAO,oBAAoB,EAAE,aAAa,KAAK,SAAS,IAAI,CAAC;AAAA;AAAA,EAG7D,SAAS,oBAAoB,EAAE,YAAY,KAAK,KAAK,IAAI,CAAC;AAAA;AAAA,EAG1D,QAAQ,oBAAoB,EAAE,MAAM,KAAK,OAAO,IAAI,CAAC;AAAA;AAAA,EAGrD,eAAe,oBAAoB,EAAE,SAAS,KAAK,OAAO,IAAI,CAAC;AACjE;AAOO,SAAS,iBAAiB,MAAuC;AACtE,SAAO,eAAe,IAAI,EAAE,MAAM;AACpC;AAgBO,SAAS,cACd,UACc;AACd,QAAM,SAAS,IAAI,aAAa,mBAAmB;AACnD,MAAI,cAAc;AAElB,aAAW,EAAE,QAAQ,OAAO,KAAK,UAAU;AACzC,mBAAe;AACf,aAAS,IAAI,GAAG,IAAI,qBAAqB,KAAK;AAC5C,aAAO,CAAC,MAAM,OAAO,CAAC,KAAK,KAAK;AAAA,IAClC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,aAAS,IAAI,GAAG,IAAI,qBAAqB,KAAK;AAC5C,aAAO,CAAC,KAAK;AAAA,IACf;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,YACd,MACA,IACA,GACc;AACd,QAAM,SAAS,IAAI,aAAa,mBAAmB;AACnD,QAAM,WAAW,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,CAAC,CAAC;AAE3C,WAAS,IAAI,GAAG,IAAI,qBAAqB,KAAK;AAC5C,WAAO,CAAC,KAAK,KAAK,CAAC,KAAK,MAAM,IAAI,aAAa,GAAG,CAAC,KAAK,KAAK;AAAA,EAC/D;AAEA,SAAO;AACT;AAKO,IAAM,oBAAN,MAAwB;AAAA,EAAxB;AACL,SAAQ,iBAAiB,IAAI,aAAa,mBAAmB;AAC7D,SAAQ,gBAAgB,IAAI,aAAa,mBAAmB;AAC5D,SAAQ,qBAAqB;AAC7B,SAAQ,qBAAqB;AAC7B,SAAQ,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA,EAK9B,IAAI,UAAwB;AAC1B,QAAI,KAAK,sBAAsB,GAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAGA,WAAO,YAAY,KAAK,gBAAgB,KAAK,eAAe,KAAK,kBAAkB;AAAA,EACrF;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,SAA+B;AACjC,UAAM,aAAa,oBAAoB,OAAO;AAC9C,SAAK,cAAc,IAAI,UAAU;AACjC,SAAK,eAAe,IAAI,UAAU;AAClC,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,QAAiC;AACzC,UAAM,aAAa,iBAAiB,MAAM;AAC1C,SAAK,cAAc,IAAI,UAAU;AACjC,SAAK,eAAe,IAAI,UAAU;AAClC,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,SAAyB,YAA0B;AAC9D,SAAK,eAAe,IAAI,KAAK,OAAO;AACpC,SAAK,cAAc,IAAI,oBAAoB,OAAO,CAAC;AACnD,SAAK,qBAAqB;AAC1B,SAAK,sBAAsB,YAAY,IAAI;AAC3C,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,QAA2B,YAA0B;AACtE,SAAK,eAAe,IAAI,KAAK,OAAO;AACpC,SAAK,cAAc,IAAI,iBAAiB,MAAM,CAAC;AAC/C,SAAK,qBAAqB;AAC1B,SAAK,sBAAsB,YAAY,IAAI;AAC3C,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,SAAe;AACb,QAAI,KAAK,sBAAsB,EAAK;AAEpC,UAAM,UAAU,YAAY,IAAI,IAAI,KAAK;AACzC,SAAK,qBAAqB,KAAK,IAAI,GAAK,UAAU,KAAK,kBAAkB;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,kBAA2B;AAC7B,WAAO,KAAK,qBAAqB;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,eAAe,KAAK,CAAC;AAC1B,SAAK,cAAc,KAAK,CAAC;AACzB,SAAK,qBAAqB;AAAA,EAC5B;AACF;;;AC1OO,IAAM,mBAAN,cAA+B,aAAmD;AAAA,EAkCvF,YAAY,QAAyB;AACnC,UAAM;AAlCR,SAAS,OAAO;AAEhB,SAAQ,SAAyB;AACjC,SAAQ,aAA4B;AACpC,SAAQ,eAAe;AAGvB;AAAA,SAAQ,UAAmC;AAC3C,SAAQ,MAAiC;AACzC,SAAQ,MAAgC;AAExC,SAAQ,WAAuC;AAG/C;AAAA,SAAQ,KAAuB;AAC/B,SAAQ,sBAAsB;AAC9B,SAAiB,uBAAuB;AAGxC;AAAA,SAAQ,cAA8B,CAAC;AAGvC;AAAA,SAAQ,UAAiC,CAAC;AAC1C,SAAQ,gBAAsC;AAI9C;AAAA,SAAQ,aAAa;AACrB,SAAQ,4BAAoD;AAG5D;AAAA,SAAQ,aAAa,oBAAI,IAAkD;AAIzE,SAAK,kBAAkB;AACvB,SAAK,oBAAoB,IAAI,kBAAkB;AAAA,EACjD;AAAA,EAEA,IAAI,QAAwB;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,YAA2B;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,cAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,QAAsC;AAClD,SAAK,gBAAgB;AACrB,SAAK,aAAa,OAAO;AAEzB,QAAI;AAEF,YAAM,YAAY,MAAM,KAAK,aAAa,OAAO,MAAM;AAGvD,YAAM,QAAQ,IAAI;AAAA,QAChB,KAAK,YAAY;AAAA,QACjB,KAAK,QAAQ;AAAA,MACf,CAAC;AAGD,YAAM,KAAK,iBAAiB,WAAW,MAAM;AAE7C,WAAK,eAAe;AACpB,WAAK,SAAS,MAAM;AAEpB,WAAK,KAAK,qBAAqB,EAAE,WAAW,KAAK,YAAY,SAAS,KAAK,KAAK,CAAC;AAAA,IACnF,SAAS,OAAO;AACd,WAAK,SAAS,OAAO;AACrB,WAAK,KAAK,oBAAoB;AAAA,QAC5B;AAAA,QACA,aAAa;AAAA,MACf,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAEhC,SAAK,2BAA2B,MAAM;AAGtC,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,QAAQ;AACtB,WAAK,WAAW;AAAA,IAClB;AAGA,QAAI,KAAK,IAAI;AACX,WAAK,GAAG,MAAM,KAAM,mBAAmB;AACvC,WAAK,KAAK;AAAA,IACZ;AAGA,UAAM,QAAQ,IAAI;AAAA,MAChB,KAAK,SAAS,QAAQ;AAAA,MACtB,KAAK,KAAK,QAAQ;AAAA,MAClB,KAAK,KAAK,QAAQ;AAAA,IACpB,CAAC;AAED,SAAK,eAAe;AACpB,SAAK,SAAS,cAAc;AAE5B,SAAK,KAAK,qBAAqB,EAAE,QAAQ,oBAAoB,CAAC;AAAA,EAChE;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAAwC;AAChD,QAAI,CAAC,KAAK,aAAc;AAGxB,QAAI,KAAK,YAAY;AACnB,WAAK,oBAAoB,KAAK,EAAE,KAAK,CAAC,qBAAqB;AACzD,YAAI,kBAAkB;AACpB,eAAK,UAAU;AAAA,QACjB;AAAA,MACF,CAAC,EAAE,MAAM,CAAC,UAAU;AAClB,gBAAQ,MAAM,wDAAwD,KAAK;AAAA,MAC7E,CAAC;AAAA,IAEH;AAGA,UAAM,UAAU,iBAAiB,eAC7B,QACA,KAAK,eAAe,KAAK;AAG7B,SAAK,YAAY,KAAK,OAAO;AAG7B,SAAK,sBAAsB;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAS,MAA6B;AAC1C,QAAI,CAAC,KAAK,gBAAgB,CAAC,KAAK,IAAI;AAClC,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AAGA,SAAK,aAAa;AAAA,MAChB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAED,SAAK,SAAS,UAAU;AACxB,SAAK,KAAK,qBAAqB,EAAE,WAAW,KAAK,IAAI,EAAE,CAAC;AAGxD,SAAK,GAAG,KAAK,KAAK,UAAU;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK;AAAA,MAChB,SAAS;AAAA,MACT,SAAS;AAAA,QACP,SAAS,KAAK,QAAQ,MAAM,GAAG;AAAA;AAAA,QAC/B,SAAS,MAAM,KAAK,KAAK,kBAAkB,OAAO;AAAA,MACpD;AAAA,IACF,CAAC,CAAC;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,YAAkB;AAChB,QAAI,CAAC,KAAK,WAAY;AAEtB,SAAK,KAAK,yBAAyB,EAAE,WAAW,KAAK,IAAI,EAAE,CAAC;AAG5D,SAAK,2BAA2B,MAAM;AACtC,SAAK,4BAA4B;AAGjC,QAAI,KAAK,IAAI,eAAe,UAAU,MAAM;AAC1C,WAAK,GAAG,KAAK,KAAK,UAAU;AAAA,QAC1B,MAAM;AAAA,QACN,WAAW,KAAK;AAAA,QAChB,WAAW,KAAK,IAAI;AAAA,MACtB,CAAC,CAAC;AAAA,IACJ;AAEA,SAAK,aAAa;AAClB,SAAK,SAAS,WAAW;AAEzB,SAAK,KAAK,wBAAwB,EAAE,WAAW,KAAK,IAAI,GAAG,QAAQ,OAAO,CAAC;AAAA,EAC7E;AAAA,EAEA,aAAoC;AAClC,WAAO,CAAC,GAAG,KAAK,OAAO;AAAA,EACzB;AAAA,EAEA,eAAqB;AACnB,SAAK,UAAU,CAAC;AAChB,SAAK,KAAK,kBAAkB,EAAE,cAAc,EAAE,CAAC;AAAA,EACjD;AAAA,EAEA,MAAM,cAAgC;AACpC,QAAI,CAAC,KAAK,MAAM,KAAK,GAAG,eAAe,UAAU,MAAM;AACrD,aAAO;AAAA,IACT;AAEA,WAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,YAAM,UAAU,WAAW,MAAM,QAAQ,KAAK,GAAG,GAAI;AAErD,YAAM,UAAU,CAAC,UAAwB;AACvC,cAAM,OAAO,KAAK,MAAM,MAAM,IAAI;AAClC,YAAI,KAAK,SAAS,QAAQ;AACxB,uBAAa,OAAO;AACpB,eAAK,IAAI,oBAAoB,WAAW,OAAO;AAC/C,kBAAQ,IAAI;AAAA,QACd;AAAA,MACF;AAEA,WAAK,IAAI,iBAAiB,WAAW,OAAO;AAC5C,WAAK,IAAI,KAAK,KAAK,UAAU,EAAE,MAAM,OAAO,CAAC,CAAC;AAAA,IAChD,CAAC;AAAA,EACH;AAAA;AAAA,EAIQ,SAAS,OAA6B;AAC5C,UAAM,gBAAgB,KAAK;AAC3B,SAAK,SAAS;AACd,SAAK,KAAK,gBAAgB,EAAE,OAAO,cAAc,CAAC;AAAA,EACpD;AAAA,EAEA,MAAc,aAAa,QAAuC;AAChE,UAAM,SAAS,KAAK,WAAW,IAAI,OAAO,QAAQ;AAClD,QAAI,UAAU,OAAO,YAAY,KAAK,IAAI,IAAI,KAAO;AACnD,aAAO,OAAO;AAAA,IAChB;AAGA,QAAI,OAAO,YAAY,WAAW;AAChC,aAAO,OAAO,YAAY;AAAA,IAC5B;AAIA,UAAM,WAAW,KAAK,gBAAgB;AACtC,QAAI,SAAS,WAAW,OAAO,KAAK,SAAS,SAAS,WAAW,GAAG;AAClE,aAAO;AAAA,IACT;AAGA,UAAM,eAAe,SAAS,QAAQ,UAAU,UAAU,EAAE,QAAQ,SAAS,SAAS;AACtF,UAAM,WAAW,MAAM,MAAM,GAAG,YAAY,eAAe;AAAA,MACzD,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU;AAAA,QACnB,UAAU,OAAO;AAAA,QACjB,QAAQ,OAAO,YAAY;AAAA,MAC7B,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,gBAAgB,SAAS,UAAU,EAAE;AAAA,IACvD;AAEA,UAAM,EAAE,OAAO,UAAU,IAAI,MAAM,SAAS,KAAK;AAEjD,SAAK,WAAW,IAAI,OAAO,UAAU;AAAA,MACnC;AAAA,MACA,WAAW,KAAK,IAAI,IAAI,YAAY;AAAA,IACtC,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,cAA6B;AAEzC,UAAM,QAAQ,IAAI;AAAA;AAAA,OAEf,YAAY;AACX,aAAK,UAAU,IAAI,iBAAiB;AAAA,UAClC,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ,CAAC;AACD,cAAM,KAAK,QAAQ,KAAK;AAAA,MAC1B,GAAG;AAAA;AAAA,OAEF,YAAY;AACX,aAAK,MAAM,IAAI,mBAAmB;AAAA,UAChC,UAAU;AAAA,UACV,SAAS;AAAA,UACT,YAAY;AAAA,UACZ,WAAW;AAAA,QACb,CAAC;AACD,cAAM,KAAK,IAAI,KAAK;AAAA,MACtB,GAAG;AAAA,IACL,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,UAAyB;AAGrC,UAAM,SAAS,KAAK,gBAAgB,QAAQ,UAAU;AAEtD,SAAK,MAAM,IAAI,kBAAkB;AAAA,MAC/B,UAAU;AAAA,MACV,SAAS;AAAA,IACX,CAAC;AAED,UAAM,KAAK,IAAI,KAAK;AAGpB,UAAM,KAAK,aAAa;AAAA,EAC1B;AAAA,EAEA,MAAc,eAA8B;AAC1C,QAAI,CAAC,KAAK,KAAK;AACb,YAAM,IAAI,MAAM,yCAAyC;AAAA,IAC3D;AAEA,SAAK,WAAW,IAAI,oBAAoB;AAAA,MACtC,KAAK,KAAK;AAAA,MACV,YAAY;AAAA,MACZ,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,KAAK,SAAS,WAAW;AAG/B,SAAK,SAAS,GAAG,eAAe,CAAC,UAAwB;AAEvD,WAAK,KAAK,aAAa;AAAA,QACrB,aAAa;AAAA,QACb,KAAK,CAAC,SAAiB;AACrB,gBAAM,MAAO,gBAAsC,QAAQ,IAAI;AAC/D,iBAAO,OAAO,IAAI,MAAM,GAAG,IAAI;AAAA,QACjC;AAAA,QACA,WAAW,KAAK,IAAI;AAAA;AAAA,QACpB,aAAa;AAAA;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAED,SAAK,SAAS,GAAG,qBAAqB,MAAM;AAC1C,WAAK,aAAa;AAClB,WAAK,SAAS,MAAM;AACpB,WAAK,KAAK,oBAAoB,EAAE,YAAY,EAAE,CAAC;AAAA,IACjD,CAAC;AAED,SAAK,SAAS,GAAG,SAAS,CAAC,UAAiB;AAC1C,cAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAK,KAAK,oBAAoB;AAAA,QAC5B;AAAA,QACA,aAAa;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,iBAAiB,WAAmB,QAAsC;AACtF,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,QAAQ,IAAI,IAAI,GAAG,KAAK,gBAAgB,SAAS,QAAQ,QAAQ,IAAI,CAAC,KAAK;AACjF,YAAM,aAAa,IAAI,aAAa,OAAO,SAAS;AACpD,YAAM,aAAa,IAAI,eAAe,OAAO,OAAO,WAAW;AAE/D,WAAK,KAAK,IAAI,UAAU,MAAM,SAAS,CAAC;AAExC,WAAK,GAAG,SAAS,MAAM;AAErB,aAAK,IAAI,KAAK,KAAK,UAAU;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,UAAU,OAAO,OAAO;AAAA,UACxB,cAAc,OAAO;AAAA,QACvB,CAAC,CAAC;AAAA,MACJ;AAEA,WAAK,GAAG,YAAY,CAAC,UAAU;AAC7B,aAAK,uBAAuB,KAAK,MAAM,MAAM,IAAI,CAAC;AAAA,MACpD;AAEA,WAAK,GAAG,UAAU,MAAM;AACtB,eAAO,IAAI,MAAM,6BAA6B,CAAC;AAAA,MACjD;AAEA,WAAK,GAAG,UAAU,CAAC,UAAU;AAC3B,aAAK,iBAAiB,KAAK;AAAA,MAC7B;AAGA,YAAM,cAAc,WAAW,MAAM;AACnC,eAAO,IAAI,MAAM,cAAc,CAAC;AAAA,MAClC,GAAG,GAAK;AAER,YAAM,cAAc,CAAC,UAAwB;AAC3C,cAAM,OAAO,KAAK,MAAM,MAAM,IAAI;AAClC,YAAI,KAAK,SAAS,gBAAgB;AAChC,uBAAa,WAAW;AACxB,eAAK,IAAI,oBAAoB,WAAW,WAAW;AACnD,kBAAQ;AAAA,QACV,WAAW,KAAK,SAAS,eAAe;AACtC,uBAAa,WAAW;AACxB,iBAAO,IAAI,MAAM,KAAK,OAAO,CAAC;AAAA,QAChC;AAAA,MACF;AAEA,WAAK,GAAG,iBAAiB,WAAW,WAAW;AAAA,IACjD,CAAC;AAAA,EACH;AAAA,EAEQ,uBAAuB,MAAqC;AAClE,YAAQ,KAAK,MAAM;AAAA,MACjB,KAAK;AACH,aAAK,SAAS,UAAU;AACxB,aAAK,aAAa;AAClB,aAAK,KAAK,qBAAqB;AAAA,UAC7B,MAAM,KAAK;AAAA,UACX,SAAS,KAAK;AAAA,QAChB,CAAC;AAED,YAAI,KAAK,SAAS;AAChB,eAAK,kBAAkB;AAAA,YACrB,EAAE,CAAC,KAAK,OAAiB,GAAG,IAAI;AAAA,YAChC;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,UAAU;AACjB,eAAK,SAAS,MAAM;AAAA,QACtB;AACA;AAAA,MAEF,KAAK;AACH,aAAK,KAAK,qBAAqB;AAAA,UAC7B,MAAM,KAAK;AAAA,UACX,QAAQ,KAAK;AAAA,QACf,CAAC;AACD;AAAA,MAEF,KAAK;AAEH,YAAI,KAAK,SAAS,KAAK,UAAU;AAC/B,gBAAM,YAAY,KAAK,oBAAoB,KAAK,KAAe;AAC/D,gBAAM,QAAQ,IAAI,WAAW,SAAS;AACtC,eAAK,SAAS,aAAa,KAAK,EAAE,MAAM,CAAC,UAAU;AACjD,oBAAQ,MAAM,qCAAqC,KAAK;AAAA,UAC1D,CAAC;AAAA,QACH;AACA;AAAA,MAEF,KAAK;AAEH,YAAI,KAAK,UAAU;AACjB,eAAK,SAAS,IAAI,EAAE,MAAM,CAAC,UAAU;AACnC,oBAAQ,MAAM,mCAAmC,KAAK;AAAA,UACxD,CAAC;AAAA,QACH;AAEA;AAAA,MAEF,KAAK;AACH,aAAK,aAAa;AAAA,UAChB,MAAM;AAAA,UACN,SAAS,KAAK;AAAA,UACd,WAAW,KAAK,IAAI;AAAA,UACpB,SAAS,KAAK;AAAA,QAChB,CAAC;AACD,aAAK,KAAK,mBAAmB;AAAA,UAC3B,UAAU,KAAK;AAAA,UACf,YAAY,KAAK,cAAwB;AAAA,QAC3C,CAAC;AACD;AAAA,MAEF,KAAK;AACH,aAAK,KAAK,kBAAkB;AAAA,UAC1B,cAAc,KAAK;AAAA,UACnB,YAAY,KAAK;AAAA,QACnB,CAAC;AACD;AAAA,MAEF,KAAK;AACH,aAAK,KAAK,oBAAoB;AAAA,UAC5B,OAAO,IAAI,MAAM,KAAK,OAAiB;AAAA,UACvC,aAAc,KAAK,eAA2B;AAAA,QAChD,CAAC;AACD;AAAA,IACJ;AAAA,EACF;AAAA,EAEQ,wBAA8B;AAIpC,QAAI,KAAK,YAAY,WAAW,EAAG;AAGnC,UAAM,cAAc,KAAK,YAAY,OAAO,CAACC,MAAK,QAAQA,OAAM,IAAI,QAAQ,CAAC;AAI7E,QAAI,cAAc,IAAM;AAExB,UAAM,QAAQ,IAAI,aAAa,WAAW;AAC1C,QAAI,SAAS;AACb,eAAW,OAAO,KAAK,aAAa;AAClC,YAAM,IAAI,KAAK,MAAM;AACrB,gBAAU,IAAI;AAAA,IAChB;AACA,SAAK,cAAc,CAAC;AAIpB,QAAI,MAAM;AACV,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,aAAO,MAAM,CAAC,IAAI,MAAM,CAAC;AAAA,IAC3B;AACA,UAAM,MAAM,KAAK,KAAK,MAAM,MAAM,MAAM;AAGxC,QAAI,MAAM,MAAM;AACd,cAAQ,MAAM,qCAAqC,EAAE,KAAK,SAAS,MAAM,OAAO,CAAC;AACjF;AAAA,IACF;AAGA,QAAI,KAAK,SAAS;AAChB,WAAK,SAAS,WAAW;AACzB,WAAK,KAAK,qBAAqB,EAAE,WAAW,KAAK,IAAI,EAAE,CAAC;AAExD,WAAK,QAAQ,WAAW,KAAK,EAAE,KAAK,CAAC,WAAW;AAC9C,aAAK,KAAK,yBAAyB;AAAA,UACjC,MAAM,OAAO;AAAA,UACb,YAAY;AAAA,QACd,CAAC;AACD,aAAK,KAAK,mBAAmB,EAAE,WAAW,KAAK,IAAI,GAAG,YAAY,OAAO,gBAAgB,CAAC;AAG1F,cAAM,YAAY,OAAO,KAAK,KAAK;AACnC,YAAI,aAAa,CAAC,UAAU,SAAS,eAAe,GAAG;AACrD,eAAK,SAAS,SAAS,EAAE,MAAM,CAAC,UAAU;AACxC,oBAAQ,MAAM,gCAAgC,KAAK;AAAA,UACrD,CAAC;AAAA,QACH;AAAA,MACF,CAAC,EAAE,MAAM,CAAC,UAAU;AAClB,gBAAQ,MAAM,oCAAoC,KAAK;AAAA,MACzD,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,oBAAoB,OAAoD;AAEpF,UAAM,UAAU,iBAAiB,eAC7B,QACA,KAAK,eAAe,KAAK;AAG7B,QAAI,KAAK,KAAK;AAEZ,YAAM,YAAY,KAAK,IAAI,aAAa;AAGxC,eAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,QAAQ,KAAK,WAAW;AAC/D,cAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI,SAAS;AAC5C,cAAM,SAAS,MAAM,KAAK,IAAI,QAAQ,KAAK;AAG3C,YAAI,OAAO,UAAU;AACnB,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAGA,QAAI,MAAM;AACV,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,aAAO,QAAQ,CAAC,IAAI,QAAQ,CAAC;AAAA,IAC/B;AACA,UAAM,MAAM,KAAK,KAAK,MAAM,QAAQ,MAAM;AAC1C,WAAO,MAAM;AAAA,EACf;AAAA,EAEQ,eAAe,OAAiC;AACtD,UAAM,UAAU,IAAI,aAAa,MAAM,MAAM;AAC7C,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAQ,CAAC,IAAI,MAAM,CAAC,IAAI;AAAA,IAC1B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,oBAAoB,QAA6B;AACvD,UAAM,eAAe,KAAK,MAAM;AAChC,UAAM,QAAQ,IAAI,WAAW,aAAa,MAAM;AAChD,aAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,YAAM,CAAC,IAAI,aAAa,WAAW,CAAC;AAAA,IACtC;AACA,WAAO,MAAM;AAAA,EACf;AAAA,EAEQ,aAAa,SAAoC;AACvD,SAAK,QAAQ,KAAK,OAAO;AACzB,SAAK,KAAK,kBAAkB,EAAE,cAAc,KAAK,QAAQ,OAAO,CAAC;AAAA,EACnE;AAAA,EAEQ,iBAAiB,OAAyB;AAChD,SAAK,eAAe;AAEpB,QAAI,MAAM,SAAS,KAAM;AAEvB,UAAI,KAAK,sBAAsB,KAAK,sBAAsB;AACxD,aAAK;AACL,mBAAW,MAAM;AACf,cAAI,KAAK,eAAe;AACtB,iBAAK,QAAQ,KAAK,aAAa,EAAE,MAAM,MAAM;AAAA,YAE7C,CAAC;AAAA,UACH;AAAA,QACF,GAAG,KAAK,IAAI,GAAG,KAAK,mBAAmB,IAAI,GAAI;AAAA,MACjD,OAAO;AACL,aAAK,SAAS,OAAO;AACrB,aAAK,KAAK,oBAAoB;AAAA,UAC5B,OAAO,IAAI,MAAM,mCAAmC;AAAA,UACpD,aAAa;AAAA,QACf,CAAC;AAAA,MACH;AAAA,IACF;AAEA,SAAK,KAAK,qBAAqB,EAAE,QAAQ,MAAM,UAAU,oBAAoB,CAAC;AAAA,EAChF;AACF;;;AC7oBA,IAAM,0BAAN,MAA6D;AAAA,EAW3D,YACE,QACA,SACA;AARF,SAAQ,WAAkC,CAAC;AAC3C,SAAQ,WAAW,oBAAI,IAAoB;AAQzC,SAAK,YAAY,OAAO;AACxB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,YAAY,KAAK,IAAI;AAC1B,SAAK,kBAAkB,KAAK,IAAI;AAChC,SAAK,qBAAqB,IAAI,kBAAkB;AAEhD,QAAI,OAAO,SAAS;AAClB,WAAK,mBAAmB,UAAU,OAAO,OAAkE;AAAA,IAC7G;AAAA,EACF;AAAA,EAEA,IAAI,UAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,SAAwB;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,QAAwB;AAC1B,WAAO,KAAK,SAAS;AAAA,EACvB;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,CAAC,GAAG,KAAK,QAAQ;AAAA,EAC1B;AAAA,EAEA,IAAI,UAA0B;AAC5B,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,IAAI,iBAAyB;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,SAAS,QAAQ,KAAK,OAAO;AACxC,SAAK,kBAAkB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,MAAM,MAAqB;AACzB,UAAM,KAAK,SAAS,WAAW;AAAA,EACjC;AAAA,EAEA,UAAU,OAAwC;AAChD,SAAK,SAAS,UAAU,KAAK;AAC7B,SAAK,kBAAkB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,MAAM,SAAS,MAA6B;AAC1C,UAAM,KAAK,SAAS,SAAS,IAAI;AACjC,SAAK,kBAAkB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,YAAkB;AAChB,SAAK,SAAS,UAAU;AACxB,SAAK,kBAAkB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,WAAW,SAA+B;AACxC,SAAK,mBAAmB,IAAI,OAAO;AAAA,EACrC;AAAA,EAEA,WAAW,KAAa,OAAqB;AAC3C,SAAK,SAAS,IAAI,KAAK,KAAK;AAAA,EAC9B;AAAA,EAEA,cAAc,KAAmB;AAC/B,SAAK,SAAS,OAAO,GAAG;AAAA,EAC1B;AAAA,EAEA,aAAqC;AACnC,WAAO,OAAO,YAAY,KAAK,QAAQ;AAAA,EACzC;AAAA,EAEA,SAA0B;AACxB,WAAO;AAAA,MACL,WAAW,KAAK;AAAA,MAChB,UAAU,KAAK,QAAQ,OAAO;AAAA,MAC9B,aAAa,KAAK,QAAQ,OAAO;AAAA,MACjC,SAAS,KAAK;AAAA,MACd,SAAS,OAAO,YAAY,KAAK,QAAQ;AAAA,MACzC,SAAS,KAAK;AAAA,MACd,WAAW,KAAK;AAAA,MAChB,gBAAgB,KAAK;AAAA,IACvB;AAAA,EACF;AAAA,EAEA,OAAO,UAAiC;AACtC,SAAK,WAAW,CAAC,GAAG,SAAS,OAAO;AACpC,SAAK,WAAW,IAAI,IAAI,OAAO,QAAQ,SAAS,OAAO,CAAC;AACxD,SAAK,kBAAkB,SAAS;AAAA,EAClC;AAAA,EAEA,cAAoB;AAClB,SAAK,WAAW,KAAK,SAAS,WAAW;AAAA,EAC3C;AACF;AAKO,IAAM,2BAAN,cAAuC,aAAiC;AAAA,EAgB7E,YAAY,QAA4B;AACtC,UAAM;AAVR;AAAA,SAAQ,WAAW,oBAAI,IAAqC;AAG5D;AAAA,SAAQ,UAAU,oBAAI,IAA0B;AAGhD;AAAA,SAAQ,sBAA6D;AACrE,SAAiB,2BAA2B;AAI1C,SAAK,SAAS;AAAA,MACZ,qBAAqB;AAAA,MACrB,YAAY;AAAA,MACZ,GAAG;AAAA,IACL;AAGA,SAAK,UAAU,IAAI,iBAAiB,OAAO,OAAO;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,QAA4B;AACzC,SAAK,QAAQ,IAAI,OAAO,UAAU,MAAM;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,UAAwB;AACvC,SAAK,QAAQ,OAAO,QAAQ;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,UAA4C;AACpD,WAAO,KAAK,QAAQ,IAAI,QAAQ;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,UACA,UAAkC,CAAC,GACL;AAC9B,UAAM,SAAS,KAAK,QAAQ,IAAI,QAAQ;AACxC,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,qBAAqB,QAAQ,EAAE;AAAA,IACjD;AAEA,UAAM,YAAY,QAAQ,aAAa,KAAK,kBAAkB;AAE9D,UAAM,gBAA+B;AAAA,MACnC;AAAA,MACA;AAAA,MACA,cAAc,QAAQ;AAAA,MACtB,OAAO,QAAQ;AAAA,MACf,SAAS,QAAQ;AAAA,MACjB,UAAU,QAAQ;AAAA,IACpB;AAEA,UAAM,UAAU,IAAI,wBAAwB,eAAe,KAAK,OAAO;AAEvE,SAAK,SAAS,IAAI,WAAW,OAAO;AAGpC,SAAK,qBAAqB,KAAK,SAAS,SAAS;AAGjD,UAAM,QAAQ,MAAM;AAEpB,SAAK,KAAK,mBAAmB,EAAE,WAAW,SAAS,CAAC;AAEpD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,WAAkC;AACjD,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,QAAI,SAAS;AACX,YAAM,QAAQ,IAAI;AAClB,WAAK,SAAS,OAAO,SAAS;AAC9B,WAAK,KAAK,iBAAiB,EAAE,WAAW,QAAQ,mBAAmB,CAAC;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,WAAoD;AAC7D,WAAO,KAAK,SAAS,IAAI,SAAS;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,UAAyC;AACzD,WAAO,MAAM,KAAK,KAAK,SAAS,OAAO,CAAC,EACrC,OAAO,OAAK,EAAE,OAAO,OAAO,aAAa,QAAQ;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKA,wBAA8B;AAC5B,QAAI,KAAK,oBAAqB;AAE9B,SAAK,sBAAsB,YAAY,YAAY;AACjD,YAAM,KAAK,mBAAmB;AAAA,IAChC,GAAG,KAAK,wBAAwB;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,uBAA6B;AAC3B,QAAI,KAAK,qBAAqB;AAC5B,oBAAc,KAAK,mBAAmB;AACtC,WAAK,sBAAsB;AAAA,IAC7B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,SAAK,qBAAqB;AAG1B,UAAM,cAAc,MAAM,KAAK,KAAK,SAAS,OAAO,CAAC,EAAE,IAAI,OAAK,EAAE,IAAI,CAAC;AACvE,UAAM,QAAQ,IAAI,WAAW;AAC7B,SAAK,SAAS,MAAM;AAGpB,UAAM,KAAK,QAAQ,WAAW;AAAA,EAChC;AAAA;AAAA,EAIQ,oBAA4B;AAClC,WAAO,QAAQ,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAAA,EACtE;AAAA,EAEQ,qBAAqB,SAAoB,WAAyB;AAExE,UAAM,SAAoC;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,SAAS,QAAQ;AAC1B,cAAQ,GAAG,OAAO,CAAC,SAAS;AAC1B,cAAM,YAAY;AAClB,aAAK,KAAK,OAAO,EAAE,GAAG,WAAW,UAAU,CAAkC;AAAA,MAC/E,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAc,qBAAoC;AAChD,QAAI;AACF,YAAM,KAAK,QAAQ,YAAY;AAAA,IACjC,QAAQ;AAAA,IAER;AAAA,EACF;AACF;;;AC9SO,IAAM,iBAAN,MAAM,eAAc;AAAA,EAApB;AACL,SAAQ,UAAU,oBAAI,IAA0B;AAChD,SAAQ,SAAS,oBAAI,IAAyB;AAC9C,SAAQ,QAAQ,oBAAI,IAAyB;AAC7C,SAAQ,wBAAwB,oBAAI,IAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAetE,SACE,QACA,QAAqB,eAAc,eACnC,sBACM;AACN,SAAK,QAAQ,IAAI,OAAO,UAAU,MAAM;AACxC,SAAK,OAAO,IAAI,OAAO,UAAU,KAAK;AACtC,SAAK,MAAM,IAAI,OAAO,UAAU;AAAA,MAC9B,iBAAiB;AAAA,MACjB,oBAAoB;AAAA,MACpB,YAAY;AAAA,MACZ,mBAAmB;AAAA,MACnB,iBAAiB,KAAK,IAAI;AAAA,MAC1B,gBAAgB,KAAK,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,sBAAsB;AACxB,WAAK,sBAAsB,IAAI,OAAO,UAAU,oBAAoB;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,UAAwB;AACjC,SAAK,QAAQ,OAAO,QAAQ;AAC5B,SAAK,OAAO,OAAO,QAAQ;AAC3B,SAAK,MAAM,OAAO,QAAQ;AAC1B,SAAK,sBAAsB,OAAO,QAAQ;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAA4C;AAC9C,WAAO,KAAK,QAAQ,IAAI,QAAQ;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAA2B;AAC7B,WAAO,KAAK,QAAQ,IAAI,QAAQ;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,eAAyB;AACvB,WAAO,MAAM,KAAK,KAAK,QAAQ,KAAK,CAAC;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,UAA2B;AAC1C,UAAM,QAAQ,KAAK,OAAO,IAAI,QAAQ;AACtC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AAErC,QAAI,CAAC,SAAS,CAAC,MAAO,QAAO;AAE7B,WAAO,MAAM,kBAAkB,MAAM;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,UAA2B;AACxC,UAAM,QAAQ,KAAK,OAAO,IAAI,QAAQ;AACtC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AAErC,QAAI,CAAC,SAAS,CAAC,MAAO,QAAO;AAG7B,SAAK,iBAAiB,QAAQ;AAE9B,WAAO,MAAM,qBAAqB,MAAM;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,UAAkB,SAA0B;AACtD,UAAM,QAAQ,KAAK,OAAO,IAAI,QAAQ;AACtC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AAErC,QAAI,CAAC,SAAS,CAAC,MAAO,QAAO;AAG7B,SAAK,gBAAgB,QAAQ;AAE7B,WAAO,MAAM,oBAAoB,WAAW,MAAM;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,UAAwB;AACxC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,UAAwB;AACxC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,SAAS,MAAM,kBAAkB,GAAG;AACtC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,UAAwB;AACpC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,WAAK,iBAAiB,QAAQ;AAC9B,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,UAAkB,QAAsB;AACnD,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,YAAM,cAAc;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,UAAkB,SAAuB;AAC1D,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,WAAK,gBAAgB,QAAQ;AAC7B,YAAM,qBAAqB;AAAA,IAC7B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAAmC;AACpD,UAAM,SAAS,KAAK,QAAQ,IAAI,QAAQ;AACxC,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,qBAAqB,QAAQ,EAAE;AAAA,IACjD;AAGA,UAAM,WAAW,KAAK,sBAAsB,IAAI,QAAQ;AACxD,QAAI,UAAU;AACZ,YAAM,QAAQ,MAAM,SAAS;AAC7B,aAAO,YAAY,YAAY;AAC/B,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,YAAY,WAAW;AAChC,aAAO,OAAO,YAAY;AAAA,IAC5B;AAEA,UAAM,IAAI,MAAM,uCAAuC,QAAQ,EAAE;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,UAAkB,aAAyD;AAC3F,UAAM,SAAS,KAAK,QAAQ,IAAI,QAAQ;AACxC,QAAI,QAAQ;AACV,aAAO,cAAc,EAAE,GAAG,OAAO,aAAa,GAAG,YAAY;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,UAA2C;AAClD,WAAO,KAAK,MAAM,IAAI,QAAQ;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,UAA2C;AAClD,WAAO,KAAK,OAAO,IAAI,QAAQ;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,UAAkB,OAAmC;AAC/D,UAAM,WAAW,KAAK,OAAO,IAAI,QAAQ;AACzC,QAAI,UAAU;AACZ,WAAK,OAAO,IAAI,UAAU,EAAE,GAAG,UAAU,GAAG,MAAM,CAAC;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,UAAwB;AACjC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,YAAM,qBAAqB;AAC3B,YAAM,aAAa;AACnB,YAAM,oBAAoB;AAC1B,YAAM,kBAAkB,KAAK,IAAI;AACjC,YAAM,iBAAiB,KAAK,IAAI;AAAA,IAClC;AAAA,EACF;AAAA;AAAA,EAIQ,iBAAiB,UAAwB;AAC/C,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,CAAC,MAAO;AAEZ,UAAM,MAAM,KAAK,IAAI;AACrB,QAAI,MAAM,MAAM,mBAAmB,KAAO;AACxC,YAAM,qBAAqB;AAC3B,YAAM,kBAAkB;AAAA,IAC1B;AAAA,EACF;AAAA,EAEQ,gBAAgB,UAAwB;AAC9C,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,CAAC,MAAO;AAEZ,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,aAAa,KAAK,KAAK,KAAK;AAClC,QAAI,MAAM,MAAM,kBAAkB,YAAY;AAC5C,YAAM,oBAAoB;AAC1B,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AACF;AAAA;AAAA;AAAA;AAtQa,eASK,gBAA6B;AAAA,EAC3C,aAAa;AAAA,EACb,mBAAmB;AAAA,EACnB,0BAA0B;AAAA,EAC1B,uBAAuB;AACzB;AAdK,IAAM,gBAAN;;;ACbA,IAAM,mBAAN,cAA+B,aAA8B;AAAA,EAUlE,YAAY,SAA0B,CAAC,GAAG;AACxC,UAAM;AARR,SAAQ,iBAAiB;AACzB,SAAQ,gBAAgC,CAAC;AACzC,SAAQ,YAAY;AACpB,SAAQ,eAAoC;AAC5C,SAAQ,oBAAoB;AAC5B,SAAQ,gBAAgB;AAItB,SAAK,SAAS;AAAA,MACZ,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,GAAG;AAAA,IACL;AAEA,SAAK,cAAc,IAAI,aAAa,KAAK,OAAO,UAAU;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,IAAI,aAAa,EAAE,YAAY,KAAK,OAAO,WAAW,CAAC;AAAA,IAC7E;AAEA,QAAI,KAAK,aAAa,UAAU,aAAa;AAC3C,YAAM,KAAK,aAAa,OAAO;AAAA,IACjC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAA2B;AAEnC,SAAK,cAAc,KAAK,KAAK;AAG7B,SAAK,mBAAmB,KAAK;AAG7B,QAAI,CAAC,KAAK,aAAa,KAAK,cAAc,SAAS,GAAG;AACpD,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAA2B;AACpD,QAAI,SAAS;AAEb,WAAO,SAAS,MAAM,QAAQ;AAC5B,YAAM,YAAY,KAAK,OAAO,aAAa,KAAK;AAChD,YAAM,SAAS,KAAK,IAAI,WAAW,MAAM,SAAS,MAAM;AAExD,WAAK,YAAY,IAAI,MAAM,SAAS,QAAQ,SAAS,MAAM,GAAG,KAAK,cAAc;AACjF,WAAK,kBAAkB;AACvB,gBAAU;AAGV,UAAI,KAAK,kBAAkB,KAAK,OAAO,YAAY;AACjD,aAAK,KAAK,gBAAgB,EAAE,OAAO,IAAI,aAAa,KAAK,WAAW,EAAE,CAAC;AAGvE,cAAM,eAAe,KAAK,OAAO,aAAa,KAAK,OAAO;AAC1D,aAAK,YAAY,WAAW,GAAG,YAAY;AAC3C,aAAK,iBAAiB,KAAK,OAAO;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAA+B;AAC3C,QAAI,CAAC,KAAK,gBAAgB,KAAK,UAAW;AAE1C,SAAK,YAAY;AACjB,SAAK,oBAAoB,KAAK,aAAa;AAC3C,SAAK,gBAAgB;AAErB,SAAK,KAAK,kBAAkB,CAAC,CAAC;AAE9B,UAAM,KAAK,qBAAqB;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,uBAAsC;AAClD,QAAI,CAAC,KAAK,aAAc;AAExB,WAAO,KAAK,cAAc,SAAS,GAAG;AACpC,YAAM,QAAQ,KAAK,cAAc,MAAM;AAGvC,YAAM,SAAS,KAAK,aAAa,aAAa,GAAG,MAAM,QAAQ,KAAK,OAAO,UAAU;AACrF,aAAO,cAAc,OAAO,CAAC;AAE7B,YAAM,SAAS,KAAK,aAAa,mBAAmB;AACpD,aAAO,SAAS;AAChB,aAAO,QAAQ,KAAK,aAAa,WAAW;AAG5C,YAAM,WAAW,KAAK,oBAAoB,KAAK,gBAAgB,KAAK,OAAO;AAC3E,aAAO,MAAM,QAAQ;AAErB,WAAK,iBAAiB,MAAM;AAG5B,WAAK,WAAW;AAGhB,YAAM,IAAI,QAAQ,aAAW;AAC3B,eAAO,UAAU;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,SAAK,YAAY;AACjB,SAAK,KAAK,gBAAgB,CAAC,CAAC;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAmB;AACzB,QAAI,CAAC,KAAK,aAAc;AAExB,UAAM,eAAe,KAAK,oBAAoB,KAAK,gBAAgB,KAAK,OAAO;AAC/E,UAAM,aAAa,KAAK,aAAa;AACrC,UAAM,WAAW,aAAa,gBAAgB;AAE9C,QAAI,KAAK,IAAI,OAAO,IAAI,KAAK,OAAO,YAAY;AAC9C,WAAK,KAAK,cAAc,EAAE,QAAQ,CAAC;AAAA,IACrC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,gBAAgB,CAAC;AACtB,SAAK,iBAAiB;AACtB,SAAK,YAAY,KAAK,CAAC;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAa;AACX,SAAK,WAAW;AAChB,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,sBAA8B;AAC5B,QAAI,CAAC,KAAK,aAAc,QAAO;AAC/B,WAAO,KAAK,aAAa,cAAc,KAAK;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA,EAKA,eAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,SAAK,KAAK;AACV,SAAK,cAAc,MAAM;AACzB,SAAK,eAAe;AAAA,EACtB;AACF;;;ACpLO,IAAM,sBAAN,cAAkC,aAAiC;AAAA,EAWxE,YAAY,SAA6B,CAAC,GAAG;AAC3C,UAAM;AAVR,SAAQ,aAAa;AACrB,SAAQ,kBAAkB;AAC1B,SAAQ,iBAAiB;AACzB,SAAQ,eAAqD;AAC7D,SAAQ,eAAe;AAGvB;AAAA,SAAQ,mCAAmC;AAIzC,SAAK,SAAS;AAAA,MACZ,cAAc;AAAA;AAAA,MACd,qBAAqB;AAAA;AAAA,MACrB,kBAAkB;AAAA;AAAA,MAClB,SAAS;AAAA,MACT,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,gBAAwB,cAAsB,GAAS;AACtE,QAAI,CAAC,KAAK,OAAO,QAAS;AAE1B,QAAI,iBAAiB,KAAK,OAAO,cAAc;AAC7C,WAAK,iBAAiB,eAAe,cAAc;AAAA,IACrD,OAAO;AACL,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,aAAa,SAA0C;AACrD,QAAI,CAAC,KAAK,OAAO,QAAS;AAE1B,UAAM,MAAM,KAAK,aAAa,OAAO;AAIrC,UAAM,iBAAiB,KAAK,IAAI,MAAM,MAAM,CAAG;AAE/C,QAAI,iBAAiB,KAAK,OAAO,cAAc;AAC7C,WAAK,iBAAiB,GAAG;AAAA,IAC3B,OAAO;AACL,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,UAAyB;AACrC,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,SAAwB;AACjC,SAAK,OAAO,UAAU;AACtB,QAAI,CAAC,SAAS;AACZ,WAAK,MAAM;AAAA,IACb;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,QAA2C;AACtD,SAAK,SAAS,EAAE,GAAG,KAAK,QAAQ,GAAG,OAAO;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,SAAK,iBAAiB;AACtB,SAAK,mCAAmC;AACxC,QAAI,KAAK,cAAc;AACrB,mBAAa,KAAK,YAAY;AAC9B,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAA8D;AAC5D,WAAO;AAAA,MACL,YAAY,KAAK;AAAA,MACjB,kBAAkB,KAAK,aAAa,KAAK,IAAI,IAAI,KAAK,kBAAkB;AAAA,IAC1E;AAAA,EACF;AAAA;AAAA,EAIQ,aAAa,SAA4C;AAC/D,QAAI,MAAM;AACV,UAAM,QAAQ,mBAAmB,aAAa,QAAQ;AAEtD,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAM,SAAS,QAAQ,CAAC,IAAI;AAC5B,aAAO,SAAS;AAAA,IAClB;AAEA,WAAO,KAAK,KAAK,MAAM,QAAQ,MAAM;AAAA,EACvC;AAAA,EAEQ,iBAAiB,KAAmB;AAC1C,UAAM,MAAM,KAAK,IAAI;AACrB,SAAK,iBAAiB;AAGtB,QAAI,KAAK,cAAc;AACrB,mBAAa,KAAK,YAAY;AAC9B,WAAK,eAAe;AAAA,IACtB;AAGA,QAAI,CAAC,KAAK,YAAY;AACpB,WAAK,aAAa;AAClB,WAAK,kBAAkB;AACvB,WAAK,KAAK,mBAAmB,EAAE,IAAI,CAAC;AAAA,IACtC;AAGA,QAAI,KAAK,gBAAgB,CAAC,KAAK,kCAAkC;AAC/D,YAAM,iBAAiB,MAAM,KAAK;AAClC,UAAI,kBAAkB,KAAK,OAAO,qBAAqB;AACrD,aAAK,mCAAmC;AACxC,aAAK,KAAK,0BAA0B,EAAE,KAAK,YAAY,eAAe,CAAC;AAAA,MACzE;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,oBAA0B;AAChC,QAAI,CAAC,KAAK,WAAY;AAGtB,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,WAAW,MAAM;AACnC,cAAM,aAAa,KAAK,iBAAiB,KAAK;AAC9C,aAAK,aAAa;AAClB,aAAK,eAAe;AAEpB,aAAK,mCAAmC;AACxC,aAAK,KAAK,gBAAgB,EAAE,WAAW,CAAC;AAAA,MAC1C,GAAG,KAAK,OAAO,gBAAgB;AAAA,IACjC;AAAA,EACF;AACF;;;ACjMO,IAAM,kBACX;AA8BK,SAAS,oBAAoB,KAAwC;AAE1E,QAAM,UAAU;AAChB,QAAM,QAAQ,IAAI,MAAM,OAAO;AAE/B,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,KAAK,MAAM,CAAC;AAAA,IACZ,OAAO,MAAM,CAAC;AAAA,IACd,QAAQ,MAAM,CAAC;AAAA,IACf,MAAM,MAAM,CAAC;AAAA,EACf;AACF;AAsBA,eAAsB,0BAA0B,UAAkB,iBAAmC;AACnG,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,SAAS;AAAA,MACpC,QAAQ;AAAA,MACR,OAAO;AAAA;AAAA,IACT,CAAC;AAED,WAAO,SAAS;AAAA,EAClB,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;;;AC/EA,IAAMC,WAAS,aAAa,mBAAmB;AAY/C,eAAsB,uBAAuB,SAKvB;AACpB,QAAM,UAAU,SAAS,WAAW;AACpC,QAAM,qBAAqB,SAAS,sBAAsB,CAAC;AAE3D,MAAI,EAAE,YAAY,SAAS;AACzB,IAAAA,SAAO,KAAK,6CAA6C;AACzD,WAAO,CAAC;AAAA,EACV;AAEA,MAAI;AACF,UAAM,aAAa,MAAM,OAAO,KAAK;AACrC,UAAM,gBAA0B,CAAC;AAEjC,UAAM,WAAW;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL;AAEA,eAAW,aAAa,YAAY;AAClC,YAAM,eAAe,SAAS;AAAA,QAAK,aACjC,UAAU,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC;AAAA,MACxD;AAEA,UAAI,cAAc;AAChB,YAAI,SAAS;AACX,UAAAA,SAAO,KAAK,kBAAkB,EAAE,UAAU,CAAC;AAAA,QAC7C;AACA,cAAM,UAAU,MAAM,OAAO,OAAO,SAAS;AAC7C,YAAI,SAAS;AACX,wBAAc,KAAK,SAAS;AAAA,QAC9B,WAAW,SAAS;AAClB,UAAAA,SAAO,KAAK,0BAA0B,EAAE,UAAU,CAAC;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS;AACX,MAAAA,SAAO,KAAK,2BAA2B;AAAA,QACrC,aAAa,WAAW;AAAA,QACxB,cAAc,cAAc;AAAA,QAC5B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,yBAAyB,EAAE,MAAM,CAAC;AAC/C,UAAM;AAAA,EACR;AACF;AAQA,eAAsB,mBAAmB,WAAqC;AAC5E,MAAI,EAAE,YAAY,SAAS;AACzB,IAAAA,SAAO,KAAK,6CAA6C;AACzD,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAU,MAAM,OAAO,OAAO,SAAS;AAC7C,IAAAA,SAAO,KAAK,0BAA0B,EAAE,WAAW,QAAQ,CAAC;AAC5D,WAAO;AAAA,EACT,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,wBAAwB,EAAE,WAAW,MAAM,CAAC;AACzD,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,aAAgC;AACpD,MAAI,EAAE,YAAY,SAAS;AACzB,IAAAA,SAAO,KAAK,6CAA6C;AACzD,WAAO,CAAC;AAAA,EACV;AAEA,MAAI;AACF,UAAM,aAAa,MAAM,OAAO,KAAK;AACrC,IAAAA,SAAO,MAAM,oBAAoB,EAAE,WAAW,CAAC;AAC/C,WAAO;AAAA,EACT,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,wBAAwB,EAAE,MAAM,CAAC;AAC9C,WAAO,CAAC;AAAA,EACV;AACF;AASA,eAAsB,uBACpB,WACA,YAOC;AACD,MAAI,EAAE,YAAY,SAAS;AACzB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,MACb,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI;AACF,UAAM,QAAQ,MAAM,OAAO,KAAK,SAAS;AACzC,UAAM,WAAW,MAAM,MAAM,MAAM,UAAU;AAE7C,QAAI,CAAC,UAAU;AACb,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,aAAa;AAAA,QACb,QAAQ;AAAA,QACR,QAAQ;AAAA,MACV;AAAA,IACF;AAEA,UAAM,cAAc,SAAS,QAAQ,IAAI,cAAc;AACvD,UAAM,SACJ,aAAa,SAAS,WAAW,KACjC,aAAa,SAAS,YAAY;AAGpC,UAAM,iBAAiB,SAAS,MAAM;AACtC,UAAM,OAAO,MAAM,eAAe,KAAK;AACvC,UAAM,gBAAgB,KAAK,KAAK,EAAE,WAAW,GAAG,KAAK,KAAK,SAAS,WAAW;AAE9E,UAAM,QAAQ;AAAA,MACZ,SAAS,WAAW,OACpB,CAAC,UACD,CAAC,iBACD,gBACC,YAAY,SAAS,kBAAkB,KACtC,YAAY,SAAS,0BAA0B,KAC/C,YAAY,SAAS,QAAQ;AAAA,IACjC;AAEA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,QAAQ,UAAU;AAAA,MAClB,QAAQ,QACJ,mBACA,mBAAmB,SAAS,MAAM,iBAAiB,WAAW,YAAY,UAAU,aAAa;AAAA,IACvG;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,oCAAoC,EAAE,WAAW,YAAY,MAAM,CAAC;AACjF,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,MACb,QAAQ;AAAA,MACR,QAAQ,UAAU,KAAK;AAAA,IACzB;AAAA,EACF;AACF;AAOA,eAAsB,uBAQnB;AACD,MAAI,EAAE,YAAY,SAAS;AACzB,WAAO,EAAE,aAAa,GAAG,gBAAgB,GAAG,gBAAgB,CAAC,EAAE;AAAA,EACjE;AAEA,QAAM,iBAA4E,CAAC;AACnF,MAAI,iBAAiB;AAErB,MAAI;AACF,UAAM,aAAa,MAAM,OAAO,KAAK;AAErC,eAAW,aAAa,YAAY;AAClC,UAAI,CAAC,UAAU,YAAY,EAAE,SAAS,cAAc,GAAG;AACrD;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM,OAAO,KAAK,SAAS;AACzC,YAAM,WAAW,MAAM,MAAM,KAAK;AAElC,iBAAW,WAAW,UAAU;AAC9B;AACA,cAAM,MAAM,QAAQ;AAEpB,cAAM,aAAa,MAAM,uBAAuB,WAAW,GAAG;AAE9D,YAAI,WAAW,UAAU,CAAC,WAAW,OAAO;AAC1C,yBAAe,KAAK;AAAA,YAClB;AAAA,YACA;AAAA,YACA,QAAQ,WAAW,UAAU;AAAA,UAC/B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,IAAAA,SAAO,KAAK,uBAAuB;AAAA,MACjC,aAAa,WAAW;AAAA,MACxB;AAAA,MACA,cAAc,eAAe;AAAA,IAC/B,CAAC;AAED,WAAO;AAAA,MACL,aAAa,WAAW;AAAA,MACxB;AAAA,MACA;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,yBAAyB,EAAE,MAAM,CAAC;AAC/C,UAAM;AAAA,EACR;AACF;AAWA,eAAsB,kBAAkB,oBAAoB,OAAwB;AAClF,MAAI,EAAE,YAAY,SAAS;AACzB,IAAAA,SAAO,KAAK,6CAA6C;AACzD,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,aAAa,MAAM,OAAO,KAAK;AACrC,QAAI,eAAe;AAEnB,eAAW,aAAa,YAAY;AAClC,YAAM,UAAU,MAAM,OAAO,OAAO,SAAS;AAC7C,UAAI,SAAS;AACX;AAAA,MACF;AAAA,IACF;AAEA,IAAAA,SAAO,KAAK,8BAA8B;AAAA,MACxC,cAAc;AAAA,IAChB,CAAC;AAED,QAAI,mBAAmB;AAErB,YAAM,EAAE,KAAAC,KAAI,IAAI,MAAM,OAAO,iCAA2B;AACxD,MAAAA,KAAI,kBAAkB;AACtB,MAAAD,SAAO,KAAK,+DAA+D;AAAA,IAC7E;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,uBAAuB,EAAE,MAAM,CAAC;AAC7C,UAAM;AAAA,EACR;AACF;;;ACtHO,IAAM,2BAAiD;AAAA,EAC5D,cAAc;AAAA,EACd,sBAAsB;AAAA;AAAA,EACtB,kBAAkB;AAAA,EAClB,kBAAkB;AAAA,EAElB,QAAQ;AAAA,IACN;AAAA,MACE,MAAM;AAAA,MACN,WAAW,CAAC,cAAc;AAAA,MAC1B,aAAa,CAAC,CAAG;AAAA,MACjB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,SAAS;AAAA,IACX;AAAA,IACA;AAAA,MACE,MAAM;AAAA,MACN,WAAW,CAAC,gBAAgB;AAAA,MAC5B,aAAa,CAAC,CAAG;AAAA,MACjB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,SAAS;AAAA;AAAA,MACT,eAAe;AAAA,IACjB;AAAA,IACA;AAAA,MACE,MAAM;AAAA,MACN,WAAW,CAAC,oBAAoB,oBAAoB;AAAA,MACpD,aAAa,CAAC,KAAK,GAAG;AAAA,MACtB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,SAAS;AAAA;AAAA,MACT,eAAe;AAAA,IACjB;AAAA,IACA;AAAA,MACE,MAAM;AAAA,MACN,WAAW,CAAC,cAAc;AAAA,MAC1B,aAAa,CAAC,CAAG;AAAA,MACjB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EAEA,aAAa;AAAA;AAAA,IAEX,EAAE,MAAM,QAAQ,IAAI,aAAa,SAAS,qBAAqB,UAAU,IAAI;AAAA,IAC7E,EAAE,MAAM,YAAY,IAAI,aAAa,SAAS,qBAAqB,UAAU,IAAI;AAAA;AAAA;AAAA,IAGjF,EAAE,MAAM,aAAa,IAAI,YAAY,SAAS,oBAAoB,UAAU,IAAI;AAAA;AAAA,IAGhF,EAAE,MAAM,YAAY,IAAI,YAAY,SAAS,kBAAkB,UAAU,IAAI;AAAA,IAC7E,EAAE,MAAM,QAAQ,IAAI,YAAY,SAAS,kBAAkB,UAAU,IAAI;AAAA;AAAA,IAGzE,EAAE,MAAM,YAAY,IAAI,QAAQ,SAAS,mBAAmB,UAAU,IAAI;AAAA;AAAA,IAG1E,EAAE,MAAM,aAAa,IAAI,QAAQ,SAAS,WAAW,UAAU,IAAI;AAAA,IACnE,EAAE,MAAM,YAAY,IAAI,QAAQ,SAAS,WAAW,UAAU,IAAI;AAAA;AAAA,IAGlE,EAAE,MAAM,YAAY,IAAI,aAAa,SAAS,aAAa,UAAU,IAAI;AAAA,EAC3E;AAAA,EAEA,iBAAiB;AAAA,IACf,EAAE,SAAS,SAAS,MAAM,iBAAiB,WAAW,KAAK,YAAY,EAAI;AAAA,IAC3E,EAAE,SAAS,OAAO,MAAM,eAAe,WAAW,KAAK,YAAY,IAAI;AAAA,IACvE,EAAE,SAAS,SAAS,MAAM,iBAAiB,WAAW,KAAK,YAAY,IAAI;AAAA,IAC3E,EAAE,SAAS,WAAW,MAAM,gBAAgB,WAAW,KAAK,YAAY,EAAI;AAAA,IAC5E,EAAE,SAAS,aAAa,MAAM,qBAAqB,WAAW,KAAK,YAAY,IAAI;AAAA,IACnF,EAAE,SAAS,QAAQ,MAAM,gBAAgB,WAAW,KAAK,YAAY,EAAI;AAAA,IACzE,EAAE,SAAS,WAAW,MAAM,mBAAmB,WAAW,KAAK,YAAY,EAAI;AAAA,IAC/E,EAAE,SAAS,WAAW,MAAM,mBAAmB,WAAW,GAAK,YAAY,EAAI;AAAA,EACjF;AAAA,EAEA,cAAc,CAAC,sBAAsB,uBAAuB,oBAAoB;AAClF;;;AC5NO,IAAM,iBAAN,cAA6B,aAAmC;AAAA,EA6BrE,YAAY,SAAwC,CAAC,GAAG;AACtD,UAAM;AA3BR,SAAQ,gBAAuC;AAG/C;AAAA,SAAQ,kBAA2B;AACnC,SAAQ,qBAA6B;AACrC,SAAQ,qBAA6B;AACrC,SAAQ,sBAA8B;AAGtC;AAAA,SAAQ,iBAAsC;AAC9C,SAAQ,oBAA4B;AACpC,SAAQ,qBAA6B;AACrC,SAAQ,sBAA8B;AAGtC;AAAA,SAAQ,cAAsB;AAC9B,SAAQ,gBAAwB;AAChC,SAAQ,qBAA6B;AAGrC;AAAA,SAAQ,iBAAyB;AACjC,SAAQ,iBAAyB;AAO/B,SAAK,SAAS,EAAE,GAAG,0BAA0B,GAAG,OAAO;AAGvD,UAAM,eAAe,KAAK,OAAO,OAAO;AAAA,MACtC,CAAC,MAAM,EAAE,SAAS,KAAK,OAAO;AAAA,IAChC;AACA,QAAI,CAAC,cAAc;AACjB,YAAM,IAAI,MAAM,kBAAkB,KAAK,OAAO,YAAY,aAAa;AAAA,IACzE;AACA,SAAK,eAAe;AACpB,SAAK,iBAAiB,KAAK,IAAI;AAC/B,SAAK,iBAAiB,KAAK,IAAI;AAG/B,SAAK,eAAe,KAAK,cAAc;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,QAA4B;AAC9B,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,SAA0B;AAC5B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,OAAkC;AAExC,UAAM,aAAa,KAAK,OAAO,YAAY;AAAA,MACzC,CAAC,MACC,EAAE,SAAS,KAAK,aAAa,QAC7B,EAAE,YAAY,UACb,CAAC,EAAE,aAAa,EAAE,UAAU;AAAA,IACjC;AAEA,QAAI,CAAC,YAAY;AACf,aAAO;AAAA,IACT;AAEA,SAAK,gBAAgB,YAAY,KAAK;AACtC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,SAAuB,YAA0B;AAC1D,UAAM,cAAc,KAAK;AAEzB,SAAK,iBAAiB;AACtB,SAAK,oBAAoB,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,UAAU,CAAC;AAG5D,UAAM,UAAU,KAAK,OAAO,gBAAgB;AAAA,MAC1C,CAAC,MAAM,EAAE,YAAY;AAAA,IACvB;AACA,QAAI,WAAW,KAAK,aAAa,qBAAqB;AACpD,WAAK,sBAAsB,QAAQ,YAAY,KAAK;AAAA,IACtD,OAAO;AACL,WAAK,sBAAsB;AAAA,IAC7B;AAEA,QAAI,gBAAgB,SAAS;AAC3B,WAAK,KAAK,kBAAkB,EAAE,SAAS,WAAW,CAAC;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,eAAqB;AACnB,SAAK,iBAAiB;AACtB,SAAK,oBAAoB;AACzB,SAAK,sBAAsB;AAC3B,SAAK,KAAK,kBAAkB,EAAE,SAAS,MAAM,YAAY,EAAE,CAAC;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,QAAsB;AACnC,SAAK,cAAc,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,MAAM,CAAC;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,WAA+B,gBAAwB,KAAW;AACzE,UAAM,cAAc,KAAK,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AACvE,QAAI,CAAC,aAAa;AAChB,cAAQ,KAAK,2BAA2B,SAAS,aAAa;AAC9D;AAAA,IACF;AAEA,QAAI,YAAY,SAAS,KAAK,aAAa,QAAQ,CAAC,KAAK,iBAAiB;AACxE;AAAA,IACF;AAGA,UAAM,mBAA+B;AAAA,MACnC,MAAM,KAAK,aAAa;AAAA,MACxB,IAAI;AAAA,MACJ,SAAS;AAAA;AAAA,MACT,UAAU;AAAA,IACZ;AAEA,SAAK,gBAAgB,kBAAkB,SAAS;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,SAAmC;AACxC,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,KAAK,WAAW,MAAM,KAAK;AACjC,SAAK,iBAAiB;AAEtB,UAAM,YAAY,KAAK;AAGvB,QAAI,KAAK,iBAAiB;AACxB,WAAK,iBAAiB,SAAS;AAAA,IACjC;AAGA,SAAK,aAAa,GAAG;AAGrB,SAAK,mBAAmB,SAAS;AAGjC,SAAK,cAAc,SAAS;AAG5B,SAAK,eAAe,KAAK,cAAc;AACvC,SAAK,KAAK,iBAAiB,KAAK,YAAY;AAE5C,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,UAAM,eAAe,KAAK,OAAO,OAAO;AAAA,MACtC,CAAC,MAAM,EAAE,SAAS,KAAK,OAAO;AAAA,IAChC;AACA,QAAI,cAAc;AAChB,WAAK,eAAe;AACpB,WAAK,gBAAgB;AACrB,WAAK,kBAAkB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,iBAAiB,KAAK,IAAI;AAC/B,WAAK,qBAAqB;AAC1B,WAAK,gBAAgB;AACrB,WAAK,eAAe,KAAK,cAAc;AAAA,IACzC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAA6B;AAC3B,UAAM,QAAQ,oBAAI,IAAY;AAG9B,eAAW,SAAS,KAAK,OAAO,QAAQ;AACtC,iBAAW,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,eAAW,WAAW,KAAK,OAAO,iBAAiB;AACjD,YAAM,IAAI,QAAQ,IAAI;AAAA,IACxB;AAGA,eAAW,QAAQ,KAAK,OAAO,cAAc;AAC3C,YAAM,IAAI,IAAI;AAAA,IAChB;AAEA,WAAO,MAAM,KAAK,KAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAMQ,gBAAgB,YAAwB,OAA+B;AAC7E,UAAM,cAAc,KAAK,OAAO,OAAO;AAAA,MACrC,CAAC,MAAM,EAAE,SAAS,WAAW;AAAA,IAC/B;AACA,QAAI,CAAC,aAAa;AAChB,cAAQ,KAAK,kCAAkC,WAAW,EAAE,aAAa;AACzE;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,aAAa;AAEpC,SAAK,gBAAgB,KAAK;AAC1B,SAAK,eAAe;AACpB,SAAK,kBAAkB;AACvB,SAAK,qBAAqB;AAC1B,SAAK,qBAAqB,WAAW;AACrC,SAAK,sBAAsB,KAAK,IAAI;AACpC,SAAK,iBAAiB,KAAK,IAAI;AAG/B,QAAI,CAAC,KAAK,aAAa,qBAAqB;AAC1C,WAAK,sBAAsB;AAAA,IAC7B;AAEA,SAAK,KAAK,gBAAgB;AAAA,MACxB,MAAM;AAAA,MACN,IAAI,YAAY;AAAA,MAChB,SAAS;AAAA,IACX,CAAC;AAED,SAAK,KAAK,oBAAoB;AAAA,MAC5B,MAAM;AAAA,MACN,IAAI,YAAY;AAAA,MAChB,UAAU,WAAW;AAAA,IACvB,CAAC;AAAA,EACH;AAAA,EAEQ,iBAAiB,WAAyB;AAChD,QAAI,CAAC,KAAK,mBAAmB,KAAK,sBAAsB,GAAG;AACzD,WAAK,kBAAkB;AACvB,WAAK,qBAAqB;AAC1B;AAAA,IACF;AAGA,UAAM,UAAU,KAAK,IAAI,IAAI,KAAK;AAClC,SAAK,qBAAqB,KAAK,IAAI,GAAG,UAAU,KAAK,kBAAkB;AAEvE,QAAI,KAAK,sBAAsB,GAAG;AAChC,WAAK,kBAAkB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,gBAAgB;AACrB,WAAK,KAAK,kBAAkB,EAAE,OAAO,KAAK,aAAa,KAAK,CAAC;AAAA,IAC/D;AAAA,EACF;AAAA,EAEQ,aAAa,KAAmB;AACtC,QAAI,KAAK,gBAAiB;AAC1B,QAAI,KAAK,aAAa,WAAW,EAAG;AAEpC,UAAM,UAAU,MAAM,KAAK;AAC3B,QAAI,WAAW,KAAK,aAAa,SAAS;AACxC,WAAK,QAAQ,SAAS;AAAA,IACxB;AAAA,EACF;AAAA,EAEQ,mBAAmB,WAAyB;AAClD,QAAI,CAAC,KAAK,gBAAgB;AAExB,WAAK,qBAAqB,KAAK;AAAA,QAC7B;AAAA,QACA,KAAK,qBAAqB,YAAY;AAAA,MACxC;AACA;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,OAAO,gBAAgB;AAAA,MAC1C,CAAC,MAAM,EAAE,YAAY,KAAK;AAAA,IAC5B;AACA,UAAM,aAAa,SAAS,cAAc;AAG1C,UAAM,OAAO,KAAK,sBAAsB,KAAK;AAC7C,UAAM,YAAY,aAAa;AAE/B,QAAI,KAAK,IAAI,IAAI,KAAK,WAAW;AAC/B,WAAK,qBAAqB,KAAK;AAAA,IACjC,OAAO;AACL,WAAK,sBAAsB,KAAK,KAAK,IAAI,IAAI;AAAA,IAC/C;AAAA,EACF;AAAA,EAEQ,cAAc,WAAyB;AAC7C,QAAI,CAAC,KAAK,aAAa,qBAAqB;AAC1C,WAAK,gBAAgB,KAAK,IAAI,GAAG,KAAK,gBAAgB,YAAY,CAAG;AACrE;AAAA,IACF;AAGA,UAAM,gBACJ,KAAK,cAAc,KAAK,OAAO,mBAC3B,KAAK,cAAc,KAAK,OAAO,mBAC/B;AAGN,UAAM,OAAO,gBAAgB,KAAK;AAClC,UAAM,aAAa;AACnB,UAAM,YAAY,aAAa;AAE/B,QAAI,KAAK,IAAI,IAAI,KAAK,WAAW;AAC/B,WAAK,gBAAgB;AAAA,IACvB,OAAO;AACL,WAAK,iBAAiB,KAAK,KAAK,IAAI,IAAI;AAAA,IAC1C;AAGA,UAAM,YAAY,KAAK,OAAO,aAAa;AAC3C,QAAI,YAAY,GAAG;AACjB,WAAK,qBAAqB,KAAK;AAAA,QAC7B,YAAY;AAAA,QACZ,KAAK,MAAM,KAAK,gBAAgB,SAAS;AAAA,MAC3C;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,gBAAiC;AACvC,UAAM,eAA8B,CAAC;AAGrC,UAAM,IAAI,KAAK;AACf,UAAM,mBAAmB,IAAI,KAAK,IAAI,IAAI;AAG1C,QAAI,KAAK,iBAAiB,KAAK,iBAAiB;AAC9C,YAAM,UAAU,IAAI;AACpB,eAAS,IAAI,GAAG,IAAI,KAAK,cAAc,UAAU,QAAQ,KAAK;AAC5D,cAAM,OAAO,KAAK,cAAc,UAAU,CAAC;AAC3C,cAAM,aAAa,KAAK,cAAc,YAAY,CAAC,KAAK;AACxD,qBAAa,KAAK;AAAA,UAChB;AAAA,UACA,QAAQ,aAAa;AAAA,UACrB,OAAO;AAAA,UACP,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,SAAS,KAAK,kBAAkB,mBAAmB;AACzD,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,UAAU,QAAQ,KAAK;AAC3D,YAAM,OAAO,KAAK,aAAa,UAAU,CAAC;AAC1C,YAAM,aAAa,KAAK,aAAa,YAAY,CAAC,KAAK;AACvD,mBAAa,KAAK;AAAA,QAChB;AAAA,QACA,QAAQ,aAAa;AAAA,QACrB,OAAO;AAAA,QACP,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,KAAK,kBAAkB,KAAK,qBAAqB,MAAM;AACzD,YAAM,UAAU,KAAK,OAAO,gBAAgB;AAAA,QAC1C,CAAC,MAAM,EAAE,YAAY,KAAK;AAAA,MAC5B;AACA,UAAI,SAAS;AACX,qBAAa,KAAK;AAAA,UAChB,MAAM,QAAQ;AAAA,UACd,QAAQ,KAAK;AAAA,UACb,OAAO;AAAA,UACP,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,KAAK,gBAAgB,QAAQ,KAAK,OAAO,aAAa,SAAS,GAAG;AACpE,YAAM,cAAc,KAAK,OAAO,aAAa,KAAK,kBAAkB;AACpE,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,QAAQ,KAAK;AAAA,QACb,OAAO,IAAM,KAAK,cAAc;AAAA;AAAA,QAChC,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,OAAO,KAAK,aAAa;AAAA,MACzB;AAAA,MACA,eAAe,KAAK,qBAAqB,OAAO,KAAK,iBAAiB;AAAA,MACtE,kBAAkB,KAAK;AAAA,MACvB,iBAAiB,KAAK;AAAA,MACtB,oBAAoB,KAAK;AAAA,IAC3B;AAAA,EACF;AACF;;;ACndO,SAAS,aAAa,SAA+B;AAC1D,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,MAAI,aAAa;AACjB,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,kBAAc,QAAQ,CAAC,IAAI,QAAQ,CAAC;AAAA,EACtC;AAEA,SAAO,KAAK,KAAK,aAAa,QAAQ,MAAM;AAC9C;AAOO,SAAS,cAAc,SAA+B;AAC3D,MAAI,OAAO;AACX,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,MAAM,KAAK,IAAI,QAAQ,CAAC,CAAC;AAC/B,QAAI,MAAM,KAAM,QAAO;AAAA,EACzB;AACA,SAAO;AACT;AAKO,IAAM,sBAAN,MAA0B;AAAA;AAAA;AAAA;AAAA;AAAA,EAU/B,YAAY,kBAA0B,MAAM,aAAqB,MAAM;AATvE,SAAQ,cAAsB;AAC9B,SAAQ,eAAuB;AAS7B,SAAK,kBAAkB,KAAK,IAAI,GAAG,KAAK,IAAI,MAAM,eAAe,CAAC;AAClE,SAAK,aAAa;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,SAAsE;AAC5E,UAAM,aAAa,aAAa,OAAO;AACvC,UAAM,cAAc,cAAc,OAAO;AAGzC,UAAM,WAAW,aAAa,KAAK,aAAa,aAAa;AAC7D,UAAM,YAAY,cAAc,KAAK,aAAa,cAAc;AAIhE,QAAI,WAAW,KAAK,aAAa;AAE/B,WAAK,cACH,KAAK,cAAc,MAAM,WAAW;AAAA,IACxC,OAAO;AAEL,WAAK,cACH,KAAK,cAAc,KAAK,kBACxB,YAAY,IAAI,KAAK;AAAA,IACzB;AAEA,QAAI,YAAY,KAAK,cAAc;AACjC,WAAK,eAAe,KAAK,eAAe,MAAM,YAAY;AAAA,IAC5D,OAAO;AACL,WAAK,eACH,KAAK,eAAe,KAAK,kBACzB,aAAa,IAAI,KAAK;AAAA,IAC1B;AAIA,UAAM,SAAS,KAAK,cAAc,MAAM,KAAK,eAAe;AAE5D,WAAO;AAAA,MACL,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,MACX,QAAQ,KAAK,IAAI,GAAG,SAAS,CAAC;AAAA;AAAA,IAChC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,cAAc;AACnB,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,MAAc;AAChB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,OAAe;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAOO,IAAM,mBAAN,MAAuB;AAAA;AAAA;AAAA;AAAA;AAAA,EAS5B,YAAY,cAAsB,IAAI,oBAA4B,MAAM;AARxE,SAAQ,gBAA0B,CAAC;AASjC,SAAK,cAAc;AACnB,SAAK,oBAAoB;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,QAAmE;AACzE,SAAK,cAAc,KAAK,MAAM;AAC9B,QAAI,KAAK,cAAc,SAAS,KAAK,aAAa;AAChD,WAAK,cAAc,MAAM;AAAA,IAC3B;AAEA,QAAI,KAAK,cAAc,SAAS,GAAG;AACjC,aAAO,EAAE,YAAY,OAAO,kBAAkB,EAAE;AAAA,IAClD;AAGA,UAAM,aAAa,KAAK,cAAc,MAAM,GAAG,EAAE;AACjD,UAAM,UAAU,WAAW,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC,IAAI,WAAW;AAGnE,UAAM,WAAW,SAAS;AAC1B,UAAM,aAAa,WAAW,KAAK;AAEnC,WAAO;AAAA,MACL;AAAA,MACA,kBAAkB,aAAa,KAAK,IAAI,GAAG,WAAW,GAAG,IAAI;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,gBAAgB,CAAC;AAAA,EACxB;AACF;","names":["data","options","session","logger","logger","logger","logger","logger","logger","logger","WASM_CDN_PATH","logger","logger","logger","sum","logger","env"]}
|
|
1
|
+
{"version":3,"sources":["../src/audio/MicrophoneCapture.ts","../src/audio/RingBuffer.ts","../src/audio/AudioScheduler.ts","../src/audio/AudioChunkCoalescer.ts","../src/audio/LAMPipeline.ts","../src/audio/SyncedAudioPipeline.ts","../src/animation/EmotionToBlendshapeMapper.ts","../src/telemetry/exporters/console.ts","../src/telemetry/exporters/otlp.ts","../src/telemetry/OmoteTelemetry.ts","../src/telemetry/types.ts","../src/cache/ModelCache.ts","../src/utils/runtime.ts","../src/inference/onnxLoader.ts","../src/inference/blendshapeUtils.ts","../src/inference/Wav2Vec2Inference.ts","../src/audio/FullFacePipeline.ts","../src/inference/WhisperInference.ts","../src/inference/Wav2ArkitCpuInference.ts","../src/inference/createLipSync.ts","../src/inference/SileroVADInference.ts","../src/inference/SileroVADWorker.ts","../src/inference/createSileroVAD.ts","../src/inference/Emotion2VecInference.ts","../src/inference/SafariSpeechRecognition.ts","../src/emotion/Emotion.ts","../src/ai/adapters/AgentCoreAdapter.ts","../src/ai/orchestration/ConversationOrchestrator.ts","../src/ai/tenancy/TenantManager.ts","../src/ai/utils/AudioSyncManager.ts","../src/ai/utils/InterruptionHandler.ts","../src/cache/huggingFaceCDN.ts","../src/utils/transformersCacheClear.ts","../src/animation/types.ts","../src/animation/AnimationGraph.ts","../src/animation/audioEnergy.ts"],"sourcesContent":["/**\n * Microphone capture - renderer-agnostic audio input\n *\n * Captures audio from the microphone and emits PCM chunks.\n * Works in any JavaScript environment with Web Audio API.\n *\n * @category Audio\n */\n\nimport { EventEmitter, type OmoteEvents } from '../events';\n\nexport interface MicrophoneCaptureConfig {\n /** Target sample rate (default: 16000 for speech processing) */\n sampleRate?: number;\n /** Chunk size in samples (default: 1600 = 100ms at 16kHz) */\n chunkSize?: number;\n}\n\nexport class MicrophoneCapture {\n private config: Required<MicrophoneCaptureConfig>;\n private stream: MediaStream | null = null;\n private context: AudioContext | null = null;\n private processor: ScriptProcessorNode | null = null;\n private buffer: Float32Array = new Float32Array(0);\n private _isRecording = false;\n private _loggedFirstChunk = false;\n\n constructor(\n private events: EventEmitter<OmoteEvents>,\n config: MicrophoneCaptureConfig = {}\n ) {\n this.config = {\n sampleRate: config.sampleRate ?? 16000,\n chunkSize: config.chunkSize ?? 1600,\n };\n }\n\n get isRecording(): boolean {\n return this._isRecording;\n }\n\n get isSupported(): boolean {\n return typeof navigator !== 'undefined' && !!navigator.mediaDevices?.getUserMedia;\n }\n\n async start(): Promise<void> {\n if (!this.isSupported) {\n this.events.emit('error', {\n code: 'MICROPHONE_NOT_SUPPORTED',\n message: 'Microphone not supported in this browser',\n });\n return;\n }\n\n if (this._isRecording) return;\n\n try {\n this.stream = await navigator.mediaDevices.getUserMedia({\n audio: {\n sampleRate: { ideal: this.config.sampleRate },\n channelCount: 1,\n echoCancellation: true,\n noiseSuppression: true,\n autoGainControl: true,\n },\n });\n\n this.context = new AudioContext({ sampleRate: this.config.sampleRate });\n\n // Resume AudioContext if suspended (browser autoplay policy)\n if (this.context.state === 'suspended') {\n await this.context.resume();\n }\n\n const source = this.context.createMediaStreamSource(this.stream);\n\n // Use ScriptProcessor for broad compatibility\n this.processor = this.context.createScriptProcessor(4096, 1, 1);\n\n this.processor.onaudioprocess = (e) => {\n const input = e.inputBuffer.getChannelData(0);\n\n // Calculate audio level\n let rms = 0;\n let peak = 0;\n for (let i = 0; i < input.length; i++) {\n const abs = Math.abs(input[i]);\n rms += input[i] * input[i];\n if (abs > peak) peak = abs;\n }\n rms = Math.sqrt(rms / input.length);\n\n this.events.emit('audio.level', { rms, peak });\n\n // Accumulate samples\n const newBuffer = new Float32Array(this.buffer.length + input.length);\n newBuffer.set(this.buffer);\n newBuffer.set(input, this.buffer.length);\n this.buffer = newBuffer;\n\n // Emit chunks\n let chunkCount = 0;\n while (this.buffer.length >= this.config.chunkSize) {\n const chunk = this.buffer.slice(0, this.config.chunkSize);\n this.buffer = this.buffer.slice(this.config.chunkSize);\n\n const pcm = this.floatToPCM16(chunk);\n this.events.emit('audio.chunk', {\n pcm,\n timestamp: performance.now(),\n });\n chunkCount++;\n }\n // Log first emission for debugging\n if (chunkCount > 0 && !this._loggedFirstChunk) {\n console.log('[MicrophoneCapture] Emitting audio chunks:', chunkCount);\n this._loggedFirstChunk = true;\n }\n };\n\n source.connect(this.processor);\n this.processor.connect(this.context.destination);\n\n this._isRecording = true;\n console.log('[MicrophoneCapture] Started recording, context state:', this.context.state);\n } catch (err) {\n this.events.emit('error', {\n code: 'MICROPHONE_ERROR',\n message: (err as Error).message,\n details: err,\n });\n }\n }\n\n stop(): void {\n if (this.processor) {\n this.processor.disconnect();\n this.processor = null;\n }\n\n if (this.context) {\n this.context.close();\n this.context = null;\n }\n\n if (this.stream) {\n this.stream.getTracks().forEach((t) => t.stop());\n this.stream = null;\n }\n\n this.buffer = new Float32Array(0);\n this._isRecording = false;\n }\n\n private floatToPCM16(float32: Float32Array): Int16Array {\n const pcm = new Int16Array(float32.length);\n for (let i = 0; i < float32.length; i++) {\n const s = Math.max(-1, Math.min(1, float32[i]));\n pcm[i] = s < 0 ? s * 0x8000 : s * 0x7fff;\n }\n return pcm;\n }\n}\n","/**\n * Ring buffer for audio sample accumulation\n *\n * Efficiently accumulates audio samples and provides\n * contiguous buffers for inference without memory allocation churn.\n *\n * @category Audio\n */\n\nexport class RingBuffer {\n private buffer: Float32Array;\n private writeIndex = 0;\n private isFull = false;\n\n constructor(private readonly size: number) {\n this.buffer = new Float32Array(size);\n }\n\n /**\n * Write samples to the ring buffer\n * Converts Int16Array PCM to Float32\n */\n write(pcm: Int16Array): void {\n for (let i = 0; i < pcm.length; i++) {\n this.buffer[this.writeIndex] = pcm[i] / 32768.0;\n this.writeIndex = (this.writeIndex + 1) % this.size;\n\n if (this.writeIndex === 0) {\n this.isFull = true;\n }\n }\n }\n\n /**\n * Write float samples directly\n */\n writeFloat(samples: Float32Array): void {\n for (let i = 0; i < samples.length; i++) {\n this.buffer[this.writeIndex] = samples[i];\n this.writeIndex = (this.writeIndex + 1) % this.size;\n\n if (this.writeIndex === 0) {\n this.isFull = true;\n }\n }\n }\n\n /**\n * Get a contiguous copy of the buffer contents in chronological order\n * Returns null if buffer isn't full yet\n */\n read(): Float32Array | null {\n if (!this.isFull) return null;\n\n const output = new Float32Array(this.size);\n\n // Copy from writeIndex to end (oldest samples)\n const firstPart = this.buffer.subarray(this.writeIndex);\n output.set(firstPart, 0);\n\n // Copy from 0 to writeIndex (newest samples)\n const secondPart = this.buffer.subarray(0, this.writeIndex);\n output.set(secondPart, firstPart.length);\n\n return output;\n }\n\n /**\n * Check if buffer has enough samples\n */\n get hasData(): boolean {\n return this.isFull;\n }\n\n /**\n * Get current fill level (0-1)\n */\n get fillLevel(): number {\n if (this.isFull) return 1;\n return this.writeIndex / this.size;\n }\n\n /**\n * Reset the buffer\n */\n reset(): void {\n this.buffer.fill(0);\n this.writeIndex = 0;\n this.isFull = false;\n }\n}\n","/**\n * AudioScheduler - Enterprise-grade Web Audio API scheduling\n *\n * Implements the lookahead scheduling pattern from Chris Wilson's\n * \"A Tale of Two Clocks\" - the authoritative guide on Web Audio timing.\n *\n * Key Features:\n * - Uses AudioContext.currentTime (hardware clock) for sample-accurate timing\n * - Pre-schedules audio chunks for gapless playback\n * - Tracks scheduled sources for cleanup\n * - Provides playback state monitoring\n *\n * @see https://web.dev/articles/audio-scheduling\n * @category Audio\n */\n\nexport interface AudioSchedulerOptions {\n /** Sample rate in Hz (default: 16000 for speech) */\n sampleRate?: number\n /** Number of audio channels (default: 1 for mono) */\n channels?: number\n}\n\nexport class AudioScheduler {\n private context: AudioContext | null = null\n private nextPlayTime = 0\n private scheduledSources: Array<{ source: AudioBufferSourceNode; gainNode: GainNode }> = []\n private isPlaying = false\n\n constructor(private readonly options: AudioSchedulerOptions = {}) {}\n\n /**\n * Initialize AudioContext with specified sample rate\n *\n * Note: This is now a no-op. AudioContext is created lazily on first schedule()\n * to avoid browser autoplay policy issues (requires user gesture).\n */\n async initialize(): Promise<void> {\n // No-op - context will be created lazily in ensureContext()\n console.log('[AudioScheduler] Ready for lazy initialization')\n }\n\n /**\n * Eagerly create and warm up the AudioContext\n *\n * Call this when a playback session starts (e.g., when AI response begins).\n * The AudioContext needs time to initialize the audio hardware — on Windows\n * this can take 50-100ms. By warming up early (before audio data arrives),\n * the context is fully ready when schedule() is first called.\n *\n * Must be called after a user gesture (click/tap) for autoplay policy.\n */\n async warmup(): Promise<void> {\n await this.ensureContext()\n }\n\n /**\n * Ensure AudioContext is created and ready\n * Called lazily on first schedule() - requires user gesture\n */\n private async ensureContext(): Promise<AudioContext> {\n if (this.context && this.context.state !== 'closed') {\n return this.context\n }\n\n const sampleRate = this.options.sampleRate ?? 16000\n this.context = new AudioContext({ sampleRate })\n\n // Resume if suspended (browser autoplay policy)\n if (this.context.state === 'suspended') {\n await this.context.resume()\n }\n\n console.log(`[AudioScheduler] AudioContext initialized at ${sampleRate}Hz`)\n return this.context\n }\n\n /**\n * Schedule an audio chunk for playback\n *\n * Uses Web Audio's hardware-accurate clock for sample-perfect timing.\n * Chunks are scheduled immediately, not when they should play - this\n * ensures gapless playback even if main thread stalls.\n *\n * @param audioData - Float32Array of audio samples\n * @returns Scheduled playback time in AudioContext seconds\n */\n async schedule(audioData: Float32Array): Promise<number> {\n // Lazy initialization (requires user gesture)\n const ctx = await this.ensureContext()\n const channels = this.options.channels ?? 1\n\n // Initialize playback timing on first chunk\n // Add 50ms lookahead so the node is fully enqueued before playback begins\n // (prevents skipping the first few ms when AudioContext was just created)\n if (!this.isPlaying) {\n this.nextPlayTime = ctx.currentTime + 0.05\n this.isPlaying = true\n }\n\n // Create audio buffer\n const audioBuffer = ctx.createBuffer(channels, audioData.length, ctx.sampleRate)\n audioBuffer.getChannelData(0).set(audioData)\n\n // Create gain node for fade control\n const gainNode = ctx.createGain()\n gainNode.gain.value = 1.0\n gainNode.connect(ctx.destination)\n\n // Create and schedule source\n const source = ctx.createBufferSource()\n source.buffer = audioBuffer\n source.connect(gainNode) // Route through gain node for fade control\n\n // Schedule at precise time for gapless playback\n const scheduleTime = this.nextPlayTime\n source.start(scheduleTime)\n\n // Track scheduled source with its gain node\n this.scheduledSources.push({ source, gainNode })\n\n // Update next play time\n const duration = audioData.length / ctx.sampleRate\n this.nextPlayTime = scheduleTime + duration\n\n return scheduleTime\n }\n\n /**\n * Get current audio clock time\n *\n * This is the hardware-accurate time, NOT JavaScript time.\n * Use this for synchronizing visual animations to audio.\n *\n * @returns Current time in AudioContext seconds\n */\n getCurrentTime(): number {\n if (!this.context) return 0\n return this.context.currentTime\n }\n\n /**\n * Get scheduled playback end time\n */\n getPlaybackEndTime(): number {\n return this.nextPlayTime\n }\n\n /**\n * Check if all scheduled audio has finished playing\n */\n isComplete(): boolean {\n if (!this.context || !this.isPlaying) return false\n return this.context.currentTime >= this.nextPlayTime\n }\n\n /**\n * Cancel all scheduled audio with smooth fade-out\n *\n * Applies a linear fade-out to all playing sources and stops them gracefully.\n * Prevents audio clicks/pops by ramping gain to zero before stopping.\n *\n * @param fadeOutMs - Fade-out duration in milliseconds (default: 50ms)\n * @returns Promise that resolves when fade-out completes\n */\n async cancelAll(fadeOutMs: number = 50): Promise<void> {\n if (!this.context || this.scheduledSources.length === 0) {\n return\n }\n\n const ctx = this.context\n const currentTime = ctx.currentTime\n const fadeOutSec = fadeOutMs / 1000\n\n // Apply fade-out to all scheduled sources\n for (const { source, gainNode } of this.scheduledSources) {\n try {\n // Ramp gain from current value to zero\n gainNode.gain.setValueAtTime(gainNode.gain.value, currentTime)\n gainNode.gain.linearRampToValueAtTime(0.0, currentTime + fadeOutSec)\n\n // Stop source after fade completes\n source.stop(currentTime + fadeOutSec)\n } catch (err) {\n // Source may have already stopped naturally - ignore error\n }\n }\n\n // Clear tracking arrays\n this.scheduledSources = []\n this.isPlaying = false\n this.nextPlayTime = 0\n\n // Wait for fade-out to complete\n await new Promise(resolve => setTimeout(resolve, fadeOutMs))\n }\n\n /**\n * Reset scheduler state for new playback session\n * Stops any orphaned sources that weren't cleaned up by cancelAll()\n */\n reset(): void {\n // Stop any still-playing sources before clearing\n if (this.context) {\n const now = this.context.currentTime\n for (const { source, gainNode } of this.scheduledSources) {\n try {\n gainNode.gain.setValueAtTime(0, now)\n source.stop(now)\n } catch {\n // Already stopped\n }\n }\n }\n this.nextPlayTime = 0\n this.isPlaying = false\n this.scheduledSources = []\n }\n\n /**\n * Cleanup resources\n */\n dispose(): void {\n if (this.context) {\n this.context.close()\n this.context = null\n }\n this.scheduledSources = []\n this.isPlaying = false\n }\n}\n","/**\n * AudioChunkCoalescer - Combine small network chunks into optimal buffers\n *\n * Network streaming often delivers audio in small chunks (e.g., 32ms from TTS APIs).\n * Creating an AudioBufferSourceNode for each tiny chunk is inefficient and can cause\n * overhead from object creation/GC.\n *\n * This class implements a double-buffering pattern: accumulate small chunks in a\n * temporary buffer, then flush to playback queue when threshold is reached.\n *\n * Benefits:\n * - Reduces AudioBufferSourceNode overhead (fewer nodes = less GC pressure)\n * - Configurable buffer size for optimal playback chunk duration\n * - Maintains sample-accurate timing despite buffering\n *\n * Based on patterns from HLS.js and production streaming implementations.\n *\n * @category Audio\n */\n\nexport interface AudioChunkCoalescerOptions {\n /**\n * Target duration in milliseconds for combined chunks\n * Default: 200ms (balances latency vs overhead)\n *\n * Smaller values = lower latency, more overhead\n * Larger values = higher latency, less overhead\n */\n targetDurationMs?: number\n\n /**\n * Sample rate in Hz\n * Default: 16000 (speech quality)\n */\n sampleRate?: number\n}\n\nexport class AudioChunkCoalescer {\n private tempBuffer: Uint8Array[] = []\n private readonly targetBytes: number\n\n constructor(private readonly options: AudioChunkCoalescerOptions = {}) {\n const targetMs = options.targetDurationMs ?? 200\n const sampleRate = options.sampleRate ?? 16000\n\n // Calculate target bytes: (duration_s) * (samples/s) * (2 bytes per Int16 sample)\n this.targetBytes = (targetMs / 1000) * sampleRate * 2\n }\n\n /**\n * Add a chunk to the temporary buffer\n *\n * @param chunk - Uint8Array containing Int16 PCM audio\n * @returns Combined buffer if threshold reached, null otherwise\n */\n add(chunk: Uint8Array): ArrayBuffer | null {\n // Add to temporary buffer\n this.tempBuffer.push(chunk)\n\n // Calculate total bytes buffered\n const totalBytes = this.tempBuffer.reduce((sum, c) => sum + c.length, 0)\n\n // If we've reached the threshold, combine and return\n if (totalBytes >= this.targetBytes) {\n return this.flush()\n }\n\n return null\n }\n\n /**\n * Flush remaining buffered data\n *\n * Call this when the stream ends to ensure all audio is processed,\n * even if it doesn't reach the target threshold.\n *\n * @returns Combined buffer, or null if buffer is empty\n */\n flush(): ArrayBuffer | null {\n if (this.tempBuffer.length === 0) {\n return null\n }\n\n // Calculate total size\n const totalBytes = this.tempBuffer.reduce((sum, c) => sum + c.length, 0)\n\n // Combine all chunks into single buffer\n const combined = new Uint8Array(totalBytes)\n let offset = 0\n for (const chunk of this.tempBuffer) {\n combined.set(chunk, offset)\n offset += chunk.length\n }\n\n // Clear temp buffer\n this.tempBuffer = []\n\n return combined.buffer\n }\n\n /**\n * Get current buffer fill level (0-1)\n */\n get fillLevel(): number {\n const totalBytes = this.tempBuffer.reduce((sum, c) => sum + c.length, 0)\n return Math.min(1, totalBytes / this.targetBytes)\n }\n\n /**\n * Get current buffered duration in milliseconds\n */\n getBufferedDurationMs(): number {\n const sampleRate = this.options.sampleRate ?? 16000\n const totalBytes = this.tempBuffer.reduce((sum, c) => sum + c.length, 0)\n const samples = totalBytes / 2 // Int16 = 2 bytes per sample\n return (samples / sampleRate) * 1000\n }\n\n /**\n * Get number of chunks currently buffered\n */\n get chunkCount(): number {\n return this.tempBuffer.length\n }\n\n /**\n * Reset the coalescer\n */\n reset(): void {\n this.tempBuffer = []\n }\n}\n","/**\n * LAMPipeline - Coordinate LAM (Wav2Vec2) inference with frame synchronization\n *\n * Manages the buffering and processing pipeline for LAM lip sync:\n * 1. Accumulates audio samples in a ring buffer\n * 2. Triggers LAM inference when buffer reaches required size (16000 samples @ 16kHz = 1.0s)\n * 3. Queues resulting blendshape frames with precise timestamps\n * 4. Provides frames synchronized to AudioContext clock\n *\n * Key Design Decisions:\n * - Ring buffer pattern for efficient sample accumulation (no allocation churn)\n * - Frame queue with timestamps for deterministic playback\n * - Timestamp-based frame retrieval (not callback) for renderer flexibility\n *\n * Based on patterns from Chrome Audio Worklet design and Web Audio clock management.\n *\n * @see https://developer.chrome.com/blog/audio-worklet-design-pattern\n * @category Audio\n */\n\nimport { RingBuffer } from './RingBuffer'\nimport type { LipSyncBackend } from '../inference/LipSyncBackend'\n\nexport interface LAMFrame {\n /** 52 ARKit blendshape weights */\n frame: Float32Array\n /** AudioContext time when this frame should be displayed */\n timestamp: number\n}\n\nexport interface LAMPipelineOptions {\n /**\n * Sample rate in Hz (must match audio playback)\n * Default: 16000\n */\n sampleRate?: number\n\n /**\n * LAM inference callback\n * Called each time LAM processes a buffer\n */\n onInference?: (frameCount: number) => void\n\n /**\n * Error callback for inference failures\n */\n onError?: (error: Error) => void\n}\n\nexport class LAMPipeline {\n private readonly REQUIRED_SAMPLES = 16000 // 1.0s at 16kHz (LAM requirement)\n private readonly FRAME_RATE = 30 // LAM outputs 30fps\n\n private buffer: Float32Array = new Float32Array(0)\n private bufferStartTime = 0\n private frameQueue: LAMFrame[] = []\n\n /**\n * Last successfully retrieved frame\n * Used as fallback when no new frame is available to prevent avatar freezing\n */\n private lastFrame: Float32Array | null = null\n\n constructor(private readonly options: LAMPipelineOptions = {}) {}\n\n /**\n * Push audio samples into the pipeline\n *\n * Accumulates samples and triggers LAM inference when buffer is full.\n * Multiple calls may be needed to accumulate enough samples.\n *\n * @param samples - Float32Array of audio samples\n * @param timestamp - AudioContext time when these samples start playing\n * @param lam - LAM inference engine\n */\n async push(samples: Float32Array, timestamp: number, lam: LipSyncBackend): Promise<void> {\n // Track buffer start time when empty\n if (this.buffer.length === 0) {\n this.bufferStartTime = timestamp\n }\n\n // Accumulate samples\n const newBuffer = new Float32Array(this.buffer.length + samples.length)\n newBuffer.set(this.buffer, 0)\n newBuffer.set(samples, this.buffer.length)\n this.buffer = newBuffer\n\n // Process ALL complete chunks (not just one)\n // Critical for AgentCore which delivers entire sentences at once (30-50K+ samples)\n // Without the while loop, samples pile up and LAM falls behind audio playback\n while (this.buffer.length >= this.REQUIRED_SAMPLES) {\n await this.processBuffer(lam)\n }\n }\n\n /**\n * Process accumulated buffer through LAM inference\n */\n private async processBuffer(lam: LipSyncBackend): Promise<void> {\n try {\n // Extract exactly REQUIRED_SAMPLES for inference\n const toProcess = this.buffer.slice(0, this.REQUIRED_SAMPLES)\n const processedStartTime = this.bufferStartTime\n\n // Keep remaining samples for next inference\n this.buffer = this.buffer.slice(this.REQUIRED_SAMPLES)\n\n // Update start time for remaining buffer\n const processedDuration = this.REQUIRED_SAMPLES / (this.options.sampleRate ?? 16000)\n this.bufferStartTime = processedStartTime + processedDuration\n\n // Run LAM inference\n const result = await lam.infer(toProcess)\n\n // Queue frames with timestamps\n const frameDuration = 1 / this.FRAME_RATE\n for (let i = 0; i < result.blendshapes.length; i++) {\n const frame = result.blendshapes[i]\n const timestamp = processedStartTime + (i * frameDuration)\n this.frameQueue.push({ frame, timestamp })\n }\n\n // Notify callback\n this.options.onInference?.(result.blendshapes.length)\n } catch (error) {\n this.options.onError?.(error as Error)\n\n // Clear buffer on error to prevent repeated failures\n this.buffer = new Float32Array(0)\n this.bufferStartTime = 0\n }\n }\n\n /**\n * Get the frame that should be displayed at the current time\n *\n * Automatically removes frames that have already been displayed.\n * This prevents memory leaks from accumulating old frames.\n *\n * Discard Window (prevents premature frame discarding):\n * - WebGPU: 0.5s (LAM inference 20-100ms + RAF jitter + React stalls)\n * - WASM: 1.0s (LAM inference 50-500ms + higher variability)\n *\n * Last-Frame-Hold: Returns last valid frame instead of null to prevent\n * avatar freezing when between frames (RAF at 60fps vs LAM at 30fps).\n *\n * @param currentTime - Current AudioContext time\n * @param lam - LAM inference engine (optional, for backend detection)\n * @returns Current frame, or last frame as fallback, or null if no frames yet\n */\n getFrameForTime(currentTime: number, lam?: { backend: 'webgpu' | 'wasm' | null }): Float32Array | null {\n // Dynamic discard window based on backend performance characteristics\n const discardWindow = lam?.backend === 'wasm' ? 1.0 : 0.5\n\n // Remove frames that are too old (already displayed)\n let discardedCount = 0\n while (this.frameQueue.length > 0 && this.frameQueue[0].timestamp < currentTime - discardWindow) {\n const discarded = this.frameQueue.shift()!\n discardedCount++\n\n // Log frame discards for debugging sync issues\n if (discardedCount === 1) {\n const ageMs = ((currentTime - discarded.timestamp) * 1000).toFixed(0)\n console.warn('[LAM] Frame(s) discarded as too old', {\n ageMs,\n discardWindowMs: discardWindow * 1000,\n queueLength: this.frameQueue.length,\n backend: lam?.backend ?? 'unknown'\n })\n }\n }\n\n // Return the frame that should be playing now\n if (this.frameQueue.length > 0 && this.frameQueue[0].timestamp <= currentTime) {\n const { frame } = this.frameQueue.shift()!\n this.lastFrame = frame // Cache for fallback\n return frame\n }\n\n // Last-frame-hold: Return cached frame instead of null to prevent freezing\n // This handles RAF running at 60fps while LAM produces 30fps\n return this.lastFrame\n }\n\n /**\n * Get all frames in the queue (for debugging/monitoring)\n */\n getQueuedFrames(): LAMFrame[] {\n return [...this.frameQueue]\n }\n\n /**\n * Get current buffer fill level (0-1)\n */\n get fillLevel(): number {\n return Math.min(1, this.buffer.length / this.REQUIRED_SAMPLES)\n }\n\n /**\n * Get number of frames queued\n */\n get queuedFrameCount(): number {\n return this.frameQueue.length\n }\n\n /**\n * Get buffered audio duration in seconds\n */\n get bufferedDuration(): number {\n return this.buffer.length / (this.options.sampleRate ?? 16000)\n }\n\n /**\n * Flush remaining buffered audio\n *\n * Processes any remaining audio in the buffer, even if less than REQUIRED_SAMPLES.\n * This ensures the final audio chunk generates blendshape frames.\n *\n * Should be called when audio stream ends to prevent losing the last 0-1 seconds.\n *\n * @param lam - LAM inference engine\n */\n async flush(lam: LipSyncBackend): Promise<void> {\n if (this.buffer.length === 0) {\n return // Nothing to flush\n }\n\n // Pad buffer to REQUIRED_SAMPLES (LAM expects exactly 16000 samples)\n const padded = new Float32Array(this.REQUIRED_SAMPLES)\n padded.set(this.buffer, 0)\n // Remaining samples are already zero (Float32Array default)\n\n // Process the padded buffer\n const processedStartTime = this.bufferStartTime\n\n try {\n // Run LAM inference\n const result = await lam.infer(padded)\n\n // Queue frames with timestamps\n // Only queue frames that correspond to actual audio (not padding)\n const actualDuration = this.buffer.length / (this.options.sampleRate ?? 16000)\n const frameDuration = 1 / this.FRAME_RATE\n const actualFrameCount = Math.ceil(actualDuration * this.FRAME_RATE)\n\n for (let i = 0; i < Math.min(actualFrameCount, result.blendshapes.length); i++) {\n const frame = result.blendshapes[i]\n const timestamp = processedStartTime + (i * frameDuration)\n this.frameQueue.push({ frame, timestamp })\n }\n\n // Clear buffer after flushing\n this.buffer = new Float32Array(0)\n this.bufferStartTime = 0\n\n // Notify callback\n this.options.onInference?.(Math.min(actualFrameCount, result.blendshapes.length))\n } catch (error) {\n this.options.onError?.(error as Error)\n\n // Clear buffer on error\n this.buffer = new Float32Array(0)\n this.bufferStartTime = 0\n }\n }\n\n /**\n * Adjust all queued frame timestamps by an offset\n *\n * Used for synchronization when audio scheduling time differs from\n * the estimated time used during LAM processing.\n *\n * @param offset - Time offset in seconds to add to all timestamps\n */\n adjustTimestamps(offset: number): void {\n for (const frame of this.frameQueue) {\n frame.timestamp += offset\n }\n }\n\n /**\n * Reset the pipeline\n */\n reset(): void {\n this.buffer = new Float32Array(0)\n this.bufferStartTime = 0\n this.frameQueue = []\n this.lastFrame = null // Clear last-frame-hold cache\n }\n}\n","/**\n * SyncedAudioPipeline - Audio playback + LAM lip sync coordinator\n *\n * Orchestrates the complete pipeline for synchronized audio playback and lip sync:\n * 1. Network chunks → Coalescer → Optimized buffers\n * 2. Audio buffers → Scheduler → Gapless playback (immediate, never blocks)\n * 3. Audio buffers → LAM Pipeline → Blendshape frames (background, fire-and-forget)\n * 4. Frames synchronized to AudioContext clock → Renderer\n *\n * Key Architecture Pattern: Audio-First, LAM-Background\n * - Audio chunks are scheduled for playback immediately (never waits for LAM)\n * - LAM inference runs in background without blocking the audio path\n * - Lip sync starts ~1 second after audio (LAM needs 16000 samples to infer)\n * - Once LAM catches up, frames stay synchronized to AudioContext clock\n *\n * This decoupled design prevents LAM inference (50-300ms) from blocking audio\n * scheduling, which caused audible stuttering when audio arrived as a continuous\n * stream (e.g., single-call TTS from ElevenLabs via AgentCore).\n *\n * @see https://web.dev/articles/audio-scheduling (Web Audio clock patterns)\n * @category Audio\n */\n\nimport { AudioScheduler } from './AudioScheduler'\nimport { AudioChunkCoalescer } from './AudioChunkCoalescer'\nimport { LAMPipeline } from './LAMPipeline'\nimport { EventEmitter } from '../events/EventEmitter'\nimport type { LipSyncBackend } from '../inference/LipSyncBackend'\n\n/**\n * Safely convert an ArrayBuffer of PCM16 bytes to Float32 samples.\n * Handles odd-length buffers by truncating to the nearest even byte boundary.\n */\nfunction pcm16ToFloat32(buffer: ArrayBuffer): Float32Array {\n // Int16Array requires even byte length — truncate if odd\n const byteLen = buffer.byteLength & ~1\n const int16 = byteLen === buffer.byteLength\n ? new Int16Array(buffer)\n : new Int16Array(buffer, 0, byteLen / 2)\n const float32 = new Float32Array(int16.length)\n for (let i = 0; i < int16.length; i++) {\n float32[i] = int16[i] / 32768\n }\n return float32\n}\n\nexport interface SyncedAudioPipelineOptions {\n /** Sample rate in Hz (default: 16000) */\n sampleRate?: number\n /** Target chunk duration in ms for coalescing (default: 200) */\n chunkTargetMs?: number\n /** LAM inference engine */\n lam: LipSyncBackend\n}\n\nexport interface SyncedAudioPipelineEvents {\n /** New frame ready for display */\n frame_ready: Float32Array\n /** Playback has completed */\n playback_complete: void\n /** First audio chunk scheduled, playback starting */\n playback_start: number\n /** Error occurred */\n error: Error\n /** Index signature for EventEmitter compatibility */\n [key: string]: unknown\n}\n\nexport class SyncedAudioPipeline extends EventEmitter<SyncedAudioPipelineEvents> {\n private scheduler: AudioScheduler\n private coalescer: AudioChunkCoalescer\n private lamPipeline: LAMPipeline\n\n private playbackStarted = false\n private monitorInterval: number | null = null\n private frameAnimationId: number | null = null\n\n constructor(private readonly options: SyncedAudioPipelineOptions) {\n super()\n\n const sampleRate = options.sampleRate ?? 16000\n\n this.scheduler = new AudioScheduler({ sampleRate })\n this.coalescer = new AudioChunkCoalescer({\n sampleRate,\n targetDurationMs: options.chunkTargetMs ?? 200,\n })\n this.lamPipeline = new LAMPipeline({\n sampleRate,\n onError: (error) => {\n this.emit('error', error)\n },\n })\n }\n\n /**\n * Initialize the pipeline\n */\n async initialize(): Promise<void> {\n await this.scheduler.initialize()\n }\n\n /**\n * Start a new playback session\n *\n * Resets all state and prepares for incoming audio chunks.\n * Audio will be scheduled immediately as chunks arrive (no buffering).\n */\n start(): void {\n // Stop any active session first (prevents duplicate frame loops/monitors)\n this.stopMonitoring()\n\n this.scheduler.reset()\n this.coalescer.reset()\n this.lamPipeline.reset()\n this.playbackStarted = false\n\n // Eagerly warm up AudioContext so audio hardware is ready when\n // first audio chunk arrives. Without this, AudioContext creation\n // happens at schedule time and the first 50-100ms of audio stutters\n // while Windows WASAPI initializes.\n this.scheduler.warmup()\n\n // Start frame animation loop\n this.startFrameLoop()\n\n // Start playback monitoring\n this.startMonitoring()\n }\n\n /**\n * Receive audio chunk from network\n *\n * Audio-first design: schedules audio immediately, LAM runs in background.\n * This prevents LAM inference (50-300ms) from blocking audio scheduling,\n * which caused audible stuttering with continuous audio streams.\n *\n * @param chunk - Uint8Array containing Int16 PCM audio\n */\n async onAudioChunk(chunk: Uint8Array): Promise<void> {\n // Coalesce small chunks into optimal buffers\n const combined = this.coalescer.add(chunk)\n if (!combined) {\n return // Not enough data yet\n }\n\n // Convert PCM16 bytes to Float32 samples (handles odd-length buffers safely)\n const float32 = pcm16ToFloat32(combined)\n\n // Schedule audio immediately — never wait for LAM\n const scheduleTime = await this.scheduler.schedule(float32)\n\n // Emit playback_start on first scheduled chunk\n if (!this.playbackStarted) {\n this.playbackStarted = true\n this.emit('playback_start', scheduleTime)\n }\n\n // LAM runs in background — never blocks audio scheduling.\n // lam.infer() takes 50-300ms when it triggers (every 16000 samples).\n // If we awaited here, the NDJSON processing loop in useVoice.tsx would\n // stall, preventing new audio chunks from being scheduled. The already-\n // scheduled audio plays out and runs dry → gap → audible stutter.\n this.lamPipeline.push(float32, scheduleTime, this.options.lam).catch(err => {\n this.emit('error', err)\n })\n }\n\n /**\n * End of audio stream\n *\n * Flushes any remaining buffered data.\n */\n async end(): Promise<void> {\n // Flush remaining coalesced data\n const remaining = this.coalescer.flush()\n if (remaining) {\n const chunk = new Uint8Array(remaining)\n await this.onAudioChunk(chunk)\n }\n\n // Flush remaining LAM buffer to process final audio chunk\n // This ensures blendshapes are generated for the last 0-1 seconds of audio\n await this.lamPipeline.flush(this.options.lam)\n }\n\n /**\n * Stop playback immediately with smooth fade-out\n *\n * Gracefully cancels all audio playback and LAM processing:\n * - Fades out audio over specified duration (default: 50ms)\n * - Cancels pending LAM inferences\n * - Clears all buffers and queues\n * - Emits 'playback_complete' event\n *\n * Use this for interruptions (e.g., user barge-in during AI speech).\n *\n * @param fadeOutMs - Fade-out duration in milliseconds (default: 50ms)\n * @returns Promise that resolves when fade-out completes\n */\n async stop(fadeOutMs: number = 50): Promise<void> {\n // Stop monitoring and frame loop\n this.stopMonitoring()\n\n // Cancel audio playback with fade-out\n await this.scheduler.cancelAll(fadeOutMs)\n\n // Clear all buffers\n this.coalescer.reset()\n this.lamPipeline.reset()\n this.playbackStarted = false\n\n // Emit completion event\n this.emit('playback_complete', undefined as any)\n }\n\n /**\n * Start frame animation loop\n *\n * Uses requestAnimationFrame to check for new LAM frames.\n * Synchronized to AudioContext clock (not visual refresh rate).\n *\n * Frame Emission Strategy:\n * - LAMPipeline uses last-frame-hold to prevent null returns\n * - Always emit frames (even repeated frames) to maintain smooth animation\n * - Renderer is responsible for detecting duplicate frames if needed\n */\n private startFrameLoop(): void {\n const updateFrame = () => {\n const currentTime = this.scheduler.getCurrentTime()\n const frame = this.lamPipeline.getFrameForTime(currentTime, this.options.lam)\n\n if (frame) {\n this.emit('frame_ready', frame)\n }\n\n this.frameAnimationId = requestAnimationFrame(updateFrame)\n }\n\n this.frameAnimationId = requestAnimationFrame(updateFrame)\n }\n\n /**\n * Start monitoring for playback completion\n */\n private startMonitoring(): void {\n if (this.monitorInterval) {\n clearInterval(this.monitorInterval)\n }\n\n this.monitorInterval = window.setInterval(() => {\n if (this.scheduler.isComplete() && this.lamPipeline.queuedFrameCount === 0) {\n this.emit('playback_complete', undefined as any)\n this.stopMonitoring()\n }\n }, 100)\n }\n\n /**\n * Stop monitoring\n */\n private stopMonitoring(): void {\n if (this.monitorInterval) {\n clearInterval(this.monitorInterval)\n this.monitorInterval = null\n }\n\n if (this.frameAnimationId) {\n cancelAnimationFrame(this.frameAnimationId)\n this.frameAnimationId = null\n }\n }\n\n /**\n * Get current pipeline state (for debugging/monitoring)\n */\n getState() {\n return {\n playbackStarted: this.playbackStarted,\n coalescerFill: this.coalescer.fillLevel,\n lamFill: this.lamPipeline.fillLevel,\n queuedFrames: this.lamPipeline.queuedFrameCount,\n currentTime: this.scheduler.getCurrentTime(),\n playbackEndTime: this.scheduler.getPlaybackEndTime(),\n }\n }\n\n /**\n * Cleanup resources\n */\n dispose(): void {\n this.stopMonitoring()\n this.scheduler.dispose()\n this.coalescer.reset()\n this.lamPipeline.reset()\n }\n}\n","/**\n * Emotion to ARKit Blendshape Mapper\n *\n * Converts Emotion2VecInference output to upper face ARKit blendshapes for\n * expressive avatar animation. Maps 4 emotion categories (neutral, happy, angry, sad)\n * to 11 upper face blendshapes (brows, eyes, cheeks).\n *\n * Supports two blend modes:\n * - 'dominant': Uses only the strongest emotion (simpler, more stable)\n * - 'weighted': Blends all emotions by probability (more nuanced, e.g., bittersweet)\n *\n * Also supports energy modulation to scale emotion intensity by audio energy,\n * making expressions stronger during emphasized speech.\n *\n * @example Basic usage\n * ```typescript\n * import { EmotionToBlendshapeMapper } from '@omote/core';\n * import { Emotion2VecInference } from '@omote/core';\n *\n * const emotion = new Emotion2VecInference({ modelUrl: '/models/emotion.onnx' });\n * const mapper = new EmotionToBlendshapeMapper();\n *\n * // Process emotion frame\n * const result = await emotion.infer(audioSamples);\n * const blendshapes = mapper.mapFrame(result.dominant);\n *\n * // Apply to avatar\n * for (const [name, value] of Object.entries(blendshapes)) {\n * avatar.setBlendshape(name, value);\n * }\n * ```\n *\n * @example Weighted blending for nuanced expressions\n * ```typescript\n * const mapper = new EmotionToBlendshapeMapper({\n * blendMode: 'weighted',\n * minBlendProbability: 0.1,\n * });\n *\n * // Frame with mixed emotions: { happy: 0.6, sad: 0.3, neutral: 0.1 }\n * // Result: bittersweet expression (smiling but worried brow)\n * const blendshapes = mapper.mapFrame(emotionFrame);\n * ```\n *\n * @example Energy-modulated emotion\n * ```typescript\n * import { AudioEnergyAnalyzer } from '@omote/core';\n *\n * const energyAnalyzer = new AudioEnergyAnalyzer();\n * const mapper = new EmotionToBlendshapeMapper({ energyModulation: true });\n *\n * // In animation loop\n * function animate(audioChunk: Float32Array, emotionFrame: EmotionFrame) {\n * const { energy } = energyAnalyzer.analyze(audioChunk);\n * mapper.mapFrame(emotionFrame, energy); // Louder = stronger emotion\n * mapper.update(16);\n * applyToAvatar(mapper.getCurrentBlendshapes());\n * }\n * ```\n *\n * @module animation\n */\n\nimport type { EmotionFrame, Emotion2VecLabel } from '../inference/Emotion2VecInference';\n\n/**\n * Upper face ARKit blendshape names (11 total)\n *\n * These blendshapes control the upper face (brows, eyes, cheeks) and are\n * driven by emotion detection, complementing the mouth blendshapes from\n * LAM lip sync.\n */\nexport const UPPER_FACE_BLENDSHAPES = [\n // Brows (5)\n 'browDownLeft',\n 'browDownRight',\n 'browInnerUp',\n 'browOuterUpLeft',\n 'browOuterUpRight',\n // Eyes (4)\n 'eyeSquintLeft',\n 'eyeSquintRight',\n 'eyeWideLeft',\n 'eyeWideRight',\n // Cheeks (2)\n 'cheekSquintLeft',\n 'cheekSquintRight',\n] as const;\n\nexport type UpperFaceBlendshapeName = (typeof UPPER_FACE_BLENDSHAPES)[number];\n\n/**\n * Upper face blendshape values (0-1 for each)\n */\nexport type UpperFaceBlendshapes = Record<UpperFaceBlendshapeName, number>;\n\n/**\n * Blend mode for combining emotions\n * - 'dominant': Use only the strongest emotion (default, more stable)\n * - 'weighted': Blend all emotions by probability (more nuanced)\n */\nexport type EmotionBlendMode = 'dominant' | 'weighted';\n\n/**\n * Emotion to ARKit blendshape mapping\n *\n * Based on Paul Ekman's FACS (Facial Action Coding System) research:\n *\n * - Happy (AU6+AU12): Cheek raise + lip corner pull (Duchenne smile)\n * Upper face: cheekSquint (AU6) + slight eyeSquint from genuine smile\n *\n * - Angry (AU4+AU5+AU7+AU23): Brow lower + eye wide + lid tighten + lip press\n * Upper face: browDown (AU4) + eyeWide (AU5) + eyeSquint (AU7) creates the \"glare\"\n *\n * - Sad (AU1+AU4+AU15): Inner brow raise + brow furrow + lip corner depress\n * Upper face: browInnerUp (AU1) + browDown (AU4) creates the worried/sad brow\n *\n * - Neutral: All zeros (no expression overlay)\n *\n * @see https://imotions.com/blog/learning/research-fundamentals/facial-action-coding-system/\n * @see https://melindaozel.com/arkit-to-facs-cheat-sheet/\n */\nexport const EMOTION_ARKIT_MAP: Record<Emotion2VecLabel, Partial<UpperFaceBlendshapes>> = {\n happy: {\n // AU6 - Cheek raiser (primary Duchenne smile marker)\n cheekSquintLeft: 0.5,\n cheekSquintRight: 0.5,\n // Slight eye squint from genuine smile (orbicularis oculi activation)\n eyeSquintLeft: 0.2,\n eyeSquintRight: 0.2,\n },\n angry: {\n // AU4 - Brow lowerer (intense, primary anger marker)\n browDownLeft: 0.7,\n browDownRight: 0.7,\n // AU5 - Upper lid raiser (wide eyes, part of the \"glare\")\n eyeWideLeft: 0.4,\n eyeWideRight: 0.4,\n // AU7 - Lid tightener (tense stare, combines with AU5 for angry glare)\n eyeSquintLeft: 0.3,\n eyeSquintRight: 0.3,\n },\n sad: {\n // AU1 - Inner brow raiser (primary sadness marker)\n browInnerUp: 0.6,\n // AU4 - Brow lowerer (brows drawn together)\n browDownLeft: 0.3,\n browDownRight: 0.3,\n },\n neutral: {}, // All zeros - no expression overlay\n};\n\n/**\n * Configuration for EmotionToBlendshapeMapper\n */\nexport interface EmotionBlendshapeConfig {\n /**\n * Smoothing factor for exponential moving average (0-1)\n * Lower = slower, smoother transitions\n * Higher = faster, more responsive\n * @default 0.15\n */\n smoothingFactor?: number;\n\n /**\n * Minimum confidence threshold for emotion to take effect\n * Emotions below this confidence are treated as neutral\n * @default 0.3\n */\n confidenceThreshold?: number;\n\n /**\n * Global intensity multiplier for all blendshapes (0-2)\n * @default 1.0\n */\n intensity?: number;\n\n /**\n * Blend mode for combining emotions\n * - 'dominant': Use only the strongest emotion (default)\n * - 'weighted': Blend all emotions by probability\n * @default 'dominant'\n */\n blendMode?: EmotionBlendMode;\n\n /**\n * Minimum probability for an emotion to contribute in weighted blend mode\n * Emotions with probability below this are ignored\n * @default 0.1\n */\n minBlendProbability?: number;\n\n /**\n * Enable energy modulation - scale emotion intensity by audio energy\n * When enabled, louder speech produces stronger expressions\n * @default false\n */\n energyModulation?: boolean;\n\n /**\n * Minimum energy scale when energy modulation is enabled (0-1)\n * At zero audio energy, emotion intensity is scaled by this factor\n * @default 0.3\n */\n minEnergyScale?: number;\n\n /**\n * Maximum energy scale when energy modulation is enabled (0-2)\n * At maximum audio energy, emotion intensity is scaled by this factor\n * @default 1.0\n */\n maxEnergyScale?: number;\n}\n\nconst DEFAULT_CONFIG: Required<EmotionBlendshapeConfig> = {\n smoothingFactor: 0.15,\n confidenceThreshold: 0.3,\n intensity: 1.0,\n blendMode: 'dominant',\n minBlendProbability: 0.1,\n energyModulation: false,\n minEnergyScale: 0.3,\n maxEnergyScale: 1.0,\n};\n\n/**\n * Creates a zeroed UpperFaceBlendshapes object\n */\nfunction createZeroBlendshapes(): UpperFaceBlendshapes {\n const result = {} as UpperFaceBlendshapes;\n for (const name of UPPER_FACE_BLENDSHAPES) {\n result[name] = 0;\n }\n return result;\n}\n\n/**\n * Clamp value between 0 and 1\n */\nfunction clamp01(value: number): number {\n return Math.max(0, Math.min(1, value));\n}\n\n/**\n * EmotionToBlendshapeMapper\n *\n * Converts emotion detection output to upper face ARKit blendshapes.\n * Provides smooth transitions between emotion states using exponential\n * moving average interpolation.\n *\n * Supports two blend modes:\n * - 'dominant': Uses only the strongest emotion\n * - 'weighted': Blends all emotions by probability for nuanced expressions\n *\n * Also supports energy modulation to scale emotion intensity by audio energy.\n */\nexport class EmotionToBlendshapeMapper {\n private config: Required<EmotionBlendshapeConfig>;\n private targetBlendshapes: UpperFaceBlendshapes;\n private currentBlendshapes: UpperFaceBlendshapes;\n private currentEnergy: number = 1.0;\n\n /**\n * Create a new EmotionToBlendshapeMapper\n *\n * @param config - Optional configuration\n */\n constructor(config?: EmotionBlendshapeConfig) {\n this.config = {\n ...DEFAULT_CONFIG,\n ...config,\n };\n this.targetBlendshapes = createZeroBlendshapes();\n this.currentBlendshapes = createZeroBlendshapes();\n }\n\n /**\n * Map an emotion frame to target blendshapes\n *\n * This sets the target values that the mapper will smoothly interpolate\n * towards. Call update() each frame to apply smoothing.\n *\n * @param frame - Emotion frame from Emotion2VecInference\n * @param audioEnergy - Optional audio energy (0-1) for energy modulation\n * @returns Target upper face blendshapes (before smoothing)\n */\n mapFrame(frame: EmotionFrame, audioEnergy?: number): UpperFaceBlendshapes {\n // Reset target to zeros\n this.targetBlendshapes = createZeroBlendshapes();\n\n // Store energy for modulation\n if (audioEnergy !== undefined) {\n this.currentEnergy = clamp01(audioEnergy);\n }\n\n // Check for valid frame\n if (!frame) {\n return { ...this.targetBlendshapes };\n }\n\n // Route to appropriate blend method\n if (this.config.blendMode === 'weighted') {\n this.mapFrameWeighted(frame);\n } else {\n this.mapFrameDominant(frame);\n }\n\n // Apply energy modulation if enabled\n if (this.config.energyModulation) {\n this.applyEnergyModulation();\n }\n\n return { ...this.targetBlendshapes };\n }\n\n /**\n * Map using dominant emotion only (original behavior)\n */\n private mapFrameDominant(frame: EmotionFrame): void {\n // Check confidence threshold\n if (frame.confidence < this.config.confidenceThreshold) {\n return;\n }\n\n // Get emotion mapping\n const emotion = frame.emotion as Emotion2VecLabel;\n const mapping = EMOTION_ARKIT_MAP[emotion];\n\n if (!mapping) {\n return;\n }\n\n // Apply mapping with intensity and confidence scaling\n const scale = this.config.intensity * frame.confidence;\n\n for (const [name, value] of Object.entries(mapping)) {\n const blendshapeName = name as UpperFaceBlendshapeName;\n if (value !== undefined) {\n this.targetBlendshapes[blendshapeName] = clamp01(value * scale);\n }\n }\n }\n\n /**\n * Map using weighted blend of all emotions by probability\n * Creates more nuanced expressions (e.g., bittersweet = happy + sad)\n */\n private mapFrameWeighted(frame: EmotionFrame): void {\n if (!frame.probabilities) {\n // Fall back to dominant if no probabilities\n this.mapFrameDominant(frame);\n return;\n }\n\n // Blend all emotions by their probability\n for (const [emotion, probability] of Object.entries(frame.probabilities)) {\n // Skip emotions below minimum probability\n if (probability < this.config.minBlendProbability) {\n continue;\n }\n\n const mapping = EMOTION_ARKIT_MAP[emotion as Emotion2VecLabel];\n if (!mapping) {\n continue;\n }\n\n // Add this emotion's contribution weighted by probability\n const scale = this.config.intensity * probability;\n\n for (const [name, value] of Object.entries(mapping)) {\n const blendshapeName = name as UpperFaceBlendshapeName;\n if (value !== undefined) {\n // Additive blending - sum contributions\n this.targetBlendshapes[blendshapeName] += value * scale;\n }\n }\n }\n\n // Clamp all values to 0-1 after blending\n for (const name of UPPER_FACE_BLENDSHAPES) {\n this.targetBlendshapes[name] = clamp01(this.targetBlendshapes[name]);\n }\n }\n\n /**\n * Apply energy modulation to scale emotion intensity by audio energy\n * Louder speech = stronger expressions\n */\n private applyEnergyModulation(): void {\n const { minEnergyScale, maxEnergyScale } = this.config;\n\n // Linear interpolation: energy 0 -> minScale, energy 1 -> maxScale\n const energyScale = minEnergyScale + this.currentEnergy * (maxEnergyScale - minEnergyScale);\n\n for (const name of UPPER_FACE_BLENDSHAPES) {\n this.targetBlendshapes[name] = clamp01(this.targetBlendshapes[name] * energyScale);\n }\n }\n\n /**\n * Apply smoothing to interpolate current values towards target\n *\n * Uses exponential moving average:\n * current = current + smoothingFactor * (target - current)\n *\n * @param _deltaMs - Delta time in milliseconds (reserved for future time-based smoothing)\n */\n update(_deltaMs: number): void {\n const factor = this.config.smoothingFactor;\n\n for (const name of UPPER_FACE_BLENDSHAPES) {\n const target = this.targetBlendshapes[name];\n const current = this.currentBlendshapes[name];\n this.currentBlendshapes[name] = clamp01(current + factor * (target - current));\n }\n }\n\n /**\n * Get current smoothed blendshape values\n *\n * @returns Current upper face blendshapes (after smoothing)\n */\n getCurrentBlendshapes(): UpperFaceBlendshapes {\n return { ...this.currentBlendshapes };\n }\n\n /**\n * Reset mapper to neutral state\n *\n * Sets both target and current blendshapes to zero.\n */\n reset(): void {\n this.targetBlendshapes = createZeroBlendshapes();\n this.currentBlendshapes = createZeroBlendshapes();\n this.currentEnergy = 1.0;\n }\n\n /**\n * Get current configuration\n */\n getConfig(): Required<EmotionBlendshapeConfig> {\n return { ...this.config };\n }\n\n /**\n * Update configuration\n *\n * @param config - Partial configuration to update\n */\n setConfig(config: Partial<EmotionBlendshapeConfig>): void {\n this.config = {\n ...this.config,\n ...config,\n };\n }\n}\n","/**\n * Console Exporter\n *\n * Exports telemetry data to the browser console for development/debugging.\n *\n * @category Telemetry\n */\n\nimport type { SpanAttributes } from '../types';\n\n/**\n * Span data structure for export\n */\nexport interface SpanData {\n name: string;\n traceId: string;\n spanId: string;\n parentSpanId?: string;\n startTime: number;\n endTime: number;\n durationMs: number;\n status: 'ok' | 'error';\n attributes: SpanAttributes;\n error?: Error;\n}\n\n/**\n * Metric data structure for export\n */\nexport interface MetricData {\n name: string;\n type: 'counter' | 'histogram';\n value: number;\n attributes: Record<string, string | number | boolean>;\n timestamp: number;\n}\n\n/**\n * Exporter interface that all exporters must implement\n */\nexport interface TelemetryExporterInterface {\n /** Export a completed span */\n exportSpan(span: SpanData): void;\n /** Export a metric */\n exportMetric(metric: MetricData): void;\n /** Flush any buffered data */\n flush(): Promise<void>;\n /** Shutdown the exporter */\n shutdown(): Promise<void>;\n}\n\n/**\n * Console exporter for development/debugging\n *\n * Outputs spans and metrics to the browser console with formatting.\n */\nexport class ConsoleExporter implements TelemetryExporterInterface {\n private enabled: boolean;\n private prefix: string;\n\n constructor(options: { enabled?: boolean; prefix?: string } = {}) {\n this.enabled = options.enabled ?? true;\n this.prefix = options.prefix ?? '[Omote Telemetry]';\n }\n\n exportSpan(span: SpanData): void {\n if (!this.enabled) return;\n\n const statusIcon = span.status === 'ok' ? '✓' : '✗';\n const statusColor = span.status === 'ok' ? 'color: green' : 'color: red';\n\n console.groupCollapsed(\n `%c${this.prefix} %c${statusIcon} ${span.name} %c(${span.durationMs.toFixed(2)}ms)`,\n 'color: gray',\n statusColor,\n 'color: gray'\n );\n\n console.log('Trace ID:', span.traceId);\n console.log('Span ID:', span.spanId);\n if (span.parentSpanId) {\n console.log('Parent Span ID:', span.parentSpanId);\n }\n console.log('Duration:', `${span.durationMs.toFixed(2)}ms`);\n console.log('Status:', span.status);\n\n if (Object.keys(span.attributes).length > 0) {\n console.log('Attributes:', span.attributes);\n }\n\n if (span.error) {\n console.error('Error:', span.error);\n }\n\n console.groupEnd();\n }\n\n exportMetric(metric: MetricData): void {\n if (!this.enabled) return;\n\n const typeIcon = metric.type === 'counter' ? '↑' : '📊';\n\n console.log(\n `%c${this.prefix} %c${typeIcon} ${metric.name}: %c${metric.value}`,\n 'color: gray',\n 'color: blue',\n 'color: black; font-weight: bold',\n metric.attributes\n );\n }\n\n async flush(): Promise<void> {\n // Console exporter doesn't buffer, nothing to flush\n }\n\n async shutdown(): Promise<void> {\n this.enabled = false;\n }\n}\n","/**\n * OTLP Exporter\n *\n * Exports telemetry data to OTLP-compatible backends (Jaeger, Tempo, etc.)\n * using the OTLP/HTTP JSON protocol.\n *\n * @category Telemetry\n */\n\nimport type { OTLPExporterConfig } from '../types';\nimport type { SpanData, MetricData, TelemetryExporterInterface } from './console';\n\n/**\n * OTLP span status codes\n */\nconst StatusCode = {\n UNSET: 0,\n OK: 1,\n ERROR: 2,\n} as const;\n\n/**\n * Convert internal span to OTLP format\n */\nfunction spanToOTLP(span: SpanData, serviceName: string, serviceVersion: string) {\n const attributes = Object.entries(span.attributes)\n .filter(([, v]) => v !== undefined)\n .map(([key, value]) => ({\n key,\n value: typeof value === 'string'\n ? { stringValue: value }\n : typeof value === 'number'\n ? Number.isInteger(value)\n ? { intValue: value }\n : { doubleValue: value }\n : { boolValue: value },\n }));\n\n return {\n resourceSpans: [{\n resource: {\n attributes: [\n { key: 'service.name', value: { stringValue: serviceName } },\n { key: 'service.version', value: { stringValue: serviceVersion } },\n { key: 'telemetry.sdk.name', value: { stringValue: 'omote-sdk' } },\n { key: 'telemetry.sdk.language', value: { stringValue: 'javascript' } },\n ],\n },\n scopeSpans: [{\n scope: {\n name: 'omote-sdk',\n version: serviceVersion,\n },\n spans: [{\n traceId: span.traceId,\n spanId: span.spanId,\n parentSpanId: span.parentSpanId || '',\n name: span.name,\n kind: 1, // INTERNAL\n startTimeUnixNano: String(span.startTime * 1_000_000),\n endTimeUnixNano: String(span.endTime * 1_000_000),\n attributes,\n status: {\n code: span.status === 'ok' ? StatusCode.OK : StatusCode.ERROR,\n message: span.error?.message || '',\n },\n }],\n }],\n }],\n };\n}\n\n/**\n * Convert internal metric to OTLP format\n */\nfunction metricToOTLP(metric: MetricData, serviceName: string, serviceVersion: string) {\n const attributes = Object.entries(metric.attributes)\n .filter(([, v]) => v !== undefined)\n .map(([key, value]) => ({\n key,\n value: typeof value === 'string'\n ? { stringValue: value }\n : typeof value === 'number'\n ? Number.isInteger(value)\n ? { intValue: value }\n : { doubleValue: value }\n : { boolValue: value },\n }));\n\n const dataPoint = {\n attributes,\n timeUnixNano: String(metric.timestamp * 1_000_000),\n ...(metric.type === 'counter'\n ? { asInt: metric.value }\n : { asDouble: metric.value }),\n };\n\n return {\n resourceMetrics: [{\n resource: {\n attributes: [\n { key: 'service.name', value: { stringValue: serviceName } },\n { key: 'service.version', value: { stringValue: serviceVersion } },\n ],\n },\n scopeMetrics: [{\n scope: {\n name: 'omote-sdk',\n version: serviceVersion,\n },\n metrics: [{\n name: metric.name,\n ...(metric.type === 'counter'\n ? {\n sum: {\n dataPoints: [dataPoint],\n aggregationTemporality: 2, // CUMULATIVE\n isMonotonic: true,\n },\n }\n : {\n gauge: {\n dataPoints: [dataPoint],\n },\n }),\n }],\n }],\n }],\n };\n}\n\n/**\n * OTLP exporter for production telemetry\n *\n * Sends spans and metrics to OTLP-compatible backends like:\n * - Jaeger\n * - Grafana Tempo\n * - Honeycomb\n * - Datadog\n * - AWS X-Ray (with collector)\n */\nexport class OTLPExporter implements TelemetryExporterInterface {\n private config: Required<OTLPExporterConfig>;\n private serviceName: string;\n private serviceVersion: string;\n private spanBuffer: SpanData[] = [];\n private metricBuffer: MetricData[] = [];\n private flushIntervalId: ReturnType<typeof setInterval> | null = null;\n private readonly BUFFER_SIZE = 100;\n private readonly FLUSH_INTERVAL_MS = 5000;\n private isShutdown = false;\n\n constructor(\n config: OTLPExporterConfig,\n serviceName: string = 'omote-sdk',\n serviceVersion: string = '0.1.0'\n ) {\n this.config = {\n timeoutMs: 10000,\n headers: {},\n ...config,\n };\n this.serviceName = serviceName;\n this.serviceVersion = serviceVersion;\n\n // Start periodic flush\n this.flushIntervalId = setInterval(() => {\n this.flush().catch(console.error);\n }, this.FLUSH_INTERVAL_MS);\n }\n\n exportSpan(span: SpanData): void {\n if (this.isShutdown) return;\n\n this.spanBuffer.push(span);\n\n if (this.spanBuffer.length >= this.BUFFER_SIZE) {\n this.flush().catch(console.error);\n }\n }\n\n exportMetric(metric: MetricData): void {\n if (this.isShutdown) return;\n\n this.metricBuffer.push(metric);\n\n if (this.metricBuffer.length >= this.BUFFER_SIZE) {\n this.flush().catch(console.error);\n }\n }\n\n async flush(): Promise<void> {\n if (this.isShutdown) return;\n\n const spans = this.spanBuffer.splice(0);\n const metrics = this.metricBuffer.splice(0);\n\n const promises: Promise<void>[] = [];\n\n // Export spans\n if (spans.length > 0) {\n promises.push(this.exportSpans(spans));\n }\n\n // Export metrics\n if (metrics.length > 0) {\n promises.push(this.exportMetrics(metrics));\n }\n\n await Promise.all(promises);\n }\n\n async shutdown(): Promise<void> {\n if (this.flushIntervalId) {\n clearInterval(this.flushIntervalId);\n this.flushIntervalId = null;\n }\n\n // Final flush before marking shutdown\n await this.flush();\n\n this.isShutdown = true;\n }\n\n private async exportSpans(spans: SpanData[]): Promise<void> {\n // Combine all spans into a single request\n const resourceSpans = spans.map(span =>\n spanToOTLP(span, this.serviceName, this.serviceVersion).resourceSpans[0]\n );\n\n const body = { resourceSpans };\n const endpoint = this.config.endpoint.replace(/\\/$/, '') + '/v1/traces';\n\n await this.sendRequest(endpoint, body);\n }\n\n private async exportMetrics(metrics: MetricData[]): Promise<void> {\n // Combine all metrics into a single request\n const resourceMetrics = metrics.map(metric =>\n metricToOTLP(metric, this.serviceName, this.serviceVersion).resourceMetrics[0]\n );\n\n const body = { resourceMetrics };\n const endpoint = this.config.endpoint.replace(/\\/$/, '') + '/v1/metrics';\n\n await this.sendRequest(endpoint, body);\n }\n\n private async sendRequest(endpoint: string, body: unknown): Promise<void> {\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), this.config.timeoutMs);\n\n try {\n const response = await fetch(endpoint, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...this.config.headers,\n },\n body: JSON.stringify(body),\n signal: controller.signal,\n });\n\n if (!response.ok) {\n console.warn(`[OTLP] Export failed: ${response.status} ${response.statusText}`);\n }\n } catch (error) {\n if ((error as Error).name === 'AbortError') {\n console.warn('[OTLP] Export timed out');\n } else {\n console.warn('[OTLP] Export error:', error);\n }\n } finally {\n clearTimeout(timeoutId);\n }\n }\n}\n","/**\n * Muse Telemetry\n *\n * Main orchestrator for SDK telemetry. Manages spans, metrics, and exporters.\n *\n * @category Telemetry\n */\n\nimport type { TelemetryConfig, SpanAttributes, SamplingConfig } from './types';\nimport type { SpanData, MetricData, TelemetryExporterInterface } from './exporters/console';\nimport { ConsoleExporter } from './exporters/console';\nimport { OTLPExporter } from './exporters/otlp';\n\n/**\n * Generate a random hex ID\n */\nfunction generateId(length: number = 16): string {\n const bytes = new Uint8Array(length);\n crypto.getRandomValues(bytes);\n return Array.from(bytes)\n .map(b => b.toString(16).padStart(2, '0'))\n .join('');\n}\n\n/**\n * Span context for tracing\n */\ninterface SpanContext {\n traceId: string;\n spanId: string;\n parentSpanId?: string;\n}\n\n/**\n * Active span handle returned by startSpan\n */\nexport interface ActiveSpan {\n /** End the span with success status */\n end(): void;\n /** End the span with error status */\n endWithError(error: Error): void;\n /** Add attributes to the span */\n setAttributes(attrs: Partial<SpanAttributes>): void;\n /** Get the span context */\n getContext(): SpanContext;\n}\n\n/**\n * Global telemetry instance\n */\nlet globalTelemetry: OmoteTelemetry | null = null;\n\n/**\n * Configure global telemetry\n *\n * @example\n * ```typescript\n * // Development\n * configureTelemetry({\n * enabled: true,\n * serviceName: 'omote-dev',\n * exporter: 'console',\n * });\n *\n * // Production\n * configureTelemetry({\n * enabled: true,\n * serviceName: 'omote-prod',\n * exporter: 'otlp',\n * exporterConfig: {\n * endpoint: 'https://tempo.example.com',\n * },\n * sampling: { ratio: 0.1 },\n * });\n * ```\n */\nexport function configureTelemetry(config: TelemetryConfig): OmoteTelemetry {\n if (globalTelemetry) {\n globalTelemetry.shutdown();\n }\n globalTelemetry = new OmoteTelemetry(config);\n return globalTelemetry;\n}\n\n/**\n * Get the global telemetry instance\n */\nexport function getTelemetry(): OmoteTelemetry | null {\n return globalTelemetry;\n}\n\n/**\n * Main telemetry class\n *\n * Manages spans, metrics, and exports to configured backends.\n */\nexport class OmoteTelemetry {\n private config: Required<Omit<TelemetryConfig, 'exporterConfig'>> & { exporterConfig?: TelemetryConfig['exporterConfig'] };\n private exporter: TelemetryExporterInterface | null = null;\n private activeTraceId: string | null = null;\n private metricsIntervalId: ReturnType<typeof setInterval> | null = null;\n\n // Metric accumulators\n private counters: Map<string, { value: number; attributes: Record<string, string | number | boolean> }> = new Map();\n private histograms: Map<string, { values: number[]; attributes: Record<string, string | number | boolean> }> = new Map();\n\n constructor(config: TelemetryConfig) {\n this.config = {\n enabled: config.enabled ?? false,\n serviceName: config.serviceName ?? 'omote-sdk',\n serviceVersion: config.serviceVersion ?? '0.1.0',\n exporter: config.exporter ?? 'none',\n exporterConfig: config.exporterConfig,\n sampling: config.sampling ?? { ratio: 1.0, alwaysSampleErrors: true },\n metricsEnabled: config.metricsEnabled ?? true,\n metricsIntervalMs: config.metricsIntervalMs ?? 60000,\n };\n\n if (this.config.enabled) {\n this.initExporter();\n this.startMetricsCollection();\n }\n }\n\n /**\n * Initialize the configured exporter\n */\n private initExporter(): void {\n switch (this.config.exporter) {\n case 'console':\n this.exporter = new ConsoleExporter({ enabled: true });\n break;\n case 'otlp':\n if (!this.config.exporterConfig) {\n console.warn('[Telemetry] OTLP exporter requires exporterConfig with endpoint');\n return;\n }\n this.exporter = new OTLPExporter(\n this.config.exporterConfig,\n this.config.serviceName,\n this.config.serviceVersion\n );\n break;\n case 'none':\n default:\n this.exporter = null;\n }\n }\n\n /**\n * Start periodic metrics collection\n */\n private startMetricsCollection(): void {\n if (!this.config.metricsEnabled || !this.exporter) return;\n\n this.metricsIntervalId = setInterval(() => {\n this.flushMetrics();\n }, this.config.metricsIntervalMs);\n }\n\n /**\n * Check if this operation should be sampled\n */\n private shouldSample(isError: boolean = false): boolean {\n if (!this.config.enabled) return false;\n\n const sampling = this.config.sampling as SamplingConfig;\n if (isError && sampling.alwaysSampleErrors) return true;\n\n const ratio = sampling.ratio ?? 1.0;\n return Math.random() < ratio;\n }\n\n /**\n * Start a new span\n *\n * @example\n * ```typescript\n * const span = telemetry.startSpan('Wav2Vec2.infer', {\n * 'inference.input_samples': samples.length,\n * 'model.backend': 'webgpu',\n * });\n *\n * try {\n * const result = await doInference();\n * span.setAttributes({ 'inference.output_frames': result.frames });\n * span.end();\n * } catch (error) {\n * span.endWithError(error);\n * }\n * ```\n */\n startSpan(name: string, attributes: Partial<SpanAttributes> = {}, parentContext?: SpanContext): ActiveSpan {\n const traceId = parentContext?.traceId ?? this.activeTraceId ?? generateId(16);\n const spanId = generateId(8);\n const parentSpanId = parentContext?.spanId;\n const startTime = performance.now();\n\n // Set active trace if this is a root span\n if (!parentContext && !this.activeTraceId) {\n this.activeTraceId = traceId;\n }\n\n let spanAttributes = { ...attributes };\n let ended = false;\n let sampled = this.shouldSample();\n\n const context: SpanContext = { traceId, spanId, parentSpanId };\n\n const endSpan = (status: 'ok' | 'error', error?: Error): void => {\n if (ended) return;\n ended = true;\n\n const endTime = performance.now();\n const durationMs = endTime - startTime;\n\n // Re-check sampling for errors\n if (status === 'error' && !sampled) {\n sampled = this.shouldSample(true);\n }\n\n if (!sampled || !this.exporter) return;\n\n const spanData: SpanData = {\n name,\n traceId,\n spanId,\n parentSpanId,\n startTime,\n endTime,\n durationMs,\n status,\n attributes: spanAttributes as SpanAttributes,\n error,\n };\n\n this.exporter.exportSpan(spanData);\n\n // Clear active trace if this was the root span\n if (!parentSpanId && this.activeTraceId === traceId) {\n this.activeTraceId = null;\n }\n };\n\n return {\n end: () => endSpan('ok'),\n endWithError: (error: Error) => endSpan('error', error),\n setAttributes: (attrs: Partial<SpanAttributes>) => {\n spanAttributes = { ...spanAttributes, ...attrs };\n },\n getContext: () => context,\n };\n }\n\n /**\n * Wrap an async function with a span\n *\n * @example\n * ```typescript\n * const result = await telemetry.withSpan('Model.load', async (span) => {\n * const model = await loadModel();\n * span.setAttributes({ 'model.size_bytes': model.size });\n * return model;\n * });\n * ```\n */\n async withSpan<T>(\n name: string,\n fn: (span: ActiveSpan) => Promise<T>,\n attributes: Partial<SpanAttributes> = {},\n parentContext?: SpanContext\n ): Promise<T> {\n const span = this.startSpan(name, attributes, parentContext);\n\n try {\n const result = await fn(span);\n span.end();\n return result;\n } catch (error) {\n span.endWithError(error as Error);\n throw error;\n }\n }\n\n /**\n * Increment a counter metric\n *\n * @example\n * ```typescript\n * telemetry.incrementCounter('omote.inference.total', 1, {\n * model: 'wav2vec2',\n * backend: 'webgpu',\n * status: 'success',\n * });\n * ```\n */\n incrementCounter(\n name: string,\n value: number = 1,\n attributes: Record<string, string | number | boolean> = {}\n ): void {\n if (!this.config.enabled || !this.config.metricsEnabled) return;\n\n const key = this.getMetricKey(name, attributes);\n const existing = this.counters.get(key);\n\n if (existing) {\n existing.value += value;\n } else {\n this.counters.set(key, { value, attributes });\n }\n }\n\n /**\n * Record a histogram value\n *\n * @example\n * ```typescript\n * telemetry.recordHistogram('omote.inference.latency', durationMs, {\n * model: 'wav2vec2',\n * backend: 'webgpu',\n * });\n * ```\n */\n recordHistogram(\n name: string,\n value: number,\n attributes: Record<string, string | number | boolean> = {}\n ): void {\n if (!this.config.enabled || !this.config.metricsEnabled) return;\n\n const key = this.getMetricKey(name, attributes);\n const existing = this.histograms.get(key);\n\n if (existing) {\n existing.values.push(value);\n } else {\n this.histograms.set(key, { values: [value], attributes });\n }\n }\n\n /**\n * Generate unique key for metric with attributes\n */\n private getMetricKey(name: string, attributes: Record<string, string | number | boolean>): string {\n const sortedAttrs = Object.entries(attributes)\n .sort(([a], [b]) => a.localeCompare(b))\n .map(([k, v]) => `${k}=${v}`)\n .join(',');\n return `${name}|${sortedAttrs}`;\n }\n\n /**\n * Flush accumulated metrics to exporter\n */\n private flushMetrics(): void {\n if (!this.exporter) return;\n\n const timestamp = performance.now();\n\n // Export counters\n for (const [key, data] of this.counters) {\n const name = key.split('|')[0];\n const metric: MetricData = {\n name,\n type: 'counter',\n value: data.value,\n attributes: data.attributes,\n timestamp,\n };\n this.exporter.exportMetric(metric);\n }\n\n // Export histogram aggregates\n for (const [key, data] of this.histograms) {\n const name = key.split('|')[0];\n if (data.values.length === 0) continue;\n\n // Calculate average for histogram\n const sum = data.values.reduce((a, b) => a + b, 0);\n const avg = sum / data.values.length;\n\n const metric: MetricData = {\n name,\n type: 'histogram',\n value: avg,\n attributes: {\n ...data.attributes,\n count: data.values.length,\n sum,\n min: Math.min(...data.values),\n max: Math.max(...data.values),\n },\n timestamp,\n };\n this.exporter.exportMetric(metric);\n\n // Clear values after export\n data.values = [];\n }\n }\n\n /**\n * Force flush all pending data\n */\n async flush(): Promise<void> {\n this.flushMetrics();\n await this.exporter?.flush();\n }\n\n /**\n * Shutdown telemetry\n */\n async shutdown(): Promise<void> {\n if (this.metricsIntervalId) {\n clearInterval(this.metricsIntervalId);\n this.metricsIntervalId = null;\n }\n\n await this.flush();\n await this.exporter?.shutdown();\n this.exporter = null;\n }\n\n /**\n * Check if telemetry is enabled\n */\n isEnabled(): boolean {\n return this.config.enabled;\n }\n\n /**\n * Get current configuration\n */\n getConfig(): TelemetryConfig {\n return { ...this.config };\n }\n}\n","/**\n * Telemetry Types\n *\n * Configuration and type definitions for OpenTelemetry instrumentation.\n *\n * @category Telemetry\n */\n\n/**\n * Supported telemetry exporters\n */\nexport type TelemetryExporter = 'console' | 'otlp' | 'none';\n\n/**\n * Sampling configuration\n */\nexport interface SamplingConfig {\n /** Sampling ratio (0.0 - 1.0). Default: 1.0 (sample everything) */\n ratio?: number;\n /** Always sample errors regardless of ratio */\n alwaysSampleErrors?: boolean;\n}\n\n/**\n * OTLP exporter configuration\n */\nexport interface OTLPExporterConfig {\n /** OTLP endpoint URL (e.g., 'https://tempo.example.com/v1/traces') */\n endpoint: string;\n /** Optional headers for authentication */\n headers?: Record<string, string>;\n /** Request timeout in ms. Default: 10000 */\n timeoutMs?: number;\n}\n\n/**\n * Main telemetry configuration\n */\nexport interface TelemetryConfig {\n /** Enable/disable telemetry. Default: false */\n enabled?: boolean;\n /** Service name for spans. Default: 'omote-sdk' */\n serviceName?: string;\n /** Service version. Default: SDK version */\n serviceVersion?: string;\n /** Exporter type. Default: 'none' */\n exporter?: TelemetryExporter;\n /** OTLP exporter config (required if exporter is 'otlp') */\n exporterConfig?: OTLPExporterConfig;\n /** Sampling configuration */\n sampling?: SamplingConfig;\n /** Enable metrics collection. Default: true when telemetry enabled */\n metricsEnabled?: boolean;\n /** Metrics export interval in ms. Default: 60000 */\n metricsIntervalMs?: number;\n}\n\n/**\n * Span attributes for model operations\n */\nexport interface ModelSpanAttributes {\n /** Model URL or identifier */\n 'model.url'?: string;\n /** Model name (e.g., 'whisper', 'lam', 'silero-vad') */\n 'model.name'?: string;\n /** Inference backend used */\n 'model.backend'?: 'webgpu' | 'wasm';\n /** Whether model was loaded from cache */\n 'model.cached'?: boolean;\n /** Model size in bytes */\n 'model.size_bytes'?: number;\n}\n\n/**\n * Span attributes for inference operations\n */\nexport interface InferenceSpanAttributes extends ModelSpanAttributes {\n /** Number of input audio samples */\n 'inference.input_samples'?: number;\n /** Input duration in ms */\n 'inference.input_duration_ms'?: number;\n /** Number of output frames (for LAM) */\n 'inference.output_frames'?: number;\n /** Inference duration in ms */\n 'inference.duration_ms'?: number;\n /** Whether inference succeeded */\n 'inference.success'?: boolean;\n /** Error type if failed */\n 'inference.error_type'?: string;\n}\n\n/**\n * Span attributes for cache operations\n */\nexport interface CacheSpanAttributes {\n /** Cache key (URL) */\n 'cache.key'?: string;\n /** Whether it was a cache hit */\n 'cache.hit'?: boolean;\n /** Size of cached item in bytes */\n 'cache.size_bytes'?: number;\n /** Cache operation type */\n 'cache.operation'?: 'get' | 'set' | 'delete';\n}\n\n/**\n * Combined span attributes type\n */\nexport type SpanAttributes =\n | ModelSpanAttributes\n | InferenceSpanAttributes\n | CacheSpanAttributes\n | Record<string, string | number | boolean | undefined>;\n\n/**\n * Metric names used by the SDK\n */\nexport const MetricNames = {\n /** Histogram: Inference latency in ms */\n INFERENCE_LATENCY: 'omote.inference.latency',\n /** Histogram: Model load time in ms */\n MODEL_LOAD_TIME: 'omote.model.load_time',\n /** Counter: Total inference operations */\n INFERENCE_TOTAL: 'omote.inference.total',\n /** Counter: Total errors */\n ERRORS_TOTAL: 'omote.errors.total',\n /** Counter: Cache hits */\n CACHE_HITS: 'omote.cache.hits',\n /** Counter: Cache misses */\n CACHE_MISSES: 'omote.cache.misses',\n} as const;\n\n/**\n * Histogram buckets for inference latency (ms)\n */\nexport const INFERENCE_LATENCY_BUCKETS = [1, 5, 10, 25, 50, 100, 250, 500, 1000, 2500, 5000];\n\n/**\n * Histogram buckets for model load time (ms)\n */\nexport const MODEL_LOAD_TIME_BUCKETS = [100, 500, 1000, 2500, 5000, 10000, 30000, 60000];\n","/**\n * Model Cache\n *\n * Caches ONNX models in IndexedDB for faster subsequent loads.\n * IndexedDB can handle large files (100s of MBs) unlike localStorage.\n *\n * @category Cache\n */\n\nimport { getTelemetry } from '../telemetry';\n\nconst DB_NAME = 'omote-model-cache';\nconst DB_VERSION = 2;\nconst STORE_NAME = 'models';\n\n/** Default cache size limit: 1GB */\nconst DEFAULT_MAX_SIZE_BYTES = 1024 * 1024 * 1024;\n\n/**\n * Configuration for cache size limits and eviction behavior\n */\nexport interface CacheConfig {\n /** Maximum total cache size in bytes (default: 1GB) */\n maxSizeBytes?: number;\n /** Maximum age in milliseconds before eviction (default: none) */\n maxAgeMs?: number;\n /** Callback when storage quota exceeds warning threshold */\n onQuotaWarning?: (info: QuotaInfo) => void;\n}\n\n/**\n * Storage quota information\n */\nexport interface QuotaInfo {\n /** Total bytes used across all origins */\n usedBytes: number;\n /** Total available quota in bytes */\n quotaBytes: number;\n /** Percentage of quota used (0-100) */\n percentUsed: number;\n /** Bytes used by omote cache specifically */\n cacheBytes: number;\n}\n\n/** Global cache configuration */\nlet globalCacheConfig: CacheConfig = {\n maxSizeBytes: DEFAULT_MAX_SIZE_BYTES,\n};\n\n/**\n * Configure cache size limits and eviction behavior\n *\n * @param config - Cache configuration options\n *\n * @example\n * ```typescript\n * import { configureCacheLimit } from '@omote/core';\n *\n * // Set 500MB limit with 24-hour max age\n * configureCacheLimit({\n * maxSizeBytes: 500 * 1024 * 1024,\n * maxAgeMs: 24 * 60 * 60 * 1000,\n * onQuotaWarning: (info) => {\n * console.warn(`Storage ${info.percentUsed.toFixed(1)}% used`);\n * }\n * });\n * ```\n */\nexport function configureCacheLimit(config: CacheConfig): void {\n globalCacheConfig = {\n ...globalCacheConfig,\n ...config,\n };\n\n // Trigger immediate cleanup if over limit\n const cache = getModelCache();\n cache.enforceLimit().catch((err) => {\n console.warn('[ModelCache] Failed to enforce limit after config change:', err);\n });\n}\n\n/**\n * Get current cache configuration\n */\nexport function getCacheConfig(): CacheConfig {\n return { ...globalCacheConfig };\n}\n\ninterface CachedModel {\n url: string;\n data: ArrayBuffer;\n size: number;\n cachedAt: number;\n /** Last time this model was accessed (for LRU eviction) */\n lastAccessedAt: number;\n etag?: string;\n version?: string;\n}\n\n/**\n * Result from getWithValidation() method\n */\nexport interface ValidationResult {\n /** The cached data, or null if not found */\n data: ArrayBuffer | null;\n /** True if the cached data is stale (etag mismatch) */\n stale: boolean;\n}\n\n/**\n * Generate a version-aware cache key\n *\n * @param url - The model URL\n * @param version - Optional version string\n * @returns The cache key (url#vX.X.X if version provided, url otherwise)\n *\n * @example\n * ```typescript\n * getCacheKey('http://example.com/model.onnx', '1.0.0')\n * // Returns: 'http://example.com/model.onnx#v1.0.0'\n *\n * getCacheKey('http://example.com/model.onnx')\n * // Returns: 'http://example.com/model.onnx'\n * ```\n */\nexport function getCacheKey(url: string, version?: string): string {\n if (version) {\n return `${url}#v${version}`;\n }\n return url;\n}\n\ninterface CacheStats {\n totalSize: number;\n modelCount: number;\n models: { url: string; size: number; cachedAt: Date }[];\n}\n\n/**\n * ModelCache - IndexedDB-based cache for ONNX models\n */\nexport class ModelCache {\n private db: IDBDatabase | null = null;\n private dbPromise: Promise<IDBDatabase> | null = null;\n\n /**\n * Initialize the cache database\n */\n private async getDB(): Promise<IDBDatabase> {\n if (this.db) return this.db;\n if (this.dbPromise) return this.dbPromise;\n\n // Request persistent storage for more generous quota on iOS/mobile browsers\n // This increases available storage from ~50MB to potentially GBs\n if (navigator.storage && navigator.storage.persist) {\n try {\n const isPersisted = await navigator.storage.persist();\n if (isPersisted) {\n console.log('[ModelCache] Persistent storage granted - increased quota available');\n } else {\n console.log('[ModelCache] Persistent storage denied - using default quota');\n }\n\n // Log current quota usage (helpful for debugging iOS limits)\n if (navigator.storage.estimate) {\n const estimate = await navigator.storage.estimate();\n const usedMB = ((estimate.usage || 0) / 1024 / 1024).toFixed(2);\n const quotaMB = ((estimate.quota || 0) / 1024 / 1024).toFixed(2);\n console.log(`[ModelCache] Storage: ${usedMB}MB / ${quotaMB}MB quota`);\n }\n } catch (err) {\n console.warn('[ModelCache] Failed to request persistent storage:', err);\n }\n }\n\n this.dbPromise = new Promise((resolve, reject) => {\n const request = indexedDB.open(DB_NAME, DB_VERSION);\n\n request.onerror = () => {\n console.error('[ModelCache] Failed to open IndexedDB:', request.error);\n reject(request.error);\n };\n\n request.onsuccess = () => {\n this.db = request.result;\n resolve(this.db);\n };\n\n request.onupgradeneeded = (event) => {\n const db = (event.target as IDBOpenDBRequest).result;\n const oldVersion = (event as IDBVersionChangeEvent).oldVersion;\n const tx = (event.target as IDBOpenDBRequest).transaction;\n\n if (oldVersion < 1) {\n // Initial schema: create store with url as key\n const store = db.createObjectStore(STORE_NAME, { keyPath: 'url' });\n store.createIndex('lastAccessedAt', 'lastAccessedAt', { unique: false });\n } else if (oldVersion < 2 && tx) {\n // Migrate from v1 to v2: add lastAccessedAt index and backfill existing entries\n const store = tx.objectStore(STORE_NAME);\n\n // Create index if it doesn't exist\n if (!store.indexNames.contains('lastAccessedAt')) {\n store.createIndex('lastAccessedAt', 'lastAccessedAt', { unique: false });\n }\n\n // Migrate existing entries: set lastAccessedAt = cachedAt\n const cursorRequest = store.openCursor();\n cursorRequest.onsuccess = (cursorEvent) => {\n const cursor = (cursorEvent.target as IDBRequest<IDBCursorWithValue>).result;\n if (cursor) {\n const value = cursor.value;\n if (value.lastAccessedAt === undefined) {\n value.lastAccessedAt = value.cachedAt || Date.now();\n cursor.update(value);\n }\n cursor.continue();\n }\n };\n }\n };\n });\n\n return this.dbPromise;\n }\n\n /**\n * Check if a model is cached\n */\n async has(url: string): Promise<boolean> {\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readonly');\n const store = tx.objectStore(STORE_NAME);\n const request = store.count(url);\n request.onsuccess = () => resolve(request.result > 0);\n request.onerror = () => resolve(false);\n });\n } catch {\n return false;\n }\n }\n\n /**\n * Get a cached model\n *\n * Updates lastAccessedAt timestamp for LRU tracking on cache hit.\n */\n async get(url: string): Promise<ArrayBuffer | null> {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('ModelCache.get', { 'cache.url': url });\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n // Use readwrite to update lastAccessedAt on hit\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const store = tx.objectStore(STORE_NAME);\n const request = store.get(url);\n request.onsuccess = () => {\n const cached = request.result as CachedModel | undefined;\n const hit = cached?.data != null;\n span?.setAttributes({ 'cache.hit': hit });\n if (cached) {\n span?.setAttributes({ 'cache.size_bytes': cached.size });\n // Update lastAccessedAt for LRU tracking\n cached.lastAccessedAt = Date.now();\n store.put(cached);\n }\n span?.end();\n if (hit) {\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n } else {\n telemetry?.incrementCounter('omote.cache.misses', 1, {});\n }\n resolve(cached?.data ?? null);\n };\n request.onerror = () => {\n span?.setAttributes({ 'cache.hit': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.misses', 1, {});\n resolve(null);\n };\n });\n } catch {\n span?.endWithError(new Error('Cache get failed'));\n return null;\n }\n }\n\n /**\n * Get a cached model with ETag validation\n *\n * Validates the cached data against the server's current ETag.\n * If the cached ETag differs from the server's, the data is marked as stale.\n *\n * @param url - The cache key\n * @param originalUrl - The original URL for HEAD request (if different from cache key)\n * @returns ValidationResult with data and stale flag\n *\n * @example\n * ```typescript\n * const result = await cache.getWithValidation('http://example.com/model.onnx');\n * if (result.data && !result.stale) {\n * // Use cached data\n * } else if (result.stale) {\n * // Refetch and update cache\n * }\n * ```\n */\n async getWithValidation(url: string, originalUrl?: string): Promise<ValidationResult> {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('ModelCache.getWithValidation', { 'cache.url': url });\n\n try {\n const db = await this.getDB();\n const cached = await new Promise<CachedModel | undefined>((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readonly');\n const store = tx.objectStore(STORE_NAME);\n const request = store.get(url);\n request.onsuccess = () => resolve(request.result as CachedModel | undefined);\n request.onerror = () => resolve(undefined);\n });\n\n // Cache miss\n if (!cached?.data) {\n span?.setAttributes({ 'cache.hit': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.misses', 1, {});\n return { data: null, stale: false };\n }\n\n span?.setAttributes({ 'cache.hit': true, 'cache.size_bytes': cached.size });\n\n // No etag stored - can't validate, return as fresh\n if (!cached.etag) {\n span?.setAttributes({ 'cache.validated': false, 'cache.stale': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n return { data: cached.data, stale: false };\n }\n\n // Validate via HEAD request\n const fetchUrl = originalUrl || url;\n try {\n const response = await fetch(fetchUrl, { method: 'HEAD' });\n if (!response.ok) {\n // Server error - assume cache is still valid\n span?.setAttributes({ 'cache.validated': false, 'cache.stale': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n return { data: cached.data, stale: false };\n }\n\n const serverEtag = response.headers.get('etag');\n const isStale = serverEtag !== null && serverEtag !== cached.etag;\n\n span?.setAttributes({\n 'cache.validated': true,\n 'cache.stale': isStale,\n 'cache.server_etag': serverEtag || 'none',\n 'cache.cached_etag': cached.etag,\n });\n span?.end();\n\n if (isStale) {\n telemetry?.incrementCounter('omote.cache.stale', 1, {});\n console.log(`[ModelCache] Stale cache detected for ${url}`);\n } else {\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n }\n\n return { data: cached.data, stale: isStale };\n } catch (fetchError) {\n // HEAD request failed (network error, CORS, etc.)\n // Return cached data as non-stale - better than failing completely\n console.warn('[ModelCache] HEAD validation failed, using cached data:', fetchError);\n span?.setAttributes({ 'cache.validated': false, 'cache.stale': false });\n span?.end();\n telemetry?.incrementCounter('omote.cache.hits', 1, {});\n return { data: cached.data, stale: false };\n }\n } catch {\n span?.endWithError(new Error('Cache getWithValidation failed'));\n return { data: null, stale: false };\n }\n }\n\n /**\n * Store a model in cache\n *\n * After storing, triggers LRU eviction if cache exceeds size limit.\n *\n * @param url - The cache key (use getCacheKey() for versioned keys)\n * @param data - The model data\n * @param etag - Optional ETag for staleness validation\n * @param version - Optional version string for metadata\n */\n async set(url: string, data: ArrayBuffer, etag?: string, version?: string): Promise<void> {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('ModelCache.set', {\n 'cache.url': url,\n 'cache.size_bytes': data.byteLength,\n ...(version && { 'cache.version': version }),\n });\n try {\n // Check quota before caching (best effort, don't block write)\n this.checkQuota().catch((err) => {\n console.warn('[ModelCache] Quota check failed:', err);\n });\n\n const db = await this.getDB();\n await new Promise<void>((resolve, reject) => {\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const store = tx.objectStore(STORE_NAME);\n const now = Date.now();\n const cached: CachedModel = {\n url,\n data,\n size: data.byteLength,\n cachedAt: now,\n lastAccessedAt: now,\n etag,\n version,\n };\n const request = store.put(cached);\n request.onsuccess = () => {\n span?.end();\n resolve();\n };\n request.onerror = () => {\n span?.endWithError(request.error || new Error('Cache set failed'));\n reject(request.error);\n };\n });\n\n // Trigger LRU cleanup after write (don't block)\n this.enforceLimit().catch((err) => {\n console.warn('[ModelCache] Failed to enforce limit after set:', err);\n });\n } catch (err) {\n console.warn('[ModelCache] Failed to cache model:', err);\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n }\n }\n\n /**\n * Check storage quota and trigger warnings/cleanup as needed\n *\n * - Logs warning if quota > 90% used\n * - Triggers LRU cleanup if quota > 95% used\n * - Calls onQuotaWarning callback if configured\n */\n private async checkQuota(): Promise<void> {\n const quota = await this.getQuotaInfo();\n if (!quota) {\n return; // API unavailable\n }\n\n const config = globalCacheConfig;\n const telemetry = getTelemetry();\n\n if (quota.percentUsed > 90) {\n console.warn(`[ModelCache] Storage quota ${quota.percentUsed.toFixed(1)}% used (${formatBytes(quota.usedBytes)} / ${formatBytes(quota.quotaBytes)})`);\n\n // Emit telemetry counter\n telemetry?.incrementCounter('omote.cache.quota_warning', 1, {\n percent_used: String(Math.round(quota.percentUsed)),\n });\n\n // Call user callback if configured\n if (config.onQuotaWarning) {\n try {\n config.onQuotaWarning(quota);\n } catch (err) {\n console.warn('[ModelCache] onQuotaWarning callback error:', err);\n }\n }\n }\n\n if (quota.percentUsed > 95) {\n console.warn('[ModelCache] Storage quota critical (>95%), triggering LRU cleanup');\n // Free at least 10% of cache to make room\n const bytesToFree = Math.max(quota.cacheBytes * 0.1, 10 * 1024 * 1024);\n await this.evictOldest(bytesToFree);\n }\n }\n\n /**\n * Delete a cached model\n */\n async delete(url: string): Promise<void> {\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const store = tx.objectStore(STORE_NAME);\n store.delete(url);\n tx.oncomplete = () => resolve();\n });\n } catch {\n // Ignore errors\n }\n }\n\n /**\n * Clear all cached models\n */\n async clear(): Promise<void> {\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const store = tx.objectStore(STORE_NAME);\n store.clear();\n tx.oncomplete = () => resolve();\n });\n } catch {\n // Ignore errors\n }\n }\n\n /**\n * Get cache statistics\n */\n async getStats(): Promise<CacheStats> {\n try {\n const db = await this.getDB();\n return new Promise((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readonly');\n const store = tx.objectStore(STORE_NAME);\n const request = store.getAll();\n request.onsuccess = () => {\n const models = (request.result as CachedModel[]) || [];\n resolve({\n totalSize: models.reduce((sum, m) => sum + m.size, 0),\n modelCount: models.length,\n models: models.map((m) => ({\n url: m.url,\n size: m.size,\n cachedAt: new Date(m.cachedAt),\n })),\n });\n };\n request.onerror = () => resolve({ totalSize: 0, modelCount: 0, models: [] });\n });\n } catch {\n return { totalSize: 0, modelCount: 0, models: [] };\n }\n }\n\n /**\n * Enforce cache size limit by evicting oldest entries (LRU)\n *\n * Called automatically after each set() operation.\n * Can also be called manually to trigger cleanup.\n */\n async enforceLimit(): Promise<void> {\n const config = globalCacheConfig;\n const maxSize = config.maxSizeBytes ?? DEFAULT_MAX_SIZE_BYTES;\n\n const stats = await this.getStats();\n if (stats.totalSize <= maxSize) {\n return; // Under limit, nothing to do\n }\n\n const bytesToFree = stats.totalSize - maxSize;\n const evictedUrls = await this.evictOldest(bytesToFree);\n\n if (evictedUrls.length > 0) {\n console.log(`[ModelCache] LRU eviction: removed ${evictedUrls.length} models to free ${formatBytes(bytesToFree)}`);\n }\n }\n\n /**\n * Evict oldest entries (by lastAccessedAt) to free space\n *\n * @param bytesToFree - Minimum bytes to free\n * @returns List of evicted URLs\n *\n * @example\n * ```typescript\n * const cache = getModelCache();\n * const evicted = await cache.evictOldest(100 * 1024 * 1024); // Free 100MB\n * console.log('Evicted:', evicted);\n * ```\n */\n async evictOldest(bytesToFree: number): Promise<string[]> {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('ModelCache.evictOldest', {\n 'eviction.bytes_requested': bytesToFree,\n });\n\n try {\n const db = await this.getDB();\n\n // Get all models sorted by lastAccessedAt (oldest first)\n const models = await new Promise<CachedModel[]>((resolve) => {\n const tx = db.transaction(STORE_NAME, 'readonly');\n const store = tx.objectStore(STORE_NAME);\n const request = store.getAll();\n request.onsuccess = () => {\n const all = (request.result as CachedModel[]) || [];\n // Sort by lastAccessedAt ascending (oldest first)\n all.sort((a, b) => (a.lastAccessedAt || a.cachedAt || 0) - (b.lastAccessedAt || b.cachedAt || 0));\n resolve(all);\n };\n request.onerror = () => resolve([]);\n });\n\n const evictedUrls: string[] = [];\n let freedBytes = 0;\n\n // Evict models until we've freed enough space\n for (const model of models) {\n if (freedBytes >= bytesToFree) {\n break;\n }\n\n await this.delete(model.url);\n evictedUrls.push(model.url);\n freedBytes += model.size;\n\n console.log(`[ModelCache] Evicted: ${model.url} (${formatBytes(model.size)})`);\n }\n\n span?.setAttributes({\n 'eviction.bytes_freed': freedBytes,\n 'eviction.models_evicted': evictedUrls.length,\n });\n span?.end();\n\n // Emit telemetry counter\n if (freedBytes > 0) {\n telemetry?.incrementCounter('omote.cache.eviction', evictedUrls.length, {\n bytes_freed: String(freedBytes),\n });\n }\n\n return evictedUrls;\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n console.warn('[ModelCache] Eviction failed:', err);\n return [];\n }\n }\n\n /**\n * Get storage quota information\n *\n * Uses navigator.storage.estimate() to get quota details.\n * Returns null if the API is unavailable.\n *\n * @returns Quota info or null if unavailable\n *\n * @example\n * ```typescript\n * const cache = getModelCache();\n * const quota = await cache.getQuotaInfo();\n * if (quota) {\n * console.log(`Using ${quota.percentUsed.toFixed(1)}% of quota`);\n * }\n * ```\n */\n async getQuotaInfo(): Promise<QuotaInfo | null> {\n if (!navigator?.storage?.estimate) {\n return null;\n }\n\n try {\n const estimate = await navigator.storage.estimate();\n const usedBytes = estimate.usage || 0;\n const quotaBytes = estimate.quota || 0;\n const percentUsed = quotaBytes > 0 ? (usedBytes / quotaBytes) * 100 : 0;\n\n const stats = await this.getStats();\n\n return {\n usedBytes,\n quotaBytes,\n percentUsed,\n cacheBytes: stats.totalSize,\n };\n } catch {\n return null;\n }\n }\n}\n\n// Singleton instance\nlet cacheInstance: ModelCache | null = null;\n\n/**\n * Get the global ModelCache instance\n */\nexport function getModelCache(): ModelCache {\n if (!cacheInstance) {\n cacheInstance = new ModelCache();\n }\n return cacheInstance;\n}\n\n// Max size for IndexedDB caching\n// When storing ArrayBuffer in IndexedDB, browser does structured clone which\n// temporarily doubles memory usage. To avoid STATUS_BREAKPOINT crashes:\n// - Files < 500MB: Cache as ArrayBuffer (safe, fast retrieval)\n// - Files >= 500MB: Skip IndexedDB, rely on HTTP cache\n// See: https://bugs.chromium.org/p/chromium/issues/detail?id=170845\nconst MAX_CACHE_SIZE_BYTES = 500 * 1024 * 1024;\n\n/**\n * Options for fetchWithCache\n */\nexport interface FetchWithCacheOptions {\n /** Optional version string for versioned caching */\n version?: string;\n /** If true, validates cached data against server ETag and refetches if stale */\n validateStale?: boolean;\n /** Progress callback during download */\n onProgress?: (loaded: number, total: number) => void;\n}\n\n/**\n * Fetch a model with caching\n * Uses IndexedDB cache with network fallback\n * Files larger than 500MB are not cached to IndexedDB to avoid memory pressure\n * (structured clone during IndexedDB write temporarily doubles memory usage)\n *\n * @param url - The URL to fetch\n * @param onProgress - Optional progress callback (legacy signature)\n * @returns The fetched ArrayBuffer\n *\n * @example\n * ```typescript\n * // Simple usage (backwards compatible)\n * const data = await fetchWithCache('http://example.com/model.onnx');\n *\n * // With progress callback (backwards compatible)\n * const data = await fetchWithCache('http://example.com/model.onnx', (loaded, total) => {\n * console.log(`${loaded}/${total} bytes`);\n * });\n *\n * // With options (new API)\n * const data = await fetchWithCache('http://example.com/model.onnx', {\n * version: '1.0.0',\n * validateStale: true,\n * onProgress: (loaded, total) => console.log(`${loaded}/${total}`)\n * });\n * ```\n */\nexport async function fetchWithCache(\n url: string,\n optionsOrProgress?: FetchWithCacheOptions | ((loaded: number, total: number) => void)\n): Promise<ArrayBuffer> {\n // Normalize arguments - support both old and new signatures\n let options: FetchWithCacheOptions = {};\n if (typeof optionsOrProgress === 'function') {\n // Legacy signature: fetchWithCache(url, onProgress)\n options = { onProgress: optionsOrProgress };\n } else if (optionsOrProgress) {\n // New signature: fetchWithCache(url, options)\n options = optionsOrProgress;\n }\n\n const { version, validateStale = false, onProgress } = options;\n\n const cache = getModelCache();\n const cacheKey = version ? getCacheKey(url, version) : url;\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('fetchWithCache', {\n 'fetch.url': url,\n ...(version && { 'fetch.version': version }),\n 'fetch.validate_stale': validateStale,\n });\n\n // Check cache with optional staleness validation\n if (validateStale) {\n const validation = await cache.getWithValidation(cacheKey, url);\n\n if (validation.data && !validation.stale) {\n console.log(`[ModelCache] Cache hit (validated): ${url} (${(validation.data.byteLength / 1024 / 1024).toFixed(1)}MB)`);\n onProgress?.(validation.data.byteLength, validation.data.byteLength);\n span?.setAttributes({\n 'fetch.cache_hit': true,\n 'fetch.cache_validated': true,\n 'fetch.cache_stale': false,\n 'fetch.size_bytes': validation.data.byteLength,\n });\n span?.end();\n return validation.data;\n }\n\n if (validation.stale) {\n console.log(`[ModelCache] Cache stale, refetching: ${url}`);\n span?.setAttributes({\n 'fetch.cache_hit': true,\n 'fetch.cache_validated': true,\n 'fetch.cache_stale': true,\n });\n // Continue to fetch fresh data\n }\n // If data is null, continue to fetch\n } else {\n // Simple cache check without validation (backwards compatible behavior)\n const cached = await cache.get(cacheKey);\n if (cached) {\n console.log(`[ModelCache] Cache hit: ${url} (${(cached.byteLength / 1024 / 1024).toFixed(1)}MB)`);\n onProgress?.(cached.byteLength, cached.byteLength);\n span?.setAttributes({\n 'fetch.cache_hit': true,\n 'fetch.size_bytes': cached.byteLength,\n });\n span?.end();\n return cached;\n }\n }\n\n span?.setAttributes({ 'fetch.cache_hit': false });\n console.log(`[ModelCache] Cache miss, fetching: ${url}`);\n\n try {\n // Fetch with progress\n const response = await fetch(url);\n if (!response.ok) {\n throw new Error(`Failed to fetch ${url}: ${response.status}`);\n }\n\n const contentLength = response.headers.get('content-length');\n const total = contentLength ? parseInt(contentLength, 10) : 0;\n const etag = response.headers.get('etag') ?? undefined;\n\n // Check if file is too large for IndexedDB (avoid memory pressure during structured clone)\n const tooLargeForCache = total > MAX_CACHE_SIZE_BYTES;\n if (tooLargeForCache) {\n console.log(`[ModelCache] File too large for IndexedDB (${(total / 1024 / 1024).toFixed(0)}MB > 500MB), using HTTP cache only`);\n }\n\n if (!response.body) {\n const data = await response.arrayBuffer();\n if (!tooLargeForCache) {\n await cache.set(cacheKey, data, etag, version);\n }\n span?.setAttributes({\n 'fetch.size_bytes': data.byteLength,\n 'fetch.cached_to_indexeddb': !tooLargeForCache,\n });\n span?.end();\n return data;\n }\n\n // Stream with progress\n const reader = response.body.getReader();\n const chunks: Uint8Array[] = [];\n let loaded = 0;\n\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n loaded += value.length;\n onProgress?.(loaded, total || loaded);\n }\n\n // Combine chunks\n const data = new Uint8Array(loaded);\n let offset = 0;\n for (const chunk of chunks) {\n data.set(chunk, offset);\n offset += chunk.length;\n }\n\n const buffer = data.buffer;\n\n // Cache for next time (if not too large)\n if (!tooLargeForCache) {\n await cache.set(cacheKey, buffer, etag, version);\n console.log(`[ModelCache] Cached: ${url} (${(buffer.byteLength / 1024 / 1024).toFixed(1)}MB)`);\n }\n\n span?.setAttributes({\n 'fetch.size_bytes': buffer.byteLength,\n 'fetch.cached_to_indexeddb': !tooLargeForCache,\n });\n span?.end();\n\n return buffer;\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n throw error;\n }\n}\n\n/**\n * Preload models into cache without creating sessions\n */\nexport async function preloadModels(\n urls: string[],\n onProgress?: (current: number, total: number, url: string) => void\n): Promise<void> {\n const cache = getModelCache();\n\n for (let i = 0; i < urls.length; i++) {\n const url = urls[i];\n onProgress?.(i, urls.length, url);\n\n if (await cache.has(url)) {\n console.log(`[ModelCache] Already cached: ${url}`);\n continue;\n }\n\n await fetchWithCache(url);\n }\n\n onProgress?.(urls.length, urls.length, 'done');\n}\n\n/**\n * Format bytes as human readable string\n */\nexport function formatBytes(bytes: number): string {\n if (bytes < 1024) return `${bytes} B`;\n if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;\n if (bytes < 1024 * 1024 * 1024) return `${(bytes / 1024 / 1024).toFixed(1)} MB`;\n return `${(bytes / 1024 / 1024 / 1024).toFixed(1)} GB`;\n}\n","/**\r\n * Runtime detection utilities for platform-specific inference configuration\r\n *\r\n * These utilities help determine the optimal backend (WebGPU vs WASM) based on\r\n * the current platform's capabilities and known limitations.\r\n *\r\n * Key considerations:\r\n * - iOS Safari: WebGPU crashes due to JSEP bugs (GitHub #22776, #26827)\r\n * - Android Chrome: WebGPU works well (Chrome 121+)\r\n * - Desktop: WebGPU preferred for performance\r\n *\r\n * @module utils/runtime\r\n */\r\n\r\n/**\r\n * Supported inference backends\r\n */\r\nexport type RuntimeBackend = 'webgpu' | 'wasm';\r\n\r\n/**\r\n * User-configurable backend preference\r\n */\r\nexport type BackendPreference =\r\n | 'auto' // iOS→WASM, else→WebGPU with fallback\r\n | 'webgpu' // Prefer WebGPU, fallback to WASM on error\r\n | 'wasm' // Prefer WASM, no WebGPU attempt\r\n | 'webgpu-only' // Force WebGPU, throw on failure (for debugging)\r\n | 'wasm-only'; // Force WASM, never load WebGPU bundle (smallest bundle)\r\n\r\n/**\r\n * Detect iOS Safari browser\r\n *\r\n * iOS Safari has severe WebGPU issues:\r\n * - JSEP compilation bugs cause OOM during session creation\r\n * - Threading bugs require numThreads=1\r\n * - Proxy mode triggers memory leaks\r\n *\r\n * @returns true if running in iOS Safari\r\n */\r\nexport function isIOSSafari(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n const ua = navigator.userAgent.toLowerCase();\r\n return (\r\n /iphone|ipad|ipod/.test(ua) ||\r\n // Safari on macOS could also have issues, but less severe\r\n // Only force WASM on actual iOS devices\r\n (/safari/.test(ua) && /mobile/.test(ua) && !/chrome|crios|fxios/.test(ua))\r\n );\r\n}\r\n\r\n/**\r\n * Detect any iOS device (regardless of browser)\r\n *\r\n * On iOS, all browsers use WebKit, so Chrome/Firefox on iOS\r\n * have the same limitations as Safari.\r\n *\r\n * @returns true if running on any iOS device\r\n */\r\nexport function isIOS(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n const ua = navigator.userAgent.toLowerCase();\r\n return /iphone|ipad|ipod/.test(ua);\r\n}\r\n\r\n/**\r\n * Detect Android device\r\n *\r\n * Android Chrome 121+ has good WebGPU support with Qualcomm/ARM GPUs.\r\n *\r\n * @returns true if running on Android\r\n */\r\nexport function isAndroid(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n return /android/i.test(navigator.userAgent);\r\n}\r\n\r\n/**\r\n * Detect any mobile device (iOS or Android)\r\n *\r\n * Mobile devices have different performance characteristics:\r\n * - Lower memory limits\r\n * - Thermal throttling\r\n * - Different GPU architectures\r\n *\r\n * @returns true if running on mobile\r\n */\r\nexport function isMobile(): boolean {\r\n return isIOS() || isAndroid();\r\n}\r\n\r\n/**\r\n * Check if WebGPU API is available in the browser\r\n *\r\n * Note: This only checks if the API exists, not if it works reliably.\r\n * iOS has navigator.gpu but ONNX Runtime's WebGPU backend crashes.\r\n *\r\n * @returns true if navigator.gpu exists\r\n */\r\nexport function hasWebGPUApi(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n return 'gpu' in navigator && navigator.gpu !== undefined;\r\n}\r\n\r\n/**\r\n * Get the recommended backend for the current platform\r\n *\r\n * Decision tree:\r\n * 1. iOS (any browser): Force WASM (WebGPU crashes)\r\n * 2. Android: WebGPU preferred (works in Chrome 121+)\r\n * 3. Desktop: WebGPU preferred (best performance)\r\n *\r\n * @returns 'wasm' for iOS, 'webgpu' for everything else\r\n */\r\nexport function getRecommendedBackend(): RuntimeBackend {\r\n // Safari (all platforms): Always WASM - WebGPU crashes due to JSEP bugs\r\n // iOS: All browsers use WebKit, so all have the same issue\r\n // macOS Safari: Same multithreaded JSEP build bug\r\n if (isSafari() || isIOS()) {\r\n return 'wasm';\r\n }\r\n\r\n // Android/Desktop (non-Safari): WebGPU preferred\r\n return 'webgpu';\r\n}\r\n\r\n/**\r\n * Resolve user preference to actual backend\r\n *\r\n * @param preference User's backend preference\r\n * @param webgpuAvailable Whether WebGPU is available and working\r\n * @returns The backend to use\r\n */\r\nexport function resolveBackend(\r\n preference: BackendPreference,\r\n webgpuAvailable: boolean\r\n): RuntimeBackend {\r\n switch (preference) {\r\n case 'wasm-only':\r\n return 'wasm';\r\n\r\n case 'webgpu-only':\r\n if (!webgpuAvailable) {\r\n throw new Error(\r\n 'WebGPU requested but not available. Use \"webgpu\" or \"auto\" for fallback.'\r\n );\r\n }\r\n return 'webgpu';\r\n\r\n case 'wasm':\r\n return 'wasm';\r\n\r\n case 'webgpu':\r\n return webgpuAvailable ? 'webgpu' : 'wasm';\r\n\r\n case 'auto':\r\n default:\r\n // Auto: Use platform recommendation, with WebGPU availability check\r\n const recommended = getRecommendedBackend();\r\n if (recommended === 'webgpu' && !webgpuAvailable) {\r\n return 'wasm';\r\n }\r\n return recommended;\r\n }\r\n}\r\n\r\n/**\r\n * Get optimal WASM thread count for current platform\r\n *\r\n * @returns Recommended number of WASM threads\r\n */\r\nexport function getOptimalWasmThreads(): number {\r\n if (isIOS()) {\r\n // iOS: Must be 1 to avoid shared memory bugs (GitHub #22086)\r\n return 1;\r\n }\r\n\r\n if (isAndroid()) {\r\n // Android: Conservative threading (2 threads)\r\n return 2;\r\n }\r\n\r\n // Desktop: Full threading (4 threads)\r\n return 4;\r\n}\r\n\r\n/**\r\n * Check if WASM proxy mode should be enabled\r\n *\r\n * Proxy mode offloads inference to a Web Worker, but has issues:\r\n * - iOS: Triggers Safari 26 JSEP memory leak\r\n * - Mobile: Generally unstable\r\n *\r\n * @returns true if proxy mode is safe to enable\r\n */\r\nexport function shouldEnableWasmProxy(): boolean {\r\n // Mobile: Disable proxy (triggers memory issues)\r\n if (isMobile()) {\r\n return false;\r\n }\r\n\r\n // Desktop: Enable proxy for better threading\r\n return true;\r\n}\r\n\r\n/**\r\n * Detect Safari browser on any platform (macOS + iOS)\r\n *\r\n * Safari WebKit has bugs with ONNX Runtime's WebGPU multithreaded JSEP build\r\n * that crash session creation. Both iOS and macOS Safari are affected.\r\n *\r\n * @returns true if running in Safari on any platform\r\n */\r\nexport function isSafari(): boolean {\r\n if (typeof navigator === 'undefined') return false;\r\n const ua = navigator.userAgent.toLowerCase();\r\n // Safari: has \"safari\" but not Chrome, Chromium, CriOS, FxiOS, or Edge\r\n return /safari/.test(ua) && !/chrome|crios|fxios|chromium|edg/.test(ua);\r\n}\r\n\r\n/**\r\n * Recommend using CPU-optimized lip sync model (wav2arkit_cpu)\r\n *\r\n * All WebKit browsers (Safari macOS, Safari iOS, Chrome iOS, Firefox iOS)\r\n * have ONNX Runtime WebGPU JSEP bugs that crash session creation, and the\r\n * 384MB LAM model stack-overflows in WASM mode.\r\n * The wav2arkit_cpu model (1.8MB) provides identical 52 ARKit blendshape\r\n * output at 22x real-time on CPU/WASM.\r\n *\r\n * @returns true if on Safari or any iOS browser (should use CPU lip sync model)\r\n */\r\nexport function shouldUseCpuLipSync(): boolean {\r\n return isSafari() || isIOS();\r\n}\r\n\r\n/**\r\n * Check if Web Speech API is available in the browser\r\n *\r\n * The Web Speech API provides native speech recognition in Safari and Chrome.\r\n * On iOS Safari, this is significantly faster than Whisper WASM.\r\n *\r\n * @returns true if SpeechRecognition API is available\r\n */\r\nexport function isSpeechRecognitionAvailable(): boolean {\r\n if (typeof window === 'undefined') return false;\r\n return 'SpeechRecognition' in window || 'webkitSpeechRecognition' in window;\r\n}\r\n\r\n/**\r\n * Recommend using native Safari Speech API over Whisper on iOS\r\n *\r\n * On iOS, Whisper ASR via WASM takes ~1.3s per inference (30% over target).\r\n * Safari's native Web Speech API is:\r\n * - Much faster (native implementation)\r\n * - Battery-efficient (no WASM overhead)\r\n * - No model download needed (saves 30-150MB)\r\n *\r\n * @returns true if on iOS with Speech API available\r\n */\r\nexport function shouldUseNativeASR(): boolean {\r\n return isIOS() && isSpeechRecognitionAvailable();\r\n}\r\n\r\n/**\r\n * Recommend using server-side LAM over client-side on iOS\r\n *\r\n * On iOS, LAM lip sync via WASM takes ~332ms per second of audio (3.3x over target).\r\n * Server-side inference with GPU can achieve ~50ms, providing:\r\n * - Real-time lip sync (under 100ms target)\r\n * - Reduced iOS device thermal/battery impact\r\n * - Better user experience\r\n *\r\n * @returns true if on iOS (should use server-side lip sync)\r\n */\r\nexport function shouldUseServerLipSync(): boolean {\r\n return isIOS();\r\n}\r\n","/**\n * Lazy ONNX Runtime loader with conditional WebGPU/WASM bundle loading\n *\n * This module provides a way to dynamically load the appropriate ONNX Runtime bundle\n * based on the platform's capabilities. This is critical for iOS support because:\n *\n * 1. iOS Safari has WebGPU API but ONNX Runtime's WebGPU backend crashes\n * 2. Loading the WebGPU bundle wastes bandwidth and can cause issues\n * 3. WASM-only bundle is smaller and more reliable on iOS\n *\n * Usage:\n * ```typescript\n * const ort = await getOnnxRuntime('wasm'); // Load WASM-only bundle\n * const ort = await getOnnxRuntime('webgpu'); // Load WebGPU bundle (includes WASM)\n * ```\n *\n * @module inference/onnxLoader\n */\n\n// Type-only import for TypeScript (no runtime code loaded at import time)\n// At runtime, we dynamically import either 'onnxruntime-web' or 'onnxruntime-web/webgpu'\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\n\n// Type alias for the ORT module (loaded dynamically)\ntype OrtModule = {\n InferenceSession: typeof InferenceSession;\n Tensor: typeof Tensor;\n env: Env;\n};\n\n// Re-export session options type\nexport type SessionOptions = InferenceSession.SessionOptions;\nimport {\n RuntimeBackend,\n BackendPreference,\n isIOS,\n isMobile,\n getOptimalWasmThreads,\n shouldEnableWasmProxy,\n resolveBackend,\n hasWebGPUApi,\n} from '../utils/runtime';\n\n// Re-export RuntimeBackend for consumers\nexport type { RuntimeBackend } from '../utils/runtime';\nimport { createLogger } from '../logging';\n\nconst logger = createLogger('OnnxLoader');\n\n// Cached ONNX Runtime instance\nlet ortInstance: OrtModule | null = null;\nlet loadedBackend: RuntimeBackend | null = null;\n\n// CDN path for WASM files\nconst WASM_CDN_PATH = 'https://cdn.jsdelivr.net/npm/onnxruntime-web@1.23.2/dist/';\n\n/**\n * Check if WebGPU is available and likely to work\n *\n * This is more thorough than just checking navigator.gpu exists.\n * It actually requests an adapter to verify the GPU is accessible.\n *\n * @returns true if WebGPU is available and working\n */\nexport async function isWebGPUAvailable(): Promise<boolean> {\n // iOS: WebGPU is broken regardless of API presence\n if (isIOS()) {\n logger.debug('WebGPU check: iOS detected, returning false');\n return false;\n }\n\n if (!hasWebGPUApi()) {\n logger.debug('WebGPU check: navigator.gpu not available');\n return false;\n }\n\n try {\n const adapter = await navigator.gpu.requestAdapter();\n if (!adapter) {\n logger.debug('WebGPU check: No adapter available');\n return false;\n }\n\n // Check for minimum required features\n const device = await adapter.requestDevice();\n if (!device) {\n logger.debug('WebGPU check: Could not create device');\n return false;\n }\n\n // Clean up\n device.destroy();\n\n logger.debug('WebGPU check: Available and working');\n return true;\n } catch (err) {\n logger.debug('WebGPU check: Error during availability check', { error: err });\n return false;\n }\n}\n\n/**\n * Configure WASM environment settings based on platform\n *\n * This must be called before creating any inference sessions.\n */\nfunction configureWasm(ort: OrtModule): void {\n // Set CDN path for WASM files\n ort.env.wasm.wasmPaths = WASM_CDN_PATH;\n\n // Platform-specific threading configuration\n const numThreads = getOptimalWasmThreads();\n const enableProxy = shouldEnableWasmProxy();\n\n ort.env.wasm.numThreads = numThreads;\n ort.env.wasm.simd = true; // SIMD always helps\n ort.env.wasm.proxy = enableProxy;\n\n logger.info('WASM configured', {\n numThreads,\n simd: true,\n proxy: enableProxy,\n platform: isIOS() ? 'iOS' : isMobile() ? 'Android' : 'Desktop',\n });\n}\n\n/**\n * Load ONNX Runtime with the specified backend\n *\n * This lazily loads the appropriate bundle:\n * - 'wasm': Loads onnxruntime-web (WASM-only, smaller)\n * - 'webgpu': Loads onnxruntime-web/webgpu (includes WebGPU + WASM fallback)\n *\n * Once loaded, the same instance is reused for all subsequent calls.\n * If you need to switch backends, you must reload the page.\n *\n * @param backend The backend to load ('webgpu' or 'wasm')\n * @returns The ONNX Runtime module\n */\nexport async function getOnnxRuntime(\n backend: RuntimeBackend\n): Promise<OrtModule> {\n // Return cached instance if same backend\n if (ortInstance && loadedBackend === backend) {\n return ortInstance;\n }\n\n // Warn if trying to switch backends (not supported without page reload)\n if (ortInstance && loadedBackend !== backend) {\n logger.warn(\n `ONNX Runtime already loaded with ${loadedBackend} backend. ` +\n `Cannot switch to ${backend}. Returning existing instance.`\n );\n return ortInstance;\n }\n\n logger.info(`Loading ONNX Runtime with ${backend} backend...`);\n\n try {\n if (backend === 'wasm') {\n // Load WASM-only bundle (smaller, no WebGPU code)\n const module = await import('onnxruntime-web');\n ortInstance = module.default || module;\n } else {\n // Load WebGPU bundle (includes WASM fallback)\n const module = await import('onnxruntime-web/webgpu');\n ortInstance = module.default || module;\n }\n\n loadedBackend = backend;\n\n // Configure WASM settings (applies to both bundles)\n configureWasm(ortInstance);\n\n logger.info(`ONNX Runtime loaded successfully`, { backend });\n\n return ortInstance;\n } catch (err) {\n logger.error(`Failed to load ONNX Runtime with ${backend} backend`, {\n error: err,\n });\n throw new Error(\n `Failed to load ONNX Runtime: ${err instanceof Error ? err.message : String(err)}`\n );\n }\n}\n\n/**\n * Get the appropriate ONNX Runtime based on user preference\n *\n * This resolves the user's preference against platform capabilities\n * and loads the appropriate bundle.\n *\n * @param preference User's backend preference\n * @returns The ONNX Runtime module and the resolved backend\n */\nexport async function getOnnxRuntimeForPreference(\n preference: BackendPreference = 'auto'\n): Promise<{ ort: OrtModule; backend: RuntimeBackend }> {\n // Check WebGPU availability (skip for iOS)\n const webgpuAvailable = await isWebGPUAvailable();\n\n // Resolve preference to actual backend\n const backend = resolveBackend(preference, webgpuAvailable);\n\n logger.info('Resolved backend preference', {\n preference,\n webgpuAvailable,\n resolvedBackend: backend,\n });\n\n // Load the appropriate bundle\n const ort = await getOnnxRuntime(backend);\n\n return { ort, backend };\n}\n\n/**\n * Get session options for creating an inference session\n *\n * This returns optimized session options based on the backend and platform.\n *\n * @param backend The backend being used\n * @returns Session options for InferenceSession.create()\n */\nexport function getSessionOptions(\n backend: RuntimeBackend\n): SessionOptions {\n if (backend === 'webgpu') {\n return {\n executionProviders: [\n {\n name: 'webgpu',\n preferredLayout: 'NHWC', // Reduces memory overhead for layout conversions\n } as const,\n ],\n graphOptimizationLevel: 'all',\n };\n }\n\n // WASM backend\n return {\n executionProviders: ['wasm'],\n graphOptimizationLevel: 'all',\n };\n}\n\n/**\n * Create an inference session with automatic fallback\n *\n * If WebGPU session creation fails, automatically falls back to WASM.\n *\n * @param modelBuffer The model data as ArrayBuffer\n * @param preferredBackend The preferred backend\n * @returns The created session and the backend used\n */\nexport async function createSessionWithFallback(\n modelBuffer: ArrayBuffer,\n preferredBackend: RuntimeBackend\n): Promise<{\n session: InferenceSession;\n backend: RuntimeBackend;\n}> {\n const ort = await getOnnxRuntime(preferredBackend);\n\n // Convert ArrayBuffer to Uint8Array for onnxruntime-common types\n const modelData = new Uint8Array(modelBuffer);\n\n if (preferredBackend === 'webgpu') {\n try {\n const options = getSessionOptions('webgpu');\n const session = await ort.InferenceSession.create(modelData, options);\n\n logger.info('Session created with WebGPU backend');\n return { session, backend: 'webgpu' };\n } catch (err) {\n logger.warn('WebGPU session creation failed, falling back to WASM', {\n error: err instanceof Error ? err.message : String(err),\n });\n // Fall through to WASM\n }\n }\n\n // WASM (primary or fallback)\n const options = getSessionOptions('wasm');\n const session = await ort.InferenceSession.create(modelData, options);\n\n logger.info('Session created with WASM backend');\n return { session, backend: 'wasm' };\n}\n\n/**\n * Get the currently loaded backend (if any)\n */\nexport function getLoadedBackend(): RuntimeBackend | null {\n return loadedBackend;\n}\n\n/**\n * Check if ONNX Runtime has been loaded\n */\nexport function isOnnxRuntimeLoaded(): boolean {\n return ortInstance !== null;\n}\n","/**\n * Shared blendshape constants and utilities for lip sync inference\n *\n * Contains LAM_BLENDSHAPES (canonical ordering), symmetrization, and\n * index remapping used by both Wav2Vec2Inference and Wav2ArkitCpuInference.\n *\n * This module is the single source of truth for blendshape ordering to\n * avoid circular dependencies between inference classes.\n *\n * @category Inference\n */\n\n/**\n * LAM model blendshape names in order (52 total)\n * NOTE: This is alphabetical ordering used by LAM, different from standard ARKit order\n */\nexport const LAM_BLENDSHAPES = [\n 'browDownLeft', 'browDownRight', 'browInnerUp', 'browOuterUpLeft', 'browOuterUpRight',\n 'cheekPuff', 'cheekSquintLeft', 'cheekSquintRight',\n 'eyeBlinkLeft', 'eyeBlinkRight', 'eyeLookDownLeft', 'eyeLookDownRight',\n 'eyeLookInLeft', 'eyeLookInRight', 'eyeLookOutLeft', 'eyeLookOutRight',\n 'eyeLookUpLeft', 'eyeLookUpRight', 'eyeSquintLeft', 'eyeSquintRight',\n 'eyeWideLeft', 'eyeWideRight',\n 'jawForward', 'jawLeft', 'jawOpen', 'jawRight',\n 'mouthClose', 'mouthDimpleLeft', 'mouthDimpleRight', 'mouthFrownLeft', 'mouthFrownRight',\n 'mouthFunnel', 'mouthLeft', 'mouthLowerDownLeft', 'mouthLowerDownRight',\n 'mouthPressLeft', 'mouthPressRight', 'mouthPucker', 'mouthRight',\n 'mouthRollLower', 'mouthRollUpper', 'mouthShrugLower', 'mouthShrugUpper',\n 'mouthSmileLeft', 'mouthSmileRight', 'mouthStretchLeft', 'mouthStretchRight',\n 'mouthUpperUpLeft', 'mouthUpperUpRight',\n 'noseSneerLeft', 'noseSneerRight', 'tongueOut'\n] as const;\n\n/** Alias for backwards compatibility */\nexport const ARKIT_BLENDSHAPES = LAM_BLENDSHAPES;\n\n/**\n * ARKit Left/Right symmetric pairs for blendshape symmetrization\n * From LAM official postprocessing (models/utils.py)\n */\nconst ARKIT_SYMMETRIC_PAIRS: [string, string][] = [\n ['jawLeft', 'jawRight'],\n ['mouthLeft', 'mouthRight'],\n ['mouthSmileLeft', 'mouthSmileRight'],\n ['mouthFrownLeft', 'mouthFrownRight'],\n ['mouthDimpleLeft', 'mouthDimpleRight'],\n ['mouthStretchLeft', 'mouthStretchRight'],\n ['mouthPressLeft', 'mouthPressRight'],\n ['mouthUpperUpLeft', 'mouthUpperUpRight'],\n ['mouthLowerDownLeft', 'mouthLowerDownRight'],\n ['noseSneerLeft', 'noseSneerRight'],\n ['cheekSquintLeft', 'cheekSquintRight'],\n ['browDownLeft', 'browDownRight'],\n ['browOuterUpLeft', 'browOuterUpRight'],\n ['eyeBlinkLeft', 'eyeBlinkRight'],\n ['eyeLookUpLeft', 'eyeLookUpRight'],\n ['eyeLookDownLeft', 'eyeLookDownRight'],\n ['eyeLookInLeft', 'eyeLookInRight'],\n ['eyeLookOutLeft', 'eyeLookOutRight'],\n ['eyeSquintLeft', 'eyeSquintRight'],\n ['eyeWideLeft', 'eyeWideRight'],\n];\n\n// Precompute index pairs for fast symmetrization\nconst SYMMETRIC_INDEX_PAIRS: [number, number][] = ARKIT_SYMMETRIC_PAIRS.map(([l, r]) => [\n LAM_BLENDSHAPES.indexOf(l as typeof LAM_BLENDSHAPES[number]),\n LAM_BLENDSHAPES.indexOf(r as typeof LAM_BLENDSHAPES[number]),\n]).filter(([l, r]) => l !== -1 && r !== -1) as [number, number][];\n\n/**\n * Symmetrize blendshapes by averaging left/right pairs\n * From LAM official postprocessing (models/utils.py)\n * This fixes asymmetric output from the raw model\n */\nexport function symmetrizeBlendshapes(frame: Float32Array): Float32Array {\n const result = new Float32Array(frame);\n for (const [lIdx, rIdx] of SYMMETRIC_INDEX_PAIRS) {\n const avg = (frame[lIdx] + frame[rIdx]) / 2;\n result[lIdx] = avg;\n result[rIdx] = avg;\n }\n return result;\n}\n\n/**\n * wav2arkit_cpu model blendshape ordering\n *\n * Indices 0-24 match LAM_BLENDSHAPES, but 25+ diverge:\n * - LAM puts jawRight, mouthClose, mouthDimpleLeft, mouthDimpleRight at 25-28\n * - wav2arkit_cpu puts mouthFrownLeft at 25 and moves those four to 48-51\n */\nexport const WAV2ARKIT_BLENDSHAPES = [\n 'browDownLeft', 'browDownRight', 'browInnerUp', 'browOuterUpLeft', 'browOuterUpRight',\n 'cheekPuff', 'cheekSquintLeft', 'cheekSquintRight',\n 'eyeBlinkLeft', 'eyeBlinkRight', 'eyeLookDownLeft', 'eyeLookDownRight',\n 'eyeLookInLeft', 'eyeLookInRight', 'eyeLookOutLeft', 'eyeLookOutRight',\n 'eyeLookUpLeft', 'eyeLookUpRight', 'eyeSquintLeft', 'eyeSquintRight',\n 'eyeWideLeft', 'eyeWideRight',\n 'jawForward', 'jawLeft', 'jawOpen',\n 'mouthFrownLeft', 'mouthFrownRight', 'mouthFunnel', 'mouthLeft',\n 'mouthLowerDownLeft', 'mouthLowerDownRight',\n 'mouthPressLeft', 'mouthPressRight', 'mouthPucker', 'mouthRight',\n 'mouthRollLower', 'mouthRollUpper', 'mouthShrugLower', 'mouthShrugUpper',\n 'mouthSmileLeft', 'mouthSmileRight', 'mouthStretchLeft', 'mouthStretchRight',\n 'mouthUpperUpLeft', 'mouthUpperUpRight',\n 'noseSneerLeft', 'noseSneerRight', 'tongueOut',\n 'mouthClose', 'mouthDimpleLeft', 'mouthDimpleRight', 'jawRight',\n] as const;\n\n/**\n * Precomputed remap table: wav2arkit_cpu output index → LAM_BLENDSHAPES index\n *\n * For each wav2arkit output index i, REMAP_TO_LAM[i] gives the LAM_BLENDSHAPES\n * index where that value should be placed.\n */\nexport const REMAP_WAV2ARKIT_TO_LAM: number[] = WAV2ARKIT_BLENDSHAPES.map(\n (name) => LAM_BLENDSHAPES.indexOf(name as typeof LAM_BLENDSHAPES[number])\n);\n\n/**\n * Remap a blendshape frame from wav2arkit_cpu ordering to LAM_BLENDSHAPES ordering\n *\n * @param frame - Float32Array of 52 blendshape values in wav2arkit_cpu order\n * @returns Float32Array of 52 blendshape values in LAM_BLENDSHAPES order\n */\nexport function remapWav2ArkitToLam(frame: Float32Array): Float32Array {\n const result = new Float32Array(52);\n for (let i = 0; i < 52; i++) {\n result[REMAP_WAV2ARKIT_TO_LAM[i]] = frame[i];\n }\n return result;\n}\n","/**\r\n * Unified Wav2Vec2 inference engine for Audio-to-Expression + ASR\r\n *\r\n * Runs entirely in the browser using WebGPU or WASM.\r\n * Takes raw 16kHz audio and outputs:\r\n * - 52 ARKit blendshapes (lip sync)\r\n * - 32-token CTC logits (speech recognition)\r\n *\r\n * @category Inference\r\n *\r\n * @example Basic usage\r\n * ```typescript\r\n * import { Wav2Vec2Inference } from '@omote/core';\r\n *\r\n * const wav2vec = new Wav2Vec2Inference({ modelUrl: '/models/unified_wav2vec2_asr_a2e.onnx' });\r\n * await wav2vec.load();\r\n *\r\n * // Process 1 second of audio (16kHz = 16000 samples)\r\n * const result = await wav2vec.infer(audioSamples);\r\n *\r\n * console.log('Blendshapes:', result.blendshapes); // [30, 52] for 30fps\r\n * console.log('ASR text:', result.text); // Decoded transcription\r\n * ```\r\n */\r\n\r\n// Type-only import for TypeScript (no runtime code loaded at import time)\r\n// At runtime, we dynamically import either 'onnxruntime-web' or 'onnxruntime-web/webgpu'\r\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\r\n\r\nimport { fetchWithCache, getModelCache, formatBytes } from '../cache/ModelCache';\r\nimport { createLogger } from '../logging';\r\nimport { getTelemetry } from '../telemetry';\r\nimport {\r\n getOnnxRuntimeForPreference,\r\n getSessionOptions,\r\n isWebGPUAvailable,\r\n type RuntimeBackend,\r\n} from './onnxLoader';\r\nimport { BackendPreference } from '../utils/runtime';\r\nimport { symmetrizeBlendshapes, LAM_BLENDSHAPES, ARKIT_BLENDSHAPES } from './blendshapeUtils';\r\n\r\n// Type alias for the ORT module (loaded dynamically)\r\ntype OrtModule = {\r\n InferenceSession: typeof InferenceSession;\r\n Tensor: typeof Tensor;\r\n env: Env;\r\n};\r\n\r\nconst logger = createLogger('Wav2Vec2');\r\n\r\n// Re-export for backward compatibility\r\nexport type InferenceBackend = BackendPreference;\r\n\r\nexport interface Wav2Vec2InferenceConfig {\r\n /** Path or URL to the ONNX model */\r\n modelUrl: string;\r\n /** Preferred backend (auto will try WebGPU first, fallback to WASM) */\r\n backend?: InferenceBackend;\r\n /** Number of identity classes (default: 12 for streaming model) */\r\n numIdentityClasses?: number;\r\n}\r\n\r\nexport interface ModelInfo {\r\n backend: 'webgpu' | 'wasm';\r\n loadTimeMs: number;\r\n inputNames: string[];\r\n outputNames: string[];\r\n}\r\n\r\n// Re-export blendshape constants from shared utils (canonical source)\r\nexport { LAM_BLENDSHAPES, ARKIT_BLENDSHAPES } from './blendshapeUtils';\r\n\r\n/** CTC vocabulary (32 tokens from wav2vec2-base-960h) */\r\nexport const CTC_VOCAB = [\r\n '<pad>', '<s>', '</s>', '<unk>', '|', 'E', 'T', 'A', 'O', 'N',\r\n 'I', 'H', 'S', 'R', 'D', 'L', 'U', 'M', 'W', 'C',\r\n 'F', 'G', 'Y', 'P', 'B', 'V', 'K', \"'\", 'X', 'J', 'Q', 'Z'\r\n];\r\n\r\nexport interface Wav2Vec2Result {\r\n /** Blendshape weights [frames, 52] - 30fps */\r\n blendshapes: Float32Array[];\r\n /** Raw CTC logits [frames, 32] - 50fps */\r\n asrLogits: Float32Array[];\r\n /** Decoded text from CTC */\r\n text: string;\r\n /** Number of blendshape frames (30fps) — alias for numA2EFrames */\r\n numFrames: number;\r\n /** Number of A2E frames (30fps) */\r\n numA2EFrames: number;\r\n /** Number of ASR frames (50fps) */\r\n numASRFrames: number;\r\n /** Inference time in ms */\r\n inferenceTimeMs: number;\r\n}\r\n\r\nexport class Wav2Vec2Inference {\r\n private session: InferenceSession | null = null;\r\n private ort: OrtModule | null = null; // Lazy-loaded ONNX Runtime module\r\n private config: Wav2Vec2InferenceConfig;\r\n private _backend: RuntimeBackend = 'wasm';\r\n private isLoading = false;\r\n private numIdentityClasses: number;\r\n\r\n // Inference queue for handling concurrent calls\r\n private inferenceQueue: Promise<void> = Promise.resolve();\r\n\r\n constructor(config: Wav2Vec2InferenceConfig) {\r\n this.config = config;\r\n this.numIdentityClasses = config.numIdentityClasses ?? 12;\r\n }\r\n\r\n /**\r\n * Check if WebGPU is available and working\r\n * (iOS returns false even if navigator.gpu exists due to ONNX Runtime bugs)\r\n */\r\n static isWebGPUAvailable = isWebGPUAvailable;\r\n\r\n get backend(): 'webgpu' | 'wasm' | null {\r\n return this.session ? this._backend : null;\r\n }\r\n\r\n get isLoaded(): boolean {\r\n return this.session !== null;\r\n }\r\n\r\n /**\r\n * Load the ONNX model\r\n */\r\n async load(): Promise<ModelInfo> {\r\n if (this.isLoading) {\r\n throw new Error('Model is already loading');\r\n }\r\n\r\n if (this.session) {\r\n throw new Error('Model already loaded. Call dispose() first.');\r\n }\r\n\r\n this.isLoading = true;\r\n const startTime = performance.now();\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('Wav2Vec2.load', {\r\n 'model.url': this.config.modelUrl,\r\n 'model.backend_requested': this.config.backend || 'auto',\r\n });\r\n\r\n try {\r\n // Lazy load ONNX Runtime with appropriate backend\r\n // iOS: Loads WASM-only bundle (smaller, no WebGPU code)\r\n // Android/Desktop: Loads WebGPU bundle (with WASM fallback)\r\n logger.info('Loading ONNX Runtime...', { preference: this.config.backend || 'auto' });\r\n\r\n const { ort, backend } = await getOnnxRuntimeForPreference(this.config.backend || 'auto');\r\n this.ort = ort;\r\n this._backend = backend;\r\n\r\n logger.info('ONNX Runtime loaded', { backend: this._backend });\r\n\r\n // Load model with caching\r\n const cache = getModelCache();\r\n const modelUrl = this.config.modelUrl;\r\n const isCached = await cache.has(modelUrl);\r\n\r\n let modelBuffer: ArrayBuffer;\r\n if (isCached) {\r\n logger.debug('Loading model from cache', { modelUrl });\r\n modelBuffer = (await cache.get(modelUrl))!;\r\n\r\n // Check for cache corruption (entry exists but data is null)\r\n if (!modelBuffer) {\r\n logger.warn('Cache corruption detected, clearing and retrying', { modelUrl });\r\n await cache.delete(modelUrl);\r\n logger.info('Corrupted cache entry deleted, fetching fresh model', { modelUrl });\r\n modelBuffer = await fetchWithCache(modelUrl);\r\n }\r\n } else {\r\n logger.debug('Fetching and caching model', { modelUrl });\r\n modelBuffer = await fetchWithCache(modelUrl);\r\n }\r\n\r\n // Final validation\r\n if (!modelBuffer) {\r\n const errorMsg = `Failed to load model: ${modelUrl}. Model buffer is null or undefined even after retry.`;\r\n logger.error(errorMsg, { modelUrl, isCached });\r\n throw new Error(errorMsg);\r\n }\r\n\r\n logger.debug('Creating ONNX session', {\r\n size: formatBytes(modelBuffer.byteLength),\r\n backend: this._backend,\r\n });\r\n\r\n // Create session with optimized settings for the backend\r\n const sessionOptions = getSessionOptions(this._backend);\r\n logger.info('Creating session with execution provider', {\r\n executionProvider: this._backend,\r\n });\r\n\r\n // Create session from buffer (convert ArrayBuffer to Uint8Array for onnxruntime-common types)\r\n const modelData = new Uint8Array(modelBuffer);\r\n this.session = await this.ort!.InferenceSession.create(modelData, sessionOptions);\r\n\r\n logger.info('ONNX session created successfully', {\r\n executionProvider: this._backend,\r\n backend: this._backend,\r\n });\r\n\r\n const loadTimeMs = performance.now() - startTime;\r\n\r\n logger.info('Model loaded successfully', {\r\n backend: this._backend,\r\n loadTimeMs: Math.round(loadTimeMs),\r\n inputs: this.session.inputNames,\r\n outputs: this.session.outputNames,\r\n });\r\n\r\n span?.setAttributes({\r\n 'model.backend': this._backend,\r\n 'model.load_time_ms': loadTimeMs,\r\n 'model.cached': isCached,\r\n });\r\n span?.end();\r\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n });\r\n\r\n // Warmup inference to initialize GPU kernels and contexts\r\n // This prevents hitching on the first real inference during playback\r\n logger.debug('Running warmup inference to initialize GPU context');\r\n const warmupStart = performance.now();\r\n const silentAudio = new Float32Array(16000); // 1 second of silence\r\n await this.infer(silentAudio, 0);\r\n const warmupTimeMs = performance.now() - warmupStart;\r\n logger.info('Warmup inference complete', {\r\n warmupTimeMs: Math.round(warmupTimeMs),\r\n backend: this._backend,\r\n });\r\n telemetry?.recordHistogram('omote.model.warmup_time', warmupTimeMs, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n });\r\n\r\n return {\r\n backend: this._backend,\r\n loadTimeMs,\r\n inputNames: [...this.session.inputNames],\r\n outputNames: [...this.session.outputNames],\r\n };\r\n } catch (error) {\r\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\r\n telemetry?.incrementCounter('omote.errors.total', 1, {\r\n model: 'wav2vec2',\r\n error_type: 'load_failed',\r\n });\r\n throw error;\r\n } finally {\r\n this.isLoading = false;\r\n }\r\n }\r\n\r\n /**\r\n * Run inference on raw audio\r\n * @param audioSamples - Float32Array of raw audio at 16kHz (16000 samples = 1 second)\r\n * @param identityIndex - Optional identity index (0-11, default 0 = neutral)\r\n *\r\n * Note: Model expects 1-second chunks (16000 samples) for optimal performance.\r\n * Audio will be zero-padded or truncated to 16000 samples.\r\n */\r\n async infer(\r\n audioSamples: Float32Array,\r\n identityIndex: number = 0\r\n ): Promise<Wav2Vec2Result> {\r\n if (!this.session) {\r\n throw new Error('Model not loaded. Call load() first.');\r\n }\r\n\r\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\r\n // During interruptions, audioSamples buffer may get detached by ONNX Runtime\r\n // before we process it. Copy synchronously to preserve data.\r\n const audioSamplesCopy = new Float32Array(audioSamples);\r\n\r\n // Ensure audio is exactly 16000 samples (1 second)\r\n let audio: Float32Array;\r\n if (audioSamplesCopy.length === 16000) {\r\n audio = audioSamplesCopy;\r\n } else if (audioSamplesCopy.length < 16000) {\r\n // Zero-pad\r\n audio = new Float32Array(16000);\r\n audio.set(audioSamplesCopy, 0);\r\n } else {\r\n // Truncate\r\n audio = audioSamplesCopy.slice(0, 16000);\r\n }\r\n\r\n // Create identity one-hot vector\r\n const identity = new Float32Array(this.numIdentityClasses);\r\n identity[Math.min(identityIndex, this.numIdentityClasses - 1)] = 1.0;\r\n\r\n // CRITICAL: Force copy to prevent ArrayBuffer detachment by ONNX Runtime Web workers\r\n // Without copy, WASM backend transfers buffers to workers, causing \"memory access out of bounds\" errors\r\n const audioCopy = new Float32Array(audio);\r\n const identityCopy = new Float32Array(identity);\r\n\r\n const feeds = {\r\n 'audio': new this.ort!.Tensor('float32', audioCopy, [1, 16000]),\r\n 'identity': new this.ort!.Tensor('float32', identityCopy, [1, this.numIdentityClasses]),\r\n };\r\n\r\n // Queue the inference\r\n return this.queueInference(feeds);\r\n }\r\n\r\n /**\r\n * Decode CTC logits to text using greedy decoding\r\n */\r\n private decodeCTC(logits: Float32Array[]): string {\r\n const tokens: number[] = [];\r\n let prevToken = -1;\r\n\r\n for (const frame of logits) {\r\n // Find argmax\r\n let maxIdx = 0;\r\n let maxVal = frame[0];\r\n for (let i = 1; i < frame.length; i++) {\r\n if (frame[i] > maxVal) {\r\n maxVal = frame[i];\r\n maxIdx = i;\r\n }\r\n }\r\n\r\n // CTC collapse: skip duplicates and blanks (token 0)\r\n if (maxIdx !== prevToken && maxIdx !== 0) {\r\n tokens.push(maxIdx);\r\n }\r\n prevToken = maxIdx;\r\n }\r\n\r\n // Convert to text (token 4 = '|' = word separator = space)\r\n return tokens.map(t => CTC_VOCAB[t] === '|' ? ' ' : CTC_VOCAB[t]).join('');\r\n }\r\n\r\n /**\r\n * Queue inference to serialize ONNX session calls\r\n */\r\n private queueInference(\r\n feeds: Record<string, Tensor>\r\n ): Promise<Wav2Vec2Result> {\r\n return new Promise((resolve, reject) => {\r\n this.inferenceQueue = this.inferenceQueue.then(async () => {\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('Wav2Vec2.infer', {\r\n 'inference.backend': this._backend,\r\n 'inference.input_samples': 16000,\r\n });\r\n try {\r\n const startTime = performance.now();\r\n const results = await this.session!.run(feeds);\r\n const inferenceTimeMs = performance.now() - startTime;\r\n\r\n const asrOutput = results['asr_logits'];\r\n const blendshapeOutput = results['blendshapes'];\r\n\r\n if (!asrOutput || !blendshapeOutput) {\r\n throw new Error('Missing outputs from model');\r\n }\r\n\r\n const asrData = asrOutput.data as Float32Array;\r\n const blendshapeData = blendshapeOutput.data as Float32Array;\r\n\r\n // Parse shapes: ASR is [1, time_50fps, 32], A2E is [1, time_30fps, 52]\r\n const numASRFrames = asrOutput.dims[1] as number;\r\n const numA2EFrames = blendshapeOutput.dims[1] as number;\r\n const asrVocabSize = asrOutput.dims[2] as number;\r\n const numBlendshapes = blendshapeOutput.dims[2] as number;\r\n\r\n // Split into per-frame arrays\r\n const asrLogits: Float32Array[] = [];\r\n const blendshapes: Float32Array[] = [];\r\n\r\n for (let f = 0; f < numASRFrames; f++) {\r\n asrLogits.push(asrData.slice(f * asrVocabSize, (f + 1) * asrVocabSize));\r\n }\r\n\r\n for (let f = 0; f < numA2EFrames; f++) {\r\n const rawFrame = blendshapeData.slice(f * numBlendshapes, (f + 1) * numBlendshapes);\r\n // Apply symmetrization postprocessing (from LAM official pipeline)\r\n blendshapes.push(symmetrizeBlendshapes(rawFrame));\r\n }\r\n\r\n // Decode CTC\r\n const text = this.decodeCTC(asrLogits);\r\n\r\n logger.trace('Inference completed', {\r\n inferenceTimeMs: Math.round(inferenceTimeMs * 100) / 100,\r\n numA2EFrames,\r\n numASRFrames,\r\n textLength: text.length,\r\n });\r\n\r\n span?.setAttributes({\r\n 'inference.duration_ms': inferenceTimeMs,\r\n 'inference.a2e_frames': numA2EFrames,\r\n 'inference.asr_frames': numASRFrames,\r\n });\r\n span?.end();\r\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n });\r\n telemetry?.incrementCounter('omote.inference.total', 1, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n status: 'success',\r\n });\r\n\r\n resolve({\r\n blendshapes,\r\n asrLogits,\r\n text,\r\n numFrames: numA2EFrames,\r\n numA2EFrames,\r\n numASRFrames,\r\n inferenceTimeMs,\r\n });\r\n } catch (err) {\r\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\r\n telemetry?.incrementCounter('omote.inference.total', 1, {\r\n model: 'wav2vec2',\r\n backend: this._backend,\r\n status: 'error',\r\n });\r\n reject(err);\r\n }\r\n });\r\n });\r\n }\r\n\r\n /**\r\n * Get blendshape value by name for a specific frame\r\n */\r\n getBlendshape(blendshapes: Float32Array, name: typeof LAM_BLENDSHAPES[number]): number {\r\n const index = LAM_BLENDSHAPES.indexOf(name);\r\n if (index === -1) {\r\n throw new Error(`Unknown blendshape: ${name}`);\r\n }\r\n return blendshapes[index];\r\n }\r\n\r\n /**\r\n * Dispose of the model and free resources\r\n */\r\n async dispose(): Promise<void> {\r\n if (this.session) {\r\n await this.session.release();\r\n this.session = null;\r\n }\r\n }\r\n}\r\n","/**\n * FullFacePipeline - Combined LAM lip sync + Emotion upper face pipeline\n *\n * Orchestrates full-face animation by combining:\n * 1. LAM lip sync (52 ARKit blendshapes) via SyncedAudioPipeline\n * 2. Emotion2VecInference for emotion detection\n * 3. EmotionToBlendshapeMapper for upper face expressions\n *\n * Merge Strategy:\n * - Lower face (41 blendshapes): 100% from LAM (mouth, jaw, tongue, etc.)\n * - Upper face (11 blendshapes): Emotion overlay with LAM as subtle fallback\n * Formula: emotion * 0.8 + lam * 0.2\n *\n * This ensures accurate lip sync from LAM while adding expressive upper face\n * animations driven by speech emotion detection.\n *\n * @category Audio\n *\n * @example Basic usage\n * ```typescript\n * import { FullFacePipeline, Wav2Vec2Inference, Emotion2VecInference } from '@omote/core';\n *\n * const lam = new Wav2Vec2Inference({ modelUrl: '/models/lam.onnx' });\n * const emotion = new Emotion2VecInference({ modelUrl: '/models/emotion.onnx' });\n *\n * await lam.load();\n * await emotion.load();\n *\n * const pipeline = new FullFacePipeline({ lam, emotion });\n * await pipeline.initialize();\n *\n * pipeline.on('full_frame_ready', (frame) => {\n * // frame.blendshapes: merged 52 ARKit blendshapes\n * // frame.emotion: raw emotion data\n * applyToAvatar(frame.blendshapes);\n * });\n *\n * pipeline.start();\n * // Feed audio chunks...\n * await pipeline.onAudioChunk(audioData);\n * ```\n */\n\nimport { AudioScheduler } from './AudioScheduler';\nimport { AudioChunkCoalescer } from './AudioChunkCoalescer';\nimport { LAMPipeline } from './LAMPipeline';\nimport { EventEmitter } from '../events/EventEmitter';\nimport { EmotionToBlendshapeMapper, UPPER_FACE_BLENDSHAPES } from '../animation/EmotionToBlendshapeMapper';\nimport type { UpperFaceBlendshapes } from '../animation/EmotionToBlendshapeMapper';\nimport type { LipSyncBackend } from '../inference/LipSyncBackend';\nimport { LAM_BLENDSHAPES } from '../inference/Wav2Vec2Inference';\nimport type { Emotion2VecInference, EmotionFrame } from '../inference/Emotion2VecInference';\nimport { createLogger } from '../logging';\n\nconst logger = createLogger('FullFacePipeline');\n\n/**\n * Index map for O(1) blendshape name lookup\n */\nconst BLENDSHAPE_INDEX_MAP = new Map<string, number>();\nLAM_BLENDSHAPES.forEach((name, index) => {\n BLENDSHAPE_INDEX_MAP.set(name, index);\n});\n\n/**\n * Set of upper face blendshape names for fast lookup\n */\nconst UPPER_FACE_SET = new Set<string>(UPPER_FACE_BLENDSHAPES);\n\n/**\n * Configuration for FullFacePipeline\n */\nexport interface FullFacePipelineOptions {\n /** Sample rate in Hz (default: 16000) */\n sampleRate?: number;\n /** Target chunk duration in ms for coalescing (default: 200) */\n chunkTargetMs?: number;\n /** LAM inference engine */\n lam: LipSyncBackend;\n /** Emotion inference engine (optional - pipeline works without emotion) */\n emotion?: Emotion2VecInference;\n /**\n * Emotion blend factor for upper face blendshapes (0-1)\n * Higher values give more weight to emotion detection\n * @default 0.8\n */\n emotionBlendFactor?: number;\n /**\n * LAM blend factor for upper face blendshapes (0-1)\n * Provides subtle fallback from LAM when emotion is weak\n * @default 0.2\n */\n lamBlendFactor?: number;\n /**\n * Enable lazy loading of emotion model\n * If true, pipeline starts with LAM-only and adds emotion when ready\n * @default true\n */\n lazyLoadEmotion?: boolean;\n}\n\n/**\n * Full face frame with merged blendshapes and emotion data\n */\nexport interface FullFaceFrame {\n /** Merged 52 ARKit blendshapes (lower face from LAM + upper face from emotion) */\n blendshapes: Float32Array;\n /** Original LAM blendshapes (52) */\n lamBlendshapes: Float32Array;\n /** Emotion-driven upper face blendshapes (11) */\n emotionBlendshapes: UpperFaceBlendshapes;\n /** Raw emotion frame data */\n emotion: EmotionFrame | null;\n /** AudioContext timestamp for this frame */\n timestamp: number;\n}\n\n/**\n * Events emitted by FullFacePipeline\n */\nexport interface FullFacePipelineEvents {\n /** New merged frame ready for display */\n full_frame_ready: FullFaceFrame;\n /** Raw LAM frame ready (for debugging/monitoring) */\n lam_frame_ready: Float32Array;\n /** Emotion frame ready (for debugging/monitoring) */\n emotion_frame_ready: EmotionFrame;\n /** Playback has completed */\n playback_complete: void;\n /** First frame ready, playback starting */\n playback_start: number;\n /** Error occurred */\n error: Error;\n /** Emotion model loaded (for lazy loading) */\n emotion_ready: void;\n /** Index signature for EventEmitter compatibility */\n [key: string]: unknown;\n}\n\n/**\n * Internal emotion frame with timestamp for synchronization\n */\ninterface TimestampedEmotionFrame {\n frame: EmotionFrame;\n timestamp: number;\n}\n\n/**\n * FullFacePipeline - Unified LAM + Emotion animation pipeline\n */\nexport class FullFacePipeline extends EventEmitter<FullFacePipelineEvents> {\n private scheduler: AudioScheduler;\n private coalescer: AudioChunkCoalescer;\n private lamPipeline: LAMPipeline;\n private emotionMapper: EmotionToBlendshapeMapper;\n\n private waitingForFirstLAM = false;\n private bufferedChunks: ArrayBuffer[] = [];\n private monitorInterval: ReturnType<typeof setInterval> | null = null;\n private frameAnimationId: number | null = null;\n\n // Emotion processing\n private emotion: Emotion2VecInference | null = null;\n private emotionReady = false;\n private emotionDisabledDueToError = false; // Graceful degradation after memory error\n private emotionBuffer: Float32Array = new Float32Array(0);\n private emotionBufferStartTime = 0;\n private emotionFrameQueue: TimestampedEmotionFrame[] = [];\n private lastEmotionFrame: EmotionFrame | null = null;\n\n // Blend factors\n private emotionBlendFactor: number;\n private lamBlendFactor: number;\n\n constructor(private readonly options: FullFacePipelineOptions) {\n super();\n\n const sampleRate = options.sampleRate ?? 16000;\n this.emotionBlendFactor = options.emotionBlendFactor ?? 0.8;\n this.lamBlendFactor = options.lamBlendFactor ?? 0.2;\n\n this.scheduler = new AudioScheduler({ sampleRate });\n this.coalescer = new AudioChunkCoalescer({\n sampleRate,\n targetDurationMs: options.chunkTargetMs ?? 200,\n });\n this.lamPipeline = new LAMPipeline({\n sampleRate,\n onInference: (_frameCount) => {\n if (this.waitingForFirstLAM) {\n this.onFirstLAMComplete();\n }\n },\n onError: (error) => {\n this.emit('error', error);\n },\n });\n this.emotionMapper = new EmotionToBlendshapeMapper({\n smoothingFactor: 0.15,\n confidenceThreshold: 0.3,\n intensity: 1.0,\n });\n\n // Set emotion engine if provided\n if (options.emotion) {\n this.emotion = options.emotion;\n this.emotionReady = options.emotion.isLoaded;\n }\n }\n\n /**\n * Initialize the pipeline\n */\n async initialize(): Promise<void> {\n await this.scheduler.initialize();\n\n // Check if emotion is already loaded\n if (this.emotion?.isLoaded) {\n this.emotionReady = true;\n logger.info('Emotion engine ready');\n }\n }\n\n /**\n * Set or update the emotion inference engine\n * Call this for lazy loading after pipeline is already running\n */\n setEmotionEngine(emotion: Emotion2VecInference): void {\n this.emotion = emotion;\n this.emotionReady = emotion.isLoaded;\n\n if (this.emotionReady) {\n logger.info('Emotion engine set and ready');\n this.emit('emotion_ready', undefined as any);\n }\n }\n\n /**\n * Mark emotion engine as ready (for lazy loading)\n */\n markEmotionReady(): void {\n if (this.emotion) {\n this.emotionReady = true;\n logger.info('Emotion engine marked ready');\n this.emit('emotion_ready', undefined as any);\n }\n }\n\n /**\n * Start a new playback session\n */\n start(): void {\n this.scheduler.reset();\n this.coalescer.reset();\n this.lamPipeline.reset();\n this.bufferedChunks = [];\n this.waitingForFirstLAM = true;\n\n // Reset emotion state (but keep disabled flag - memory issue persists across sessions)\n this.emotionBuffer = new Float32Array(0);\n this.emotionBufferStartTime = 0;\n this.emotionFrameQueue = [];\n this.lastEmotionFrame = null;\n this.emotionMapper.reset();\n // Note: emotionDisabledDueToError is NOT reset - once memory fails, it won't recover\n\n this.startFrameLoop();\n this.startMonitoring();\n }\n\n /**\n * Receive audio chunk from network\n */\n async onAudioChunk(chunk: Uint8Array): Promise<void> {\n const combined = this.coalescer.add(chunk);\n if (!combined) {\n return;\n }\n\n // Convert to Float32\n const int16 = new Int16Array(combined);\n const float32 = new Float32Array(int16.length);\n for (let i = 0; i < int16.length; i++) {\n float32[i] = int16[i] / 32768;\n }\n\n if (this.waitingForFirstLAM) {\n this.bufferedChunks.push(combined);\n const estimatedTime = this.scheduler.getCurrentTime();\n\n // Process LAM\n await this.lamPipeline.push(float32, estimatedTime, this.options.lam);\n\n // Process emotion in parallel (non-blocking)\n if (this.emotionReady && this.emotion) {\n this.queueEmotionProcessing(float32, estimatedTime);\n }\n } else {\n const scheduleTime = await this.scheduler.schedule(float32);\n\n // Process LAM\n await this.lamPipeline.push(float32, scheduleTime, this.options.lam);\n\n // Process emotion in parallel\n if (this.emotionReady && this.emotion) {\n this.queueEmotionProcessing(float32, scheduleTime);\n }\n }\n }\n\n /**\n * Queue emotion processing (accumulates to 1 second chunks)\n */\n private queueEmotionProcessing(samples: Float32Array, timestamp: number): void {\n if (!this.emotion || this.emotionDisabledDueToError) return;\n\n // Track buffer start time\n if (this.emotionBuffer.length === 0) {\n this.emotionBufferStartTime = timestamp;\n }\n\n // Accumulate samples\n const newBuffer = new Float32Array(this.emotionBuffer.length + samples.length);\n newBuffer.set(this.emotionBuffer, 0);\n newBuffer.set(samples, this.emotionBuffer.length);\n this.emotionBuffer = newBuffer;\n\n // Process when we have 1 second (16000 samples)\n if (this.emotionBuffer.length >= 16000) {\n this.processEmotionBuffer();\n }\n }\n\n /**\n * Process accumulated emotion buffer\n */\n private async processEmotionBuffer(): Promise<void> {\n if (!this.emotion || this.emotionBuffer.length < 16000) return;\n\n try {\n const toProcess = this.emotionBuffer.slice(0, 16000);\n const processedStartTime = this.emotionBufferStartTime;\n\n // Keep remaining samples\n this.emotionBuffer = this.emotionBuffer.slice(16000);\n this.emotionBufferStartTime = processedStartTime + 1.0; // 1 second processed\n\n // Run emotion inference\n const result = await this.emotion.infer(toProcess);\n\n // Queue emotion frames with timestamps (50Hz = 20ms per frame)\n const frameDuration = 1 / 50; // 20ms\n for (let i = 0; i < result.frames.length; i++) {\n const frame = result.frames[i];\n const frameTimestamp = processedStartTime + i * frameDuration;\n this.emotionFrameQueue.push({ frame, timestamp: frameTimestamp });\n }\n\n // Emit event for debugging\n this.emit('emotion_frame_ready', result.dominant);\n\n logger.debug('Emotion processed', {\n frames: result.frames.length,\n dominant: result.dominant.emotion,\n confidence: Math.round(result.dominant.confidence * 100),\n });\n } catch (error) {\n // Memory errors crash the WASM runtime - disable emotion for graceful degradation\n logger.warn('Emotion inference failed - disabling emotion for this session', {\n error: error instanceof Error ? error.message : String(error)\n });\n this.emotionDisabledDueToError = true;\n this.emotionBuffer = new Float32Array(0);\n this.emotionFrameQueue = [];\n // Don't emit error - emotion is supplementary, LAM continues\n }\n }\n\n /**\n * Get emotion frame for a given timestamp\n * Interpolates between 50Hz emotion and 30fps animation\n */\n private getEmotionFrameForTime(currentTime: number): EmotionFrame | null {\n // Remove old frames (keep 1 second buffer for smoothing)\n while (\n this.emotionFrameQueue.length > 0 &&\n this.emotionFrameQueue[0].timestamp < currentTime - 1.0\n ) {\n const removed = this.emotionFrameQueue.shift()!;\n this.lastEmotionFrame = removed.frame; // Cache for fallback\n }\n\n // Find the frame closest to current time\n for (let i = 0; i < this.emotionFrameQueue.length; i++) {\n if (this.emotionFrameQueue[i].timestamp >= currentTime) {\n // Return previous frame if available (we want frame at or before current time)\n if (i > 0) {\n return this.emotionFrameQueue[i - 1].frame;\n }\n return this.emotionFrameQueue[0].frame;\n }\n }\n\n // Return last frame in queue or cached frame as fallback\n if (this.emotionFrameQueue.length > 0) {\n return this.emotionFrameQueue[this.emotionFrameQueue.length - 1].frame;\n }\n\n return this.lastEmotionFrame;\n }\n\n /**\n * Merge LAM blendshapes with emotion upper face blendshapes\n */\n private mergeBlendshapes(\n lamFrame: Float32Array,\n emotionFrame: EmotionFrame | null\n ): { merged: Float32Array; emotionBlendshapes: UpperFaceBlendshapes } {\n const merged = new Float32Array(52);\n let emotionBlendshapes: UpperFaceBlendshapes;\n\n if (emotionFrame) {\n // Get emotion-driven blendshapes\n this.emotionMapper.mapFrame(emotionFrame);\n this.emotionMapper.update(33); // ~30fps\n emotionBlendshapes = this.emotionMapper.getCurrentBlendshapes();\n } else {\n // No emotion - use zeros\n emotionBlendshapes = {} as UpperFaceBlendshapes;\n for (const name of UPPER_FACE_BLENDSHAPES) {\n emotionBlendshapes[name] = 0;\n }\n }\n\n // Merge: lower face 100% LAM, upper face emotion + LAM fallback\n for (let i = 0; i < 52; i++) {\n const name = LAM_BLENDSHAPES[i];\n\n if (UPPER_FACE_SET.has(name)) {\n // Upper face: emotion * 0.8 + LAM * 0.2\n const emotionValue = emotionBlendshapes[name as keyof UpperFaceBlendshapes] ?? 0;\n const lamValue = lamFrame[i];\n merged[i] = emotionValue * this.emotionBlendFactor + lamValue * this.lamBlendFactor;\n } else {\n // Lower face: 100% LAM\n merged[i] = lamFrame[i];\n }\n }\n\n return { merged, emotionBlendshapes };\n }\n\n /**\n * Handle first LAM inference completion\n */\n private async onFirstLAMComplete(): Promise<void> {\n this.waitingForFirstLAM = false;\n\n const beforeSchedule = this.scheduler.getCurrentTime();\n let actualStartTime = beforeSchedule;\n\n // Schedule all buffered audio chunks\n for (let i = 0; i < this.bufferedChunks.length; i++) {\n const buffer = this.bufferedChunks[i];\n const int16 = new Int16Array(buffer);\n const float32 = new Float32Array(int16.length);\n for (let j = 0; j < int16.length; j++) {\n float32[j] = int16[j] / 32768;\n }\n const scheduleTime = await this.scheduler.schedule(float32);\n\n if (i === 0) {\n actualStartTime = scheduleTime;\n }\n }\n\n // Adjust LAM frame timestamps\n const timeOffset = actualStartTime - beforeSchedule;\n if (timeOffset !== 0) {\n this.lamPipeline.adjustTimestamps(timeOffset);\n\n // Also adjust emotion frame timestamps\n for (const frame of this.emotionFrameQueue) {\n frame.timestamp += timeOffset;\n }\n }\n\n this.bufferedChunks = [];\n this.emit('playback_start', actualStartTime);\n }\n\n /**\n * Start frame animation loop\n */\n private startFrameLoop(): void {\n const updateFrame = () => {\n const currentTime = this.scheduler.getCurrentTime();\n const lamFrame = this.lamPipeline.getFrameForTime(currentTime, this.options.lam);\n\n if (lamFrame) {\n // Get corresponding emotion frame\n const emotionFrame = this.getEmotionFrameForTime(currentTime);\n\n // Merge LAM + emotion\n const { merged, emotionBlendshapes } = this.mergeBlendshapes(lamFrame, emotionFrame);\n\n // Emit merged frame\n const fullFrame: FullFaceFrame = {\n blendshapes: merged,\n lamBlendshapes: lamFrame,\n emotionBlendshapes,\n emotion: emotionFrame,\n timestamp: currentTime,\n };\n\n this.emit('full_frame_ready', fullFrame);\n this.emit('lam_frame_ready', lamFrame);\n }\n\n this.frameAnimationId = requestAnimationFrame(updateFrame);\n };\n\n this.frameAnimationId = requestAnimationFrame(updateFrame);\n }\n\n /**\n * End of audio stream\n */\n async end(): Promise<void> {\n // Flush remaining coalesced data\n const remaining = this.coalescer.flush();\n if (remaining) {\n const chunk = new Uint8Array(remaining);\n await this.onAudioChunk(chunk);\n }\n\n // Flush remaining LAM buffer\n await this.lamPipeline.flush(this.options.lam);\n\n // Process any remaining emotion buffer (even if less than 1 second)\n if (this.emotion && this.emotionBuffer.length > 0 && !this.emotionDisabledDueToError) {\n // Pad to 1 second for inference\n const padded = new Float32Array(16000);\n padded.set(this.emotionBuffer, 0);\n try {\n const result = await this.emotion.infer(padded);\n // Only queue frames for actual audio duration\n const actualFrames = Math.ceil(\n (this.emotionBuffer.length / 16000) * 50\n );\n for (let i = 0; i < Math.min(actualFrames, result.frames.length); i++) {\n const frameTimestamp =\n this.emotionBufferStartTime + i * (1 / 50);\n this.emotionFrameQueue.push({\n frame: result.frames[i],\n timestamp: frameTimestamp,\n });\n }\n } catch (error) {\n logger.warn('Final emotion flush failed', { error });\n }\n this.emotionBuffer = new Float32Array(0);\n }\n }\n\n /**\n * Stop playback immediately with smooth fade-out\n */\n async stop(fadeOutMs: number = 50): Promise<void> {\n this.stopMonitoring();\n await this.scheduler.cancelAll(fadeOutMs);\n\n this.bufferedChunks = [];\n this.coalescer.reset();\n this.lamPipeline.reset();\n this.waitingForFirstLAM = false;\n\n // Clear emotion state\n this.emotionBuffer = new Float32Array(0);\n this.emotionFrameQueue = [];\n this.emotionMapper.reset();\n\n this.emit('playback_complete', undefined as any);\n }\n\n /**\n * Start monitoring for playback completion\n */\n private startMonitoring(): void {\n if (this.monitorInterval) {\n clearInterval(this.monitorInterval);\n }\n\n this.monitorInterval = setInterval(() => {\n if (this.scheduler.isComplete() && this.lamPipeline.queuedFrameCount === 0) {\n this.emit('playback_complete', undefined as any);\n this.stopMonitoring();\n }\n }, 100);\n }\n\n /**\n * Stop monitoring\n */\n private stopMonitoring(): void {\n if (this.monitorInterval) {\n clearInterval(this.monitorInterval);\n this.monitorInterval = null;\n }\n\n if (this.frameAnimationId) {\n cancelAnimationFrame(this.frameAnimationId);\n this.frameAnimationId = null;\n }\n }\n\n /**\n * Get current pipeline state (for debugging/monitoring)\n */\n getState() {\n return {\n waitingForFirstLAM: this.waitingForFirstLAM,\n bufferedChunks: this.bufferedChunks.length,\n coalescerFill: this.coalescer.fillLevel,\n lamFill: this.lamPipeline.fillLevel,\n queuedLAMFrames: this.lamPipeline.queuedFrameCount,\n queuedEmotionFrames: this.emotionFrameQueue.length,\n emotionReady: this.emotionReady,\n currentTime: this.scheduler.getCurrentTime(),\n playbackEndTime: this.scheduler.getPlaybackEndTime(),\n };\n }\n\n /**\n * Check if emotion is enabled and ready\n */\n get isEmotionReady(): boolean {\n return this.emotionReady && !this.emotionDisabledDueToError;\n }\n\n /**\n * Check if emotion was disabled due to runtime error (memory exhaustion)\n */\n get isEmotionDisabled(): boolean {\n return this.emotionDisabledDueToError;\n }\n\n /**\n * Cleanup resources\n */\n dispose(): void {\n this.stopMonitoring();\n this.scheduler.dispose();\n this.coalescer.reset();\n this.lamPipeline.reset();\n this.bufferedChunks = [];\n this.emotionBuffer = new Float32Array(0);\n this.emotionFrameQueue = [];\n }\n}\n","/**\r\n * Whisper Automatic Speech Recognition using transformers.js\r\n * Uses Xenova's proven pipeline API for reliable transcription\r\n */\r\n\r\nimport { pipeline, env, type AutomaticSpeechRecognitionPipeline } from '@huggingface/transformers';\r\nimport { createLogger } from '../logging';\r\nimport { getTelemetry } from '../telemetry';\r\n\r\nconst logger = createLogger('Whisper');\r\n\r\nexport type WhisperModel = 'tiny' | 'base' | 'small' | 'medium';\r\nexport type WhisperDtype = 'fp32' | 'fp16' | 'q8' | 'int8' | 'uint8' | 'q4' | 'q4f16' | 'bnb4';\r\n\r\nexport interface WhisperConfig {\r\n /** Model size: tiny (~75MB), base (~150MB), small (~500MB), medium (~1.5GB) */\r\n model?: WhisperModel;\r\n /** Use multilingual model (default: false, uses .en models) */\r\n multilingual?: boolean;\r\n /** Language code (e.g., 'en', 'es', 'fr') - for multilingual models */\r\n language?: string;\r\n /** Task: transcribe or translate (default: transcribe) */\r\n task?: 'transcribe' | 'translate';\r\n /** Model quantization format (default: 'q8' for balance of speed/quality) */\r\n dtype?: WhisperDtype;\r\n /** Use WebGPU acceleration if available (default: auto-detect) */\r\n device?: 'auto' | 'webgpu' | 'wasm';\r\n /** Local model path (e.g., '/models/whisper-tiny.en') - overrides HuggingFace CDN */\r\n localModelPath?: string;\r\n /** HuggingFace API token to bypass rate limits (get from https://huggingface.co/settings/tokens) */\r\n token?: string;\r\n /** Suppress non-speech tokens like [LAUGHTER], [CLICKING], etc. (default: true) */\r\n suppressNonSpeech?: boolean;\r\n}\r\n\r\nexport interface TranscriptionResult {\r\n /** Transcribed text */\r\n text: string;\r\n /** Detected/used language */\r\n language: string;\r\n /** Inference time in ms */\r\n inferenceTimeMs: number;\r\n /** Full chunks with timestamps (if requested) */\r\n chunks?: Array<{ text: string; timestamp: [number, number | null] }>;\r\n}\r\n\r\n/**\r\n * Whisper ASR inference using transformers.js pipeline API\r\n *\r\n * Features:\r\n * - Automatic WebGPU/WASM backend selection\r\n * - Streaming support with chunk callbacks\r\n * - Proven implementation from Xenova's demo\r\n * - Handles all audio preprocessing automatically\r\n */\r\nexport class WhisperInference {\r\n private config: Required<Omit<WhisperConfig, 'localModelPath' | 'token'>> & Pick<WhisperConfig, 'localModelPath' | 'token'>;\r\n private pipeline: AutomaticSpeechRecognitionPipeline | null = null;\r\n private currentModel: string | null = null;\r\n private isLoading = false;\r\n private actualBackend: string = 'unknown';\r\n\r\n constructor(config: WhisperConfig = {}) {\r\n this.config = {\r\n model: config.model || 'tiny',\r\n multilingual: config.multilingual || false,\r\n language: config.language || 'en',\r\n task: config.task || 'transcribe',\r\n dtype: config.dtype || 'q8',\r\n device: config.device || 'auto',\r\n localModelPath: config.localModelPath,\r\n token: config.token,\r\n suppressNonSpeech: config.suppressNonSpeech !== false, // Default true\r\n };\r\n }\r\n\r\n /**\r\n * Check if WebGPU is available in this browser\r\n */\r\n static async isWebGPUAvailable(): Promise<boolean> {\r\n return 'gpu' in navigator;\r\n }\r\n\r\n /**\r\n * Load the Whisper model pipeline\r\n */\r\n async load(onProgress?: (progress: { status: string; progress?: number; file?: string }) => void): Promise<void> {\r\n if (this.isLoading) {\r\n logger.debug('Already loading model, waiting...');\r\n while (this.isLoading) {\r\n await new Promise(resolve => setTimeout(resolve, 100));\r\n }\r\n return;\r\n }\r\n\r\n const modelName = this.getModelName();\r\n\r\n // Check if we already have this model loaded\r\n if (this.pipeline !== null && this.currentModel === modelName) {\r\n logger.debug('Model already loaded', { model: modelName });\r\n return;\r\n }\r\n\r\n this.isLoading = true;\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('whisper.load', {\r\n 'whisper.model': modelName,\r\n 'whisper.dtype': this.config.dtype,\r\n 'whisper.device': this.config.device,\r\n });\r\n\r\n try {\r\n const loadStart = performance.now();\r\n\r\n logger.info('Loading model', {\r\n model: modelName,\r\n dtype: this.config.dtype,\r\n device: this.config.device,\r\n multilingual: this.config.multilingual,\r\n });\r\n\r\n // Dispose old pipeline if switching models\r\n if (this.pipeline !== null && this.currentModel !== modelName) {\r\n logger.debug('Disposing old model', { oldModel: this.currentModel });\r\n await this.pipeline.dispose();\r\n this.pipeline = null;\r\n }\r\n\r\n // Create pipeline with progress callback\r\n // Force WebGPU if available to avoid WASM CORS issues in v4\r\n const hasWebGPU = await WhisperInference.isWebGPUAvailable();\r\n const device = this.config.device === 'auto'\r\n ? (hasWebGPU ? 'webgpu' : 'wasm')\r\n : this.config.device;\r\n\r\n logger.info('Creating pipeline', { device, hasWebGPU });\r\n\r\n // CRITICAL: Configure transformers.js environment BEFORE calling pipeline()\r\n // This must happen here (not at module level) to avoid lazy-loading race conditions\r\n env.allowLocalModels = false; // Don't check local paths\r\n env.allowRemoteModels = true; // Fetch from HuggingFace\r\n env.useBrowserCache = false; // Disable browser cache\r\n env.useCustomCache = false; // No custom cache\r\n env.useWasmCache = false; // Disable WASM cache (prevents HTML error pages from being cached)\r\n if (env.backends.onnx.wasm) {\r\n env.backends.onnx.wasm.proxy = false; // Disable worker proxy for WASM\r\n env.backends.onnx.wasm.numThreads = 1; // Single thread to avoid CORS issues\r\n }\r\n\r\n logger.info('Configured transformers.js env', {\r\n allowLocalModels: env.allowLocalModels,\r\n useBrowserCache: env.useBrowserCache,\r\n useWasmCache: env.useWasmCache,\r\n });\r\n\r\n // Force WebGPU execution providers if available\r\n const pipelineOptions: any = {\r\n dtype: this.config.dtype,\r\n device,\r\n progress_callback: onProgress,\r\n // For medium models, use no_attentions revision to save memory\r\n revision: modelName.includes('whisper-medium') ? 'no_attentions' : 'main',\r\n // Pass HuggingFace token to bypass rate limits\r\n ...(this.config.token && { token: this.config.token }),\r\n };\r\n\r\n // CRITICAL: Force WebGPU execution providers to avoid WASM memory issues\r\n if (device === 'webgpu') {\r\n pipelineOptions.session_options = {\r\n executionProviders: ['webgpu'],\r\n };\r\n logger.info('Forcing WebGPU execution providers');\r\n }\r\n\r\n this.pipeline = await pipeline(\r\n 'automatic-speech-recognition',\r\n modelName,\r\n pipelineOptions\r\n );\r\n\r\n // Store the actual backend used\r\n this.actualBackend = device;\r\n\r\n this.currentModel = modelName;\r\n const loadTimeMs = performance.now() - loadStart;\r\n\r\n logger.info('Model loaded successfully', {\r\n model: modelName,\r\n loadTimeMs: Math.round(loadTimeMs),\r\n });\r\n\r\n span?.setAttributes({\r\n 'whisper.load_time_ms': loadTimeMs,\r\n });\r\n span?.end();\r\n } catch (error) {\r\n const errorDetails = {\r\n message: error instanceof Error ? error.message : String(error),\r\n stack: error instanceof Error ? error.stack : undefined,\r\n name: error instanceof Error ? error.name : undefined,\r\n error: error,\r\n };\r\n logger.error('Failed to load model', errorDetails);\r\n span?.endWithError(error as Error);\r\n throw error;\r\n } finally {\r\n this.isLoading = false;\r\n }\r\n }\r\n\r\n /**\r\n * Transcribe audio to text\r\n *\r\n * @param audio Audio samples (Float32Array, 16kHz mono)\r\n * @param options Transcription options\r\n */\r\n async transcribe(\r\n audio: Float32Array,\r\n options?: {\r\n /** Return timestamps for each chunk */\r\n returnTimestamps?: boolean;\r\n /** Chunk length in seconds (default: 30) */\r\n chunkLengthS?: number;\r\n /** Stride length in seconds for overlapping chunks (default: 5) */\r\n strideLengthS?: number;\r\n /** Language override */\r\n language?: string;\r\n /** Task override */\r\n task?: 'transcribe' | 'translate';\r\n }\r\n ): Promise<TranscriptionResult> {\r\n if (!this.pipeline) {\r\n throw new Error('Model not loaded. Call load() first.');\r\n }\r\n\r\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\r\n // During interruptions or concurrent inferences, audio buffer may get detached\r\n // by transformers.js ONNX Runtime before processing. Copy synchronously to preserve data.\r\n const audioCopy = new Float32Array(audio);\r\n\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('whisper.transcribe', {\r\n 'audio.samples': audioCopy.length,\r\n 'audio.duration_s': audioCopy.length / 16000,\r\n 'whisper.model': this.currentModel!,\r\n });\r\n\r\n try {\r\n const inferStart = performance.now();\r\n\r\n const audioDurationSec = audioCopy.length / 16000;\r\n const isShortAudio = audioDurationSec < 10;\r\n\r\n logger.debug('Starting transcription', {\r\n audioSamples: audioCopy.length,\r\n durationSeconds: audioDurationSec.toFixed(2),\r\n isShortAudio,\r\n });\r\n\r\n // Build transcription options\r\n // PERFORMANCE: For short audio (<10s), disable chunking to avoid overhead\r\n // Chunking is designed for long audio (podcasts, meetings) and adds 0.5-1s\r\n // latency for conversational clips. For 2-4s audio, process in one pass.\r\n const transcribeOptions: any = {\r\n // Decoding strategy\r\n top_k: 0,\r\n do_sample: false,\r\n\r\n // Adaptive chunking: Disable for short audio, enable for long audio\r\n chunk_length_s: options?.chunkLengthS || (isShortAudio ? audioDurationSec : 30),\r\n stride_length_s: options?.strideLengthS || (isShortAudio ? 0 : 5),\r\n\r\n // Timestamps\r\n return_timestamps: options?.returnTimestamps || false,\r\n force_full_sequences: false,\r\n };\r\n\r\n // Only pass language/task for multilingual models\r\n if (this.config.multilingual) {\r\n transcribeOptions.language = options?.language || this.config.language;\r\n transcribeOptions.task = options?.task || this.config.task;\r\n }\r\n\r\n // Run transcription\r\n const rawResult = await this.pipeline(audioCopy, transcribeOptions);\r\n\r\n // v3 can return array or single object, normalize to single\r\n const result = Array.isArray(rawResult) ? rawResult[0] : rawResult;\r\n\r\n const inferenceTimeMs = performance.now() - inferStart;\r\n\r\n // Clean non-speech tokens if enabled\r\n let cleanedText = result.text;\r\n if (this.config.suppressNonSpeech) {\r\n cleanedText = this.removeNonSpeechTokens(cleanedText);\r\n }\r\n\r\n const transcription: TranscriptionResult = {\r\n text: cleanedText,\r\n language: this.config.language,\r\n inferenceTimeMs,\r\n chunks: result.chunks,\r\n };\r\n\r\n logger.debug('Transcription complete', {\r\n text: transcription.text,\r\n inferenceTimeMs: Math.round(inferenceTimeMs),\r\n chunksCount: result.chunks?.length || 0,\r\n });\r\n\r\n span?.setAttributes({\r\n 'whisper.inference_time_ms': inferenceTimeMs,\r\n 'whisper.text_length': transcription.text.length,\r\n });\r\n span?.end();\r\n\r\n return transcription;\r\n } catch (error) {\r\n logger.error('Transcribe error', { error });\r\n span?.endWithError(error as Error);\r\n throw new Error(`Whisper transcription failed: ${error}`);\r\n }\r\n }\r\n\r\n /**\r\n * Transcribe with streaming chunks (progressive results)\r\n *\r\n * @param audio Audio samples\r\n * @param onChunk Called when each chunk is finalized\r\n * @param onUpdate Called after each generation step (optional)\r\n */\r\n async transcribeStreaming(\r\n audio: Float32Array,\r\n onChunk: (chunk: { text: string; timestamp: [number, number | null] }) => void,\r\n onUpdate?: (text: string) => void,\r\n options?: {\r\n chunkLengthS?: number;\r\n strideLengthS?: number;\r\n language?: string;\r\n task?: 'transcribe' | 'translate';\r\n }\r\n ): Promise<TranscriptionResult> {\r\n if (!this.pipeline) {\r\n throw new Error('Model not loaded. Call load() first.');\r\n }\r\n\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('whisper.transcribe_streaming', {\r\n 'audio.samples': audio.length,\r\n 'audio.duration_s': audio.length / 16000,\r\n });\r\n\r\n try {\r\n const inferStart = performance.now();\r\n\r\n logger.debug('Starting streaming transcription', {\r\n audioSamples: audio.length,\r\n durationSeconds: (audio.length / 16000).toFixed(2),\r\n });\r\n\r\n // NOTE: transformers.js v3 removed callback_function support for streaming\r\n // For now, we'll process chunks manually after transcription\r\n // TODO: Re-implement streaming when v3 supports it\r\n\r\n // Build transcription options\r\n const transcribeOptions: any = {\r\n top_k: 0,\r\n do_sample: false,\r\n chunk_length_s: options?.chunkLengthS || 30,\r\n stride_length_s: options?.strideLengthS || 5,\r\n return_timestamps: true,\r\n force_full_sequences: false,\r\n };\r\n\r\n // Only pass language/task for multilingual models\r\n if (this.config.multilingual) {\r\n transcribeOptions.language = options?.language || this.config.language;\r\n transcribeOptions.task = options?.task || this.config.task;\r\n }\r\n\r\n // Run transcription with timestamps to get chunks\r\n const rawResult = await this.pipeline(audio, transcribeOptions);\r\n\r\n // v3 can return array or single object, normalize to single\r\n const result = Array.isArray(rawResult) ? rawResult[0] : rawResult;\r\n\r\n const inferenceTimeMs = performance.now() - inferStart;\r\n\r\n // Process chunks after completion\r\n if (result.chunks && onChunk) {\r\n for (const chunk of result.chunks) {\r\n onChunk({\r\n text: chunk.text,\r\n timestamp: chunk.timestamp,\r\n });\r\n }\r\n }\r\n\r\n // Final update with complete text\r\n if (onUpdate) {\r\n onUpdate(result.text);\r\n }\r\n\r\n logger.debug('Streaming transcription complete', {\r\n text: result.text,\r\n inferenceTimeMs: Math.round(inferenceTimeMs),\r\n chunksCount: result.chunks?.length || 0,\r\n });\r\n\r\n span?.setAttributes({\r\n 'whisper.inference_time_ms': inferenceTimeMs,\r\n 'whisper.chunks_count': result.chunks?.length || 0,\r\n });\r\n span?.end();\r\n\r\n return {\r\n text: result.text,\r\n language: this.config.language,\r\n inferenceTimeMs,\r\n chunks: result.chunks,\r\n };\r\n } catch (error) {\r\n logger.error('Streaming transcribe error', { error });\r\n span?.endWithError(error as Error);\r\n throw new Error(`Whisper streaming transcription failed: ${error}`);\r\n }\r\n }\r\n\r\n /**\r\n * Dispose of the model and free resources\r\n */\r\n async dispose(): Promise<void> {\r\n if (this.pipeline) {\r\n logger.debug('Disposing model', { model: this.currentModel });\r\n await this.pipeline.dispose();\r\n this.pipeline = null;\r\n this.currentModel = null;\r\n }\r\n }\r\n\r\n /**\r\n * Check if model is loaded\r\n */\r\n get isLoaded(): boolean {\r\n return this.pipeline !== null;\r\n }\r\n\r\n /**\r\n * Get the backend being used (webgpu or wasm)\r\n */\r\n get backend(): string {\r\n return this.actualBackend;\r\n }\r\n\r\n /**\r\n * Get the full model name used by transformers.js\r\n */\r\n private getModelName(): string {\r\n // Use local model path if provided\r\n if (this.config.localModelPath) {\r\n return this.config.localModelPath;\r\n }\r\n\r\n // Otherwise construct HuggingFace model path\r\n let modelName = `onnx-community/whisper-${this.config.model}`;\r\n\r\n // Add .en suffix for English-only models (non-multilingual)\r\n if (!this.config.multilingual) {\r\n modelName += '.en';\r\n }\r\n\r\n return modelName;\r\n }\r\n\r\n /**\r\n * Remove non-speech event tokens from transcription\r\n *\r\n * Whisper outputs special tokens for non-speech events like:\r\n * [LAUGHTER], [APPLAUSE], [MUSIC], [BLANK_AUDIO], [CLICKING], etc.\r\n *\r\n * This method strips these tokens and cleans up extra whitespace.\r\n */\r\n private removeNonSpeechTokens(text: string): string {\r\n // Pattern matches anything in square brackets (non-speech tokens)\r\n // Examples: [LAUGHTER], [BLANK_AUDIO], [MUSIC], [APPLAUSE], [CLICKING]\r\n const cleaned = text.replace(/\\[[\\w\\s_]+\\]/g, '');\r\n\r\n // Clean up multiple spaces and trim\r\n return cleaned.replace(/\\s+/g, ' ').trim();\r\n }\r\n}\r\n","/**\n * CPU-optimized lip sync inference using wav2arkit_cpu model\n *\n * A lightweight (1.8MB) alternative to Wav2Vec2Inference (384MB) designed\n * for Safari/iOS where WebGPU crashes due to ONNX Runtime JSEP bugs.\n *\n * Key differences from Wav2Vec2Inference:\n * - WASM-only backend (CPU-optimized, no WebGPU)\n * - 1.8MB model vs 384MB\n * - No identity input (baked to identity 11)\n * - No ASR output (lip sync only)\n * - Dynamic input length (not fixed to 16000 samples)\n * - Different native blendshape ordering (remapped to LAM_BLENDSHAPES)\n *\n * @category Inference\n *\n * @example\n * ```typescript\n * import { Wav2ArkitCpuInference } from '@omote/core';\n *\n * const lam = new Wav2ArkitCpuInference({\n * modelUrl: '/models/wav2arkit_cpu.onnx',\n * });\n * await lam.load();\n *\n * const { blendshapes } = await lam.infer(audioSamples);\n * // blendshapes: Float32Array[] in LAM_BLENDSHAPES order, 30fps\n * ```\n */\n\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\n\nimport { fetchWithCache, getModelCache, formatBytes } from '../cache/ModelCache';\nimport { createLogger } from '../logging';\nimport { getTelemetry } from '../telemetry';\nimport {\n getOnnxRuntimeForPreference,\n getSessionOptions,\n type RuntimeBackend,\n} from './onnxLoader';\nimport { BackendPreference } from '../utils/runtime';\nimport { symmetrizeBlendshapes, remapWav2ArkitToLam } from './blendshapeUtils';\nimport type { LipSyncBackend, LipSyncModelInfo, LipSyncResult } from './LipSyncBackend';\n\ntype OrtModule = {\n InferenceSession: typeof InferenceSession;\n Tensor: typeof Tensor;\n env: Env;\n};\n\nconst logger = createLogger('Wav2ArkitCpu');\n\nexport interface Wav2ArkitCpuConfig {\n /** Path or URL to the wav2arkit_cpu ONNX model */\n modelUrl: string;\n /** Preferred backend (default: 'wasm' — this model is CPU-optimized) */\n backend?: BackendPreference;\n}\n\nexport class Wav2ArkitCpuInference implements LipSyncBackend {\n private session: InferenceSession | null = null;\n private ort: OrtModule | null = null;\n private config: Wav2ArkitCpuConfig;\n private _backend: RuntimeBackend = 'wasm';\n private isLoading = false;\n\n // Inference queue for handling concurrent calls\n private inferenceQueue: Promise<void> = Promise.resolve();\n\n constructor(config: Wav2ArkitCpuConfig) {\n this.config = config;\n }\n\n get backend(): RuntimeBackend | null {\n return this.session ? this._backend : null;\n }\n\n get isLoaded(): boolean {\n return this.session !== null;\n }\n\n /**\n * Load the ONNX model\n */\n async load(): Promise<LipSyncModelInfo> {\n if (this.isLoading) {\n throw new Error('Model is already loading');\n }\n\n if (this.session) {\n throw new Error('Model already loaded. Call dispose() first.');\n }\n\n this.isLoading = true;\n const startTime = performance.now();\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('Wav2ArkitCpu.load', {\n 'model.url': this.config.modelUrl,\n 'model.backend_requested': this.config.backend || 'wasm',\n });\n\n try {\n // Default to WASM — this model is CPU-optimized\n const preference = this.config.backend || 'wasm';\n logger.info('Loading ONNX Runtime...', { preference });\n\n const { ort, backend } = await getOnnxRuntimeForPreference(preference);\n this.ort = ort;\n this._backend = backend;\n\n logger.info('ONNX Runtime loaded', { backend: this._backend });\n\n // Load model with caching\n const cache = getModelCache();\n const modelUrl = this.config.modelUrl;\n const isCached = await cache.has(modelUrl);\n\n let modelBuffer: ArrayBuffer;\n if (isCached) {\n logger.debug('Loading model from cache', { modelUrl });\n modelBuffer = (await cache.get(modelUrl))!;\n\n if (!modelBuffer) {\n logger.warn('Cache corruption detected, clearing and retrying', { modelUrl });\n await cache.delete(modelUrl);\n modelBuffer = await fetchWithCache(modelUrl);\n }\n } else {\n logger.debug('Fetching and caching model', { modelUrl });\n modelBuffer = await fetchWithCache(modelUrl);\n }\n\n if (!modelBuffer) {\n throw new Error(`Failed to load model: ${modelUrl}`);\n }\n\n logger.debug('Creating ONNX session', {\n size: formatBytes(modelBuffer.byteLength),\n backend: this._backend,\n });\n\n const sessionOptions = getSessionOptions(this._backend);\n const modelData = new Uint8Array(modelBuffer);\n this.session = await this.ort!.InferenceSession.create(modelData, sessionOptions);\n\n const loadTimeMs = performance.now() - startTime;\n\n logger.info('Model loaded successfully', {\n backend: this._backend,\n loadTimeMs: Math.round(loadTimeMs),\n inputs: this.session.inputNames,\n outputs: this.session.outputNames,\n });\n\n span?.setAttributes({\n 'model.backend': this._backend,\n 'model.load_time_ms': loadTimeMs,\n 'model.cached': isCached,\n });\n span?.end();\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n });\n\n // Warmup inference\n logger.debug('Running warmup inference');\n const warmupStart = performance.now();\n const silentAudio = new Float32Array(16000);\n await this.infer(silentAudio);\n const warmupTimeMs = performance.now() - warmupStart;\n logger.info('Warmup inference complete', {\n warmupTimeMs: Math.round(warmupTimeMs),\n backend: this._backend,\n });\n telemetry?.recordHistogram('omote.model.warmup_time', warmupTimeMs, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n });\n\n return {\n backend: this._backend,\n loadTimeMs,\n inputNames: [...this.session.inputNames],\n outputNames: [...this.session.outputNames],\n };\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n telemetry?.incrementCounter('omote.errors.total', 1, {\n model: 'wav2arkit_cpu',\n error_type: 'load_failed',\n });\n throw error;\n } finally {\n this.isLoading = false;\n }\n }\n\n /**\n * Run inference on raw audio\n *\n * Accepts variable-length audio (not fixed to 16000 samples).\n * Output frames = ceil(30 * numSamples / 16000).\n *\n * @param audioSamples - Float32Array of raw audio at 16kHz\n * @param _identityIndex - Ignored (identity 11 is baked into the model)\n */\n async infer(\n audioSamples: Float32Array,\n _identityIndex?: number\n ): Promise<LipSyncResult> {\n if (!this.session) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n // Force copy to prevent ArrayBuffer detachment\n const audioCopy = new Float32Array(audioSamples);\n\n const feeds = {\n 'audio_waveform': new this.ort!.Tensor('float32', audioCopy, [1, audioCopy.length]),\n };\n\n return this.queueInference(feeds, audioCopy.length);\n }\n\n /**\n * Queue inference to serialize ONNX session calls\n */\n private queueInference(\n feeds: Record<string, Tensor>,\n inputSamples: number\n ): Promise<LipSyncResult> {\n return new Promise((resolve, reject) => {\n this.inferenceQueue = this.inferenceQueue.then(async () => {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('Wav2ArkitCpu.infer', {\n 'inference.backend': this._backend,\n 'inference.input_samples': inputSamples,\n });\n try {\n const startTime = performance.now();\n const results = await this.session!.run(feeds);\n const inferenceTimeMs = performance.now() - startTime;\n\n const blendshapeOutput = results['blendshapes'];\n\n if (!blendshapeOutput) {\n throw new Error('Missing blendshapes output from model');\n }\n\n const blendshapeData = blendshapeOutput.data as Float32Array;\n const numFrames = blendshapeOutput.dims[1] as number;\n const numBlendshapes = blendshapeOutput.dims[2] as number;\n\n // Split into per-frame arrays, remap to LAM order, symmetrize\n const blendshapes: Float32Array[] = [];\n for (let f = 0; f < numFrames; f++) {\n const rawFrame = blendshapeData.slice(f * numBlendshapes, (f + 1) * numBlendshapes);\n const remapped = remapWav2ArkitToLam(rawFrame);\n blendshapes.push(symmetrizeBlendshapes(remapped));\n }\n\n logger.trace('Inference completed', {\n inferenceTimeMs: Math.round(inferenceTimeMs * 100) / 100,\n numFrames,\n inputSamples,\n });\n\n span?.setAttributes({\n 'inference.duration_ms': inferenceTimeMs,\n 'inference.frames': numFrames,\n });\n span?.end();\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n });\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n status: 'success',\n });\n\n resolve({\n blendshapes,\n numFrames,\n inferenceTimeMs,\n });\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'wav2arkit_cpu',\n backend: this._backend,\n status: 'error',\n });\n reject(err);\n }\n });\n });\n }\n\n /**\n * Dispose of the model and free resources\n */\n async dispose(): Promise<void> {\n if (this.session) {\n await this.session.release();\n this.session = null;\n }\n }\n}\n","/**\n * Factory function for lip sync with automatic GPU/CPU model selection\n *\n * Provides a unified API that automatically selects the optimal model:\n * - Safari (macOS + iOS): Uses Wav2ArkitCpuInference (1.8MB, WASM)\n * - Chrome/Firefox/Edge: Uses Wav2Vec2Inference (384MB, WebGPU)\n * - Fallback: Gracefully falls back to CPU model if GPU model fails to load\n *\n * @category Inference\n *\n * @example Auto-detect (recommended)\n * ```typescript\n * import { createLipSync } from '@omote/core';\n *\n * const lam = createLipSync({\n * gpuModelUrl: '/models/unified_wav2vec2_asr_a2e.onnx',\n * cpuModelUrl: '/models/wav2arkit_cpu.onnx',\n * });\n *\n * await lam.load();\n * const { blendshapes } = await lam.infer(audioSamples);\n * ```\n *\n * @example Force CPU model\n * ```typescript\n * const lam = createLipSync({\n * gpuModelUrl: '/models/unified_wav2vec2_asr_a2e.onnx',\n * cpuModelUrl: '/models/wav2arkit_cpu.onnx',\n * mode: 'cpu',\n * });\n * ```\n */\n\nimport { createLogger } from '../logging';\nimport { shouldUseCpuLipSync, isSafari } from '../utils/runtime';\nimport { Wav2Vec2Inference } from './Wav2Vec2Inference';\nimport { Wav2ArkitCpuInference } from './Wav2ArkitCpuInference';\nimport type { LipSyncBackend, LipSyncModelInfo, LipSyncResult } from './LipSyncBackend';\nimport type { RuntimeBackend, BackendPreference } from '../utils/runtime';\n\nconst logger = createLogger('createLipSync');\n\n/**\n * Configuration for the lip sync factory\n */\nexport interface CreateLipSyncConfig {\n /** URL for the GPU model (Wav2Vec2, used on Chrome/Firefox/Edge) */\n gpuModelUrl: string;\n /** URL for the CPU model (wav2arkit_cpu, used on Safari/iOS) */\n cpuModelUrl: string;\n /**\n * Model selection mode:\n * - 'auto': Safari/iOS → CPU, everything else → GPU (default)\n * - 'gpu': Force GPU model (Wav2Vec2Inference)\n * - 'cpu': Force CPU model (Wav2ArkitCpuInference)\n */\n mode?: 'auto' | 'gpu' | 'cpu';\n /** Backend preference for GPU model (default: 'auto') */\n gpuBackend?: BackendPreference;\n /** Number of identity classes for GPU model (default: 12) */\n numIdentityClasses?: number;\n /**\n * Fall back to CPU model if GPU model fails to load (default: true)\n * Only applies when mode is 'auto' or 'gpu'\n */\n fallbackOnError?: boolean;\n}\n\n/**\n * Create a lip sync instance with automatic GPU/CPU model selection\n *\n * @param config - Factory configuration\n * @returns A LipSyncBackend instance (either GPU or CPU model)\n */\nexport function createLipSync(config: CreateLipSyncConfig): LipSyncBackend {\n const mode = config.mode ?? 'auto';\n const fallbackOnError = config.fallbackOnError ?? true;\n\n // Determine which model to use\n let useCpu: boolean;\n\n if (mode === 'cpu') {\n useCpu = true;\n logger.info('Forcing CPU lip sync model (wav2arkit_cpu)');\n } else if (mode === 'gpu') {\n useCpu = false;\n logger.info('Forcing GPU lip sync model (Wav2Vec2)');\n } else {\n // Auto-detect: Safari/iOS → CPU, everything else → GPU\n useCpu = shouldUseCpuLipSync();\n logger.info('Auto-detected lip sync model', {\n useCpu,\n isSafari: isSafari(),\n });\n }\n\n if (useCpu) {\n logger.info('Creating Wav2ArkitCpuInference (1.8MB, WASM)');\n return new Wav2ArkitCpuInference({\n modelUrl: config.cpuModelUrl,\n });\n }\n\n // GPU model, optionally with fallback\n const gpuInstance = new Wav2Vec2Inference({\n modelUrl: config.gpuModelUrl,\n backend: config.gpuBackend ?? 'auto',\n numIdentityClasses: config.numIdentityClasses,\n });\n\n if (fallbackOnError) {\n logger.info('Creating Wav2Vec2Inference with CPU fallback');\n return new LipSyncWithFallback(gpuInstance, config);\n }\n\n logger.info('Creating Wav2Vec2Inference (no fallback)');\n return gpuInstance;\n}\n\n/**\n * Wrapper that provides automatic fallback from GPU to CPU model\n *\n * If the GPU model fails during load(), this wrapper automatically\n * creates a Wav2ArkitCpuInference instance instead.\n */\nclass LipSyncWithFallback implements LipSyncBackend {\n private implementation: LipSyncBackend;\n private readonly config: CreateLipSyncConfig;\n private hasFallenBack = false;\n\n constructor(gpuInstance: Wav2Vec2Inference, config: CreateLipSyncConfig) {\n this.implementation = gpuInstance;\n this.config = config;\n }\n\n get backend(): RuntimeBackend | null {\n return this.implementation.backend;\n }\n\n get isLoaded(): boolean {\n return this.implementation.isLoaded;\n }\n\n async load(): Promise<LipSyncModelInfo> {\n try {\n return await this.implementation.load();\n } catch (error) {\n logger.warn('GPU model load failed, falling back to CPU model', {\n error: error instanceof Error ? error.message : String(error),\n });\n\n // Clean up failed GPU instance\n try {\n await this.implementation.dispose();\n } catch {\n // Ignore dispose errors\n }\n\n // Create CPU fallback\n this.implementation = new Wav2ArkitCpuInference({\n modelUrl: this.config.cpuModelUrl,\n });\n this.hasFallenBack = true;\n\n logger.info('Fallback to Wav2ArkitCpuInference successful');\n return await this.implementation.load();\n }\n }\n\n async infer(audioSamples: Float32Array, identityIndex?: number): Promise<LipSyncResult> {\n return this.implementation.infer(audioSamples, identityIndex);\n }\n\n async dispose(): Promise<void> {\n return this.implementation.dispose();\n }\n}\n","/**\n * Silero VAD (Voice Activity Detection) inference\n *\n * Neural network-based VAD running in browser via ONNX Runtime Web.\n * Much more accurate than RMS-based energy detection.\n *\n * Uses lazy loading to conditionally load WebGPU or WASM-only bundle:\n * - iOS: Loads WASM-only bundle (WebGPU crashes due to Safari bugs)\n * - Android/Desktop: Loads WebGPU bundle (with WASM fallback)\n *\n * @category Inference\n *\n * @example Basic usage\n * ```typescript\n * import { SileroVADInference } from '@omote/core';\n *\n * const vad = new SileroVADInference({\n * modelUrl: '/models/silero-vad.onnx'\n * });\n * await vad.load();\n *\n * // Process 32ms chunks (512 samples at 16kHz)\n * const probability = await vad.process(audioChunk);\n * if (probability > 0.5) {\n * console.log('Speech detected!');\n * }\n * ```\n *\n * @example Streaming with state management\n * ```typescript\n * // State is automatically maintained between process() calls\n * // Call reset() when starting a new audio stream\n * vad.reset();\n *\n * for (const chunk of audioChunks) {\n * const prob = await vad.process(chunk);\n * // prob is speech probability [0, 1]\n * }\n * ```\n */\n\n// Type-only import for TypeScript (no runtime code loaded at import time)\n// At runtime, we dynamically import either 'onnxruntime-web' or 'onnxruntime-web/webgpu'\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\n\n// Type alias for the ORT module (loaded dynamically)\ntype OrtModule = {\n InferenceSession: typeof InferenceSession;\n Tensor: typeof Tensor;\n env: Env;\n};\nimport { fetchWithCache, getModelCache, formatBytes } from '../cache/ModelCache';\nimport { createLogger } from '../logging';\nimport { getTelemetry } from '../telemetry';\nimport {\n getOnnxRuntimeForPreference,\n getSessionOptions,\n isWebGPUAvailable,\n type RuntimeBackend,\n} from './onnxLoader';\nimport { BackendPreference } from '../utils/runtime';\n\nconst logger = createLogger('SileroVAD');\n\nexport type VADBackend = BackendPreference;\n\n/**\n * Configuration for Silero VAD\n */\nexport interface SileroVADConfig {\n /** Path or URL to the ONNX model */\n modelUrl: string;\n /** Preferred backend (auto will try WebGPU first, fallback to WASM) */\n backend?: VADBackend;\n /** Sample rate (8000 or 16000, default: 16000) */\n sampleRate?: 8000 | 16000;\n /** Speech probability threshold (default: 0.5) */\n threshold?: number;\n /**\n * Number of audio chunks to keep in pre-speech buffer.\n * When VAD triggers, these chunks are prepended to the speech buffer\n * to capture the beginning of speech that occurred before detection.\n *\n * At 512 samples/chunk and 16kHz:\n * - 10 chunks = 320ms of pre-speech audio\n * - 15 chunks = 480ms of pre-speech audio\n *\n * Default: 10 chunks (320ms)\n */\n preSpeechBufferChunks?: number;\n}\n\n/**\n * VAD model loading information\n */\nexport interface VADModelInfo {\n backend: 'webgpu' | 'wasm';\n loadTimeMs: number;\n inputNames: string[];\n outputNames: string[];\n sampleRate: number;\n chunkSize: number;\n}\n\n/**\n * Result from a single VAD inference\n */\nexport interface VADResult {\n /** Speech probability (0-1) */\n probability: number;\n /** Whether speech is detected (probability > threshold) */\n isSpeech: boolean;\n /** Inference time in milliseconds */\n inferenceTimeMs: number;\n /**\n * Pre-speech audio chunks (only present on first speech detection).\n * These are the N chunks immediately before VAD triggered, useful for\n * capturing the beginning of speech that occurred before detection.\n *\n * Only populated when transitioning from silence to speech.\n */\n preSpeechChunks?: Float32Array[];\n}\n\n/**\n * Speech segment detected by VAD\n */\nexport interface SpeechSegment {\n /** Start time in seconds */\n start: number;\n /** End time in seconds */\n end: number;\n /** Average probability during segment */\n avgProbability: number;\n}\n\n/**\n * Silero VAD - Neural network voice activity detection\n *\n * Based on snakers4/silero-vad ONNX model.\n * Processes 32ms chunks (512 samples at 16kHz) with LSTM state.\n *\n * @see https://github.com/snakers4/silero-vad\n */\nexport class SileroVADInference {\n private session: InferenceSession | null = null;\n private ort: OrtModule | null = null; // Lazy-loaded ONNX Runtime module\n private config: Required<SileroVADConfig>;\n private _backend: RuntimeBackend = 'wasm';\n private isLoading = false;\n\n // LSTM state tensors [2, batch_size, 128]\n private state: Tensor | null = null;\n\n // Context buffer (prepended to each chunk)\n private context: Float32Array;\n\n // Chunk sizes based on sample rate\n private readonly chunkSize: number;\n private readonly contextSize: number;\n\n // Inference queue for serialization\n private inferenceQueue: Promise<void> = Promise.resolve();\n\n // Pre-speech buffer for capturing beginning of speech\n private preSpeechBuffer: Float32Array[] = [];\n private wasSpeaking = false;\n\n // Cached sample rate tensor (int64 scalar, never changes per instance)\n private srTensor: Tensor | null = null;\n\n constructor(config: SileroVADConfig) {\n const sampleRate = config.sampleRate ?? 16000;\n\n if (sampleRate !== 8000 && sampleRate !== 16000) {\n throw new Error('Silero VAD only supports 8000 or 16000 Hz sample rates');\n }\n\n this.config = {\n modelUrl: config.modelUrl,\n backend: config.backend ?? 'auto',\n sampleRate,\n threshold: config.threshold ?? 0.5,\n preSpeechBufferChunks: config.preSpeechBufferChunks ?? 10,\n };\n\n // Set chunk sizes based on sample rate\n this.chunkSize = sampleRate === 16000 ? 512 : 256;\n this.contextSize = sampleRate === 16000 ? 64 : 32;\n this.context = new Float32Array(this.contextSize);\n }\n\n get backend(): RuntimeBackend | null {\n return this.session ? this._backend : null;\n }\n\n get isLoaded(): boolean {\n return this.session !== null;\n }\n\n get sampleRate(): number {\n return this.config.sampleRate;\n }\n\n get threshold(): number {\n return this.config.threshold;\n }\n\n /**\n * Get required chunk size in samples\n */\n getChunkSize(): number {\n return this.chunkSize;\n }\n\n /**\n * Get chunk duration in milliseconds\n */\n getChunkDurationMs(): number {\n return (this.chunkSize / this.config.sampleRate) * 1000;\n }\n\n /**\n * Check if WebGPU is available and working\n * (iOS returns false even if navigator.gpu exists due to ONNX Runtime bugs)\n */\n static isWebGPUAvailable = isWebGPUAvailable;\n\n /**\n * Load the ONNX model\n */\n async load(): Promise<VADModelInfo> {\n if (this.isLoading) {\n throw new Error('Model is already loading');\n }\n\n if (this.session) {\n throw new Error('Model already loaded. Call dispose() first.');\n }\n\n this.isLoading = true;\n const startTime = performance.now();\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('SileroVAD.load', {\n 'model.url': this.config.modelUrl,\n 'model.backend_requested': this.config.backend,\n 'model.sample_rate': this.config.sampleRate,\n });\n\n try {\n // Lazy load ONNX Runtime with appropriate backend\n // iOS: Loads WASM-only bundle (smaller, no WebGPU code)\n // Android/Desktop: Loads WebGPU bundle (with WASM fallback)\n logger.info('Loading ONNX Runtime...', { preference: this.config.backend });\n\n const { ort, backend } = await getOnnxRuntimeForPreference(this.config.backend);\n this.ort = ort;\n this._backend = backend;\n\n logger.info('ONNX Runtime loaded', { backend: this._backend });\n\n // Load model with caching\n const cache = getModelCache();\n const modelUrl = this.config.modelUrl;\n const isCached = await cache.has(modelUrl);\n\n let modelBuffer: ArrayBuffer;\n if (isCached) {\n logger.debug('Loading model from cache', { modelUrl });\n modelBuffer = (await cache.get(modelUrl))!;\n } else {\n logger.debug('Fetching and caching model', { modelUrl });\n modelBuffer = await fetchWithCache(modelUrl);\n }\n\n logger.debug('Creating ONNX session', {\n size: formatBytes(modelBuffer.byteLength),\n backend: this._backend,\n });\n\n // Create session with optimized settings for the backend\n // Convert ArrayBuffer to Uint8Array for onnxruntime-common types\n const sessionOptions = getSessionOptions(this._backend);\n const modelData = new Uint8Array(modelBuffer);\n this.session = await ort.InferenceSession.create(modelData, sessionOptions);\n\n // Initialize state\n this.reset();\n\n const loadTimeMs = performance.now() - startTime;\n\n logger.info('Model loaded successfully', {\n backend: this._backend,\n loadTimeMs: Math.round(loadTimeMs),\n sampleRate: this.config.sampleRate,\n chunkSize: this.chunkSize,\n threshold: this.config.threshold,\n });\n\n span?.setAttributes({\n 'model.backend': this._backend,\n 'model.load_time_ms': loadTimeMs,\n 'model.cached': isCached,\n });\n span?.end();\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\n model: 'silero-vad',\n backend: this._backend,\n });\n\n return {\n backend: this._backend,\n loadTimeMs,\n inputNames: [...this.session.inputNames],\n outputNames: [...this.session.outputNames],\n sampleRate: this.config.sampleRate,\n chunkSize: this.chunkSize,\n };\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n telemetry?.incrementCounter('omote.errors.total', 1, {\n model: 'silero-vad',\n error_type: 'load_failed',\n });\n throw error;\n } finally {\n this.isLoading = false;\n }\n }\n\n /**\n * Reset state for new audio stream\n */\n reset(): void {\n if (!this.ort) {\n throw new Error('ONNX Runtime not loaded. Call load() first.');\n }\n // LSTM state: [2, batch_size=1, 128]\n this.state = new this.ort.Tensor('float32', new Float32Array(2 * 1 * 128), [2, 1, 128]);\n // Reset context buffer\n this.context = new Float32Array(this.contextSize);\n // Reset pre-speech buffer\n this.preSpeechBuffer = [];\n this.wasSpeaking = false;\n\n // Create cached sr tensor once (int64 scalar, never changes)\n if (!this.srTensor) {\n try {\n this.srTensor = new this.ort.Tensor(\n 'int64',\n new BigInt64Array([BigInt(this.config.sampleRate)]),\n []\n );\n } catch (e) {\n // Fallback: some iOS Safari versions may not support BigInt64Array\n // ORT also accepts readonly bigint[] for int64 tensors\n logger.warn('BigInt64Array not available, using bigint array fallback', {\n error: e instanceof Error ? e.message : String(e),\n });\n this.srTensor = new this.ort.Tensor(\n 'int64',\n [BigInt(this.config.sampleRate)] as unknown as BigInt64Array,\n []\n );\n }\n }\n }\n\n /**\n * Process a single audio chunk\n *\n * @param audioChunk - Float32Array of exactly chunkSize samples (512 for 16kHz, 256 for 8kHz)\n * @returns VAD result with speech probability\n */\n async process(audioChunk: Float32Array): Promise<VADResult> {\n if (!this.session) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n if (audioChunk.length !== this.chunkSize) {\n throw new Error(\n `Audio chunk must be exactly ${this.chunkSize} samples (got ${audioChunk.length}). ` +\n `Use getChunkSize() to get required size.`\n );\n }\n\n return this.queueInference(audioChunk);\n }\n\n /**\n * Process audio and detect speech segments\n *\n * @param audio - Complete audio buffer\n * @param options - Detection options\n * @returns Array of speech segments\n */\n async detectSpeech(\n audio: Float32Array,\n options: {\n /** Minimum speech duration in ms (default: 250) */\n minSpeechDurationMs?: number;\n /** Minimum silence duration to end segment in ms (default: 300) */\n minSilenceDurationMs?: number;\n /** Padding to add before/after speech in ms (default: 30) */\n speechPadMs?: number;\n } = {}\n ): Promise<SpeechSegment[]> {\n const {\n minSpeechDurationMs = 250,\n minSilenceDurationMs = 300,\n speechPadMs = 30,\n } = options;\n\n this.reset();\n\n const segments: SpeechSegment[] = [];\n const chunkDurationMs = this.getChunkDurationMs();\n const minSpeechChunks = Math.ceil(minSpeechDurationMs / chunkDurationMs);\n const minSilenceChunks = Math.ceil(minSilenceDurationMs / chunkDurationMs);\n const padChunks = Math.ceil(speechPadMs / chunkDurationMs);\n\n let inSpeech = false;\n let speechStart = 0;\n let silenceCount = 0;\n let speechChunks = 0;\n let totalProb = 0;\n\n // Process in chunks\n for (let i = 0; i + this.chunkSize <= audio.length; i += this.chunkSize) {\n const chunk = audio.slice(i, i + this.chunkSize);\n const result = await this.process(chunk);\n const chunkIndex = i / this.chunkSize;\n const timeMs = chunkIndex * chunkDurationMs;\n\n if (result.isSpeech) {\n if (!inSpeech) {\n // Start of speech\n inSpeech = true;\n speechStart = Math.max(0, timeMs - speechPadMs);\n silenceCount = 0;\n speechChunks = 0;\n totalProb = 0;\n }\n silenceCount = 0;\n speechChunks++;\n totalProb += result.probability;\n } else if (inSpeech) {\n silenceCount++;\n if (silenceCount >= minSilenceChunks) {\n // End of speech\n if (speechChunks >= minSpeechChunks) {\n segments.push({\n start: speechStart / 1000,\n end: (timeMs + speechPadMs) / 1000,\n avgProbability: totalProb / speechChunks,\n });\n }\n inSpeech = false;\n }\n }\n }\n\n // Handle trailing speech\n if (inSpeech && speechChunks >= minSpeechChunks) {\n const endMs = (audio.length / this.config.sampleRate) * 1000;\n segments.push({\n start: speechStart / 1000,\n end: endMs / 1000,\n avgProbability: totalProb / speechChunks,\n });\n }\n\n return segments;\n }\n\n /**\n * Calculate RMS energy of audio chunk\n */\n private calculateRMS(samples: Float32Array): number {\n let sum = 0;\n for (let i = 0; i < samples.length; i++) {\n sum += samples[i] * samples[i];\n }\n return Math.sqrt(sum / samples.length);\n }\n\n /**\n * Queue inference to serialize ONNX session calls\n */\n private queueInference(audioChunk: Float32Array): Promise<VADResult> {\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\n // During interruptions, audioChunk's buffer may get detached by ONNX Runtime\n // before we access it in the async queue. Copy synchronously to preserve data.\n const audioChunkCopy = new Float32Array(audioChunk);\n\n // Energy pre-filter: skip inference on very quiet audio\n // This prevents false positives from blank/silent chunks and saves compute\n const MIN_ENERGY_THRESHOLD = 0.001; // Very low threshold - only filters near-silence\n const rms = this.calculateRMS(audioChunkCopy);\n if (rms < MIN_ENERGY_THRESHOLD) {\n // Update pre-speech buffer even for silent chunks (ring buffer)\n if (!this.wasSpeaking) {\n this.preSpeechBuffer.push(new Float32Array(audioChunkCopy));\n if (this.preSpeechBuffer.length > this.config.preSpeechBufferChunks) {\n this.preSpeechBuffer.shift();\n }\n }\n\n logger.trace('Skipping VAD inference - audio too quiet', {\n rms: Math.round(rms * 10000) / 10000,\n threshold: MIN_ENERGY_THRESHOLD,\n });\n\n return Promise.resolve({\n probability: 0,\n isSpeech: false,\n inferenceTimeMs: 0,\n });\n }\n\n return new Promise((resolve, reject) => {\n this.inferenceQueue = this.inferenceQueue.then(async () => {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('SileroVAD.process', {\n 'inference.backend': this._backend,\n 'inference.chunk_size': this.chunkSize,\n });\n try {\n const startTime = performance.now();\n\n // Prepend context to input\n const inputSize = this.contextSize + this.chunkSize;\n const inputBuffer = new Float32Array(inputSize);\n inputBuffer.set(this.context, 0);\n inputBuffer.set(audioChunkCopy, this.contextSize);\n\n // Create tensors\n // CRITICAL: Force copy to prevent ArrayBuffer detachment by ONNX Runtime Web workers\n // Without copy, WASM backend transfers buffers to workers, causing \"memory access out of bounds\" errors\n const inputBufferCopy = new Float32Array(inputBuffer);\n const inputTensor = new this.ort!.Tensor('float32', inputBufferCopy, [1, inputSize]);\n // Use cached sr tensor (created once in reset(), handles BigInt64Array compatibility)\n const srTensor = this.srTensor!;\n\n // CRITICAL: Also copy state tensor to prevent detachment\n // State tensor is reused across inferences and gets detached during interruptions\n const stateCopy = new Float32Array(this.state!.data as Float32Array);\n const stateTensor = new this.ort!.Tensor('float32', stateCopy, this.state!.dims as number[]);\n\n const feeds = {\n 'input': inputTensor,\n 'state': stateTensor,\n 'sr': srTensor,\n };\n\n // Run inference\n const results = await this.session!.run(feeds);\n\n // Extract outputs\n const outputTensor = results['output'];\n const newStateTensor = results['stateN'] || results['state'];\n\n if (!outputTensor) {\n throw new Error('Missing output tensor from VAD model');\n }\n\n const probability = (outputTensor.data as Float32Array)[0];\n\n // Update state for next call\n if (newStateTensor) {\n this.state = new this.ort!.Tensor(\n 'float32',\n new Float32Array(newStateTensor.data as Float32Array),\n [2, 1, 128]\n );\n }\n\n // Update context (last contextSize samples of input chunk)\n this.context = audioChunk.slice(-this.contextSize);\n\n const inferenceTimeMs = performance.now() - startTime;\n const isSpeech = probability > this.config.threshold;\n\n // Pre-speech buffer logic\n let preSpeechChunks: Float32Array[] | undefined;\n\n if (isSpeech && !this.wasSpeaking) {\n // Silence→Speech transition: populate preSpeechChunks\n preSpeechChunks = [...this.preSpeechBuffer];\n this.preSpeechBuffer = [];\n logger.debug('Speech started with pre-speech buffer', {\n preSpeechChunks: preSpeechChunks.length,\n durationMs: Math.round(preSpeechChunks.length * this.getChunkDurationMs()),\n });\n } else if (!isSpeech && !this.wasSpeaking) {\n // Still in silence: maintain ring buffer\n this.preSpeechBuffer.push(new Float32Array(audioChunk));\n if (this.preSpeechBuffer.length > this.config.preSpeechBufferChunks) {\n this.preSpeechBuffer.shift();\n }\n } else if (!isSpeech && this.wasSpeaking) {\n // Speech→Silence transition: clear buffer\n this.preSpeechBuffer = [];\n }\n\n this.wasSpeaking = isSpeech;\n\n logger.trace('VAD inference completed', {\n probability: Math.round(probability * 1000) / 1000,\n isSpeech,\n inferenceTimeMs: Math.round(inferenceTimeMs * 100) / 100,\n });\n\n span?.setAttributes({\n 'inference.duration_ms': inferenceTimeMs,\n 'inference.probability': probability,\n 'inference.is_speech': isSpeech,\n });\n span?.end();\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\n model: 'silero-vad',\n backend: this._backend,\n });\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'silero-vad',\n backend: this._backend,\n status: 'success',\n });\n\n resolve({\n probability,\n isSpeech,\n inferenceTimeMs,\n preSpeechChunks,\n });\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'silero-vad',\n backend: this._backend,\n status: 'error',\n });\n reject(err);\n }\n });\n });\n }\n\n /**\n * Dispose of the model and free resources\n */\n async dispose(): Promise<void> {\n if (this.session) {\n await this.session.release();\n this.session = null;\n }\n this.state = null;\n this.srTensor = null;\n }\n}\n","/**\n * Silero VAD Web Worker implementation\n *\n * Runs Silero VAD inference in a dedicated Web Worker to prevent main thread blocking.\n * Uses inline worker script (Blob URL pattern) to avoid separate file deployment.\n *\n * Key design decisions:\n * - WASM backend only (WebGPU doesn't work in Workers)\n * - LSTM state serialized as Float32Array (Tensors can't cross worker boundary)\n * - Audio copied (not transferred) to retain main thread access for pre-speech buffer\n * - ONNX Runtime loaded from CDN in worker (no bundler complications)\n *\n * @category Inference\n *\n * @example Basic usage\n * ```typescript\n * import { SileroVADWorker } from '@omote/core';\n *\n * const vad = new SileroVADWorker({\n * modelUrl: '/models/silero-vad.onnx'\n * });\n * await vad.load();\n *\n * // Process 32ms chunks (512 samples at 16kHz)\n * const result = await vad.process(audioChunk);\n * if (result.isSpeech) {\n * console.log('Speech detected!', result.probability);\n * }\n * ```\n */\n\nimport { createLogger } from '../logging';\nimport { getTelemetry } from '../telemetry';\n\nconst logger = createLogger('SileroVADWorker');\n\n// ONNX Runtime CDN path (matches onnxLoader.ts)\nconst WASM_CDN_PATH = 'https://cdn.jsdelivr.net/npm/onnxruntime-web@1.23.2/dist/';\n\n// Worker script timeouts\nconst LOAD_TIMEOUT_MS = 10000; // 10 seconds for model load\nconst INFERENCE_TIMEOUT_MS = 1000; // 1 second for inference\n\n/**\n * Messages sent from main thread to worker\n */\nexport type VADWorkerMessage =\n | { type: 'load'; modelUrl: string; sampleRate: 8000 | 16000; wasmPaths: string }\n | { type: 'process'; audio: Float32Array; state: Float32Array; context: Float32Array }\n | { type: 'reset' }\n | { type: 'dispose' };\n\n/**\n * Messages sent from worker to main thread\n */\nexport type VADWorkerResult =\n | { type: 'loaded'; inputNames: string[]; outputNames: string[]; loadTimeMs: number }\n | { type: 'result'; probability: number; state: Float32Array; inferenceTimeMs: number }\n | { type: 'reset'; state: Float32Array }\n | { type: 'error'; error: string }\n | { type: 'disposed' };\n\n/**\n * Configuration for Silero VAD Worker\n */\nexport interface VADWorkerConfig {\n /** Path or URL to the ONNX model */\n modelUrl: string;\n /** Sample rate (8000 or 16000, default: 16000) */\n sampleRate?: 8000 | 16000;\n /** Speech probability threshold (default: 0.5) */\n threshold?: number;\n /**\n * Number of audio chunks to keep in pre-speech buffer.\n * When VAD triggers, these chunks are prepended to the speech buffer\n * to capture the beginning of speech that occurred before detection.\n *\n * At 512 samples/chunk and 16kHz:\n * - 10 chunks = 320ms of pre-speech audio\n * - 15 chunks = 480ms of pre-speech audio\n *\n * Default: 10 chunks (320ms)\n */\n preSpeechBufferChunks?: number;\n}\n\n/**\n * VAD model loading information from worker\n */\nexport interface VADWorkerModelInfo {\n backend: 'wasm'; // Worker always uses WASM (no WebGPU in workers)\n loadTimeMs: number;\n inputNames: string[];\n outputNames: string[];\n sampleRate: number;\n chunkSize: number;\n}\n\n/**\n * Result from a single VAD inference\n */\nexport interface VADResult {\n /** Speech probability (0-1) */\n probability: number;\n /** Whether speech is detected (probability > threshold) */\n isSpeech: boolean;\n /** Inference time in milliseconds */\n inferenceTimeMs: number;\n /**\n * Pre-speech audio chunks (only present on first speech detection).\n * These are the N chunks immediately before VAD triggered, useful for\n * capturing the beginning of speech that occurred before detection.\n *\n * Only populated when transitioning from silence to speech.\n */\n preSpeechChunks?: Float32Array[];\n}\n\n/**\n * Inline worker script for VAD inference\n *\n * This script is embedded as a string and loaded via Blob URL.\n * It loads ONNX Runtime from CDN and runs VAD inference.\n */\nconst WORKER_SCRIPT = `\n// Silero VAD Worker Script\n// Loaded via Blob URL - no separate file needed\n\nlet ort = null;\nlet session = null;\nlet sampleRate = 16000;\nlet chunkSize = 512;\nlet contextSize = 64;\n\n/**\n * Load ONNX Runtime from CDN\n */\nasync function loadOrt(wasmPaths) {\n if (ort) return;\n\n // Import ONNX Runtime from CDN\n // Using dynamic import with full CDN URL\n const ortUrl = wasmPaths + 'ort.wasm.min.js';\n\n // Load the script by fetching and executing it\n const response = await fetch(ortUrl);\n const scriptText = await response.text();\n\n // Create a blob URL for the script\n const blob = new Blob([scriptText], { type: 'application/javascript' });\n const blobUrl = URL.createObjectURL(blob);\n\n // Import the module\n importScripts(blobUrl);\n URL.revokeObjectURL(blobUrl);\n\n // ort is now available as global\n ort = self.ort;\n\n // Configure WASM settings\n ort.env.wasm.wasmPaths = wasmPaths;\n ort.env.wasm.numThreads = 1; // Single thread in worker\n ort.env.wasm.simd = true;\n ort.env.wasm.proxy = false; // No proxy in worker\n}\n\n/**\n * Load the VAD model\n */\nasync function loadModel(modelUrl, sr) {\n sampleRate = sr;\n chunkSize = sr === 16000 ? 512 : 256;\n contextSize = sr === 16000 ? 64 : 32;\n\n // Fetch model data\n const response = await fetch(modelUrl);\n if (!response.ok) {\n throw new Error('Failed to fetch model: ' + response.status + ' ' + response.statusText);\n }\n const modelBuffer = await response.arrayBuffer();\n const modelData = new Uint8Array(modelBuffer);\n\n // Create session with WASM backend\n session = await ort.InferenceSession.create(modelData, {\n executionProviders: ['wasm'],\n graphOptimizationLevel: 'all',\n });\n\n return {\n inputNames: session.inputNames,\n outputNames: session.outputNames,\n };\n}\n\n/**\n * Create initial LSTM state\n */\nfunction createInitialState() {\n return new Float32Array(2 * 1 * 128); // [2, 1, 128]\n}\n\n/**\n * Run VAD inference\n */\nasync function runInference(audio, state, context) {\n const inputSize = contextSize + chunkSize;\n\n // Prepend context to input\n const inputBuffer = new Float32Array(inputSize);\n inputBuffer.set(context, 0);\n inputBuffer.set(audio, contextSize);\n\n // Create tensors\n const inputTensor = new ort.Tensor('float32', new Float32Array(inputBuffer), [1, inputSize]);\n const stateTensor = new ort.Tensor('float32', new Float32Array(state), [2, 1, 128]);\n // Use BigInt64Array constructor (not .from()) for broader compatibility\n let srTensor;\n try {\n srTensor = new ort.Tensor('int64', new BigInt64Array([BigInt(sampleRate)]), []);\n } catch (e) {\n // Fallback for environments without BigInt64Array support\n srTensor = new ort.Tensor('int64', [BigInt(sampleRate)], []);\n }\n\n const feeds = {\n 'input': inputTensor,\n 'state': stateTensor,\n 'sr': srTensor,\n };\n\n // Run inference\n const results = await session.run(feeds);\n\n // Extract outputs\n const outputTensor = results['output'];\n const newStateTensor = results['stateN'] || results['state'];\n\n if (!outputTensor) {\n throw new Error('Missing output tensor from VAD model');\n }\n\n const probability = outputTensor.data[0];\n const newState = new Float32Array(newStateTensor.data);\n\n return { probability, newState };\n}\n\n// Message handler\nself.onmessage = async function(e) {\n const msg = e.data;\n\n try {\n switch (msg.type) {\n case 'load': {\n const startTime = performance.now();\n await loadOrt(msg.wasmPaths);\n const { inputNames, outputNames } = await loadModel(msg.modelUrl, msg.sampleRate);\n const loadTimeMs = performance.now() - startTime;\n\n self.postMessage({\n type: 'loaded',\n inputNames,\n outputNames,\n loadTimeMs,\n });\n break;\n }\n\n case 'process': {\n const startTime = performance.now();\n const { probability, newState } = await runInference(msg.audio, msg.state, msg.context);\n const inferenceTimeMs = performance.now() - startTime;\n\n self.postMessage({\n type: 'result',\n probability,\n state: newState,\n inferenceTimeMs,\n });\n break;\n }\n\n case 'reset': {\n const state = createInitialState();\n self.postMessage({\n type: 'reset',\n state,\n });\n break;\n }\n\n case 'dispose': {\n if (session) {\n await session.release();\n session = null;\n }\n ort = null;\n self.postMessage({ type: 'disposed' });\n break;\n }\n\n default:\n self.postMessage({\n type: 'error',\n error: 'Unknown message type: ' + msg.type,\n });\n }\n } catch (err) {\n self.postMessage({\n type: 'error',\n error: err.message || String(err),\n });\n }\n};\n\n// Error handler\nself.onerror = function(err) {\n self.postMessage({\n type: 'error',\n error: 'Worker error: ' + (err.message || String(err)),\n });\n};\n`;\n\n/**\n * Silero VAD Worker - Voice Activity Detection in a Web Worker\n *\n * Runs Silero VAD inference off the main thread to prevent UI blocking.\n * Feature parity with SileroVADInference but runs in dedicated worker.\n *\n * @see SileroVADInference for main-thread version\n */\nexport class SileroVADWorker {\n private worker: Worker | null = null;\n private config: Required<VADWorkerConfig>;\n private isLoading = false;\n private _isLoaded = false;\n\n // LSTM state (kept in main thread, sent to worker for each inference)\n private state: Float32Array;\n\n // Context buffer (last 64 samples for 16kHz, 32 for 8kHz)\n private context: Float32Array;\n\n // Chunk sizes based on sample rate\n private readonly chunkSize: number;\n private readonly contextSize: number;\n\n // Inference queue for serialization\n private inferenceQueue: Promise<void> = Promise.resolve();\n\n // Pre-speech buffer for capturing beginning of speech\n private preSpeechBuffer: Float32Array[] = [];\n private wasSpeaking = false;\n\n // Pending message handlers\n private pendingResolvers: Map<string, { resolve: (value: unknown) => void; reject: (error: Error) => void }> = new Map();\n private messageId = 0;\n\n constructor(config: VADWorkerConfig) {\n const sampleRate = config.sampleRate ?? 16000;\n\n if (sampleRate !== 8000 && sampleRate !== 16000) {\n throw new Error('Silero VAD only supports 8000 or 16000 Hz sample rates');\n }\n\n this.config = {\n modelUrl: config.modelUrl,\n sampleRate,\n threshold: config.threshold ?? 0.5,\n preSpeechBufferChunks: config.preSpeechBufferChunks ?? 10,\n };\n\n // Set chunk sizes based on sample rate\n this.chunkSize = sampleRate === 16000 ? 512 : 256;\n this.contextSize = sampleRate === 16000 ? 64 : 32;\n\n // Initialize state and context\n this.state = new Float32Array(2 * 1 * 128); // [2, 1, 128]\n this.context = new Float32Array(this.contextSize);\n }\n\n get isLoaded(): boolean {\n return this._isLoaded;\n }\n\n /**\n * Backend type (always 'wasm' for Worker, WebGPU not supported in Workers)\n */\n get backend(): 'wasm' | null {\n return this._isLoaded ? 'wasm' : null;\n }\n\n get sampleRate(): number {\n return this.config.sampleRate;\n }\n\n get threshold(): number {\n return this.config.threshold;\n }\n\n /**\n * Get required chunk size in samples\n */\n getChunkSize(): number {\n return this.chunkSize;\n }\n\n /**\n * Get chunk duration in milliseconds\n */\n getChunkDurationMs(): number {\n return (this.chunkSize / this.config.sampleRate) * 1000;\n }\n\n /**\n * Create the worker from inline script\n */\n private createWorker(): Worker {\n const blob = new Blob([WORKER_SCRIPT], { type: 'application/javascript' });\n const blobUrl = URL.createObjectURL(blob);\n const worker = new Worker(blobUrl);\n\n // Revoke blob URL after worker is created (worker has its own copy)\n URL.revokeObjectURL(blobUrl);\n\n // Set up message handler\n worker.onmessage = (event: MessageEvent<VADWorkerResult>) => {\n this.handleWorkerMessage(event.data);\n };\n\n // Set up error handler\n worker.onerror = (error) => {\n logger.error('Worker error', { error: error.message });\n // Reject any pending operations\n for (const [, resolver] of this.pendingResolvers) {\n resolver.reject(new Error(`Worker error: ${error.message}`));\n }\n this.pendingResolvers.clear();\n };\n\n return worker;\n }\n\n /**\n * Handle messages from worker\n */\n private handleWorkerMessage(result: VADWorkerResult): void {\n // Route to pending resolver based on result type\n const resolver = this.pendingResolvers.get(result.type);\n if (resolver) {\n this.pendingResolvers.delete(result.type);\n if (result.type === 'error') {\n resolver.reject(new Error(result.error));\n } else {\n resolver.resolve(result);\n }\n }\n }\n\n /**\n * Send message to worker and wait for response\n */\n private sendMessage<T>(message: VADWorkerMessage, expectedType: string, timeoutMs: number): Promise<T> {\n return new Promise((resolve, reject) => {\n if (!this.worker) {\n reject(new Error('Worker not initialized'));\n return;\n }\n\n // Set up timeout\n const timeoutId = setTimeout(() => {\n this.pendingResolvers.delete(expectedType);\n reject(new Error(`Worker operation timed out after ${timeoutMs}ms`));\n }, timeoutMs);\n\n // Register resolver\n this.pendingResolvers.set(expectedType, {\n resolve: (value) => {\n clearTimeout(timeoutId);\n resolve(value as T);\n },\n reject: (error) => {\n clearTimeout(timeoutId);\n reject(error);\n },\n });\n\n // Also listen for errors\n this.pendingResolvers.set('error', {\n resolve: () => {}, // Never called for errors\n reject: (error) => {\n clearTimeout(timeoutId);\n this.pendingResolvers.delete(expectedType);\n reject(error);\n },\n });\n\n // Send message\n this.worker.postMessage(message);\n });\n }\n\n /**\n * Load the ONNX model in the worker\n */\n async load(): Promise<VADWorkerModelInfo> {\n if (this.isLoading) {\n throw new Error('Model is already loading');\n }\n\n if (this._isLoaded) {\n throw new Error('Model already loaded. Call dispose() first.');\n }\n\n this.isLoading = true;\n const startTime = performance.now();\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('SileroVADWorker.load', {\n 'model.url': this.config.modelUrl,\n 'model.sample_rate': this.config.sampleRate,\n });\n\n try {\n logger.info('Creating VAD worker...');\n\n // Create worker\n this.worker = this.createWorker();\n\n logger.info('Loading model in worker...', {\n modelUrl: this.config.modelUrl,\n sampleRate: this.config.sampleRate,\n });\n\n // Send load message to worker\n const result = await this.sendMessage<{\n type: 'loaded';\n inputNames: string[];\n outputNames: string[];\n loadTimeMs: number;\n }>(\n {\n type: 'load',\n modelUrl: this.config.modelUrl,\n sampleRate: this.config.sampleRate,\n wasmPaths: WASM_CDN_PATH,\n },\n 'loaded',\n LOAD_TIMEOUT_MS\n );\n\n this._isLoaded = true;\n\n const loadTimeMs = performance.now() - startTime;\n\n logger.info('VAD worker loaded successfully', {\n backend: 'wasm',\n loadTimeMs: Math.round(loadTimeMs),\n workerLoadTimeMs: Math.round(result.loadTimeMs),\n sampleRate: this.config.sampleRate,\n chunkSize: this.chunkSize,\n threshold: this.config.threshold,\n });\n\n span?.setAttributes({\n 'model.backend': 'wasm',\n 'model.load_time_ms': loadTimeMs,\n 'model.worker_load_time_ms': result.loadTimeMs,\n });\n span?.end();\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\n model: 'silero-vad-worker',\n backend: 'wasm',\n });\n\n return {\n backend: 'wasm',\n loadTimeMs,\n inputNames: result.inputNames,\n outputNames: result.outputNames,\n sampleRate: this.config.sampleRate,\n chunkSize: this.chunkSize,\n };\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n telemetry?.incrementCounter('omote.errors.total', 1, {\n model: 'silero-vad-worker',\n error_type: 'load_failed',\n });\n\n // Clean up on failure\n if (this.worker) {\n this.worker.terminate();\n this.worker = null;\n }\n\n throw error;\n } finally {\n this.isLoading = false;\n }\n }\n\n /**\n * Reset state for new audio stream\n */\n async reset(): Promise<void> {\n if (!this._isLoaded || !this.worker) {\n throw new Error('Worker not loaded. Call load() first.');\n }\n\n // Request reset from worker to get fresh state\n const result = await this.sendMessage<{ type: 'reset'; state: Float32Array }>(\n { type: 'reset' },\n 'reset',\n INFERENCE_TIMEOUT_MS\n );\n\n // Update local state\n this.state = result.state;\n this.context = new Float32Array(this.contextSize);\n this.preSpeechBuffer = [];\n this.wasSpeaking = false;\n }\n\n /**\n * Process a single audio chunk\n *\n * @param audioChunk - Float32Array of exactly chunkSize samples (512 for 16kHz, 256 for 8kHz)\n * @returns VAD result with speech probability\n */\n async process(audioChunk: Float32Array): Promise<VADResult> {\n if (!this._isLoaded || !this.worker) {\n throw new Error('Worker not loaded. Call load() first.');\n }\n\n if (audioChunk.length !== this.chunkSize) {\n throw new Error(\n `Audio chunk must be exactly ${this.chunkSize} samples (got ${audioChunk.length}). ` +\n `Use getChunkSize() to get required size.`\n );\n }\n\n return this.queueInference(audioChunk);\n }\n\n /**\n * Queue inference to serialize worker calls\n */\n private queueInference(audioChunk: Float32Array): Promise<VADResult> {\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\n const audioChunkCopy = new Float32Array(audioChunk);\n\n return new Promise((resolve, reject) => {\n this.inferenceQueue = this.inferenceQueue.then(async () => {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('SileroVADWorker.process', {\n 'inference.backend': 'wasm',\n 'inference.chunk_size': this.chunkSize,\n });\n\n try {\n const startTime = performance.now();\n\n // Send process message to worker\n const result = await this.sendMessage<{\n type: 'result';\n probability: number;\n state: Float32Array;\n inferenceTimeMs: number;\n }>(\n {\n type: 'process',\n audio: audioChunkCopy,\n state: this.state,\n context: this.context,\n },\n 'result',\n INFERENCE_TIMEOUT_MS\n );\n\n // Update local state from worker result\n this.state = result.state;\n\n // Update context (last contextSize samples of input chunk)\n this.context = audioChunkCopy.slice(-this.contextSize);\n\n const inferenceTimeMs = performance.now() - startTime;\n const isSpeech = result.probability > this.config.threshold;\n\n // Pre-speech buffer logic (same as SileroVADInference)\n let preSpeechChunks: Float32Array[] | undefined;\n\n if (isSpeech && !this.wasSpeaking) {\n // Silence→Speech transition: populate preSpeechChunks\n preSpeechChunks = [...this.preSpeechBuffer];\n this.preSpeechBuffer = [];\n logger.debug('Speech started with pre-speech buffer', {\n preSpeechChunks: preSpeechChunks.length,\n durationMs: Math.round(preSpeechChunks.length * this.getChunkDurationMs()),\n });\n } else if (!isSpeech && !this.wasSpeaking) {\n // Still in silence: maintain ring buffer\n this.preSpeechBuffer.push(new Float32Array(audioChunkCopy));\n if (this.preSpeechBuffer.length > this.config.preSpeechBufferChunks) {\n this.preSpeechBuffer.shift();\n }\n } else if (!isSpeech && this.wasSpeaking) {\n // Speech→Silence transition: clear buffer\n this.preSpeechBuffer = [];\n }\n\n this.wasSpeaking = isSpeech;\n\n logger.trace('VAD worker inference completed', {\n probability: Math.round(result.probability * 1000) / 1000,\n isSpeech,\n inferenceTimeMs: Math.round(inferenceTimeMs * 100) / 100,\n workerTimeMs: Math.round(result.inferenceTimeMs * 100) / 100,\n });\n\n span?.setAttributes({\n 'inference.duration_ms': inferenceTimeMs,\n 'inference.worker_duration_ms': result.inferenceTimeMs,\n 'inference.probability': result.probability,\n 'inference.is_speech': isSpeech,\n });\n span?.end();\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\n model: 'silero-vad-worker',\n backend: 'wasm',\n });\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'silero-vad-worker',\n backend: 'wasm',\n status: 'success',\n });\n\n resolve({\n probability: result.probability,\n isSpeech,\n inferenceTimeMs,\n preSpeechChunks,\n });\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'silero-vad-worker',\n backend: 'wasm',\n status: 'error',\n });\n reject(err);\n }\n });\n });\n }\n\n /**\n * Dispose of the worker and free resources\n */\n async dispose(): Promise<void> {\n if (this.worker) {\n try {\n // Ask worker to clean up\n await this.sendMessage({ type: 'dispose' }, 'disposed', INFERENCE_TIMEOUT_MS);\n } catch {\n // Ignore errors during dispose\n }\n\n // Terminate worker\n this.worker.terminate();\n this.worker = null;\n }\n\n this._isLoaded = false;\n this.state = new Float32Array(2 * 1 * 128);\n this.context = new Float32Array(this.contextSize);\n this.preSpeechBuffer = [];\n this.wasSpeaking = false;\n this.pendingResolvers.clear();\n }\n\n /**\n * Check if Web Workers are supported\n */\n static isSupported(): boolean {\n return typeof Worker !== 'undefined';\n }\n}\n","/**\n * Factory function for Silero VAD with automatic Worker vs main thread selection\n *\n * Provides a unified API that automatically selects the optimal implementation:\n * - Desktop browsers: Uses SileroVADWorker (off-main-thread inference)\n * - Mobile devices: Uses SileroVADInference (main thread, avoids memory overhead)\n * - Fallback: Gracefully falls back to main thread if Worker fails\n *\n * @category Inference\n *\n * @example Basic usage (auto-detect)\n * ```typescript\n * import { createSileroVAD } from '@omote/core';\n *\n * const vad = createSileroVAD({\n * modelUrl: '/models/silero-vad.onnx',\n * threshold: 0.5,\n * });\n *\n * await vad.load();\n * const result = await vad.process(audioChunk);\n * if (result.isSpeech) {\n * console.log('Speech detected!', result.probability);\n * }\n * ```\n *\n * @example Force worker usage\n * ```typescript\n * const vad = createSileroVAD({\n * modelUrl: '/models/silero-vad.onnx',\n * useWorker: true, // Force Worker even on mobile\n * });\n * ```\n *\n * @example Force main thread\n * ```typescript\n * const vad = createSileroVAD({\n * modelUrl: '/models/silero-vad.onnx',\n * useWorker: false, // Force main thread\n * });\n * ```\n */\n\nimport { createLogger } from '../logging';\nimport { isMobile } from '../utils/runtime';\nimport { SileroVADInference } from './SileroVADInference';\nimport type { SileroVADConfig, VADModelInfo, VADResult } from './SileroVADInference';\nimport { SileroVADWorker } from './SileroVADWorker';\nimport type { VADWorkerModelInfo } from './SileroVADWorker';\nimport type { RuntimeBackend } from '../utils/runtime';\n\nconst logger = createLogger('createSileroVAD');\n\n/**\n * Common interface for both SileroVADInference and SileroVADWorker\n *\n * This interface defines the shared API that both implementations provide,\n * allowing consumers to use either interchangeably.\n */\nexport interface SileroVADBackend {\n /** Current backend type (webgpu, wasm, or null if not loaded) */\n readonly backend: RuntimeBackend | null;\n\n /** Whether the model is loaded and ready for inference */\n readonly isLoaded: boolean;\n\n /** Audio sample rate (8000 or 16000 Hz) */\n readonly sampleRate: number;\n\n /** Speech detection threshold (0-1) */\n readonly threshold: number;\n\n /**\n * Load the ONNX model\n * @returns Model loading information\n */\n load(): Promise<VADModelInfo | VADWorkerModelInfo>;\n\n /**\n * Process a single audio chunk\n * @param audioChunk - Float32Array of exactly chunkSize samples\n * @returns VAD result with speech probability\n */\n process(audioChunk: Float32Array): Promise<VADResult>;\n\n /**\n * Reset state for new audio stream\n */\n reset(): void | Promise<void>;\n\n /**\n * Dispose of the model and free resources\n */\n dispose(): Promise<void>;\n\n /**\n * Get required chunk size in samples\n */\n getChunkSize(): number;\n\n /**\n * Get chunk duration in milliseconds\n */\n getChunkDurationMs(): number;\n}\n\n/**\n * Configuration for the Silero VAD factory\n *\n * Extends SileroVADConfig with worker-specific options.\n */\nexport interface SileroVADFactoryConfig extends SileroVADConfig {\n /**\n * Force worker usage (true), main thread (false), or auto-detect (undefined).\n *\n * Auto-detection behavior:\n * - Desktop: Uses Worker (better responsiveness, off-main-thread)\n * - Mobile: Uses main thread (avoids 5MB memory overhead)\n *\n * You can override this to:\n * - `true`: Force Worker even on mobile (if you have memory headroom)\n * - `false`: Force main thread even on desktop (for debugging)\n *\n * Default: undefined (auto-detect)\n */\n useWorker?: boolean;\n\n /**\n * Fallback to main thread on worker errors.\n *\n * When true (default), if the Worker fails to load or encounters an error,\n * the factory will automatically create a main thread instance instead.\n *\n * When false, worker errors will propagate as exceptions.\n *\n * Default: true\n */\n fallbackOnError?: boolean;\n}\n\n/**\n * Check if the current environment supports VAD Web Workers\n *\n * Requirements:\n * - Worker constructor must exist\n * - Blob URL support (for inline worker script)\n *\n * @returns true if VAD Worker is supported\n */\nexport function supportsVADWorker(): boolean {\n // Check Worker constructor exists\n if (typeof Worker === 'undefined') {\n logger.debug('Worker not supported: Worker constructor undefined');\n return false;\n }\n\n // Check Blob URL support (needed for inline worker script)\n if (typeof URL === 'undefined' || typeof URL.createObjectURL === 'undefined') {\n logger.debug('Worker not supported: URL.createObjectURL unavailable');\n return false;\n }\n\n // Check Blob support\n if (typeof Blob === 'undefined') {\n logger.debug('Worker not supported: Blob constructor unavailable');\n return false;\n }\n\n return true;\n}\n\n/**\n * Create a Silero VAD instance with automatic implementation selection\n *\n * This factory function automatically selects between:\n * - **SileroVADWorker**: Off-main-thread inference (better for desktop)\n * - **SileroVADInference**: Main thread inference (better for mobile)\n *\n * The selection is based on:\n * 1. Explicit `useWorker` config (if provided)\n * 2. Platform detection (mobile vs desktop)\n * 3. Worker API availability\n *\n * Both implementations share the same interface (SileroVADBackend),\n * so consumers can use either interchangeably.\n *\n * @param config - Factory configuration\n * @returns A SileroVAD instance (either Worker or main thread)\n *\n * @example\n * ```typescript\n * // Auto-detect (recommended)\n * const vad = createSileroVAD({ modelUrl: '/models/silero-vad.onnx' });\n *\n * // Force Worker\n * const vadWorker = createSileroVAD({ modelUrl: '/models/silero-vad.onnx', useWorker: true });\n *\n * // Force main thread\n * const vadMain = createSileroVAD({ modelUrl: '/models/silero-vad.onnx', useWorker: false });\n * ```\n */\nexport function createSileroVAD(config: SileroVADFactoryConfig): SileroVADBackend {\n const fallbackOnError = config.fallbackOnError ?? true;\n\n // Determine whether to use Worker\n let useWorker: boolean;\n\n if (config.useWorker !== undefined) {\n // Explicit preference\n useWorker = config.useWorker;\n logger.debug('Worker preference explicitly set', { useWorker });\n } else {\n // Auto-detect based on platform and support\n const workerSupported = supportsVADWorker();\n const onMobile = isMobile();\n\n // Desktop with Worker support: use Worker\n // Mobile: use main thread (memory overhead concern)\n useWorker = workerSupported && !onMobile;\n\n logger.debug('Auto-detected Worker preference', {\n useWorker,\n workerSupported,\n onMobile,\n });\n }\n\n // Create the appropriate implementation\n if (useWorker) {\n logger.info('Creating SileroVADWorker (off-main-thread)');\n const worker = new SileroVADWorker({\n modelUrl: config.modelUrl,\n sampleRate: config.sampleRate,\n threshold: config.threshold,\n preSpeechBufferChunks: config.preSpeechBufferChunks,\n });\n\n if (fallbackOnError) {\n // Wrap with fallback behavior\n return new VADWorkerWithFallback(worker, config);\n }\n\n return worker as SileroVADBackend;\n }\n\n logger.info('Creating SileroVADInference (main thread)');\n return new SileroVADInference(config) as SileroVADBackend;\n}\n\n/**\n * Wrapper that provides automatic fallback from Worker to main thread\n *\n * If the Worker fails during load(), this wrapper will automatically\n * create a main thread SileroVADInference instance instead.\n */\nclass VADWorkerWithFallback implements SileroVADBackend {\n private implementation: SileroVADBackend;\n private readonly config: SileroVADFactoryConfig;\n private hasFallenBack = false;\n\n constructor(worker: SileroVADWorker, config: SileroVADFactoryConfig) {\n this.implementation = worker as SileroVADBackend;\n this.config = config;\n }\n\n get backend(): RuntimeBackend | null {\n // Worker always uses WASM, but hasn't loaded yet\n if (!this.isLoaded) return null;\n return this.hasFallenBack ? (this.implementation as SileroVADInference).backend : 'wasm';\n }\n\n get isLoaded(): boolean {\n return this.implementation.isLoaded;\n }\n\n get sampleRate(): number {\n return this.implementation.sampleRate;\n }\n\n get threshold(): number {\n return this.implementation.threshold;\n }\n\n async load(): Promise<VADModelInfo | VADWorkerModelInfo> {\n try {\n return await this.implementation.load();\n } catch (error) {\n logger.warn('Worker load failed, falling back to main thread', {\n error: error instanceof Error ? error.message : String(error),\n });\n\n // Clean up failed worker\n try {\n await this.implementation.dispose();\n } catch {\n // Ignore dispose errors\n }\n\n // Create main thread fallback\n this.implementation = new SileroVADInference(this.config) as SileroVADBackend;\n this.hasFallenBack = true;\n\n logger.info('Fallback to SileroVADInference successful');\n return await this.implementation.load();\n }\n }\n\n async process(audioChunk: Float32Array): Promise<VADResult> {\n return this.implementation.process(audioChunk);\n }\n\n reset(): void | Promise<void> {\n return this.implementation.reset();\n }\n\n async dispose(): Promise<void> {\n return this.implementation.dispose();\n }\n\n getChunkSize(): number {\n return this.implementation.getChunkSize();\n }\n\n getChunkDurationMs(): number {\n return this.implementation.getChunkDurationMs();\n }\n}\n","/**\n * Speech emotion inference\n *\n * Frame-level speech emotion recognition running in browser via ONNX Runtime Web.\n * Classifies audio into 4 emotion categories: neutral, happy, angry, sad.\n *\n * Uses SUPERB wav2vec2-base-superb-er model (~360MB ONNX, Apache 2.0 license).\n * Outputs frame-level embeddings at 50Hz (matching LAM lip sync) plus utterance-level\n * emotion classification.\n *\n * Uses lazy loading to conditionally load WebGPU or WASM-only bundle:\n * - iOS: Loads WASM-only bundle (WebGPU crashes due to Safari bugs)\n * - Android/Desktop: Loads WebGPU bundle (with WASM fallback)\n *\n * @category Inference\n *\n * @example Basic usage\n * ```typescript\n * import { Emotion2VecInference } from '@omote/core';\n *\n * const emotion = new Emotion2VecInference({\n * modelUrl: '/models/emotion/emotion_superb.onnx'\n * });\n * await emotion.load();\n *\n * // Process audio chunk (1 second at 16kHz)\n * const result = await emotion.infer(audioSamples);\n * console.log('Emotion:', result.dominant.emotion, 'Confidence:', result.dominant.confidence);\n * console.log('Frames:', result.frames.length, 'at 50Hz');\n * ```\n *\n * @example Access per-frame emotions\n * ```typescript\n * const result = await emotion.infer(audioSamples);\n * // Each frame has the same emotion (utterance-level applied per-frame)\n * result.frames.forEach((frame, i) => {\n * const timeMs = i * 20; // 50Hz = 20ms per frame\n * console.log(`${timeMs}ms: ${frame.emotion} (${frame.confidence.toFixed(2)})`);\n * });\n * ```\n */\n\n// Type-only import for TypeScript (no runtime code loaded at import time)\n// At runtime, we dynamically import either 'onnxruntime-web' or 'onnxruntime-web/webgpu'\nimport type { InferenceSession, Tensor, Env } from 'onnxruntime-common';\n\n// Type alias for the ORT module (loaded dynamically)\ntype OrtModule = {\n InferenceSession: typeof InferenceSession;\n Tensor: typeof Tensor;\n env: Env;\n};\n\nimport { fetchWithCache, getModelCache, formatBytes } from '../cache/ModelCache';\nimport { createLogger } from '../logging';\nimport { getTelemetry } from '../telemetry';\nimport {\n getOnnxRuntimeForPreference,\n getSessionOptions,\n isWebGPUAvailable,\n type RuntimeBackend,\n} from './onnxLoader';\nimport { BackendPreference } from '../utils/runtime';\n\nconst logger = createLogger('Emotion2Vec');\n\n/**\n * Emotion labels supported by the SUPERB emotion model\n *\n * These are the 4 emotion categories that the model can classify:\n * - neutral: Neutral, calm state\n * - happy: Joy, happiness, excitement\n * - angry: Anger, frustration\n * - sad: Sadness, grief\n *\n * Labels are in model output order (indices 0-3).\n */\nexport const EMOTION2VEC_LABELS = ['neutral', 'happy', 'angry', 'sad'] as const;\n\nexport type Emotion2VecLabel = (typeof EMOTION2VEC_LABELS)[number];\n\nexport type EmotionBackend = BackendPreference;\n\n/**\n * Configuration for Emotion2Vec inference\n */\nexport interface Emotion2VecConfig {\n /** Path or URL to the ONNX model */\n modelUrl: string;\n /** Preferred backend (auto will try WebGPU first, fallback to WASM) */\n backend?: EmotionBackend;\n /** Sample rate (default: 16000) */\n sampleRate?: number;\n}\n\n/**\n * Model loading information\n */\nexport interface Emotion2VecModelInfo {\n backend: 'webgpu' | 'wasm';\n loadTimeMs: number;\n inputNames: string[];\n outputNames: string[];\n sampleRate: number;\n}\n\n/**\n * Frame-level emotion result\n */\nexport interface EmotionFrame {\n /** Primary emotion label */\n emotion: Emotion2VecLabel;\n /** Confidence for primary emotion (0-1) */\n confidence: number;\n /** All emotion probabilities */\n probabilities: Record<Emotion2VecLabel, number>;\n}\n\n/**\n * Result from emotion inference\n */\nexport interface Emotion2VecResult {\n /** Frame-level emotion results at 50Hz (constant emotion per frame from utterance classification) */\n frames: EmotionFrame[];\n /** Overall dominant emotion for the audio (utterance-level classification) */\n dominant: EmotionFrame;\n /** Frame-level embeddings (768-dim per frame at 50Hz) for advanced use */\n embeddings: Float32Array[];\n /** Raw logits before softmax (4-dim for 4 emotions) */\n logits: Float32Array;\n /** Inference time in milliseconds */\n inferenceTimeMs: number;\n}\n\n/**\n * Emotion2VecInference - Frame-level speech emotion recognition\n *\n * Uses SUPERB wav2vec2-base-superb-er model.\n * Processes audio and outputs:\n * - Frame-level embeddings at 50Hz (matching LAM lip sync)\n * - Utterance-level emotion classification\n *\n * @see https://huggingface.co/superb/wav2vec2-base-superb-er\n */\nexport class Emotion2VecInference {\n private session: InferenceSession | null = null;\n private ort: OrtModule | null = null;\n private config: Required<Emotion2VecConfig>;\n private _backend: RuntimeBackend = 'wasm';\n private isLoading = false;\n private inferenceQueue: Promise<void> = Promise.resolve();\n\n constructor(config: Emotion2VecConfig) {\n this.config = {\n modelUrl: config.modelUrl,\n backend: config.backend ?? 'auto',\n sampleRate: config.sampleRate ?? 16000,\n };\n }\n\n get backend(): RuntimeBackend | null {\n return this.session ? this._backend : null;\n }\n\n get isLoaded(): boolean {\n return this.session !== null;\n }\n\n get sampleRate(): number {\n return this.config.sampleRate;\n }\n\n /**\n * Check if WebGPU is available and working\n * (iOS returns false even if navigator.gpu exists due to ONNX Runtime bugs)\n */\n static isWebGPUAvailable = isWebGPUAvailable;\n\n /**\n * Load the ONNX model\n */\n async load(): Promise<Emotion2VecModelInfo> {\n if (this.isLoading) {\n throw new Error('Model is already loading');\n }\n\n if (this.session) {\n throw new Error('Model already loaded. Call dispose() first.');\n }\n\n this.isLoading = true;\n const startTime = performance.now();\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('Emotion2Vec.load', {\n 'model.url': this.config.modelUrl,\n 'model.backend_requested': this.config.backend,\n });\n\n try {\n logger.info('Loading ONNX Runtime...', { preference: this.config.backend });\n\n const { ort, backend } = await getOnnxRuntimeForPreference(this.config.backend);\n this.ort = ort;\n this._backend = backend;\n\n logger.info('ONNX Runtime loaded', { backend: this._backend });\n\n // Load model with caching\n logger.info('Checking model cache...');\n const cache = getModelCache();\n const modelUrl = this.config.modelUrl;\n const isCached = await cache.has(modelUrl);\n logger.info('Cache check complete', { modelUrl, isCached });\n\n let modelBuffer: ArrayBuffer;\n if (isCached) {\n logger.info('Loading model from cache...', { modelUrl });\n modelBuffer = (await cache.get(modelUrl))!;\n logger.info('Model loaded from cache', { size: formatBytes(modelBuffer.byteLength) });\n } else {\n logger.info('Fetching model (not cached)...', { modelUrl });\n modelBuffer = await fetchWithCache(modelUrl);\n logger.info('Model fetched and cached', { size: formatBytes(modelBuffer.byteLength) });\n }\n\n logger.info('Creating ONNX session (this may take a while for large models)...');\n logger.debug('Creating ONNX session', {\n size: formatBytes(modelBuffer.byteLength),\n backend: this._backend,\n });\n\n const sessionOptions = getSessionOptions(this._backend);\n const modelData = new Uint8Array(modelBuffer);\n this.session = await ort.InferenceSession.create(modelData, sessionOptions);\n\n const loadTimeMs = performance.now() - startTime;\n\n logger.info('Model loaded successfully', {\n backend: this._backend,\n loadTimeMs: Math.round(loadTimeMs),\n sampleRate: this.config.sampleRate,\n inputNames: [...this.session.inputNames],\n outputNames: [...this.session.outputNames],\n });\n\n span?.setAttributes({\n 'model.backend': this._backend,\n 'model.load_time_ms': loadTimeMs,\n 'model.cached': isCached,\n });\n span?.end();\n telemetry?.recordHistogram('omote.model.load_time', loadTimeMs, {\n model: 'emotion2vec',\n backend: this._backend,\n });\n\n return {\n backend: this._backend,\n loadTimeMs,\n inputNames: [...this.session.inputNames],\n outputNames: [...this.session.outputNames],\n sampleRate: this.config.sampleRate,\n };\n } catch (error) {\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\n telemetry?.incrementCounter('omote.errors.total', 1, {\n model: 'emotion2vec',\n error_type: 'load_failed',\n });\n throw error;\n } finally {\n this.isLoading = false;\n }\n }\n\n /**\n * Run emotion inference on audio samples\n *\n * @param audio - Float32Array of 16kHz audio samples\n * @returns Frame-level emotion results at 50Hz\n */\n async infer(audio: Float32Array): Promise<Emotion2VecResult> {\n if (!this.session) {\n throw new Error('Model not loaded. Call load() first.');\n }\n\n return this.queueInference(audio);\n }\n\n private queueInference(audio: Float32Array): Promise<Emotion2VecResult> {\n // CRITICAL: Force copy IMMEDIATELY to prevent ArrayBuffer detachment\n // During interruptions, audio's buffer may get detached by ONNX Runtime\n // before we access it in the async queue. Copy synchronously to preserve data.\n const audioCopy = new Float32Array(audio);\n\n return new Promise((resolve, reject) => {\n this.inferenceQueue = this.inferenceQueue.then(async () => {\n const telemetry = getTelemetry();\n const span = telemetry?.startSpan('Emotion2Vec.infer', {\n 'inference.backend': this._backend,\n 'inference.audio_samples': audioCopy.length,\n });\n\n try {\n const startTime = performance.now();\n\n // Create input tensor\n // SUPERB model expects: [batch, audio_samples] with input name 'audio'\n const inputTensor = new this.ort!.Tensor('float32', audioCopy, [1, audioCopy.length]);\n\n // Run inference - SUPERB model input name is 'audio'\n const results = await this.session!.run({ audio: inputTensor });\n\n // Extract outputs:\n // - logits: [1, 4] utterance-level emotion classification\n // - layer_norm_25: [1, frames, 768] frame-level embeddings\n const logitsTensor = results['logits'];\n const embeddingsTensor = results['layer_norm_25'];\n\n if (!logitsTensor) {\n throw new Error(\n `Missing logits tensor from SUPERB model. Got outputs: ${Object.keys(results).join(', ')}`\n );\n }\n\n // Process utterance-level logits\n const logitsData = logitsTensor.data as Float32Array;\n const logits = new Float32Array(logitsData);\n\n // Apply softmax to get probabilities\n const probs = this.softmax(logits);\n\n // Create probabilities record\n const probabilities: Record<Emotion2VecLabel, number> = {\n neutral: probs[0],\n happy: probs[1],\n angry: probs[2],\n sad: probs[3],\n };\n\n // Find dominant emotion\n let maxIdx = 0;\n let maxProb = probs[0];\n for (let i = 1; i < probs.length; i++) {\n if (probs[i] > maxProb) {\n maxProb = probs[i];\n maxIdx = i;\n }\n }\n\n const dominant: EmotionFrame = {\n emotion: EMOTION2VEC_LABELS[maxIdx],\n confidence: maxProb,\n probabilities,\n };\n\n // Process frame-level embeddings\n let embeddings: Float32Array[] = [];\n let numFrames = 1;\n\n if (embeddingsTensor) {\n const embeddingData = embeddingsTensor.data as Float32Array;\n const dims = embeddingsTensor.dims as number[];\n\n if (dims.length === 3) {\n // [batch, frames, embedding_dim]\n numFrames = dims[1];\n const embeddingDim = dims[2];\n\n for (let i = 0; i < numFrames; i++) {\n const start = i * embeddingDim;\n embeddings.push(new Float32Array(embeddingData.slice(start, start + embeddingDim)));\n }\n }\n }\n\n // Create per-frame emotion results\n // Currently we apply the utterance-level emotion to all frames\n // (Future: could train a per-frame classifier on the embeddings)\n const frames: EmotionFrame[] = [];\n for (let i = 0; i < numFrames; i++) {\n frames.push({\n emotion: dominant.emotion,\n confidence: dominant.confidence,\n probabilities: { ...probabilities },\n });\n }\n\n const inferenceTimeMs = performance.now() - startTime;\n\n logger.debug('Emotion inference completed', {\n numFrames,\n dominant: dominant.emotion,\n confidence: Math.round(dominant.confidence * 100),\n inferenceTimeMs: Math.round(inferenceTimeMs),\n });\n\n span?.setAttributes({\n 'inference.duration_ms': inferenceTimeMs,\n 'inference.num_frames': numFrames,\n 'inference.dominant_emotion': dominant.emotion,\n });\n span?.end();\n telemetry?.recordHistogram('omote.inference.latency', inferenceTimeMs, {\n model: 'emotion2vec',\n backend: this._backend,\n });\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'emotion2vec',\n backend: this._backend,\n status: 'success',\n });\n\n resolve({\n frames,\n dominant,\n embeddings,\n logits,\n inferenceTimeMs,\n });\n } catch (err) {\n span?.endWithError(err instanceof Error ? err : new Error(String(err)));\n telemetry?.incrementCounter('omote.inference.total', 1, {\n model: 'emotion2vec',\n backend: this._backend,\n status: 'error',\n });\n reject(err);\n }\n });\n });\n }\n\n /**\n * Apply softmax to convert logits to probabilities\n */\n private softmax(logits: Float32Array): Float32Array {\n // Find max for numerical stability\n let max = logits[0];\n for (let i = 1; i < logits.length; i++) {\n if (logits[i] > max) max = logits[i];\n }\n\n // Compute exp and sum\n const exp = new Float32Array(logits.length);\n let sum = 0;\n for (let i = 0; i < logits.length; i++) {\n exp[i] = Math.exp(logits[i] - max);\n sum += exp[i];\n }\n\n // Normalize\n const probs = new Float32Array(logits.length);\n for (let i = 0; i < logits.length; i++) {\n probs[i] = exp[i] / sum;\n }\n\n return probs;\n }\n\n /**\n * Dispose of the model and free resources\n */\n async dispose(): Promise<void> {\n if (this.session) {\n await this.session.release();\n this.session = null;\n }\n }\n}\n","/**\r\n * Safari Web Speech API wrapper for iOS speech recognition\r\n *\r\n * Provides a similar interface to WhisperInference for easy substitution on iOS.\r\n * Uses the native Web Speech API which is significantly faster than Whisper WASM on iOS.\r\n *\r\n * Key differences from WhisperInference:\r\n * - Real-time streaming (not batch processing)\r\n * - No audio buffer input (microphone handled by browser)\r\n * - transcribe() throws error (use start/stop pattern instead)\r\n *\r\n * @category Inference\r\n *\r\n * @example Basic usage\r\n * ```typescript\r\n * import { SafariSpeechRecognition, shouldUseNativeASR } from '@omote/core';\r\n *\r\n * // Use native ASR on iOS, Whisper elsewhere\r\n * if (shouldUseNativeASR()) {\r\n * const speech = new SafariSpeechRecognition({ language: 'en-US' });\r\n *\r\n * speech.onResult((result) => {\r\n * console.log('Transcript:', result.text);\r\n * });\r\n *\r\n * await speech.start();\r\n * // ... user speaks ...\r\n * const finalResult = await speech.stop();\r\n * }\r\n * ```\r\n *\r\n * @example Platform-aware initialization\r\n * ```typescript\r\n * const asr = shouldUseNativeASR()\r\n * ? new SafariSpeechRecognition({ language: 'en-US' })\r\n * : new WhisperInference({ model: 'tiny' });\r\n * ```\r\n */\r\n\r\nimport { createLogger } from '../logging';\r\nimport { getTelemetry } from '../telemetry';\r\nimport { isSpeechRecognitionAvailable } from '../utils/runtime';\r\n\r\nconst logger = createLogger('SafariSpeech');\r\n\r\n/**\r\n * Configuration for Safari Speech Recognition\r\n */\r\nexport interface SafariSpeechConfig {\r\n /** Language code (default: 'en-US') */\r\n language?: string;\r\n /** Continuous mode for ongoing conversation (default: true) */\r\n continuous?: boolean;\r\n /** Interim results before speech ends (default: true) */\r\n interimResults?: boolean;\r\n /** Max alternatives (default: 1) */\r\n maxAlternatives?: number;\r\n}\r\n\r\n/**\r\n * Result from speech recognition (matches WhisperInference TranscriptionResult)\r\n */\r\nexport interface SpeechRecognitionResult {\r\n /** Transcribed text */\r\n text: string;\r\n /** Detected/used language */\r\n language: string;\r\n /** Time since start in ms (not inference time - native API) */\r\n inferenceTimeMs: number;\r\n /** Whether this is a final result or interim */\r\n isFinal: boolean;\r\n /** Confidence score (0-1) if available */\r\n confidence?: number;\r\n}\r\n\r\n/**\r\n * Callback for receiving recognition results\r\n */\r\nexport type SpeechResultCallback = (result: SpeechRecognitionResult) => void;\r\n\r\n/**\r\n * Callback for receiving recognition errors\r\n */\r\nexport type SpeechErrorCallback = (error: Error) => void;\r\n\r\n// Type declarations for Web Speech API (not in lib.dom.d.ts by default)\r\ninterface SpeechRecognitionEvent extends Event {\r\n resultIndex: number;\r\n results: SpeechRecognitionResultList;\r\n}\r\n\r\ninterface SpeechRecognitionResultList {\r\n length: number;\r\n item(index: number): SpeechRecognitionResult;\r\n [index: number]: SpeechRecognitionResultItem;\r\n}\r\n\r\ninterface SpeechRecognitionResultItem {\r\n isFinal: boolean;\r\n length: number;\r\n item(index: number): SpeechRecognitionAlternative;\r\n [index: number]: SpeechRecognitionAlternative;\r\n}\r\n\r\ninterface SpeechRecognitionAlternative {\r\n transcript: string;\r\n confidence: number;\r\n}\r\n\r\ninterface SpeechRecognitionErrorEvent extends Event {\r\n error: string;\r\n message: string;\r\n}\r\n\r\ninterface SpeechRecognitionInterface extends EventTarget {\r\n continuous: boolean;\r\n interimResults: boolean;\r\n lang: string;\r\n maxAlternatives: number;\r\n start(): void;\r\n stop(): void;\r\n abort(): void;\r\n onresult: ((event: SpeechRecognitionEvent) => void) | null;\r\n onerror: ((event: SpeechRecognitionErrorEvent) => void) | null;\r\n onend: (() => void) | null;\r\n onstart: (() => void) | null;\r\n onaudiostart: (() => void) | null;\r\n onaudioend: (() => void) | null;\r\n onspeechstart: (() => void) | null;\r\n onspeechend: (() => void) | null;\r\n}\r\n\r\ndeclare global {\r\n interface Window {\r\n SpeechRecognition?: new () => SpeechRecognitionInterface;\r\n webkitSpeechRecognition?: new () => SpeechRecognitionInterface;\r\n }\r\n}\r\n\r\n/**\r\n * Safari Web Speech API wrapper\r\n *\r\n * Provides native speech recognition on iOS Safari.\r\n * Much faster than Whisper WASM and more battery-efficient.\r\n */\r\nexport class SafariSpeechRecognition {\r\n private config: Required<SafariSpeechConfig>;\r\n private recognition: SpeechRecognitionInterface | null = null;\r\n private isListening = false;\r\n private startTime = 0;\r\n private accumulatedText = '';\r\n\r\n // Callbacks\r\n private resultCallbacks: SpeechResultCallback[] = [];\r\n private errorCallbacks: SpeechErrorCallback[] = [];\r\n\r\n // Promise resolvers for stop()\r\n private stopResolver: ((result: SpeechRecognitionResult) => void) | null = null;\r\n private stopRejecter: ((error: Error) => void) | null = null;\r\n\r\n constructor(config: SafariSpeechConfig = {}) {\r\n this.config = {\r\n language: config.language ?? 'en-US',\r\n continuous: config.continuous ?? true,\r\n interimResults: config.interimResults ?? true,\r\n maxAlternatives: config.maxAlternatives ?? 1,\r\n };\r\n\r\n logger.debug('SafariSpeechRecognition created', {\r\n language: this.config.language,\r\n continuous: this.config.continuous,\r\n });\r\n }\r\n\r\n /**\r\n * Check if Web Speech API is available\r\n */\r\n static isAvailable(): boolean {\r\n return isSpeechRecognitionAvailable();\r\n }\r\n\r\n /**\r\n * Check if currently listening\r\n */\r\n get listening(): boolean {\r\n return this.isListening;\r\n }\r\n\r\n /**\r\n * Get the language being used\r\n */\r\n get language(): string {\r\n return this.config.language;\r\n }\r\n\r\n /**\r\n * Register a callback for receiving results\r\n */\r\n onResult(callback: SpeechResultCallback): void {\r\n this.resultCallbacks.push(callback);\r\n }\r\n\r\n /**\r\n * Register a callback for receiving errors\r\n */\r\n onError(callback: SpeechErrorCallback): void {\r\n this.errorCallbacks.push(callback);\r\n }\r\n\r\n /**\r\n * Remove a result callback\r\n */\r\n offResult(callback: SpeechResultCallback): void {\r\n const index = this.resultCallbacks.indexOf(callback);\r\n if (index !== -1) {\r\n this.resultCallbacks.splice(index, 1);\r\n }\r\n }\r\n\r\n /**\r\n * Remove an error callback\r\n */\r\n offError(callback: SpeechErrorCallback): void {\r\n const index = this.errorCallbacks.indexOf(callback);\r\n if (index !== -1) {\r\n this.errorCallbacks.splice(index, 1);\r\n }\r\n }\r\n\r\n /**\r\n * Start listening for speech\r\n *\r\n * On iOS Safari, this will trigger the microphone permission prompt\r\n * if not already granted.\r\n */\r\n async start(): Promise<void> {\r\n if (this.isListening) {\r\n logger.warn('Already listening');\r\n return;\r\n }\r\n\r\n if (!SafariSpeechRecognition.isAvailable()) {\r\n const error = new Error(\r\n 'Web Speech API not available. ' +\r\n 'This API is supported in Safari (iOS/macOS) and Chrome. ' +\r\n 'On iOS, use Safari for native speech recognition.'\r\n );\r\n this.emitError(error);\r\n throw error;\r\n }\r\n\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('SafariSpeech.start', {\r\n 'speech.language': this.config.language,\r\n 'speech.continuous': this.config.continuous,\r\n });\r\n\r\n try {\r\n // Create recognition instance\r\n const SpeechRecognitionClass = window.SpeechRecognition || window.webkitSpeechRecognition;\r\n if (!SpeechRecognitionClass) {\r\n throw new Error('SpeechRecognition constructor not found');\r\n }\r\n\r\n this.recognition = new SpeechRecognitionClass();\r\n this.recognition.continuous = this.config.continuous;\r\n this.recognition.interimResults = this.config.interimResults;\r\n this.recognition.lang = this.config.language;\r\n this.recognition.maxAlternatives = this.config.maxAlternatives;\r\n\r\n // Set up event handlers\r\n this.setupEventHandlers();\r\n\r\n // Start recognition\r\n this.recognition.start();\r\n this.isListening = true;\r\n this.startTime = performance.now();\r\n this.accumulatedText = '';\r\n\r\n logger.info('Speech recognition started', {\r\n language: this.config.language,\r\n });\r\n\r\n span?.end();\r\n } catch (error) {\r\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\r\n this.emitError(error instanceof Error ? error : new Error(String(error)));\r\n throw error;\r\n }\r\n }\r\n\r\n /**\r\n * Stop listening and return the final transcript\r\n */\r\n async stop(): Promise<SpeechRecognitionResult> {\r\n if (!this.isListening || !this.recognition) {\r\n logger.warn('Not currently listening');\r\n return {\r\n text: this.accumulatedText,\r\n language: this.config.language,\r\n inferenceTimeMs: 0,\r\n isFinal: true,\r\n };\r\n }\r\n\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('SafariSpeech.stop');\r\n\r\n return new Promise((resolve, reject) => {\r\n this.stopResolver = resolve;\r\n this.stopRejecter = reject;\r\n\r\n try {\r\n this.recognition!.stop();\r\n // onend handler will resolve the promise\r\n } catch (error) {\r\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\r\n this.isListening = false;\r\n reject(error);\r\n }\r\n });\r\n }\r\n\r\n /**\r\n * Abort recognition without waiting for final result\r\n */\r\n abort(): void {\r\n if (this.recognition && this.isListening) {\r\n this.recognition.abort();\r\n this.isListening = false;\r\n logger.info('Speech recognition aborted');\r\n }\r\n }\r\n\r\n /**\r\n * NOT SUPPORTED: Transcribe audio buffer\r\n *\r\n * Safari Speech API does not support transcribing pre-recorded audio.\r\n * It only works with live microphone input.\r\n *\r\n * For batch transcription on iOS, use server-side Whisper or a cloud ASR service.\r\n *\r\n * @throws Error always - this method is not supported\r\n */\r\n async transcribe(_audio: Float32Array): Promise<SpeechRecognitionResult> {\r\n throw new Error(\r\n 'SafariSpeechRecognition does not support transcribe() with audio buffers. ' +\r\n 'The Web Speech API only works with live microphone input. ' +\r\n 'Use start() and stop() for real-time recognition, or use WhisperInference/cloud ASR for batch transcription.'\r\n );\r\n }\r\n\r\n /**\r\n * Dispose of recognition resources\r\n */\r\n dispose(): void {\r\n if (this.recognition) {\r\n if (this.isListening) {\r\n this.recognition.abort();\r\n }\r\n this.recognition = null;\r\n }\r\n this.isListening = false;\r\n this.resultCallbacks = [];\r\n this.errorCallbacks = [];\r\n logger.debug('SafariSpeechRecognition disposed');\r\n }\r\n\r\n /**\r\n * Set up event handlers for the recognition instance\r\n */\r\n private setupEventHandlers(): void {\r\n if (!this.recognition) return;\r\n\r\n this.recognition.onresult = (event: SpeechRecognitionEvent) => {\r\n const telemetry = getTelemetry();\r\n const span = telemetry?.startSpan('SafariSpeech.onresult');\r\n\r\n try {\r\n // Process all new results\r\n for (let i = event.resultIndex; i < event.results.length; i++) {\r\n const result = event.results[i];\r\n const alternative = result[0];\r\n\r\n if (alternative) {\r\n const text = alternative.transcript;\r\n const isFinal = result.isFinal;\r\n\r\n // Accumulate final text\r\n if (isFinal) {\r\n this.accumulatedText += text + ' ';\r\n }\r\n\r\n const speechResult: SpeechRecognitionResult = {\r\n text: isFinal ? this.accumulatedText.trim() : text,\r\n language: this.config.language,\r\n inferenceTimeMs: performance.now() - this.startTime,\r\n isFinal,\r\n confidence: alternative.confidence,\r\n };\r\n\r\n // Emit to callbacks\r\n this.emitResult(speechResult);\r\n\r\n logger.trace('Speech result', {\r\n text: text.substring(0, 50),\r\n isFinal,\r\n confidence: alternative.confidence,\r\n });\r\n }\r\n }\r\n\r\n span?.end();\r\n } catch (error) {\r\n span?.endWithError(error instanceof Error ? error : new Error(String(error)));\r\n logger.error('Error processing speech result', { error });\r\n }\r\n };\r\n\r\n this.recognition.onerror = (event: SpeechRecognitionErrorEvent) => {\r\n const error = new Error(`Speech recognition error: ${event.error} - ${event.message}`);\r\n logger.error('Speech recognition error', { error: event.error, message: event.message });\r\n this.emitError(error);\r\n\r\n if (this.stopRejecter) {\r\n this.stopRejecter(error);\r\n this.stopResolver = null;\r\n this.stopRejecter = null;\r\n }\r\n };\r\n\r\n this.recognition.onend = () => {\r\n this.isListening = false;\r\n logger.info('Speech recognition ended', {\r\n totalText: this.accumulatedText.length,\r\n durationMs: performance.now() - this.startTime,\r\n });\r\n\r\n // Resolve stop() promise if pending\r\n if (this.stopResolver) {\r\n const result: SpeechRecognitionResult = {\r\n text: this.accumulatedText.trim(),\r\n language: this.config.language,\r\n inferenceTimeMs: performance.now() - this.startTime,\r\n isFinal: true,\r\n };\r\n this.stopResolver(result);\r\n this.stopResolver = null;\r\n this.stopRejecter = null;\r\n }\r\n };\r\n\r\n this.recognition.onstart = () => {\r\n logger.debug('Speech recognition started by browser');\r\n };\r\n\r\n this.recognition.onspeechstart = () => {\r\n logger.debug('Speech detected');\r\n };\r\n\r\n this.recognition.onspeechend = () => {\r\n logger.debug('Speech ended');\r\n };\r\n }\r\n\r\n /**\r\n * Emit result to all registered callbacks\r\n */\r\n private emitResult(result: SpeechRecognitionResult): void {\r\n for (const callback of this.resultCallbacks) {\r\n try {\r\n callback(result);\r\n } catch (error) {\r\n logger.error('Error in result callback', { error });\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * Emit error to all registered callbacks\r\n */\r\n private emitError(error: Error): void {\r\n for (const callback of this.errorCallbacks) {\r\n try {\r\n callback(error);\r\n } catch (callbackError) {\r\n logger.error('Error in error callback', { error: callbackError });\r\n }\r\n }\r\n }\r\n}\r\n","/**\n * Emotion - Helper for creating emotion vectors for avatar animation\n *\n * Provides 10 explicit emotion channels that can be used to control\n * avatar expressions and emotional states.\n *\n * @category Emotion\n *\n * @example Creating emotion vectors\n * ```typescript\n * import { createEmotionVector, EmotionPresets } from '@omote/core';\n *\n * // Named weights\n * const happy = createEmotionVector({ joy: 0.8, amazement: 0.2 });\n *\n * // Use preset\n * const surprised = EmotionPresets.surprised;\n * ```\n *\n * @example Smooth transitions\n * ```typescript\n * import { EmotionController } from '@omote/core';\n *\n * const controller = new EmotionController();\n * controller.setPreset('happy');\n * controller.transitionTo({ sadness: 0.7 }, 500);\n *\n * // In animation loop\n * controller.update();\n * const emotion = controller.emotion;\n * ```\n */\n\n/** The 10 explicit emotion channels */\nexport const EMOTION_NAMES = [\n 'amazement',\n 'anger',\n 'cheekiness',\n 'disgust',\n 'fear',\n 'grief',\n 'joy',\n 'outofbreath',\n 'pain',\n 'sadness',\n] as const;\n\nexport type EmotionName = typeof EMOTION_NAMES[number];\n\n/** Emotion weights by name */\nexport type EmotionWeights = Partial<Record<EmotionName, number>>;\n\n/** Total emotion vector size */\nexport const EMOTION_VECTOR_SIZE = 26;\n\n/** Number of explicit emotion channels */\nexport const EXPLICIT_EMOTION_COUNT = 10;\n\n/**\n * Create an emotion vector from named weights\n *\n * @param weights - Named emotion weights (0-1)\n * @returns Float32Array of emotion values\n *\n * @example\n * ```ts\n * const emotion = createEmotionVector({ joy: 0.8, amazement: 0.3 });\n * ```\n */\nexport function createEmotionVector(weights: EmotionWeights = {}): Float32Array {\n const vector = new Float32Array(EMOTION_VECTOR_SIZE);\n\n for (const [name, value] of Object.entries(weights)) {\n const idx = EMOTION_NAMES.indexOf(name as EmotionName);\n if (idx >= 0) {\n vector[idx] = Math.max(0, Math.min(1, value));\n }\n }\n\n return vector;\n}\n\n/**\n * Pre-built emotion presets for common expressions\n */\nexport const EmotionPresets = {\n /** Neutral/default - no emotional expression */\n neutral: createEmotionVector({}),\n\n /** Happy - joy with slight amazement */\n happy: createEmotionVector({ joy: 0.7, amazement: 0.2 }),\n\n /** Sad - grief and sadness */\n sad: createEmotionVector({ sadness: 0.7, grief: 0.4 }),\n\n /** Angry - anger with disgust */\n angry: createEmotionVector({ anger: 0.8, disgust: 0.3 }),\n\n /** Surprised - high amazement */\n surprised: createEmotionVector({ amazement: 0.9, fear: 0.2 }),\n\n /** Scared - fear with pain */\n scared: createEmotionVector({ fear: 0.8, pain: 0.3 }),\n\n /** Disgusted - disgust with anger */\n disgusted: createEmotionVector({ disgust: 0.8, anger: 0.2 }),\n\n /** Excited - joy with amazement and cheekiness */\n excited: createEmotionVector({ joy: 0.6, amazement: 0.5, cheekiness: 0.4 }),\n\n /** Tired - out of breath with sadness */\n tired: createEmotionVector({ outofbreath: 0.6, sadness: 0.3 }),\n\n /** Playful - cheekiness with joy */\n playful: createEmotionVector({ cheekiness: 0.7, joy: 0.5 }),\n\n /** Pained - pain with grief */\n pained: createEmotionVector({ pain: 0.8, grief: 0.4 }),\n\n /** Contemplative - slight sadness, calm */\n contemplative: createEmotionVector({ sadness: 0.2, grief: 0.1 }),\n} as const;\n\nexport type EmotionPresetName = keyof typeof EmotionPresets;\n\n/**\n * Get an emotion preset by name\n */\nexport function getEmotionPreset(name: EmotionPresetName): Float32Array {\n return EmotionPresets[name].slice();\n}\n\n/**\n * Blend multiple emotion vectors together\n *\n * @param emotions - Array of { vector, weight } pairs\n * @returns Blended emotion vector\n *\n * @example\n * ```ts\n * const blended = blendEmotions([\n * { vector: EmotionPresets.happy, weight: 0.7 },\n * { vector: EmotionPresets.surprised, weight: 0.3 },\n * ]);\n * ```\n */\nexport function blendEmotions(\n emotions: Array<{ vector: Float32Array; weight: number }>\n): Float32Array {\n const result = new Float32Array(EMOTION_VECTOR_SIZE);\n let totalWeight = 0;\n\n for (const { vector, weight } of emotions) {\n totalWeight += weight;\n for (let i = 0; i < EMOTION_VECTOR_SIZE; i++) {\n result[i] += (vector[i] || 0) * weight;\n }\n }\n\n // Normalize if total weight > 0\n if (totalWeight > 0) {\n for (let i = 0; i < EMOTION_VECTOR_SIZE; i++) {\n result[i] /= totalWeight;\n }\n }\n\n return result;\n}\n\n/**\n * Interpolate between two emotion vectors\n *\n * @param from - Starting emotion\n * @param to - Target emotion\n * @param t - Interpolation factor (0-1)\n * @returns Interpolated emotion vector\n */\nexport function lerpEmotion(\n from: Float32Array,\n to: Float32Array,\n t: number\n): Float32Array {\n const result = new Float32Array(EMOTION_VECTOR_SIZE);\n const clampedT = Math.max(0, Math.min(1, t));\n\n for (let i = 0; i < EMOTION_VECTOR_SIZE; i++) {\n result[i] = (from[i] || 0) * (1 - clampedT) + (to[i] || 0) * clampedT;\n }\n\n return result;\n}\n\n/**\n * EmotionController - Manages emotion state with smooth transitions\n */\nexport class EmotionController {\n private currentEmotion = new Float32Array(EMOTION_VECTOR_SIZE);\n private targetEmotion = new Float32Array(EMOTION_VECTOR_SIZE);\n private transitionProgress = 1.0;\n private transitionDuration = 0;\n private transitionStartTime = 0;\n\n /**\n * Get the current emotion vector\n */\n get emotion(): Float32Array {\n if (this.transitionProgress >= 1.0) {\n return this.targetEmotion;\n }\n\n // Interpolate during transition\n return lerpEmotion(this.currentEmotion, this.targetEmotion, this.transitionProgress);\n }\n\n /**\n * Set emotion immediately (no transition)\n */\n set(weights: EmotionWeights): void {\n const newEmotion = createEmotionVector(weights);\n this.targetEmotion.set(newEmotion);\n this.currentEmotion.set(newEmotion);\n this.transitionProgress = 1.0;\n }\n\n /**\n * Set emotion from preset immediately\n */\n setPreset(preset: EmotionPresetName): void {\n const newEmotion = getEmotionPreset(preset);\n this.targetEmotion.set(newEmotion);\n this.currentEmotion.set(newEmotion);\n this.transitionProgress = 1.0;\n }\n\n /**\n * Transition to new emotion over time\n *\n * @param weights - Target emotion weights\n * @param durationMs - Transition duration in milliseconds\n */\n transitionTo(weights: EmotionWeights, durationMs: number): void {\n this.currentEmotion.set(this.emotion);\n this.targetEmotion.set(createEmotionVector(weights));\n this.transitionDuration = durationMs;\n this.transitionStartTime = performance.now();\n this.transitionProgress = 0;\n }\n\n /**\n * Transition to preset over time\n */\n transitionToPreset(preset: EmotionPresetName, durationMs: number): void {\n this.currentEmotion.set(this.emotion);\n this.targetEmotion.set(getEmotionPreset(preset));\n this.transitionDuration = durationMs;\n this.transitionStartTime = performance.now();\n this.transitionProgress = 0;\n }\n\n /**\n * Update transition progress (call each frame)\n */\n update(): void {\n if (this.transitionProgress >= 1.0) return;\n\n const elapsed = performance.now() - this.transitionStartTime;\n this.transitionProgress = Math.min(1.0, elapsed / this.transitionDuration);\n }\n\n /**\n * Check if currently transitioning\n */\n get isTransitioning(): boolean {\n return this.transitionProgress < 1.0;\n }\n\n /**\n * Reset to neutral\n */\n reset(): void {\n this.currentEmotion.fill(0);\n this.targetEmotion.fill(0);\n this.transitionProgress = 1.0;\n }\n}\n","/**\r\n * AWS AgentCore Adapter\r\n *\r\n * Primary AI adapter for the Omote Platform.\r\n *\r\n * Pipeline:\r\n * User Audio -> Whisper ASR (local) -> Text\r\n * Text -> AgentCore (WebSocket) -> Response Text + Audio chunks (TTS handled backend-side)\r\n * Audio chunks -> LAM (local) -> Blendshapes -> Render\r\n *\r\n * @category AI\r\n */\r\n\r\nimport { EventEmitter } from '../../events/EventEmitter';\r\nimport type {\r\n AIAdapter,\r\n AIAdapterEvents,\r\n SessionConfig,\r\n AISessionState,\r\n ConversationMessage,\r\n TenantConfig,\r\n} from '../interfaces/AIAdapter';\r\nimport { WhisperInference } from '../../inference/WhisperInference';\r\nimport { Wav2Vec2Inference, LAM_BLENDSHAPES } from '../../inference/Wav2Vec2Inference';\r\nimport { SileroVADInference } from '../../inference/SileroVADInference';\r\nimport { EmotionController } from '../../emotion/Emotion';\r\nimport { SyncedAudioPipeline } from '../../audio/SyncedAudioPipeline';\r\n\r\n/**\r\n * AgentCore-specific configuration\r\n */\r\nexport interface AgentCoreConfig {\r\n /** AgentCore WebSocket endpoint */\r\n endpoint: string;\r\n /** AWS region */\r\n region?: string;\r\n /** Model URLs */\r\n models?: {\r\n lamUrl?: string;\r\n };\r\n /** Enable observability */\r\n observability?: {\r\n tracing?: boolean;\r\n metrics?: boolean;\r\n };\r\n}\r\n\r\n/**\r\n * AWS AgentCore Adapter\r\n */\r\nexport class AgentCoreAdapter extends EventEmitter<AIAdapterEvents> implements AIAdapter {\r\n readonly name = 'AgentCore';\r\n\r\n private _state: AISessionState = 'disconnected';\r\n private _sessionId: string | null = null;\r\n private _isConnected = false;\r\n\r\n // Sub-components\r\n private whisper: WhisperInference | null = null;\r\n private vad: SileroVADInference | null = null;\r\n private lam: Wav2Vec2Inference | null = null;\r\n private emotionController: EmotionController;\r\n private pipeline: SyncedAudioPipeline | null = null;\r\n\r\n // WebSocket connection to AgentCore\r\n private ws: WebSocket | null = null;\r\n private wsReconnectAttempts = 0;\r\n private readonly maxReconnectAttempts = 5;\r\n\r\n // Audio buffers\r\n private audioBuffer: Float32Array[] = [];\r\n\r\n // Conversation state\r\n private history: ConversationMessage[] = [];\r\n private currentConfig: SessionConfig | null = null;\r\n private agentCoreConfig: AgentCoreConfig;\r\n\r\n // Interruption handling\r\n private isSpeaking = false;\r\n private currentTtsAbortController: AbortController | null = null;\r\n\r\n // Auth token cache per tenant\r\n private tokenCache = new Map<string, { token: string; expiresAt: number }>();\r\n\r\n constructor(config: AgentCoreConfig) {\r\n super();\r\n this.agentCoreConfig = config;\r\n this.emotionController = new EmotionController();\r\n }\r\n\r\n get state(): AISessionState {\r\n return this._state;\r\n }\r\n\r\n get sessionId(): string | null {\r\n return this._sessionId;\r\n }\r\n\r\n get isConnected(): boolean {\r\n return this._isConnected;\r\n }\r\n\r\n /**\r\n * Connect to AgentCore with session configuration\r\n */\r\n async connect(config: SessionConfig): Promise<void> {\r\n this.currentConfig = config;\r\n this._sessionId = config.sessionId;\r\n\r\n try {\r\n // 1. Get/refresh auth token for tenant\r\n const authToken = await this.getAuthToken(config.tenant);\r\n\r\n // 2. Initialize local inference components in parallel\r\n await Promise.all([\r\n this.initWhisper(),\r\n this.initLAM(),\r\n ]);\r\n\r\n // 3. Connect to AgentCore WebSocket\r\n await this.connectWebSocket(authToken, config);\r\n\r\n this._isConnected = true;\r\n this.setState('idle');\r\n\r\n this.emit('connection.opened', { sessionId: this._sessionId, adapter: this.name });\r\n } catch (error) {\r\n this.setState('error');\r\n this.emit('connection.error', {\r\n error: error as Error,\r\n recoverable: true,\r\n });\r\n throw error;\r\n }\r\n }\r\n\r\n /**\r\n * Disconnect and cleanup\r\n */\r\n async disconnect(): Promise<void> {\r\n // Cancel any ongoing TTS\r\n this.currentTtsAbortController?.abort();\r\n\r\n // Stop pipeline\r\n if (this.pipeline) {\r\n this.pipeline.dispose();\r\n this.pipeline = null;\r\n }\r\n\r\n // Close WebSocket\r\n if (this.ws) {\r\n this.ws.close(1000, 'Client disconnect');\r\n this.ws = null;\r\n }\r\n\r\n // Cleanup local components\r\n await Promise.all([\r\n this.whisper?.dispose(),\r\n this.vad?.dispose(),\r\n this.lam?.dispose(),\r\n ]);\r\n\r\n this._isConnected = false;\r\n this.setState('disconnected');\r\n\r\n this.emit('connection.closed', { reason: 'Client disconnect' });\r\n }\r\n\r\n /**\r\n * Push user audio for processing\r\n */\r\n pushAudio(audio: Int16Array | Float32Array): void {\r\n if (!this._isConnected) return;\r\n\r\n // Handle interruption detection (async but fire-and-forget)\r\n if (this.isSpeaking) {\r\n this.detectVoiceActivity(audio).then((hasVoiceActivity) => {\r\n if (hasVoiceActivity) {\r\n this.interrupt();\r\n }\r\n }).catch((error) => {\r\n console.error('[AgentCore] VAD error during interruption detection:', error);\r\n });\r\n // Don't return - still buffer the audio for transcription after interruption\r\n }\r\n\r\n // Convert to Float32 if needed\r\n const float32 = audio instanceof Float32Array\r\n ? audio\r\n : this.int16ToFloat32(audio);\r\n\r\n // Buffer audio chunks\r\n this.audioBuffer.push(float32);\r\n\r\n // Debounce and send to Whisper when we have enough\r\n this.scheduleTranscription();\r\n }\r\n\r\n /**\r\n * Send text directly to AgentCore\r\n */\r\n async sendText(text: string): Promise<void> {\r\n if (!this._isConnected || !this.ws) {\r\n throw new Error('Not connected to AgentCore');\r\n }\r\n\r\n // Add to history\r\n this.addToHistory({\r\n role: 'user',\r\n content: text,\r\n timestamp: Date.now(),\r\n });\r\n\r\n this.setState('thinking');\r\n this.emit('ai.thinking.start', { timestamp: Date.now() });\r\n\r\n // Send to AgentCore\r\n this.ws.send(JSON.stringify({\r\n type: 'user_message',\r\n sessionId: this._sessionId,\r\n content: text,\r\n context: {\r\n history: this.history.slice(-10), // Last 10 messages\r\n emotion: Array.from(this.emotionController.emotion),\r\n },\r\n }));\r\n }\r\n\r\n /**\r\n * Interrupt current AI response\r\n */\r\n interrupt(): void {\r\n if (!this.isSpeaking) return;\r\n\r\n this.emit('interruption.detected', { timestamp: Date.now() });\r\n\r\n // Cancel any pending operations\r\n this.currentTtsAbortController?.abort();\r\n this.currentTtsAbortController = null;\r\n\r\n // Notify AgentCore to stop TTS streaming\r\n if (this.ws?.readyState === WebSocket.OPEN) {\r\n this.ws.send(JSON.stringify({\r\n type: 'interrupt',\r\n sessionId: this._sessionId,\r\n timestamp: Date.now(),\r\n }));\r\n }\r\n\r\n this.isSpeaking = false;\r\n this.setState('listening');\r\n\r\n this.emit('interruption.handled', { timestamp: Date.now(), action: 'stop' });\r\n }\r\n\r\n getHistory(): ConversationMessage[] {\r\n return [...this.history];\r\n }\r\n\r\n clearHistory(): void {\r\n this.history = [];\r\n this.emit('memory.updated', { messageCount: 0 });\r\n }\r\n\r\n async healthCheck(): Promise<boolean> {\r\n if (!this.ws || this.ws.readyState !== WebSocket.OPEN) {\r\n return false;\r\n }\r\n\r\n return new Promise((resolve) => {\r\n const timeout = setTimeout(() => resolve(false), 5000);\r\n\r\n const handler = (event: MessageEvent) => {\r\n const data = JSON.parse(event.data);\r\n if (data.type === 'pong') {\r\n clearTimeout(timeout);\r\n this.ws?.removeEventListener('message', handler);\r\n resolve(true);\r\n }\r\n };\r\n\r\n this.ws?.addEventListener('message', handler);\r\n this.ws?.send(JSON.stringify({ type: 'ping' }));\r\n });\r\n }\r\n\r\n // ==================== Private Methods ====================\r\n\r\n private setState(state: AISessionState): void {\r\n const previousState = this._state;\r\n this._state = state;\r\n this.emit('state.change', { state, previousState });\r\n }\r\n\r\n private async getAuthToken(tenant: TenantConfig): Promise<string> {\r\n const cached = this.tokenCache.get(tenant.tenantId);\r\n if (cached && cached.expiresAt > Date.now() + 60000) {\r\n return cached.token;\r\n }\r\n\r\n // If we have an auth token already, use it\r\n if (tenant.credentials.authToken) {\r\n return tenant.credentials.authToken;\r\n }\r\n\r\n // Skip auth for local dev (ws:// endpoints or localhost)\r\n // The simple voice-agent doesn't have an auth endpoint\r\n const endpoint = this.agentCoreConfig.endpoint;\r\n if (endpoint.startsWith('ws://') || endpoint.includes('localhost')) {\r\n return 'local-dev-token';\r\n }\r\n\r\n // Exchange credentials for token (production)\r\n const httpEndpoint = endpoint.replace('wss://', 'https://').replace('ws://', 'http://');\r\n const response = await fetch(`${httpEndpoint}/auth/token`, {\r\n method: 'POST',\r\n headers: { 'Content-Type': 'application/json' },\r\n body: JSON.stringify({\r\n tenantId: tenant.tenantId,\r\n apiKey: tenant.credentials.apiKey,\r\n }),\r\n });\r\n\r\n if (!response.ok) {\r\n throw new Error(`Auth failed: ${response.statusText}`);\r\n }\r\n\r\n const { token, expiresIn } = await response.json();\r\n\r\n this.tokenCache.set(tenant.tenantId, {\r\n token,\r\n expiresAt: Date.now() + expiresIn * 1000,\r\n });\r\n\r\n return token;\r\n }\r\n\r\n private async initWhisper(): Promise<void> {\r\n // Initialize Whisper and Silero VAD in parallel\r\n await Promise.all([\r\n // Whisper ASR\r\n (async () => {\r\n this.whisper = new WhisperInference({\r\n model: 'tiny',\r\n device: 'auto',\r\n language: 'en',\r\n });\r\n await this.whisper.load();\r\n })(),\r\n // Silero VAD for accurate voice activity detection\r\n (async () => {\r\n this.vad = new SileroVADInference({\r\n modelUrl: '/models/silero-vad.onnx',\r\n backend: 'webgpu',\r\n sampleRate: 16000,\r\n threshold: 0.5,\r\n });\r\n await this.vad.load();\r\n })(),\r\n ]);\r\n }\r\n\r\n private async initLAM(): Promise<void> {\r\n // LAM (Lip Animation Model) based on wav2vec2\r\n // Outputs 52 ARKit blendshapes directly at 30fps - no PCA solver needed\r\n const lamUrl = this.agentCoreConfig.models?.lamUrl || '/models/unified_wav2vec2_asr_a2e.onnx';\r\n\r\n this.lam = new Wav2Vec2Inference({\r\n modelUrl: lamUrl,\r\n backend: 'auto',\r\n });\r\n\r\n await this.lam.load();\r\n\r\n // Initialize SyncedAudioPipeline for synchronized audio playback + LAM\r\n await this.initPipeline();\r\n }\r\n\r\n private async initPipeline(): Promise<void> {\r\n if (!this.lam) {\r\n throw new Error('LAM must be initialized before pipeline');\r\n }\r\n\r\n this.pipeline = new SyncedAudioPipeline({\r\n lam: this.lam,\r\n sampleRate: 16000,\r\n chunkTargetMs: 200,\r\n });\r\n\r\n await this.pipeline.initialize();\r\n\r\n // Subscribe to pipeline events\r\n this.pipeline.on('frame_ready', (frame: Float32Array) => {\r\n // Emit animation event with synchronized frame\r\n this.emit('animation', {\r\n blendshapes: frame,\r\n get: (name: string) => {\r\n const idx = (LAM_BLENDSHAPES as readonly string[]).indexOf(name);\r\n return idx >= 0 ? frame[idx] : 0;\r\n },\r\n timestamp: Date.now(), // Wall clock for client-side logging only\r\n inferenceMs: 0, // Pipeline handles LAM inference asynchronously\r\n });\r\n });\r\n\r\n this.pipeline.on('playback_complete', () => {\r\n this.isSpeaking = false;\r\n this.setState('idle');\r\n this.emit('audio.output.end', { durationMs: 0 });\r\n });\r\n\r\n this.pipeline.on('error', (error: Error) => {\r\n console.error('[AgentCore] Pipeline error:', error);\r\n this.emit('connection.error', {\r\n error,\r\n recoverable: true,\r\n });\r\n });\r\n }\r\n\r\n private async connectWebSocket(authToken: string, config: SessionConfig): Promise<void> {\r\n return new Promise((resolve, reject) => {\r\n const wsUrl = new URL(`${this.agentCoreConfig.endpoint.replace('http', 'ws')}/ws`);\r\n wsUrl.searchParams.set('sessionId', config.sessionId);\r\n wsUrl.searchParams.set('characterId', config.tenant.characterId);\r\n\r\n this.ws = new WebSocket(wsUrl.toString());\r\n\r\n this.ws.onopen = () => {\r\n // Send auth\r\n this.ws?.send(JSON.stringify({\r\n type: 'auth',\r\n token: authToken,\r\n tenantId: config.tenant.tenantId,\r\n systemPrompt: config.systemPrompt,\r\n }));\r\n };\r\n\r\n this.ws.onmessage = (event) => {\r\n this.handleAgentCoreMessage(JSON.parse(event.data));\r\n };\r\n\r\n this.ws.onerror = () => {\r\n reject(new Error('WebSocket connection failed'));\r\n };\r\n\r\n this.ws.onclose = (event) => {\r\n this.handleDisconnect(event);\r\n };\r\n\r\n // Wait for auth confirmation\r\n const authTimeout = setTimeout(() => {\r\n reject(new Error('Auth timeout'));\r\n }, 10000);\r\n\r\n const authHandler = (event: MessageEvent) => {\r\n const data = JSON.parse(event.data);\r\n if (data.type === 'auth_success') {\r\n clearTimeout(authTimeout);\r\n this.ws?.removeEventListener('message', authHandler);\r\n resolve();\r\n } else if (data.type === 'auth_failed') {\r\n clearTimeout(authTimeout);\r\n reject(new Error(data.message));\r\n }\r\n };\r\n\r\n this.ws.addEventListener('message', authHandler);\r\n });\r\n }\r\n\r\n private handleAgentCoreMessage(data: Record<string, unknown>): void {\r\n switch (data.type) {\r\n case 'response_start':\r\n this.setState('speaking');\r\n this.isSpeaking = true;\r\n this.emit('ai.response.start', {\r\n text: data.text as string | undefined,\r\n emotion: data.emotion as string | undefined,\r\n });\r\n // Update emotion state\r\n if (data.emotion) {\r\n this.emotionController.transitionTo(\r\n { [data.emotion as string]: 0.7 },\r\n 300\r\n );\r\n }\r\n // Start pipeline for synchronized playback\r\n if (this.pipeline) {\r\n this.pipeline.start();\r\n }\r\n break;\r\n\r\n case 'response_chunk':\r\n this.emit('ai.response.chunk', {\r\n text: data.text as string,\r\n isLast: data.isLast as boolean,\r\n });\r\n break;\r\n\r\n case 'audio_chunk':\r\n // TTS audio streamed from backend - feed to synchronized pipeline\r\n if (data.audio && this.pipeline) {\r\n const audioData = this.base64ToArrayBuffer(data.audio as string);\r\n const uint8 = new Uint8Array(audioData);\r\n this.pipeline.onAudioChunk(uint8).catch((error) => {\r\n console.error('[AgentCore] Pipeline chunk error:', error);\r\n });\r\n }\r\n break;\r\n\r\n case 'audio_end':\r\n // Signal end of audio stream to pipeline\r\n if (this.pipeline) {\r\n this.pipeline.end().catch((error) => {\r\n console.error('[AgentCore] Pipeline end error:', error);\r\n });\r\n }\r\n // Note: isSpeaking and state will be set to idle by pipeline.playback_complete event\r\n break;\r\n\r\n case 'response_end':\r\n this.addToHistory({\r\n role: 'assistant',\r\n content: data.fullText as string,\r\n timestamp: Date.now(),\r\n emotion: data.emotion as string | undefined,\r\n });\r\n this.emit('ai.response.end', {\r\n fullText: data.fullText as string,\r\n durationMs: data.durationMs as number || 0,\r\n });\r\n break;\r\n\r\n case 'memory_updated':\r\n this.emit('memory.updated', {\r\n messageCount: data.messageCount as number,\r\n tokenCount: data.tokenCount as number | undefined,\r\n });\r\n break;\r\n\r\n case 'error':\r\n this.emit('connection.error', {\r\n error: new Error(data.message as string),\r\n recoverable: (data.recoverable as boolean) ?? false,\r\n });\r\n break;\r\n }\r\n }\r\n\r\n private scheduleTranscription(): void {\r\n // No debounce - transcribe immediately when we have enough audio\r\n // This reduces latency significantly (was adding 100ms delay)\r\n\r\n if (this.audioBuffer.length === 0) return;\r\n\r\n // Concatenate buffered audio\r\n const totalLength = this.audioBuffer.reduce((sum, buf) => sum + buf.length, 0);\r\n\r\n // Need minimum samples for Whisper (250ms instead of 1 sec)\r\n // Shorter buffer = faster response time\r\n if (totalLength < 4000) return; // 250ms at 16kHz (was 16000 = 1sec)\r\n\r\n const audio = new Float32Array(totalLength);\r\n let offset = 0;\r\n for (const buf of this.audioBuffer) {\r\n audio.set(buf, offset);\r\n offset += buf.length;\r\n }\r\n this.audioBuffer = [];\r\n\r\n // Check for actual audio content (not silence/blank audio)\r\n // This prevents [BLANK_AUDIO] transcriptions\r\n let sum = 0;\r\n for (let i = 0; i < audio.length; i++) {\r\n sum += audio[i] * audio[i];\r\n }\r\n const rms = Math.sqrt(sum / audio.length);\r\n\r\n // Skip silent audio (too low energy)\r\n if (rms < 0.01) {\r\n console.debug('[AgentCore] Skipping silent audio', { rms, samples: audio.length });\r\n return;\r\n }\r\n\r\n // Transcribe with Whisper\r\n if (this.whisper) {\r\n this.setState('listening');\r\n this.emit('user.speech.start', { timestamp: Date.now() });\r\n\r\n this.whisper.transcribe(audio).then((result) => {\r\n this.emit('user.transcript.final', {\r\n text: result.text,\r\n confidence: 1.0,\r\n });\r\n this.emit('user.speech.end', { timestamp: Date.now(), durationMs: result.inferenceTimeMs });\r\n\r\n // Send to AgentCore (skip [BLANK_AUDIO] or empty transcriptions)\r\n const cleanText = result.text.trim();\r\n if (cleanText && !cleanText.includes('[BLANK_AUDIO]')) {\r\n this.sendText(cleanText).catch((error) => {\r\n console.error('[AgentCore] Send text error:', error);\r\n });\r\n }\r\n }).catch((error) => {\r\n console.error('[AgentCore] Transcription error:', error);\r\n });\r\n }\r\n }\r\n\r\n // REMOVED: processAudioForAnimation() - now handled by SyncedAudioPipeline\r\n // The pipeline manages audio scheduling, LAM inference, and frame synchronization\r\n // Frames are emitted via pipeline.on('frame_ready') event (see initPipeline())\r\n\r\n /**\r\n * Detect voice activity using Silero VAD\r\n * Falls back to simple RMS if VAD not available\r\n */\r\n private async detectVoiceActivity(audio: Int16Array | Float32Array): Promise<boolean> {\r\n // Convert to Float32 if needed\r\n const float32 = audio instanceof Float32Array\r\n ? audio\r\n : this.int16ToFloat32(audio);\r\n\r\n // Use Silero VAD if available (much more accurate)\r\n if (this.vad) {\r\n // Silero VAD requires 512-sample chunks (32ms at 16kHz)\r\n const chunkSize = this.vad.getChunkSize();\r\n\r\n // Process available chunks\r\n for (let i = 0; i + chunkSize <= float32.length; i += chunkSize) {\r\n const chunk = float32.slice(i, i + chunkSize);\r\n const result = await this.vad.process(chunk);\r\n\r\n // If any chunk has speech, return true\r\n if (result.isSpeech) {\r\n return true;\r\n }\r\n }\r\n\r\n return false;\r\n }\r\n\r\n // Fallback: Simple RMS-based detection (less accurate)\r\n let sum = 0;\r\n for (let i = 0; i < float32.length; i++) {\r\n sum += float32[i] * float32[i];\r\n }\r\n const rms = Math.sqrt(sum / float32.length);\r\n return rms > 0.02;\r\n }\r\n\r\n private int16ToFloat32(int16: Int16Array): Float32Array {\r\n const float32 = new Float32Array(int16.length);\r\n for (let i = 0; i < int16.length; i++) {\r\n float32[i] = int16[i] / 32768;\r\n }\r\n return float32;\r\n }\r\n\r\n private base64ToArrayBuffer(base64: string): ArrayBuffer {\r\n const binaryString = atob(base64);\r\n const bytes = new Uint8Array(binaryString.length);\r\n for (let i = 0; i < binaryString.length; i++) {\r\n bytes[i] = binaryString.charCodeAt(i);\r\n }\r\n return bytes.buffer;\r\n }\r\n\r\n private addToHistory(message: ConversationMessage): void {\r\n this.history.push(message);\r\n this.emit('memory.updated', { messageCount: this.history.length });\r\n }\r\n\r\n private handleDisconnect(event: CloseEvent): void {\r\n this._isConnected = false;\r\n\r\n if (event.code !== 1000) {\r\n // Abnormal close - attempt reconnect\r\n if (this.wsReconnectAttempts < this.maxReconnectAttempts) {\r\n this.wsReconnectAttempts++;\r\n setTimeout(() => {\r\n if (this.currentConfig) {\r\n this.connect(this.currentConfig).catch(() => {\r\n // Will retry if fails\r\n });\r\n }\r\n }, Math.pow(2, this.wsReconnectAttempts) * 1000);\r\n } else {\r\n this.setState('error');\r\n this.emit('connection.error', {\r\n error: new Error('Max reconnection attempts reached'),\r\n recoverable: false,\r\n });\r\n }\r\n }\r\n\r\n this.emit('connection.closed', { reason: event.reason || 'Connection closed' });\r\n }\r\n}\r\n","/**\n * Conversation Orchestrator\n *\n * Manages the conversation pipeline with AgentCore:\n * - Handles session lifecycle and tenant isolation\n * - Manages adapter events and state\n *\n * @category AI\n */\n\nimport { EventEmitter } from '../../events/EventEmitter';\nimport type {\n AIAdapter,\n AIAdapterEvents,\n SessionConfig,\n TenantConfig,\n ConversationMessage,\n AISessionState,\n} from '../interfaces/AIAdapter';\nimport type { ConversationSession, SessionSnapshot } from '../interfaces/ConversationSession';\nimport { AgentCoreAdapter, type AgentCoreConfig } from '../adapters/AgentCoreAdapter';\nimport { EmotionController, type EmotionWeights } from '../../emotion/Emotion';\n\n/**\n * Orchestrator configuration\n */\nexport interface OrchestratorConfig {\n /** AgentCore adapter config */\n adapter: AgentCoreConfig;\n /** Connection timeout in ms */\n connectionTimeoutMs?: number;\n /** Max retry attempts */\n maxRetries?: number;\n}\n\n/**\n * Orchestrator events (extends AI adapter events)\n */\nexport interface OrchestratorEvents extends AIAdapterEvents {\n 'session.created': { sessionId: string; tenantId: string };\n 'session.ended': { sessionId: string; reason: string };\n}\n\n/**\n * Internal session implementation\n */\nclass ConversationSessionImpl implements ConversationSession {\n readonly sessionId: string;\n readonly createdAt: number;\n\n private _adapter: AIAdapter;\n private _config: SessionConfig;\n private _history: ConversationMessage[] = [];\n private _context = new Map<string, string>();\n private _emotionController: EmotionController;\n private _lastActivityAt: number;\n\n constructor(\n config: SessionConfig,\n adapter: AIAdapter,\n ) {\n this.sessionId = config.sessionId;\n this._config = config;\n this._adapter = adapter;\n this.createdAt = Date.now();\n this._lastActivityAt = Date.now();\n this._emotionController = new EmotionController();\n\n if (config.emotion) {\n this._emotionController.setPreset(config.emotion as Parameters<typeof this._emotionController.setPreset>[0]);\n }\n }\n\n get adapter(): AIAdapter {\n return this._adapter;\n }\n\n get config(): SessionConfig {\n return this._config;\n }\n\n get state(): AISessionState {\n return this._adapter.state;\n }\n\n get history(): ConversationMessage[] {\n return [...this._history];\n }\n\n get emotion(): EmotionWeights {\n return {};\n }\n\n get lastActivityAt(): number {\n return this._lastActivityAt;\n }\n\n async start(): Promise<void> {\n await this._adapter.connect(this._config);\n this._lastActivityAt = Date.now();\n }\n\n async end(): Promise<void> {\n await this._adapter.disconnect();\n }\n\n pushAudio(audio: Int16Array | Float32Array): void {\n this._adapter.pushAudio(audio);\n this._lastActivityAt = Date.now();\n }\n\n async sendText(text: string): Promise<void> {\n await this._adapter.sendText(text);\n this._lastActivityAt = Date.now();\n }\n\n interrupt(): void {\n this._adapter.interrupt();\n this._lastActivityAt = Date.now();\n }\n\n setEmotion(emotion: EmotionWeights): void {\n this._emotionController.set(emotion);\n }\n\n addContext(key: string, value: string): void {\n this._context.set(key, value);\n }\n\n removeContext(key: string): void {\n this._context.delete(key);\n }\n\n getContext(): Record<string, string> {\n return Object.fromEntries(this._context);\n }\n\n export(): SessionSnapshot {\n return {\n sessionId: this.sessionId,\n tenantId: this._config.tenant.tenantId,\n characterId: this._config.tenant.characterId,\n history: this._history,\n context: Object.fromEntries(this._context),\n emotion: this.emotion,\n createdAt: this.createdAt,\n lastActivityAt: this._lastActivityAt,\n };\n }\n\n import(snapshot: SessionSnapshot): void {\n this._history = [...snapshot.history];\n this._context = new Map(Object.entries(snapshot.context));\n this._lastActivityAt = snapshot.lastActivityAt;\n }\n\n syncHistory(): void {\n this._history = this._adapter.getHistory();\n }\n}\n\n/**\n * Conversation Orchestrator\n */\nexport class ConversationOrchestrator extends EventEmitter<OrchestratorEvents> {\n private config: Required<OrchestratorConfig>;\n\n // Adapter\n private adapter: AgentCoreAdapter;\n\n // Sessions per tenant\n private sessions = new Map<string, ConversationSessionImpl>();\n\n // Tenant configurations\n private tenants = new Map<string, TenantConfig>();\n\n // Health monitoring\n private healthCheckInterval: ReturnType<typeof setInterval> | null = null;\n private readonly HEALTH_CHECK_INTERVAL_MS = 30000;\n\n constructor(config: OrchestratorConfig) {\n super();\n this.config = {\n connectionTimeoutMs: 5000,\n maxRetries: 3,\n ...config,\n };\n\n // Initialize adapter\n this.adapter = new AgentCoreAdapter(config.adapter);\n }\n\n /**\n * Register a tenant\n */\n registerTenant(tenant: TenantConfig): void {\n this.tenants.set(tenant.tenantId, tenant);\n }\n\n /**\n * Unregister a tenant\n */\n unregisterTenant(tenantId: string): void {\n this.tenants.delete(tenantId);\n }\n\n /**\n * Get tenant config\n */\n getTenant(tenantId: string): TenantConfig | undefined {\n return this.tenants.get(tenantId);\n }\n\n /**\n * Create a new conversation session for a tenant\n */\n async createSession(\n tenantId: string,\n options: Partial<SessionConfig> = {}\n ): Promise<ConversationSession> {\n const tenant = this.tenants.get(tenantId);\n if (!tenant) {\n throw new Error(`Tenant not found: ${tenantId}`);\n }\n\n const sessionId = options.sessionId || this.generateSessionId();\n\n const sessionConfig: SessionConfig = {\n sessionId,\n tenant,\n systemPrompt: options.systemPrompt,\n voice: options.voice,\n emotion: options.emotion,\n language: options.language,\n };\n\n const session = new ConversationSessionImpl(sessionConfig, this.adapter);\n\n this.sessions.set(sessionId, session);\n\n // Forward adapter events\n this.forwardAdapterEvents(this.adapter, sessionId);\n\n // Connect the session\n await session.start();\n\n this.emit('session.created', { sessionId, tenantId });\n\n return session;\n }\n\n /**\n * End a session\n */\n async endSession(sessionId: string): Promise<void> {\n const session = this.sessions.get(sessionId);\n if (session) {\n await session.end();\n this.sessions.delete(sessionId);\n this.emit('session.ended', { sessionId, reason: 'Client requested' });\n }\n }\n\n /**\n * Get session by ID\n */\n getSession(sessionId: string): ConversationSession | undefined {\n return this.sessions.get(sessionId);\n }\n\n /**\n * Get all sessions for a tenant\n */\n getTenantSessions(tenantId: string): ConversationSession[] {\n return Array.from(this.sessions.values())\n .filter(s => s.config.tenant.tenantId === tenantId);\n }\n\n /**\n * Start health monitoring\n */\n startHealthMonitoring(): void {\n if (this.healthCheckInterval) return;\n\n this.healthCheckInterval = setInterval(async () => {\n await this.performHealthCheck();\n }, this.HEALTH_CHECK_INTERVAL_MS);\n }\n\n /**\n * Stop health monitoring\n */\n stopHealthMonitoring(): void {\n if (this.healthCheckInterval) {\n clearInterval(this.healthCheckInterval);\n this.healthCheckInterval = null;\n }\n }\n\n /**\n * Dispose all resources\n */\n async dispose(): Promise<void> {\n this.stopHealthMonitoring();\n\n // End all sessions\n const endPromises = Array.from(this.sessions.values()).map(s => s.end());\n await Promise.all(endPromises);\n this.sessions.clear();\n\n // Disconnect adapter\n await this.adapter.disconnect();\n }\n\n // ==================== Private Methods ====================\n\n private generateSessionId(): string {\n return `sess_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;\n }\n\n private forwardAdapterEvents(adapter: AIAdapter, sessionId: string): void {\n // Forward key events with session context\n const events: (keyof AIAdapterEvents)[] = [\n 'state.change',\n 'user.speech.start',\n 'user.speech.end',\n 'user.transcript.partial',\n 'user.transcript.final',\n 'ai.thinking.start',\n 'ai.response.start',\n 'ai.response.chunk',\n 'ai.response.end',\n 'audio.output.chunk',\n 'audio.output.end',\n 'animation',\n 'memory.updated',\n 'connection.error',\n 'interruption.detected',\n 'interruption.handled',\n ];\n\n for (const event of events) {\n adapter.on(event, (data) => {\n const eventData = data as Record<string, unknown>;\n this.emit(event, { ...eventData, sessionId } as AIAdapterEvents[typeof event]);\n });\n }\n }\n\n private async performHealthCheck(): Promise<void> {\n try {\n await this.adapter.healthCheck();\n } catch {\n // Adapter health check failed\n }\n }\n}\n","/**\n * Tenant Manager\n *\n * Handles multi-tenant isolation for the Omote Platform:\n * - Credential isolation per tenant\n * - Session scoping per tenant\n * - Quota management\n * - Token refresh\n *\n * @category AI\n */\n\nimport type { TenantConfig } from '../interfaces/AIAdapter';\n\n/**\n * Tenant quota configuration\n */\nexport interface TenantQuota {\n /** Max concurrent sessions */\n maxSessions: number;\n /** Requests per minute */\n requestsPerMinute: number;\n /** Max tokens per conversation */\n maxTokensPerConversation: number;\n /** Max audio minutes per day */\n maxAudioMinutesPerDay: number;\n}\n\n/**\n * Tenant usage tracking\n */\nexport interface TenantUsage {\n /** Current active sessions */\n currentSessions: number;\n /** Requests in current minute */\n requestsThisMinute: number;\n /** Total tokens used */\n tokensUsed: number;\n /** Audio minutes used today */\n audioMinutesToday: number;\n /** Last reset timestamp */\n lastMinuteReset: number;\n /** Last daily reset timestamp */\n lastDailyReset: number;\n}\n\n/**\n * Token refresh callback\n */\nexport type TokenRefreshCallback = () => Promise<string>;\n\n/**\n * Tenant Manager\n */\nexport class TenantManager {\n private tenants = new Map<string, TenantConfig>();\n private quotas = new Map<string, TenantQuota>();\n private usage = new Map<string, TenantUsage>();\n private tokenRefreshCallbacks = new Map<string, TokenRefreshCallback>();\n\n /**\n * Default quota for new tenants\n */\n static readonly DEFAULT_QUOTA: TenantQuota = {\n maxSessions: 10,\n requestsPerMinute: 60,\n maxTokensPerConversation: 100000,\n maxAudioMinutesPerDay: 60,\n };\n\n /**\n * Register a tenant with quota\n */\n register(\n tenant: TenantConfig,\n quota: TenantQuota = TenantManager.DEFAULT_QUOTA,\n tokenRefreshCallback?: TokenRefreshCallback\n ): void {\n this.tenants.set(tenant.tenantId, tenant);\n this.quotas.set(tenant.tenantId, quota);\n this.usage.set(tenant.tenantId, {\n currentSessions: 0,\n requestsThisMinute: 0,\n tokensUsed: 0,\n audioMinutesToday: 0,\n lastMinuteReset: Date.now(),\n lastDailyReset: Date.now(),\n });\n\n if (tokenRefreshCallback) {\n this.tokenRefreshCallbacks.set(tenant.tenantId, tokenRefreshCallback);\n }\n }\n\n /**\n * Unregister a tenant\n */\n unregister(tenantId: string): void {\n this.tenants.delete(tenantId);\n this.quotas.delete(tenantId);\n this.usage.delete(tenantId);\n this.tokenRefreshCallbacks.delete(tenantId);\n }\n\n /**\n * Get tenant config\n */\n get(tenantId: string): TenantConfig | undefined {\n return this.tenants.get(tenantId);\n }\n\n /**\n * Check if tenant exists\n */\n has(tenantId: string): boolean {\n return this.tenants.has(tenantId);\n }\n\n /**\n * Get all tenant IDs\n */\n getTenantIds(): string[] {\n return Array.from(this.tenants.keys());\n }\n\n /**\n * Check if tenant can create new session\n */\n canCreateSession(tenantId: string): boolean {\n const quota = this.quotas.get(tenantId);\n const usage = this.usage.get(tenantId);\n\n if (!quota || !usage) return false;\n\n return usage.currentSessions < quota.maxSessions;\n }\n\n /**\n * Check if tenant can make request\n */\n canMakeRequest(tenantId: string): boolean {\n const quota = this.quotas.get(tenantId);\n const usage = this.usage.get(tenantId);\n\n if (!quota || !usage) return false;\n\n // Auto-reset minute counter if needed\n this.checkMinuteReset(tenantId);\n\n return usage.requestsThisMinute < quota.requestsPerMinute;\n }\n\n /**\n * Check if tenant can use audio\n */\n canUseAudio(tenantId: string, minutes: number): boolean {\n const quota = this.quotas.get(tenantId);\n const usage = this.usage.get(tenantId);\n\n if (!quota || !usage) return false;\n\n // Auto-reset daily counter if needed\n this.checkDailyReset(tenantId);\n\n return usage.audioMinutesToday + minutes <= quota.maxAudioMinutesPerDay;\n }\n\n /**\n * Increment session count\n */\n incrementSessions(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n usage.currentSessions++;\n }\n }\n\n /**\n * Decrement session count\n */\n decrementSessions(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (usage && usage.currentSessions > 0) {\n usage.currentSessions--;\n }\n }\n\n /**\n * Record a request\n */\n recordRequest(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n this.checkMinuteReset(tenantId);\n usage.requestsThisMinute++;\n }\n }\n\n /**\n * Record token usage\n */\n recordTokens(tenantId: string, tokens: number): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n usage.tokensUsed += tokens;\n }\n }\n\n /**\n * Record audio usage\n */\n recordAudioMinutes(tenantId: string, minutes: number): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n this.checkDailyReset(tenantId);\n usage.audioMinutesToday += minutes;\n }\n }\n\n /**\n * Get fresh auth token for tenant\n */\n async getAuthToken(tenantId: string): Promise<string> {\n const tenant = this.tenants.get(tenantId);\n if (!tenant) {\n throw new Error(`Tenant not found: ${tenantId}`);\n }\n\n // Check if we have a refresh callback\n const callback = this.tokenRefreshCallbacks.get(tenantId);\n if (callback) {\n const token = await callback();\n tenant.credentials.authToken = token;\n return token;\n }\n\n // Return existing token\n if (tenant.credentials.authToken) {\n return tenant.credentials.authToken;\n }\n\n throw new Error(`No auth token available for tenant: ${tenantId}`);\n }\n\n /**\n * Update tenant credentials\n */\n updateCredentials(tenantId: string, credentials: Partial<TenantConfig['credentials']>): void {\n const tenant = this.tenants.get(tenantId);\n if (tenant) {\n tenant.credentials = { ...tenant.credentials, ...credentials };\n }\n }\n\n /**\n * Get usage stats for tenant\n */\n getUsage(tenantId: string): TenantUsage | undefined {\n return this.usage.get(tenantId);\n }\n\n /**\n * Get quota for tenant\n */\n getQuota(tenantId: string): TenantQuota | undefined {\n return this.quotas.get(tenantId);\n }\n\n /**\n * Update quota for tenant\n */\n updateQuota(tenantId: string, quota: Partial<TenantQuota>): void {\n const existing = this.quotas.get(tenantId);\n if (existing) {\n this.quotas.set(tenantId, { ...existing, ...quota });\n }\n }\n\n /**\n * Reset all usage stats for a tenant\n */\n resetUsage(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (usage) {\n usage.requestsThisMinute = 0;\n usage.tokensUsed = 0;\n usage.audioMinutesToday = 0;\n usage.lastMinuteReset = Date.now();\n usage.lastDailyReset = Date.now();\n }\n }\n\n // ==================== Private Methods ====================\n\n private checkMinuteReset(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (!usage) return;\n\n const now = Date.now();\n if (now - usage.lastMinuteReset >= 60000) {\n usage.requestsThisMinute = 0;\n usage.lastMinuteReset = now;\n }\n }\n\n private checkDailyReset(tenantId: string): void {\n const usage = this.usage.get(tenantId);\n if (!usage) return;\n\n const now = Date.now();\n const MS_PER_DAY = 24 * 60 * 60 * 1000;\n if (now - usage.lastDailyReset >= MS_PER_DAY) {\n usage.audioMinutesToday = 0;\n usage.lastDailyReset = now;\n }\n }\n}\n","/**\n * Audio Sync Manager\n *\n * Synchronizes TTS audio playback with lip sync animation:\n * - Buffers audio for inference\n * - Manages playback timing\n * - Handles audio queue for streaming\n *\n * @category AI\n */\n\nimport { EventEmitter } from '../../events/EventEmitter';\n\n/**\n * Audio sync events\n */\nexport interface AudioSyncEvents {\n [key: string]: unknown;\n 'buffer.ready': { audio: Float32Array };\n 'playback.start': Record<string, never>;\n 'playback.end': Record<string, never>;\n 'sync.drift': { driftMs: number };\n}\n\n/**\n * Audio sync configuration\n */\nexport interface AudioSyncConfig {\n /** Target sample rate (default: 16000) */\n sampleRate?: number;\n /** Buffer size for inference (default: 16640) */\n bufferSize?: number;\n /** Overlap between buffers (default: 4160) */\n overlapSize?: number;\n /** Max drift before correction (default: 100ms) */\n maxDriftMs?: number;\n}\n\n/**\n * Audio Sync Manager\n */\nexport class AudioSyncManager extends EventEmitter<AudioSyncEvents> {\n private config: Required<AudioSyncConfig>;\n private audioBuffer: Float32Array;\n private bufferPosition = 0;\n private playbackQueue: Float32Array[] = [];\n private isPlaying = false;\n private audioContext: AudioContext | null = null;\n private playbackStartTime = 0;\n private samplesPlayed = 0;\n\n constructor(config: AudioSyncConfig = {}) {\n super();\n this.config = {\n sampleRate: 16000,\n bufferSize: 16640,\n overlapSize: 4160,\n maxDriftMs: 100,\n ...config,\n };\n\n this.audioBuffer = new Float32Array(this.config.bufferSize);\n }\n\n /**\n * Initialize audio context\n */\n async initialize(): Promise<void> {\n if (!this.audioContext) {\n this.audioContext = new AudioContext({ sampleRate: this.config.sampleRate });\n }\n\n if (this.audioContext.state === 'suspended') {\n await this.audioContext.resume();\n }\n }\n\n /**\n * Push audio chunk for processing and playback\n */\n pushAudio(audio: Float32Array): void {\n // Add to playback queue\n this.playbackQueue.push(audio);\n\n // Buffer for inference\n this.bufferForInference(audio);\n\n // Start playback if not playing\n if (!this.isPlaying && this.playbackQueue.length > 0) {\n this.startPlayback();\n }\n }\n\n /**\n * Buffer audio for inference\n */\n private bufferForInference(audio: Float32Array): void {\n let offset = 0;\n\n while (offset < audio.length) {\n const remaining = this.config.bufferSize - this.bufferPosition;\n const toCopy = Math.min(remaining, audio.length - offset);\n\n this.audioBuffer.set(audio.subarray(offset, offset + toCopy), this.bufferPosition);\n this.bufferPosition += toCopy;\n offset += toCopy;\n\n // Buffer full - emit for processing\n if (this.bufferPosition >= this.config.bufferSize) {\n this.emit('buffer.ready', { audio: new Float32Array(this.audioBuffer) });\n\n // Shift buffer with overlap for continuity\n const overlapStart = this.config.bufferSize - this.config.overlapSize;\n this.audioBuffer.copyWithin(0, overlapStart);\n this.bufferPosition = this.config.overlapSize;\n }\n }\n }\n\n /**\n * Start audio playback\n */\n private async startPlayback(): Promise<void> {\n if (!this.audioContext || this.isPlaying) return;\n\n this.isPlaying = true;\n this.playbackStartTime = this.audioContext.currentTime;\n this.samplesPlayed = 0;\n\n this.emit('playback.start', {});\n\n await this.processPlaybackQueue();\n }\n\n /**\n * Process playback queue\n */\n private async processPlaybackQueue(): Promise<void> {\n if (!this.audioContext) return;\n\n while (this.playbackQueue.length > 0) {\n const audio = this.playbackQueue.shift()!;\n\n // Create buffer and source\n const buffer = this.audioContext.createBuffer(1, audio.length, this.config.sampleRate);\n buffer.copyToChannel(audio, 0);\n\n const source = this.audioContext.createBufferSource();\n source.buffer = buffer;\n source.connect(this.audioContext.destination);\n\n // Calculate when to play\n const playTime = this.playbackStartTime + this.samplesPlayed / this.config.sampleRate;\n source.start(playTime);\n\n this.samplesPlayed += audio.length;\n\n // Check for drift\n this.checkDrift();\n\n // Wait for chunk to finish before processing next\n await new Promise(resolve => {\n source.onended = resolve;\n });\n }\n\n this.isPlaying = false;\n this.emit('playback.end', {});\n }\n\n /**\n * Check for audio/animation drift\n */\n private checkDrift(): void {\n if (!this.audioContext) return;\n\n const expectedTime = this.playbackStartTime + this.samplesPlayed / this.config.sampleRate;\n const actualTime = this.audioContext.currentTime;\n const driftMs = (actualTime - expectedTime) * 1000;\n\n if (Math.abs(driftMs) > this.config.maxDriftMs) {\n this.emit('sync.drift', { driftMs });\n }\n }\n\n /**\n * Clear playback queue\n */\n clearQueue(): void {\n this.playbackQueue = [];\n this.bufferPosition = 0;\n this.audioBuffer.fill(0);\n }\n\n /**\n * Stop playback\n */\n stop(): void {\n this.clearQueue();\n this.isPlaying = false;\n }\n\n /**\n * Get current playback position in seconds\n */\n getPlaybackPosition(): number {\n if (!this.audioContext) return 0;\n return this.audioContext.currentTime - this.playbackStartTime;\n }\n\n /**\n * Check if currently playing\n */\n getIsPlaying(): boolean {\n return this.isPlaying;\n }\n\n /**\n * Dispose resources\n */\n dispose(): void {\n this.stop();\n this.audioContext?.close();\n this.audioContext = null;\n }\n}\n","/**\n * Interruption Handler\n *\n * VAD-based interruption detection for AI conversations:\n * - Monitors user audio for speech\n * - Detects when user interrupts AI response\n * - Triggers interruption callbacks\n *\n * @category AI\n */\n\nimport { EventEmitter } from '../../events/EventEmitter';\n\n/**\n * Interruption events\n */\nexport interface InterruptionEvents {\n [key: string]: unknown;\n 'speech.detected': { rms: number };\n 'speech.ended': { durationMs: number };\n 'interruption.triggered': { rms: number; durationMs: number };\n}\n\n/**\n * Interruption handler configuration\n *\n * Industry standards applied:\n * - vadThreshold: 0.5 (Silero VAD default)\n * - minSpeechDurationMs: 200ms (Google/Amazon barge-in standard)\n * - silenceTimeoutMs: 500ms (OpenAI Realtime API standard)\n */\nexport interface InterruptionConfig {\n /** VAD probability threshold for speech detection (default: 0.5, Silero standard) */\n vadThreshold?: number;\n /** Minimum speech duration to trigger interruption (default: 200ms, Google/Amazon standard) */\n minSpeechDurationMs?: number;\n /** Silence duration to end speech (default: 500ms, OpenAI standard) */\n silenceTimeoutMs?: number;\n /** Enable interruption detection (default: true) */\n enabled?: boolean;\n}\n\n/**\n * Interruption Handler\n */\nexport class InterruptionHandler extends EventEmitter<InterruptionEvents> {\n private config: Required<InterruptionConfig>;\n private isSpeaking = false;\n private speechStartTime = 0;\n private lastSpeechTime = 0;\n private silenceTimer: ReturnType<typeof setTimeout> | null = null;\n private aiIsSpeaking = false;\n\n // Debouncing: only emit one interruption per speech session\n private interruptionTriggeredThisSession = false;\n\n constructor(config: InterruptionConfig = {}) {\n super();\n this.config = {\n vadThreshold: 0.5, // Silero VAD default\n minSpeechDurationMs: 200, // Google/Amazon barge-in standard\n silenceTimeoutMs: 500, // OpenAI Realtime API standard\n enabled: true,\n ...config,\n };\n }\n\n /**\n * Process VAD result for interruption detection\n * @param vadProbability - Speech probability from VAD (0-1)\n * @param audioEnergy - Optional RMS energy for logging (default: 0)\n */\n processVADResult(vadProbability: number, audioEnergy: number = 0): void {\n if (!this.config.enabled) return;\n\n if (vadProbability > this.config.vadThreshold) {\n this.onSpeechDetected(audioEnergy || vadProbability);\n } else {\n this.onSilenceDetected();\n }\n }\n\n /**\n * @deprecated Use processVADResult() instead. This method uses naive RMS detection.\n * Process audio samples for VAD (legacy - uses simple RMS)\n */\n processAudio(samples: Float32Array | Int16Array): void {\n if (!this.config.enabled) return;\n\n const rms = this.calculateRMS(samples);\n\n // Use RMS as proxy for VAD probability (less accurate)\n // RMS > 0.02 roughly maps to speech probability > 0.5\n const vadProbability = Math.min(rms / 0.02, 1.0);\n\n if (vadProbability > this.config.vadThreshold) {\n this.onSpeechDetected(rms);\n } else {\n this.onSilenceDetected();\n }\n }\n\n /**\n * Notify that AI started speaking\n */\n setAISpeaking(speaking: boolean): void {\n this.aiIsSpeaking = speaking;\n }\n\n /**\n * Enable/disable interruption detection\n */\n setEnabled(enabled: boolean): void {\n this.config.enabled = enabled;\n if (!enabled) {\n this.reset();\n }\n }\n\n /**\n * Update configuration\n */\n updateConfig(config: Partial<InterruptionConfig>): void {\n this.config = { ...this.config, ...config };\n }\n\n /**\n * Reset state\n */\n reset(): void {\n this.isSpeaking = false;\n this.speechStartTime = 0;\n this.lastSpeechTime = 0;\n this.interruptionTriggeredThisSession = false;\n if (this.silenceTimer) {\n clearTimeout(this.silenceTimer);\n this.silenceTimer = null;\n }\n }\n\n /**\n * Get current state\n */\n getState(): { isSpeaking: boolean; speechDurationMs: number } {\n return {\n isSpeaking: this.isSpeaking,\n speechDurationMs: this.isSpeaking ? Date.now() - this.speechStartTime : 0,\n };\n }\n\n // ==================== Private Methods ====================\n\n private calculateRMS(samples: Float32Array | Int16Array): number {\n let sum = 0;\n const scale = samples instanceof Int16Array ? 32768 : 1;\n\n for (let i = 0; i < samples.length; i++) {\n const sample = samples[i] / scale;\n sum += sample * sample;\n }\n\n return Math.sqrt(sum / samples.length);\n }\n\n private onSpeechDetected(rms: number): void {\n const now = Date.now();\n this.lastSpeechTime = now;\n\n // Clear silence timer\n if (this.silenceTimer) {\n clearTimeout(this.silenceTimer);\n this.silenceTimer = null;\n }\n\n // Start of speech\n if (!this.isSpeaking) {\n this.isSpeaking = true;\n this.speechStartTime = now;\n this.emit('speech.detected', { rms });\n }\n\n // Check for interruption (only emit ONCE per speech session)\n if (this.aiIsSpeaking && !this.interruptionTriggeredThisSession) {\n const speechDuration = now - this.speechStartTime;\n if (speechDuration >= this.config.minSpeechDurationMs) {\n this.interruptionTriggeredThisSession = true;\n this.emit('interruption.triggered', { rms, durationMs: speechDuration });\n }\n }\n }\n\n private onSilenceDetected(): void {\n if (!this.isSpeaking) return;\n\n // Start silence timer\n if (!this.silenceTimer) {\n this.silenceTimer = setTimeout(() => {\n const durationMs = this.lastSpeechTime - this.speechStartTime;\n this.isSpeaking = false;\n this.silenceTimer = null;\n // Reset interruption flag for next speech session\n this.interruptionTriggeredThisSession = false;\n this.emit('speech.ended', { durationMs });\n }, this.config.silenceTimeoutMs);\n }\n }\n}\n","/**\n * HuggingFace CDN Utilities\n *\n * Helper functions for working with HuggingFace CDN URLs.\n * Used by transformers.js models (Whisper, etc.) for model downloads.\n *\n * @category Cache\n */\n\n/**\n * Test URL for HuggingFace CDN reachability check.\n * Uses a small, stable file from a well-known public model.\n */\nexport const HF_CDN_TEST_URL =\n 'https://huggingface.co/Xenova/whisper-tiny/resolve/main/config.json';\n\n/**\n * Parsed HuggingFace URL components\n */\nexport interface HuggingFaceUrlInfo {\n /** Organization or username */\n org: string;\n /** Model name */\n model: string;\n /** Branch, tag, or commit */\n branch: string;\n /** File path within the repository */\n file: string;\n}\n\n/**\n * Parse a HuggingFace CDN URL into its components\n *\n * @param url - The HuggingFace URL to parse\n * @returns Parsed URL info or null if not a valid HF URL\n *\n * @example\n * ```typescript\n * const info = parseHuggingFaceUrl(\n * 'https://huggingface.co/openai/whisper-tiny/resolve/main/model.onnx'\n * );\n * // Returns: { org: 'openai', model: 'whisper-tiny', branch: 'main', file: 'model.onnx' }\n * ```\n */\nexport function parseHuggingFaceUrl(url: string): HuggingFaceUrlInfo | null {\n // Pattern: https://huggingface.co/{org}/{model}/resolve/{branch}/{file...}\n const pattern = /^https:\\/\\/huggingface\\.co\\/([^/]+)\\/([^/]+)\\/resolve\\/([^/]+)\\/(.+)$/;\n const match = url.match(pattern);\n\n if (!match) {\n return null;\n }\n\n return {\n org: match[1],\n model: match[2],\n branch: match[3],\n file: match[4],\n };\n}\n\n/**\n * Check if HuggingFace CDN is reachable\n *\n * Performs a HEAD request to a known HuggingFace model file to verify\n * connectivity. Useful for offline detection or network diagnostics.\n *\n * @param testUrl - Optional custom URL to test (defaults to HF_CDN_TEST_URL)\n * @returns True if CDN is reachable, false otherwise\n *\n * @example\n * ```typescript\n * import { isHuggingFaceCDNReachable } from '@omote/core';\n *\n * const reachable = await isHuggingFaceCDNReachable();\n * if (!reachable) {\n * console.log('HuggingFace CDN unreachable - running offline?');\n * // Fall back to cached models or show error\n * }\n * ```\n */\nexport async function isHuggingFaceCDNReachable(testUrl: string = HF_CDN_TEST_URL): Promise<boolean> {\n try {\n const response = await fetch(testUrl, {\n method: 'HEAD',\n cache: 'no-store', // Don't use cached response for reachability check\n });\n\n return response.ok;\n } catch {\n // Network error, timeout, or CORS issue\n return false;\n }\n}\n","/**\n * Utility to clear transformers.js Cache API storage\n *\n * Problem: transformers.js v4 uses Browser Cache API which persists across hard refreshes.\n * If an HTML error page gets cached (due to network errors, CDN issues, or dev server restarts),\n * it will be served instead of JSON files, causing JSON.parse() errors.\n *\n * Solution: Manually clear Cache API storage before loading models.\n *\n * @module utils/transformersCacheClear\n */\n\nimport { createLogger } from '../logging';\n\nconst logger = createLogger('TransformersCache');\n\n/**\n * Clear all transformers.js and HuggingFace caches from Browser Cache API\n *\n * This clears:\n * - transformers-cache (default cache key)\n * - Any caches with 'transformers' or 'huggingface' in the name\n *\n * @param options Configuration options\n * @returns Promise resolving to array of deleted cache names\n */\nexport async function clearTransformersCache(options?: {\n /** Whether to log deletion details (default: true) */\n verbose?: boolean;\n /** Additional cache name patterns to clear (e.g., ['my-custom-cache']) */\n additionalPatterns?: string[];\n}): Promise<string[]> {\n const verbose = options?.verbose ?? true;\n const additionalPatterns = options?.additionalPatterns ?? [];\n\n if (!('caches' in window)) {\n logger.warn('Cache API not available in this environment');\n return [];\n }\n\n try {\n const cacheNames = await caches.keys();\n const deletedCaches: string[] = [];\n\n const patterns = [\n 'transformers',\n 'huggingface',\n 'onnx',\n ...additionalPatterns,\n ];\n\n for (const cacheName of cacheNames) {\n const shouldDelete = patterns.some(pattern =>\n cacheName.toLowerCase().includes(pattern.toLowerCase())\n );\n\n if (shouldDelete) {\n if (verbose) {\n logger.info('Deleting cache', { cacheName });\n }\n const deleted = await caches.delete(cacheName);\n if (deleted) {\n deletedCaches.push(cacheName);\n } else if (verbose) {\n logger.warn('Failed to delete cache', { cacheName });\n }\n }\n }\n\n if (verbose) {\n logger.info('Cache clearing complete', {\n totalCaches: cacheNames.length,\n deletedCount: deletedCaches.length,\n deletedCaches,\n });\n }\n\n return deletedCaches;\n } catch (error) {\n logger.error('Error clearing caches', { error });\n throw error;\n }\n}\n\n/**\n * Clear a specific cache by exact name\n *\n * @param cacheName Exact cache name to delete\n * @returns Promise resolving to true if deleted, false otherwise\n */\nexport async function clearSpecificCache(cacheName: string): Promise<boolean> {\n if (!('caches' in window)) {\n logger.warn('Cache API not available in this environment');\n return false;\n }\n\n try {\n const deleted = await caches.delete(cacheName);\n logger.info('Cache deletion attempt', { cacheName, deleted });\n return deleted;\n } catch (error) {\n logger.error('Error deleting cache', { cacheName, error });\n return false;\n }\n}\n\n/**\n * List all cache names currently stored\n *\n * @returns Promise resolving to array of cache names\n */\nexport async function listCaches(): Promise<string[]> {\n if (!('caches' in window)) {\n logger.warn('Cache API not available in this environment');\n return [];\n }\n\n try {\n const cacheNames = await caches.keys();\n logger.debug('Available caches', { cacheNames });\n return cacheNames;\n } catch (error) {\n logger.error('Error listing caches', { error });\n return [];\n }\n}\n\n/**\n * Check if a specific cached response is valid JSON/binary (not HTML error page)\n *\n * @param cacheName Cache name to check\n * @param requestUrl URL/key to check\n * @returns Promise resolving to validation result\n */\nexport async function validateCachedResponse(\n cacheName: string,\n requestUrl: string\n): Promise<{\n exists: boolean;\n valid: boolean;\n contentType: string | null;\n isHtml: boolean;\n reason?: string;\n}> {\n if (!('caches' in window)) {\n return {\n exists: false,\n valid: false,\n contentType: null,\n isHtml: false,\n reason: 'Cache API not available',\n };\n }\n\n try {\n const cache = await caches.open(cacheName);\n const response = await cache.match(requestUrl);\n\n if (!response) {\n return {\n exists: false,\n valid: false,\n contentType: null,\n isHtml: false,\n reason: 'Not in cache',\n };\n }\n\n const contentType = response.headers.get('content-type');\n const isHtml =\n contentType?.includes('text/html') ||\n contentType?.includes('text/plain'); // Some servers return plain text HTML\n\n // For validation, we need to check the content\n const clonedResponse = response.clone();\n const text = await clonedResponse.text();\n const looksLikeHtml = text.trim().startsWith('<') || text.includes('<!DOCTYPE');\n\n const valid = Boolean(\n response.status === 200 &&\n !isHtml &&\n !looksLikeHtml &&\n contentType &&\n (contentType.includes('application/json') ||\n contentType.includes('application/octet-stream') ||\n contentType.includes('binary'))\n );\n\n return {\n exists: true,\n valid,\n contentType,\n isHtml: isHtml || looksLikeHtml,\n reason: valid\n ? 'Valid response'\n : `Invalid: status=${response.status}, contentType=${contentType}, isHtml=${isHtml || looksLikeHtml}`,\n };\n } catch (error) {\n logger.error('Error validating cached response', { cacheName, requestUrl, error });\n return {\n exists: false,\n valid: false,\n contentType: null,\n isHtml: false,\n reason: `Error: ${error}`,\n };\n }\n}\n\n/**\n * Scan all caches for potentially invalid cached responses\n *\n * @returns Promise resolving to report of invalid entries\n */\nexport async function scanForInvalidCaches(): Promise<{\n totalCaches: number;\n scannedEntries: number;\n invalidEntries: Array<{\n cacheName: string;\n url: string;\n reason: string;\n }>;\n}> {\n if (!('caches' in window)) {\n return { totalCaches: 0, scannedEntries: 0, invalidEntries: [] };\n }\n\n const invalidEntries: Array<{ cacheName: string; url: string; reason: string }> = [];\n let scannedEntries = 0;\n\n try {\n const cacheNames = await caches.keys();\n\n for (const cacheName of cacheNames) {\n if (!cacheName.toLowerCase().includes('transformers')) {\n continue; // Skip non-transformers caches\n }\n\n const cache = await caches.open(cacheName);\n const requests = await cache.keys();\n\n for (const request of requests) {\n scannedEntries++;\n const url = request.url;\n\n const validation = await validateCachedResponse(cacheName, url);\n\n if (validation.exists && !validation.valid) {\n invalidEntries.push({\n cacheName,\n url,\n reason: validation.reason || 'Unknown',\n });\n }\n }\n }\n\n logger.info('Cache scan complete', {\n totalCaches: cacheNames.length,\n scannedEntries,\n invalidCount: invalidEntries.length,\n });\n\n return {\n totalCaches: cacheNames.length,\n scannedEntries,\n invalidEntries,\n };\n } catch (error) {\n logger.error('Error scanning caches', { error });\n throw error;\n }\n}\n\n/**\n * Clear all caches and optionally prevent re-creation (development mode)\n *\n * WARNING: This is aggressive and should only be used in development.\n * It clears ALL browser caches, not just transformers.js.\n *\n * @param preventRecreation If true, sets env.useBrowserCache = false\n * @returns Promise resolving to number of deleted caches\n */\nexport async function nukeBrowserCaches(preventRecreation = false): Promise<number> {\n if (!('caches' in window)) {\n logger.warn('Cache API not available in this environment');\n return 0;\n }\n\n try {\n const cacheNames = await caches.keys();\n let deletedCount = 0;\n\n for (const cacheName of cacheNames) {\n const deleted = await caches.delete(cacheName);\n if (deleted) {\n deletedCount++;\n }\n }\n\n logger.info('All browser caches cleared', {\n totalDeleted: deletedCount,\n });\n\n if (preventRecreation) {\n // Import dynamically to avoid circular dependencies\n const { env } = await import('@huggingface/transformers');\n env.useBrowserCache = false;\n logger.warn('Browser cache creation disabled (env.useBrowserCache = false)');\n }\n\n return deletedCount;\n } catch (error) {\n logger.error('Error nuking caches', { error });\n throw error;\n }\n}\n","/**\n * Animation Graph Types\n *\n * Renderer-agnostic animation state machine with emotion and audio-driven blending.\n *\n * @module animation\n */\n\n/**\n * Emotion labels for animation blending\n * Note: These are the 8 emotion categories used for animation, separate from the\n * internal EmotionName type used by EmotionController.\n */\nexport type EmotionLabel =\n | 'angry'\n | 'calm'\n | 'disgust'\n | 'fearful'\n | 'happy'\n | 'neutral'\n | 'sad'\n | 'surprised';\n\n/**\n * High-level animation states\n */\nexport type AnimationStateName = 'idle' | 'listening' | 'thinking' | 'speaking';\n\n/**\n * Events that trigger state transitions\n */\nexport type AnimationTrigger =\n | 'user_speech_start'\n | 'user_speech_end'\n | 'transcript_ready'\n | 'ai_response_start'\n | 'ai_audio_start'\n | 'ai_response_end'\n | 'timeout'\n | 'interrupt';\n\n/**\n * Animation layer types for blending\n */\nexport type AnimationLayer = 'base' | 'emotion' | 'gesture' | 'additive';\n\n/**\n * A single animation clip reference\n */\nexport interface AnimationClip {\n /** Unique identifier for the clip */\n name: string;\n /** Animation layer this clip belongs to */\n layer: AnimationLayer;\n /** Whether this clip loops */\n loop: boolean;\n /** Default duration in seconds (can be overridden by actual clip) */\n duration?: number;\n}\n\n/**\n * Blend weight for an animation clip\n */\nexport interface BlendWeight {\n /** Clip name */\n clip: string;\n /** Weight 0-1 */\n weight: number;\n /** Playback speed multiplier */\n speed: number;\n /** Current time in the animation (0-1 normalized) */\n time: number;\n}\n\n/**\n * Animation state definition\n */\nexport interface AnimationState {\n /** State name */\n name: AnimationStateName;\n /** Base animation clips for this state */\n baseClips: string[];\n /** Blend weights for base clips */\n baseWeights: number[];\n /** Whether emotion overlay is enabled in this state */\n emotionBlendEnabled: boolean;\n /** Whether gesture layer is enabled in this state */\n gestureBlendEnabled: boolean;\n /** Timeout in ms to auto-transition (0 = no timeout) */\n timeout: number;\n /** State to transition to on timeout */\n timeoutTarget?: AnimationStateName;\n}\n\n/**\n * Transition between states\n */\nexport interface Transition {\n /** Source state */\n from: AnimationStateName;\n /** Target state */\n to: AnimationStateName;\n /** Event that triggers this transition */\n trigger: AnimationTrigger;\n /** Blend duration in ms */\n duration: number;\n /** Optional condition function */\n condition?: () => boolean;\n}\n\n/**\n * Emotion to animation mapping\n */\nexport interface EmotionAnimationMap {\n /** Emotion label */\n emotion: EmotionLabel;\n /** Animation clip to blend */\n clip: string;\n /** Maximum blend weight for this emotion */\n maxWeight: number;\n /** Blend speed (weight change per second) */\n blendSpeed: number;\n}\n\n/**\n * Configuration for AnimationGraph\n */\nexport interface AnimationGraphConfig {\n /** Available animation states */\n states: AnimationState[];\n /** Transitions between states */\n transitions: Transition[];\n /** Emotion to animation mappings */\n emotionMappings: EmotionAnimationMap[];\n /** Gesture clips for audio-driven animation */\n gestureClips: string[];\n /** Initial state */\n initialState: AnimationStateName;\n /** Global blend speed for state transitions (weight/sec) */\n transitionBlendSpeed: number;\n /** Minimum audio energy to trigger gestures (0-1) */\n gestureThreshold: number;\n /** Gesture intensity multiplier */\n gestureIntensity: number;\n}\n\n/**\n * Current output of the animation graph\n */\nexport interface AnimationOutput {\n /** Current state name */\n state: AnimationStateName;\n /** All blend weights to apply */\n blendWeights: BlendWeight[];\n /** Active emotion (if any) */\n activeEmotion: EmotionLabel | null;\n /** Current gesture intensity (0-1) */\n gestureIntensity: number;\n /** Whether currently transitioning between states */\n isTransitioning: boolean;\n /** Transition progress (0-1) if transitioning */\n transitionProgress: number;\n}\n\n/**\n * Events emitted by AnimationGraph\n */\nexport type AnimationGraphEvents = {\n /** State changed */\n 'state.change': {\n from: AnimationStateName;\n to: AnimationStateName;\n trigger: AnimationTrigger;\n };\n /** Transition started */\n 'transition.start': {\n from: AnimationStateName;\n to: AnimationStateName;\n duration: number;\n };\n /** Transition completed */\n 'transition.end': {\n state: AnimationStateName;\n };\n /** Emotion changed */\n 'emotion.change': {\n emotion: EmotionLabel | null;\n confidence: number;\n };\n /** Animation output updated (every frame) */\n 'output.update': AnimationOutput;\n /** Index signature for EventEmitter compatibility */\n [key: string]: unknown;\n};\n\n/**\n * Default animation graph configuration\n */\nexport const DEFAULT_ANIMATION_CONFIG: AnimationGraphConfig = {\n initialState: 'idle',\n transitionBlendSpeed: 4.0, // Full blend in 250ms\n gestureThreshold: 0.1,\n gestureIntensity: 1.0,\n\n states: [\n {\n name: 'idle',\n baseClips: ['idle_breathe'],\n baseWeights: [1.0],\n emotionBlendEnabled: true,\n gestureBlendEnabled: false,\n timeout: 0,\n },\n {\n name: 'listening',\n baseClips: ['idle_attentive'],\n baseWeights: [1.0],\n emotionBlendEnabled: true,\n gestureBlendEnabled: false,\n timeout: 10000, // 10s timeout back to idle\n timeoutTarget: 'idle',\n },\n {\n name: 'thinking',\n baseClips: ['thinking_look_up', 'thinking_hand_chin'],\n baseWeights: [0.6, 0.4],\n emotionBlendEnabled: false,\n gestureBlendEnabled: false,\n timeout: 5000, // 5s max thinking\n timeoutTarget: 'idle',\n },\n {\n name: 'speaking',\n baseClips: ['talking_idle'],\n baseWeights: [1.0],\n emotionBlendEnabled: true,\n gestureBlendEnabled: true,\n timeout: 0,\n },\n ],\n\n transitions: [\n // User starts speaking\n { from: 'idle', to: 'listening', trigger: 'user_speech_start', duration: 300 },\n { from: 'speaking', to: 'listening', trigger: 'user_speech_start', duration: 200 }, // Interrupt\n\n // User stops speaking, processing\n { from: 'listening', to: 'thinking', trigger: 'transcript_ready', duration: 400 },\n\n // AI starts responding\n { from: 'thinking', to: 'speaking', trigger: 'ai_audio_start', duration: 300 },\n { from: 'idle', to: 'speaking', trigger: 'ai_audio_start', duration: 400 },\n\n // AI done\n { from: 'speaking', to: 'idle', trigger: 'ai_response_end', duration: 500 },\n\n // Timeouts\n { from: 'listening', to: 'idle', trigger: 'timeout', duration: 600 },\n { from: 'thinking', to: 'idle', trigger: 'timeout', duration: 400 },\n\n // Interrupts\n { from: 'speaking', to: 'listening', trigger: 'interrupt', duration: 150 },\n ],\n\n emotionMappings: [\n { emotion: 'happy', clip: 'emotion_happy', maxWeight: 0.7, blendSpeed: 2.0 },\n { emotion: 'sad', clip: 'emotion_sad', maxWeight: 0.6, blendSpeed: 1.5 },\n { emotion: 'angry', clip: 'emotion_angry', maxWeight: 0.5, blendSpeed: 2.5 },\n { emotion: 'fearful', clip: 'emotion_fear', maxWeight: 0.5, blendSpeed: 2.0 },\n { emotion: 'surprised', clip: 'emotion_surprised', maxWeight: 0.6, blendSpeed: 2.5 },\n { emotion: 'calm', clip: 'emotion_calm', maxWeight: 0.3, blendSpeed: 1.0 },\n { emotion: 'disgust', clip: 'emotion_disgust', maxWeight: 0.4, blendSpeed: 2.0 },\n { emotion: 'neutral', clip: 'emotion_neutral', maxWeight: 0.0, blendSpeed: 1.0 },\n ],\n\n gestureClips: ['gesture_hand_small', 'gesture_hand_medium', 'gesture_hand_large'],\n};\n","/**\n * Animation Graph\n *\n * State machine for character animation with emotion and audio-driven blending.\n * Renderer-agnostic - outputs blend weights that any 3D engine can consume.\n *\n * @example\n * ```typescript\n * import { AnimationGraph, DEFAULT_ANIMATION_CONFIG } from '@omote/core';\n *\n * const graph = new AnimationGraph(DEFAULT_ANIMATION_CONFIG);\n *\n * // Connect to voice pipeline\n * graph.on('output.update', (output) => {\n * // Apply blend weights to your 3D character\n * for (const { clip, weight } of output.blendWeights) {\n * mixer.getAction(clip).setEffectiveWeight(weight);\n * }\n * });\n *\n * // Drive from voice state\n * voiceState.on('listening', () => graph.trigger('user_speech_start'));\n * voiceState.on('thinking', () => graph.trigger('transcript_ready'));\n * voiceState.on('speaking', () => graph.trigger('ai_audio_start'));\n *\n * // Drive from emotion detection\n * emotion.on('result', ({ emotion, confidence }) => {\n * graph.setEmotion(emotion, confidence);\n * });\n *\n * // Update every frame\n * function animate(deltaTime: number) {\n * graph.update(deltaTime);\n * }\n * ```\n *\n * @module animation\n */\n\nimport { EventEmitter } from '../events';\nimport type {\n EmotionLabel,\n AnimationGraphConfig,\n AnimationGraphEvents,\n AnimationTrigger,\n AnimationOutput,\n AnimationState,\n AnimationStateName,\n BlendWeight,\n Transition,\n} from './types';\nimport { DEFAULT_ANIMATION_CONFIG } from './types';\n\n/**\n * Animation state machine with smooth blending\n */\nexport class AnimationGraph extends EventEmitter<AnimationGraphEvents> {\n private config: AnimationGraphConfig;\n private currentState: AnimationState;\n private previousState: AnimationState | null = null;\n\n // Transition state\n private isTransitioning: boolean = false;\n private transitionProgress: number = 0;\n private transitionDuration: number = 0;\n private transitionStartTime: number = 0;\n\n // Emotion state\n private currentEmotion: EmotionLabel | null = null;\n private emotionConfidence: number = 0;\n private emotionBlendWeight: number = 0;\n private targetEmotionWeight: number = 0;\n\n // Gesture state (audio-driven)\n private audioEnergy: number = 0;\n private gestureWeight: number = 0;\n private currentGestureClip: number = 0;\n\n // Timing\n private stateEnterTime: number = 0;\n private lastUpdateTime: number = 0;\n\n // Blend weights cache\n private cachedOutput: AnimationOutput;\n\n constructor(config: Partial<AnimationGraphConfig> = {}) {\n super();\n this.config = { ...DEFAULT_ANIMATION_CONFIG, ...config };\n\n // Find initial state\n const initialState = this.config.states.find(\n (s) => s.name === this.config.initialState\n );\n if (!initialState) {\n throw new Error(`Initial state '${this.config.initialState}' not found`);\n }\n this.currentState = initialState;\n this.stateEnterTime = Date.now();\n this.lastUpdateTime = Date.now();\n\n // Initialize cached output\n this.cachedOutput = this.computeOutput();\n }\n\n /**\n * Get current state name\n */\n get state(): AnimationStateName {\n return this.currentState.name;\n }\n\n /**\n * Get current animation output\n */\n get output(): AnimationOutput {\n return this.cachedOutput;\n }\n\n /**\n * Trigger an animation event (may cause state transition)\n */\n trigger(event: AnimationTrigger): boolean {\n // Find matching transition\n const transition = this.config.transitions.find(\n (t) =>\n t.from === this.currentState.name &&\n t.trigger === event &&\n (!t.condition || t.condition())\n );\n\n if (!transition) {\n return false;\n }\n\n this.startTransition(transition, event);\n return true;\n }\n\n /**\n * Set current emotion (from DistilHuBERT or manual)\n */\n setEmotion(emotion: EmotionLabel, confidence: number): void {\n const prevEmotion = this.currentEmotion;\n\n this.currentEmotion = emotion;\n this.emotionConfidence = Math.max(0, Math.min(1, confidence));\n\n // Find emotion mapping\n const mapping = this.config.emotionMappings.find(\n (m) => m.emotion === emotion\n );\n if (mapping && this.currentState.emotionBlendEnabled) {\n this.targetEmotionWeight = mapping.maxWeight * this.emotionConfidence;\n } else {\n this.targetEmotionWeight = 0;\n }\n\n if (prevEmotion !== emotion) {\n this.emit('emotion.change', { emotion, confidence });\n }\n }\n\n /**\n * Clear current emotion\n */\n clearEmotion(): void {\n this.currentEmotion = null;\n this.emotionConfidence = 0;\n this.targetEmotionWeight = 0;\n this.emit('emotion.change', { emotion: null, confidence: 0 });\n }\n\n /**\n * Set audio energy for gesture animation (0-1)\n */\n setAudioEnergy(energy: number): void {\n this.audioEnergy = Math.max(0, Math.min(1, energy));\n }\n\n /**\n * Force transition to a specific state\n */\n setState(stateName: AnimationStateName, blendDuration: number = 300): void {\n const targetState = this.config.states.find((s) => s.name === stateName);\n if (!targetState) {\n console.warn(`[AnimationGraph] State '${stateName}' not found`);\n return;\n }\n\n if (targetState.name === this.currentState.name && !this.isTransitioning) {\n return;\n }\n\n // Create a manual transition\n const manualTransition: Transition = {\n from: this.currentState.name,\n to: stateName,\n trigger: 'timeout', // Arbitrary, not used for manual\n duration: blendDuration,\n };\n\n this.startTransition(manualTransition, 'timeout');\n }\n\n /**\n * Update animation graph (call every frame)\n * @param deltaMs Time since last update in milliseconds\n */\n update(deltaMs?: number): AnimationOutput {\n const now = Date.now();\n const dt = deltaMs ?? now - this.lastUpdateTime;\n this.lastUpdateTime = now;\n\n const dtSeconds = dt / 1000;\n\n // Update transition\n if (this.isTransitioning) {\n this.updateTransition(dtSeconds);\n }\n\n // Check timeout\n this.checkTimeout(now);\n\n // Update emotion blend\n this.updateEmotionBlend(dtSeconds);\n\n // Update gesture\n this.updateGesture(dtSeconds);\n\n // Compute and cache output\n this.cachedOutput = this.computeOutput();\n this.emit('output.update', this.cachedOutput);\n\n return this.cachedOutput;\n }\n\n /**\n * Reset to initial state\n */\n reset(): void {\n const initialState = this.config.states.find(\n (s) => s.name === this.config.initialState\n );\n if (initialState) {\n this.currentState = initialState;\n this.previousState = null;\n this.isTransitioning = false;\n this.transitionProgress = 0;\n this.stateEnterTime = Date.now();\n this.emotionBlendWeight = 0;\n this.gestureWeight = 0;\n this.cachedOutput = this.computeOutput();\n }\n }\n\n /**\n * Get all clip names used by this graph\n */\n getRequiredClips(): string[] {\n const clips = new Set<string>();\n\n // Base clips from all states\n for (const state of this.config.states) {\n for (const clip of state.baseClips) {\n clips.add(clip);\n }\n }\n\n // Emotion clips\n for (const mapping of this.config.emotionMappings) {\n clips.add(mapping.clip);\n }\n\n // Gesture clips\n for (const clip of this.config.gestureClips) {\n clips.add(clip);\n }\n\n return Array.from(clips);\n }\n\n // ─────────────────────────────────────────────────────────────────\n // Private methods\n // ─────────────────────────────────────────────────────────────────\n\n private startTransition(transition: Transition, event: AnimationTrigger): void {\n const targetState = this.config.states.find(\n (s) => s.name === transition.to\n );\n if (!targetState) {\n console.warn(`[AnimationGraph] Target state '${transition.to}' not found`);\n return;\n }\n\n const fromState = this.currentState.name;\n\n this.previousState = this.currentState;\n this.currentState = targetState;\n this.isTransitioning = true;\n this.transitionProgress = 0;\n this.transitionDuration = transition.duration;\n this.transitionStartTime = Date.now();\n this.stateEnterTime = Date.now();\n\n // Update emotion target based on new state\n if (!this.currentState.emotionBlendEnabled) {\n this.targetEmotionWeight = 0;\n }\n\n this.emit('state.change', {\n from: fromState,\n to: targetState.name,\n trigger: event,\n });\n\n this.emit('transition.start', {\n from: fromState,\n to: targetState.name,\n duration: transition.duration,\n });\n }\n\n private updateTransition(dtSeconds: number): void {\n if (!this.isTransitioning || this.transitionDuration <= 0) {\n this.isTransitioning = false;\n this.transitionProgress = 1;\n return;\n }\n\n // Linear progress based on time\n const elapsed = Date.now() - this.transitionStartTime;\n this.transitionProgress = Math.min(1, elapsed / this.transitionDuration);\n\n if (this.transitionProgress >= 1) {\n this.isTransitioning = false;\n this.transitionProgress = 1;\n this.previousState = null;\n this.emit('transition.end', { state: this.currentState.name });\n }\n }\n\n private checkTimeout(now: number): void {\n if (this.isTransitioning) return;\n if (this.currentState.timeout <= 0) return;\n\n const elapsed = now - this.stateEnterTime;\n if (elapsed >= this.currentState.timeout) {\n this.trigger('timeout');\n }\n }\n\n private updateEmotionBlend(dtSeconds: number): void {\n if (!this.currentEmotion) {\n // Decay emotion weight\n this.emotionBlendWeight = Math.max(\n 0,\n this.emotionBlendWeight - dtSeconds * 2.0\n );\n return;\n }\n\n const mapping = this.config.emotionMappings.find(\n (m) => m.emotion === this.currentEmotion\n );\n const blendSpeed = mapping?.blendSpeed ?? 2.0;\n\n // Smoothly interpolate to target\n const diff = this.targetEmotionWeight - this.emotionBlendWeight;\n const maxChange = blendSpeed * dtSeconds;\n\n if (Math.abs(diff) <= maxChange) {\n this.emotionBlendWeight = this.targetEmotionWeight;\n } else {\n this.emotionBlendWeight += Math.sign(diff) * maxChange;\n }\n }\n\n private updateGesture(dtSeconds: number): void {\n if (!this.currentState.gestureBlendEnabled) {\n this.gestureWeight = Math.max(0, this.gestureWeight - dtSeconds * 4.0);\n return;\n }\n\n // Map audio energy to gesture weight\n const targetGesture =\n this.audioEnergy > this.config.gestureThreshold\n ? this.audioEnergy * this.config.gestureIntensity\n : 0;\n\n // Smooth the gesture weight\n const diff = targetGesture - this.gestureWeight;\n const blendSpeed = 8.0; // Fast response\n const maxChange = blendSpeed * dtSeconds;\n\n if (Math.abs(diff) <= maxChange) {\n this.gestureWeight = targetGesture;\n } else {\n this.gestureWeight += Math.sign(diff) * maxChange;\n }\n\n // Select gesture clip based on intensity\n const clipCount = this.config.gestureClips.length;\n if (clipCount > 0) {\n this.currentGestureClip = Math.min(\n clipCount - 1,\n Math.floor(this.gestureWeight * clipCount)\n );\n }\n }\n\n private computeOutput(): AnimationOutput {\n const blendWeights: BlendWeight[] = [];\n\n // Smooth transition weight (ease in-out)\n const t = this.transitionProgress;\n const transitionWeight = t * t * (3 - 2 * t); // smoothstep\n\n // Previous state clips (fading out)\n if (this.previousState && this.isTransitioning) {\n const fadeOut = 1 - transitionWeight;\n for (let i = 0; i < this.previousState.baseClips.length; i++) {\n const clip = this.previousState.baseClips[i];\n const baseWeight = this.previousState.baseWeights[i] ?? 1.0;\n blendWeights.push({\n clip,\n weight: baseWeight * fadeOut,\n speed: 1.0,\n time: 0,\n });\n }\n }\n\n // Current state clips (fading in or full)\n const fadeIn = this.isTransitioning ? transitionWeight : 1.0;\n for (let i = 0; i < this.currentState.baseClips.length; i++) {\n const clip = this.currentState.baseClips[i];\n const baseWeight = this.currentState.baseWeights[i] ?? 1.0;\n blendWeights.push({\n clip,\n weight: baseWeight * fadeIn,\n speed: 1.0,\n time: 0,\n });\n }\n\n // Emotion overlay\n if (this.currentEmotion && this.emotionBlendWeight > 0.01) {\n const mapping = this.config.emotionMappings.find(\n (m) => m.emotion === this.currentEmotion\n );\n if (mapping) {\n blendWeights.push({\n clip: mapping.clip,\n weight: this.emotionBlendWeight,\n speed: 1.0,\n time: 0,\n });\n }\n }\n\n // Gesture layer\n if (this.gestureWeight > 0.01 && this.config.gestureClips.length > 0) {\n const gestureClip = this.config.gestureClips[this.currentGestureClip];\n blendWeights.push({\n clip: gestureClip,\n weight: this.gestureWeight,\n speed: 1.0 + this.audioEnergy * 0.5, // Faster with more energy\n time: 0,\n });\n }\n\n return {\n state: this.currentState.name,\n blendWeights,\n activeEmotion: this.emotionBlendWeight > 0.01 ? this.currentEmotion : null,\n gestureIntensity: this.gestureWeight,\n isTransitioning: this.isTransitioning,\n transitionProgress: this.transitionProgress,\n };\n }\n}\n","/**\n * Audio Energy Analysis\n *\n * Utilities for extracting energy/loudness from audio for gesture animation.\n *\n * @module animation\n */\n\n/**\n * Calculate RMS (Root Mean Square) energy from audio samples\n * @param samples Audio samples (Float32Array, normalized -1 to 1)\n * @returns RMS energy value (0 to 1)\n */\nexport function calculateRMS(samples: Float32Array): number {\n if (samples.length === 0) return 0;\n\n let sumSquares = 0;\n for (let i = 0; i < samples.length; i++) {\n sumSquares += samples[i] * samples[i];\n }\n\n return Math.sqrt(sumSquares / samples.length);\n}\n\n/**\n * Calculate peak amplitude from audio samples\n * @param samples Audio samples (Float32Array, normalized -1 to 1)\n * @returns Peak amplitude (0 to 1)\n */\nexport function calculatePeak(samples: Float32Array): number {\n let peak = 0;\n for (let i = 0; i < samples.length; i++) {\n const abs = Math.abs(samples[i]);\n if (abs > peak) peak = abs;\n }\n return peak;\n}\n\n/**\n * Smoothed energy analyzer for gesture animation\n */\nexport class AudioEnergyAnalyzer {\n private smoothedRMS: number = 0;\n private smoothedPeak: number = 0;\n private readonly smoothingFactor: number;\n private readonly noiseFloor: number;\n\n /**\n * @param smoothingFactor How much to smooth (0 = no smoothing, 1 = infinite smoothing). Default 0.85\n * @param noiseFloor Minimum energy threshold to consider as signal. Default 0.01\n */\n constructor(smoothingFactor: number = 0.85, noiseFloor: number = 0.01) {\n this.smoothingFactor = Math.max(0, Math.min(0.99, smoothingFactor));\n this.noiseFloor = noiseFloor;\n }\n\n /**\n * Process audio samples and return smoothed energy values\n * @param samples Audio samples (Float32Array)\n * @returns Object with rms and peak values\n */\n process(samples: Float32Array): { rms: number; peak: number; energy: number } {\n const instantRMS = calculateRMS(samples);\n const instantPeak = calculatePeak(samples);\n\n // Apply noise gate\n const gatedRMS = instantRMS > this.noiseFloor ? instantRMS : 0;\n const gatedPeak = instantPeak > this.noiseFloor ? instantPeak : 0;\n\n // Smooth the values (exponential moving average)\n // Attack fast (when getting louder), release slower\n if (gatedRMS > this.smoothedRMS) {\n // Fast attack\n this.smoothedRMS =\n this.smoothedRMS * 0.5 + gatedRMS * 0.5;\n } else {\n // Slow release\n this.smoothedRMS =\n this.smoothedRMS * this.smoothingFactor +\n gatedRMS * (1 - this.smoothingFactor);\n }\n\n if (gatedPeak > this.smoothedPeak) {\n this.smoothedPeak = this.smoothedPeak * 0.3 + gatedPeak * 0.7;\n } else {\n this.smoothedPeak =\n this.smoothedPeak * this.smoothingFactor +\n gatedPeak * (1 - this.smoothingFactor);\n }\n\n // Combined energy (weighted average of RMS and peak)\n // RMS is more stable, peak catches transients\n const energy = this.smoothedRMS * 0.7 + this.smoothedPeak * 0.3;\n\n return {\n rms: this.smoothedRMS,\n peak: this.smoothedPeak,\n energy: Math.min(1, energy * 2), // Scale up and clamp\n };\n }\n\n /**\n * Reset analyzer state\n */\n reset(): void {\n this.smoothedRMS = 0;\n this.smoothedPeak = 0;\n }\n\n /**\n * Get current smoothed RMS value\n */\n get rms(): number {\n return this.smoothedRMS;\n }\n\n /**\n * Get current smoothed peak value\n */\n get peak(): number {\n return this.smoothedPeak;\n }\n}\n\n/**\n * Extract emphasis points from audio (for gesture timing)\n *\n * Detects sudden increases in energy that correspond to speech emphasis.\n */\nexport class EmphasisDetector {\n private energyHistory: number[] = [];\n private readonly historySize: number;\n private readonly emphasisThreshold: number;\n\n /**\n * @param historySize Number of frames to track. Default 10\n * @param emphasisThreshold Minimum energy increase to count as emphasis. Default 0.15\n */\n constructor(historySize: number = 10, emphasisThreshold: number = 0.15) {\n this.historySize = historySize;\n this.emphasisThreshold = emphasisThreshold;\n }\n\n /**\n * Process energy value and detect emphasis\n * @param energy Current energy value (0-1)\n * @returns Object with isEmphasis flag and emphasisStrength\n */\n process(energy: number): { isEmphasis: boolean; emphasisStrength: number } {\n this.energyHistory.push(energy);\n if (this.energyHistory.length > this.historySize) {\n this.energyHistory.shift();\n }\n\n if (this.energyHistory.length < 3) {\n return { isEmphasis: false, emphasisStrength: 0 };\n }\n\n // Calculate average of previous frames (excluding current)\n const prevFrames = this.energyHistory.slice(0, -1);\n const avgPrev = prevFrames.reduce((a, b) => a + b, 0) / prevFrames.length;\n\n // Compare current to average\n const increase = energy - avgPrev;\n const isEmphasis = increase > this.emphasisThreshold;\n\n return {\n isEmphasis,\n emphasisStrength: isEmphasis ? Math.min(1, increase / 0.3) : 0,\n };\n }\n\n /**\n * Reset detector state\n */\n reset(): void {\n this.energyHistory = [];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAkBO,IAAM,oBAAN,MAAwB;AAAA,EAS7B,YACU,QACR,SAAkC,CAAC,GACnC;AAFQ;AARV,SAAQ,SAA6B;AACrC,SAAQ,UAA+B;AACvC,SAAQ,YAAwC;AAChD,SAAQ,SAAuB,IAAI,aAAa,CAAC;AACjD,SAAQ,eAAe;AACvB,SAAQ,oBAAoB;AAM1B,SAAK,SAAS;AAAA,MACZ,YAAY,OAAO,cAAc;AAAA,MACjC,WAAW,OAAO,aAAa;AAAA,IACjC;AAAA,EACF;AAAA,EAEA,IAAI,cAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,cAAuB;AACzB,WAAO,OAAO,cAAc,eAAe,CAAC,CAAC,UAAU,cAAc;AAAA,EACvE;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,CAAC,KAAK,aAAa;AACrB,WAAK,OAAO,KAAK,SAAS;AAAA,QACxB,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AACD;AAAA,IACF;AAEA,QAAI,KAAK,aAAc;AAEvB,QAAI;AACF,WAAK,SAAS,MAAM,UAAU,aAAa,aAAa;AAAA,QACtD,OAAO;AAAA,UACL,YAAY,EAAE,OAAO,KAAK,OAAO,WAAW;AAAA,UAC5C,cAAc;AAAA,UACd,kBAAkB;AAAA,UAClB,kBAAkB;AAAA,UAClB,iBAAiB;AAAA,QACnB;AAAA,MACF,CAAC;AAED,WAAK,UAAU,IAAI,aAAa,EAAE,YAAY,KAAK,OAAO,WAAW,CAAC;AAGtE,UAAI,KAAK,QAAQ,UAAU,aAAa;AACtC,cAAM,KAAK,QAAQ,OAAO;AAAA,MAC5B;AAEA,YAAM,SAAS,KAAK,QAAQ,wBAAwB,KAAK,MAAM;AAG/D,WAAK,YAAY,KAAK,QAAQ,sBAAsB,MAAM,GAAG,CAAC;AAE9D,WAAK,UAAU,iBAAiB,CAAC,MAAM;AACrC,cAAM,QAAQ,EAAE,YAAY,eAAe,CAAC;AAG5C,YAAI,MAAM;AACV,YAAI,OAAO;AACX,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAM,MAAM,KAAK,IAAI,MAAM,CAAC,CAAC;AAC7B,iBAAO,MAAM,CAAC,IAAI,MAAM,CAAC;AACzB,cAAI,MAAM,KAAM,QAAO;AAAA,QACzB;AACA,cAAM,KAAK,KAAK,MAAM,MAAM,MAAM;AAElC,aAAK,OAAO,KAAK,eAAe,EAAE,KAAK,KAAK,CAAC;AAG7C,cAAM,YAAY,IAAI,aAAa,KAAK,OAAO,SAAS,MAAM,MAAM;AACpE,kBAAU,IAAI,KAAK,MAAM;AACzB,kBAAU,IAAI,OAAO,KAAK,OAAO,MAAM;AACvC,aAAK,SAAS;AAGd,YAAI,aAAa;AACjB,eAAO,KAAK,OAAO,UAAU,KAAK,OAAO,WAAW;AAClD,gBAAM,QAAQ,KAAK,OAAO,MAAM,GAAG,KAAK,OAAO,SAAS;AACxD,eAAK,SAAS,KAAK,OAAO,MAAM,KAAK,OAAO,SAAS;AAErD,gBAAM,MAAM,KAAK,aAAa,KAAK;AACnC,eAAK,OAAO,KAAK,eAAe;AAAA,YAC9B;AAAA,YACA,WAAW,YAAY,IAAI;AAAA,UAC7B,CAAC;AACD;AAAA,QACF;AAEA,YAAI,aAAa,KAAK,CAAC,KAAK,mBAAmB;AAC7C,kBAAQ,IAAI,8CAA8C,UAAU;AACpE,eAAK,oBAAoB;AAAA,QAC3B;AAAA,MACF;AAEA,aAAO,QAAQ,KAAK,SAAS;AAC7B,WAAK,UAAU,QAAQ,KAAK,QAAQ,WAAW;AAE/C,WAAK,eAAe;AACpB,cAAQ,IAAI,yDAAyD,KAAK,QAAQ,KAAK;AAAA,IACzF,SAAS,KAAK;AACZ,WAAK,OAAO,KAAK,SAAS;AAAA,QACxB,MAAM;AAAA,QACN,SAAU,IAAc;AAAA,QACxB,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,OAAa;AACX,QAAI,KAAK,WAAW;AAClB,WAAK,UAAU,WAAW;AAC1B,WAAK,YAAY;AAAA,IACnB;AAEA,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,MAAM;AACnB,WAAK,UAAU;AAAA,IACjB;AAEA,QAAI,KAAK,QAAQ;AACf,WAAK,OAAO,UAAU,EAAE,QAAQ,CAAC,MAAM,EAAE,KAAK,CAAC;AAC/C,WAAK,SAAS;AAAA,IAChB;AAEA,SAAK,SAAS,IAAI,aAAa,CAAC;AAChC,SAAK,eAAe;AAAA,EACtB;AAAA,EAEQ,aAAa,SAAmC;AACtD,UAAM,MAAM,IAAI,WAAW,QAAQ,MAAM;AACzC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAM,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC;AAC9C,UAAI,CAAC,IAAI,IAAI,IAAI,IAAI,QAAS,IAAI;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AACF;;;ACzJO,IAAM,aAAN,MAAiB;AAAA,EAKtB,YAA6B,MAAc;AAAd;AAH7B,SAAQ,aAAa;AACrB,SAAQ,SAAS;AAGf,SAAK,SAAS,IAAI,aAAa,IAAI;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAAuB;AAC3B,aAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,WAAK,OAAO,KAAK,UAAU,IAAI,IAAI,CAAC,IAAI;AACxC,WAAK,cAAc,KAAK,aAAa,KAAK,KAAK;AAE/C,UAAI,KAAK,eAAe,GAAG;AACzB,aAAK,SAAS;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,SAA6B;AACtC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,WAAK,OAAO,KAAK,UAAU,IAAI,QAAQ,CAAC;AACxC,WAAK,cAAc,KAAK,aAAa,KAAK,KAAK;AAE/C,UAAI,KAAK,eAAe,GAAG;AACzB,aAAK,SAAS;AAAA,MAChB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAA4B;AAC1B,QAAI,CAAC,KAAK,OAAQ,QAAO;AAEzB,UAAM,SAAS,IAAI,aAAa,KAAK,IAAI;AAGzC,UAAM,YAAY,KAAK,OAAO,SAAS,KAAK,UAAU;AACtD,WAAO,IAAI,WAAW,CAAC;AAGvB,UAAM,aAAa,KAAK,OAAO,SAAS,GAAG,KAAK,UAAU;AAC1D,WAAO,IAAI,YAAY,UAAU,MAAM;AAEvC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAAmB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAoB;AACtB,QAAI,KAAK,OAAQ,QAAO;AACxB,WAAO,KAAK,aAAa,KAAK;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,OAAO,KAAK,CAAC;AAClB,SAAK,aAAa;AAClB,SAAK,SAAS;AAAA,EAChB;AACF;;;ACnEO,IAAM,iBAAN,MAAqB;AAAA,EAM1B,YAA6B,UAAiC,CAAC,GAAG;AAArC;AAL7B,SAAQ,UAA+B;AACvC,SAAQ,eAAe;AACvB,SAAQ,mBAAiF,CAAC;AAC1F,SAAQ,YAAY;AAAA,EAE+C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQnE,MAAM,aAA4B;AAEhC,YAAQ,IAAI,gDAAgD;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,SAAwB;AAC5B,UAAM,KAAK,cAAc;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBAAuC;AACnD,QAAI,KAAK,WAAW,KAAK,QAAQ,UAAU,UAAU;AACnD,aAAO,KAAK;AAAA,IACd;AAEA,UAAM,aAAa,KAAK,QAAQ,cAAc;AAC9C,SAAK,UAAU,IAAI,aAAa,EAAE,WAAW,CAAC;AAG9C,QAAI,KAAK,QAAQ,UAAU,aAAa;AACtC,YAAM,KAAK,QAAQ,OAAO;AAAA,IAC5B;AAEA,YAAQ,IAAI,gDAAgD,UAAU,IAAI;AAC1E,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,SAAS,WAA0C;AAEvD,UAAM,MAAM,MAAM,KAAK,cAAc;AACrC,UAAM,WAAW,KAAK,QAAQ,YAAY;AAK1C,QAAI,CAAC,KAAK,WAAW;AACnB,WAAK,eAAe,IAAI,cAAc;AACtC,WAAK,YAAY;AAAA,IACnB;AAGA,UAAM,cAAc,IAAI,aAAa,UAAU,UAAU,QAAQ,IAAI,UAAU;AAC/E,gBAAY,eAAe,CAAC,EAAE,IAAI,SAAS;AAG3C,UAAM,WAAW,IAAI,WAAW;AAChC,aAAS,KAAK,QAAQ;AACtB,aAAS,QAAQ,IAAI,WAAW;AAGhC,UAAM,SAAS,IAAI,mBAAmB;AACtC,WAAO,SAAS;AAChB,WAAO,QAAQ,QAAQ;AAGvB,UAAM,eAAe,KAAK;AAC1B,WAAO,MAAM,YAAY;AAGzB,SAAK,iBAAiB,KAAK,EAAE,QAAQ,SAAS,CAAC;AAG/C,UAAM,WAAW,UAAU,SAAS,IAAI;AACxC,SAAK,eAAe,eAAe;AAEnC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,iBAAyB;AACvB,QAAI,CAAC,KAAK,QAAS,QAAO;AAC1B,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,qBAA6B;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,QAAI,CAAC,KAAK,WAAW,CAAC,KAAK,UAAW,QAAO;AAC7C,WAAO,KAAK,QAAQ,eAAe,KAAK;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,UAAU,YAAoB,IAAmB;AACrD,QAAI,CAAC,KAAK,WAAW,KAAK,iBAAiB,WAAW,GAAG;AACvD;AAAA,IACF;AAEA,UAAM,MAAM,KAAK;AACjB,UAAM,cAAc,IAAI;AACxB,UAAM,aAAa,YAAY;AAG/B,eAAW,EAAE,QAAQ,SAAS,KAAK,KAAK,kBAAkB;AACxD,UAAI;AAEF,iBAAS,KAAK,eAAe,SAAS,KAAK,OAAO,WAAW;AAC7D,iBAAS,KAAK,wBAAwB,GAAK,cAAc,UAAU;AAGnE,eAAO,KAAK,cAAc,UAAU;AAAA,MACtC,SAAS,KAAK;AAAA,MAEd;AAAA,IACF;AAGA,SAAK,mBAAmB,CAAC;AACzB,SAAK,YAAY;AACjB,SAAK,eAAe;AAGpB,UAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,SAAS,CAAC;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAc;AAEZ,QAAI,KAAK,SAAS;AAChB,YAAM,MAAM,KAAK,QAAQ;AACzB,iBAAW,EAAE,QAAQ,SAAS,KAAK,KAAK,kBAAkB;AACxD,YAAI;AACF,mBAAS,KAAK,eAAe,GAAG,GAAG;AACnC,iBAAO,KAAK,GAAG;AAAA,QACjB,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AACA,SAAK,eAAe;AACpB,SAAK,YAAY;AACjB,SAAK,mBAAmB,CAAC;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,MAAM;AACnB,WAAK,UAAU;AAAA,IACjB;AACA,SAAK,mBAAmB,CAAC;AACzB,SAAK,YAAY;AAAA,EACnB;AACF;;;ACjMO,IAAM,sBAAN,MAA0B;AAAA,EAI/B,YAA6B,UAAsC,CAAC,GAAG;AAA1C;AAH7B,SAAQ,aAA2B,CAAC;AAIlC,UAAM,WAAW,QAAQ,oBAAoB;AAC7C,UAAM,aAAa,QAAQ,cAAc;AAGzC,SAAK,cAAe,WAAW,MAAQ,aAAa;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,IAAI,OAAuC;AAEzC,SAAK,WAAW,KAAK,KAAK;AAG1B,UAAM,aAAa,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AAGvE,QAAI,cAAc,KAAK,aAAa;AAClC,aAAO,KAAK,MAAM;AAAA,IACpB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,QAA4B;AAC1B,QAAI,KAAK,WAAW,WAAW,GAAG;AAChC,aAAO;AAAA,IACT;AAGA,UAAM,aAAa,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AAGvE,UAAM,WAAW,IAAI,WAAW,UAAU;AAC1C,QAAI,SAAS;AACb,eAAW,SAAS,KAAK,YAAY;AACnC,eAAS,IAAI,OAAO,MAAM;AAC1B,gBAAU,MAAM;AAAA,IAClB;AAGA,SAAK,aAAa,CAAC;AAEnB,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAoB;AACtB,UAAM,aAAa,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AACvE,WAAO,KAAK,IAAI,GAAG,aAAa,KAAK,WAAW;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,wBAAgC;AAC9B,UAAM,aAAa,KAAK,QAAQ,cAAc;AAC9C,UAAM,aAAa,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AACvE,UAAM,UAAU,aAAa;AAC7B,WAAQ,UAAU,aAAc;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,aAAqB;AACvB,WAAO,KAAK,WAAW;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,aAAa,CAAC;AAAA,EACrB;AACF;;;AClFO,IAAM,cAAN,MAAkB;AAAA,EAcvB,YAA6B,UAA8B,CAAC,GAAG;AAAlC;AAb7B,SAAiB,mBAAmB;AACpC;AAAA,SAAiB,aAAa;AAE9B;AAAA,SAAQ,SAAuB,IAAI,aAAa,CAAC;AACjD,SAAQ,kBAAkB;AAC1B,SAAQ,aAAyB,CAAC;AAMlC;AAAA;AAAA;AAAA;AAAA,SAAQ,YAAiC;AAAA,EAEuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYhE,MAAM,KAAK,SAAuB,WAAmB,KAAoC;AAEvF,QAAI,KAAK,OAAO,WAAW,GAAG;AAC5B,WAAK,kBAAkB;AAAA,IACzB;AAGA,UAAM,YAAY,IAAI,aAAa,KAAK,OAAO,SAAS,QAAQ,MAAM;AACtE,cAAU,IAAI,KAAK,QAAQ,CAAC;AAC5B,cAAU,IAAI,SAAS,KAAK,OAAO,MAAM;AACzC,SAAK,SAAS;AAKd,WAAO,KAAK,OAAO,UAAU,KAAK,kBAAkB;AAClD,YAAM,KAAK,cAAc,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cAAc,KAAoC;AAC9D,QAAI;AAEF,YAAM,YAAY,KAAK,OAAO,MAAM,GAAG,KAAK,gBAAgB;AAC5D,YAAM,qBAAqB,KAAK;AAGhC,WAAK,SAAS,KAAK,OAAO,MAAM,KAAK,gBAAgB;AAGrD,YAAM,oBAAoB,KAAK,oBAAoB,KAAK,QAAQ,cAAc;AAC9E,WAAK,kBAAkB,qBAAqB;AAG5C,YAAM,SAAS,MAAM,IAAI,MAAM,SAAS;AAGxC,YAAM,gBAAgB,IAAI,KAAK;AAC/B,eAAS,IAAI,GAAG,IAAI,OAAO,YAAY,QAAQ,KAAK;AAClD,cAAM,QAAQ,OAAO,YAAY,CAAC;AAClC,cAAM,YAAY,qBAAsB,IAAI;AAC5C,aAAK,WAAW,KAAK,EAAE,OAAO,UAAU,CAAC;AAAA,MAC3C;AAGA,WAAK,QAAQ,cAAc,OAAO,YAAY,MAAM;AAAA,IACtD,SAAS,OAAO;AACd,WAAK,QAAQ,UAAU,KAAc;AAGrC,WAAK,SAAS,IAAI,aAAa,CAAC;AAChC,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,gBAAgB,aAAqB,KAAkE;AAErG,UAAM,gBAAgB,KAAK,YAAY,SAAS,IAAM;AAGtD,QAAI,iBAAiB;AACrB,WAAO,KAAK,WAAW,SAAS,KAAK,KAAK,WAAW,CAAC,EAAE,YAAY,cAAc,eAAe;AAC/F,YAAM,YAAY,KAAK,WAAW,MAAM;AACxC;AAGA,UAAI,mBAAmB,GAAG;AACxB,cAAM,UAAU,cAAc,UAAU,aAAa,KAAM,QAAQ,CAAC;AACpE,gBAAQ,KAAK,uCAAuC;AAAA,UAClD;AAAA,UACA,iBAAiB,gBAAgB;AAAA,UACjC,aAAa,KAAK,WAAW;AAAA,UAC7B,SAAS,KAAK,WAAW;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,KAAK,WAAW,SAAS,KAAK,KAAK,WAAW,CAAC,EAAE,aAAa,aAAa;AAC7E,YAAM,EAAE,MAAM,IAAI,KAAK,WAAW,MAAM;AACxC,WAAK,YAAY;AACjB,aAAO;AAAA,IACT;AAIA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,kBAA8B;AAC5B,WAAO,CAAC,GAAG,KAAK,UAAU;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAoB;AACtB,WAAO,KAAK,IAAI,GAAG,KAAK,OAAO,SAAS,KAAK,gBAAgB;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,mBAA2B;AAC7B,WAAO,KAAK,WAAW;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,mBAA2B;AAC7B,WAAO,KAAK,OAAO,UAAU,KAAK,QAAQ,cAAc;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,MAAM,KAAoC;AAC9C,QAAI,KAAK,OAAO,WAAW,GAAG;AAC5B;AAAA,IACF;AAGA,UAAM,SAAS,IAAI,aAAa,KAAK,gBAAgB;AACrD,WAAO,IAAI,KAAK,QAAQ,CAAC;AAIzB,UAAM,qBAAqB,KAAK;AAEhC,QAAI;AAEF,YAAM,SAAS,MAAM,IAAI,MAAM,MAAM;AAIrC,YAAM,iBAAiB,KAAK,OAAO,UAAU,KAAK,QAAQ,cAAc;AACxE,YAAM,gBAAgB,IAAI,KAAK;AAC/B,YAAM,mBAAmB,KAAK,KAAK,iBAAiB,KAAK,UAAU;AAEnE,eAAS,IAAI,GAAG,IAAI,KAAK,IAAI,kBAAkB,OAAO,YAAY,MAAM,GAAG,KAAK;AAC9E,cAAM,QAAQ,OAAO,YAAY,CAAC;AAClC,cAAM,YAAY,qBAAsB,IAAI;AAC5C,aAAK,WAAW,KAAK,EAAE,OAAO,UAAU,CAAC;AAAA,MAC3C;AAGA,WAAK,SAAS,IAAI,aAAa,CAAC;AAChC,WAAK,kBAAkB;AAGvB,WAAK,QAAQ,cAAc,KAAK,IAAI,kBAAkB,OAAO,YAAY,MAAM,CAAC;AAAA,IAClF,SAAS,OAAO;AACd,WAAK,QAAQ,UAAU,KAAc;AAGrC,WAAK,SAAS,IAAI,aAAa,CAAC;AAChC,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,iBAAiB,QAAsB;AACrC,eAAW,SAAS,KAAK,YAAY;AACnC,YAAM,aAAa;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,SAAS,IAAI,aAAa,CAAC;AAChC,SAAK,kBAAkB;AACvB,SAAK,aAAa,CAAC;AACnB,SAAK,YAAY;AAAA,EACnB;AACF;;;AChQA,SAAS,eAAe,QAAmC;AAEzD,QAAM,UAAU,OAAO,aAAa,CAAC;AACrC,QAAM,QAAQ,YAAY,OAAO,aAC7B,IAAI,WAAW,MAAM,IACrB,IAAI,WAAW,QAAQ,GAAG,UAAU,CAAC;AACzC,QAAM,UAAU,IAAI,aAAa,MAAM,MAAM;AAC7C,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAQ,CAAC,IAAI,MAAM,CAAC,IAAI;AAAA,EAC1B;AACA,SAAO;AACT;AAwBO,IAAM,sBAAN,cAAkC,aAAwC;AAAA,EAS/E,YAA6B,SAAqC;AAChE,UAAM;AADqB;AAJ7B,SAAQ,kBAAkB;AAC1B,SAAQ,kBAAiC;AACzC,SAAQ,mBAAkC;AAKxC,UAAM,aAAa,QAAQ,cAAc;AAEzC,SAAK,YAAY,IAAI,eAAe,EAAE,WAAW,CAAC;AAClD,SAAK,YAAY,IAAI,oBAAoB;AAAA,MACvC;AAAA,MACA,kBAAkB,QAAQ,iBAAiB;AAAA,IAC7C,CAAC;AACD,SAAK,cAAc,IAAI,YAAY;AAAA,MACjC;AAAA,MACA,SAAS,CAAC,UAAU;AAClB,aAAK,KAAK,SAAS,KAAK;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,UAAM,KAAK,UAAU,WAAW;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,QAAc;AAEZ,SAAK,eAAe;AAEpB,SAAK,UAAU,MAAM;AACrB,SAAK,UAAU,MAAM;AACrB,SAAK,YAAY,MAAM;AACvB,SAAK,kBAAkB;AAMvB,SAAK,UAAU,OAAO;AAGtB,SAAK,eAAe;AAGpB,SAAK,gBAAgB;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,aAAa,OAAkC;AAEnD,UAAM,WAAW,KAAK,UAAU,IAAI,KAAK;AACzC,QAAI,CAAC,UAAU;AACb;AAAA,IACF;AAGA,UAAM,UAAU,eAAe,QAAQ;AAGvC,UAAM,eAAe,MAAM,KAAK,UAAU,SAAS,OAAO;AAG1D,QAAI,CAAC,KAAK,iBAAiB;AACzB,WAAK,kBAAkB;AACvB,WAAK,KAAK,kBAAkB,YAAY;AAAA,IAC1C;AAOA,SAAK,YAAY,KAAK,SAAS,cAAc,KAAK,QAAQ,GAAG,EAAE,MAAM,SAAO;AAC1E,WAAK,KAAK,SAAS,GAAG;AAAA,IACxB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,MAAqB;AAEzB,UAAM,YAAY,KAAK,UAAU,MAAM;AACvC,QAAI,WAAW;AACb,YAAM,QAAQ,IAAI,WAAW,SAAS;AACtC,YAAM,KAAK,aAAa,KAAK;AAAA,IAC/B;AAIA,UAAM,KAAK,YAAY,MAAM,KAAK,QAAQ,GAAG;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,KAAK,YAAoB,IAAmB;AAEhD,SAAK,eAAe;AAGpB,UAAM,KAAK,UAAU,UAAU,SAAS;AAGxC,SAAK,UAAU,MAAM;AACrB,SAAK,YAAY,MAAM;AACvB,SAAK,kBAAkB;AAGvB,SAAK,KAAK,qBAAqB,MAAgB;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaQ,iBAAuB;AAC7B,UAAM,cAAc,MAAM;AACxB,YAAM,cAAc,KAAK,UAAU,eAAe;AAClD,YAAM,QAAQ,KAAK,YAAY,gBAAgB,aAAa,KAAK,QAAQ,GAAG;AAE5E,UAAI,OAAO;AACT,aAAK,KAAK,eAAe,KAAK;AAAA,MAChC;AAEA,WAAK,mBAAmB,sBAAsB,WAAW;AAAA,IAC3D;AAEA,SAAK,mBAAmB,sBAAsB,WAAW;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAwB;AAC9B,QAAI,KAAK,iBAAiB;AACxB,oBAAc,KAAK,eAAe;AAAA,IACpC;AAEA,SAAK,kBAAkB,OAAO,YAAY,MAAM;AAC9C,UAAI,KAAK,UAAU,WAAW,KAAK,KAAK,YAAY,qBAAqB,GAAG;AAC1E,aAAK,KAAK,qBAAqB,MAAgB;AAC/C,aAAK,eAAe;AAAA,MACtB;AAAA,IACF,GAAG,GAAG;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAuB;AAC7B,QAAI,KAAK,iBAAiB;AACxB,oBAAc,KAAK,eAAe;AAClC,WAAK,kBAAkB;AAAA,IACzB;AAEA,QAAI,KAAK,kBAAkB;AACzB,2BAAqB,KAAK,gBAAgB;AAC1C,WAAK,mBAAmB;AAAA,IAC1B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW;AACT,WAAO;AAAA,MACL,iBAAiB,KAAK;AAAA,MACtB,eAAe,KAAK,UAAU;AAAA,MAC9B,SAAS,KAAK,YAAY;AAAA,MAC1B,cAAc,KAAK,YAAY;AAAA,MAC/B,aAAa,KAAK,UAAU,eAAe;AAAA,MAC3C,iBAAiB,KAAK,UAAU,mBAAmB;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,SAAK,eAAe;AACpB,SAAK,UAAU,QAAQ;AACvB,SAAK,UAAU,MAAM;AACrB,SAAK,YAAY,MAAM;AAAA,EACzB;AACF;;;AChOO,IAAM,yBAAyB;AAAA;AAAA,EAEpC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AACF;;;AC/BO,IAAM,kBAAN,MAA4D;AAAA,EAIjE,YAAY,UAAkD,CAAC,GAAG;AAChE,SAAK,UAAU,QAAQ,WAAW;AAClC,SAAK,SAAS,QAAQ,UAAU;AAAA,EAClC;AAAA,EAEA,WAAW,MAAsB;AAC/B,QAAI,CAAC,KAAK,QAAS;AAEnB,UAAM,aAAa,KAAK,WAAW,OAAO,WAAM;AAChD,UAAM,cAAc,KAAK,WAAW,OAAO,iBAAiB;AAE5D,YAAQ;AAAA,MACN,KAAK,KAAK,MAAM,MAAM,UAAU,IAAI,KAAK,IAAI,OAAO,KAAK,WAAW,QAAQ,CAAC,CAAC;AAAA,MAC9E;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,YAAQ,IAAI,aAAa,KAAK,OAAO;AACrC,YAAQ,IAAI,YAAY,KAAK,MAAM;AACnC,QAAI,KAAK,cAAc;AACrB,cAAQ,IAAI,mBAAmB,KAAK,YAAY;AAAA,IAClD;AACA,YAAQ,IAAI,aAAa,GAAG,KAAK,WAAW,QAAQ,CAAC,CAAC,IAAI;AAC1D,YAAQ,IAAI,WAAW,KAAK,MAAM;AAElC,QAAI,OAAO,KAAK,KAAK,UAAU,EAAE,SAAS,GAAG;AAC3C,cAAQ,IAAI,eAAe,KAAK,UAAU;AAAA,IAC5C;AAEA,QAAI,KAAK,OAAO;AACd,cAAQ,MAAM,UAAU,KAAK,KAAK;AAAA,IACpC;AAEA,YAAQ,SAAS;AAAA,EACnB;AAAA,EAEA,aAAa,QAA0B;AACrC,QAAI,CAAC,KAAK,QAAS;AAEnB,UAAM,WAAW,OAAO,SAAS,YAAY,WAAM;AAEnD,YAAQ;AAAA,MACN,KAAK,KAAK,MAAM,MAAM,QAAQ,IAAI,OAAO,IAAI,OAAO,OAAO,KAAK;AAAA,MAChE;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,QAAuB;AAAA,EAE7B;AAAA,EAEA,MAAM,WAA0B;AAC9B,SAAK,UAAU;AAAA,EACjB;AACF;;;ACvGA,IAAM,aAAa;AAAA,EACjB,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,OAAO;AACT;AAKA,SAAS,WAAW,MAAgB,aAAqB,gBAAwB;AAC/E,QAAM,aAAa,OAAO,QAAQ,KAAK,UAAU,EAC9C,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS,EACjC,IAAI,CAAC,CAAC,KAAK,KAAK,OAAO;AAAA,IACtB;AAAA,IACA,OAAO,OAAO,UAAU,WACpB,EAAE,aAAa,MAAM,IACrB,OAAO,UAAU,WACf,OAAO,UAAU,KAAK,IACpB,EAAE,UAAU,MAAM,IAClB,EAAE,aAAa,MAAM,IACvB,EAAE,WAAW,MAAM;AAAA,EAC3B,EAAE;AAEJ,SAAO;AAAA,IACL,eAAe,CAAC;AAAA,MACd,UAAU;AAAA,QACR,YAAY;AAAA,UACV,EAAE,KAAK,gBAAgB,OAAO,EAAE,aAAa,YAAY,EAAE;AAAA,UAC3D,EAAE,KAAK,mBAAmB,OAAO,EAAE,aAAa,eAAe,EAAE;AAAA,UACjE,EAAE,KAAK,sBAAsB,OAAO,EAAE,aAAa,YAAY,EAAE;AAAA,UACjE,EAAE,KAAK,0BAA0B,OAAO,EAAE,aAAa,aAAa,EAAE;AAAA,QACxE;AAAA,MACF;AAAA,MACA,YAAY,CAAC;AAAA,QACX,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,OAAO,CAAC;AAAA,UACN,SAAS,KAAK;AAAA,UACd,QAAQ,KAAK;AAAA,UACb,cAAc,KAAK,gBAAgB;AAAA,UACnC,MAAM,KAAK;AAAA,UACX,MAAM;AAAA;AAAA,UACN,mBAAmB,OAAO,KAAK,YAAY,GAAS;AAAA,UACpD,iBAAiB,OAAO,KAAK,UAAU,GAAS;AAAA,UAChD;AAAA,UACA,QAAQ;AAAA,YACN,MAAM,KAAK,WAAW,OAAO,WAAW,KAAK,WAAW;AAAA,YACxD,SAAS,KAAK,OAAO,WAAW;AAAA,UAClC;AAAA,QACF,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAKA,SAAS,aAAa,QAAoB,aAAqB,gBAAwB;AACrF,QAAM,aAAa,OAAO,QAAQ,OAAO,UAAU,EAChD,OAAO,CAAC,CAAC,EAAE,CAAC,MAAM,MAAM,MAAS,EACjC,IAAI,CAAC,CAAC,KAAK,KAAK,OAAO;AAAA,IACtB;AAAA,IACA,OAAO,OAAO,UAAU,WACpB,EAAE,aAAa,MAAM,IACrB,OAAO,UAAU,WACf,OAAO,UAAU,KAAK,IACpB,EAAE,UAAU,MAAM,IAClB,EAAE,aAAa,MAAM,IACvB,EAAE,WAAW,MAAM;AAAA,EAC3B,EAAE;AAEJ,QAAM,YAAY;AAAA,IAChB;AAAA,IACA,cAAc,OAAO,OAAO,YAAY,GAAS;AAAA,IACjD,GAAI,OAAO,SAAS,YAChB,EAAE,OAAO,OAAO,MAAM,IACtB,EAAE,UAAU,OAAO,MAAM;AAAA,EAC/B;AAEA,SAAO;AAAA,IACL,iBAAiB,CAAC;AAAA,MAChB,UAAU;AAAA,QACR,YAAY;AAAA,UACV,EAAE,KAAK,gBAAgB,OAAO,EAAE,aAAa,YAAY,EAAE;AAAA,UAC3D,EAAE,KAAK,mBAAmB,OAAO,EAAE,aAAa,eAAe,EAAE;AAAA,QACnE;AAAA,MACF;AAAA,MACA,cAAc,CAAC;AAAA,QACb,OAAO;AAAA,UACL,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,SAAS,CAAC;AAAA,UACR,MAAM,OAAO;AAAA,UACb,GAAI,OAAO,SAAS,YAChB;AAAA,YACA,KAAK;AAAA,cACH,YAAY,CAAC,SAAS;AAAA,cACtB,wBAAwB;AAAA;AAAA,cACxB,aAAa;AAAA,YACf;AAAA,UACF,IACE;AAAA,YACA,OAAO;AAAA,cACL,YAAY,CAAC,SAAS;AAAA,YACxB;AAAA,UACF;AAAA,QACJ,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAYO,IAAM,eAAN,MAAyD;AAAA,EAW9D,YACE,QACA,cAAsB,aACtB,iBAAyB,SACzB;AAXF,SAAQ,aAAyB,CAAC;AAClC,SAAQ,eAA6B,CAAC;AACtC,SAAQ,kBAAyD;AACjE,SAAiB,cAAc;AAC/B,SAAiB,oBAAoB;AACrC,SAAQ,aAAa;AAOnB,SAAK,SAAS;AAAA,MACZ,WAAW;AAAA,MACX,SAAS,CAAC;AAAA,MACV,GAAG;AAAA,IACL;AACA,SAAK,cAAc;AACnB,SAAK,iBAAiB;AAGtB,SAAK,kBAAkB,YAAY,MAAM;AACvC,WAAK,MAAM,EAAE,MAAM,QAAQ,KAAK;AAAA,IAClC,GAAG,KAAK,iBAAiB;AAAA,EAC3B;AAAA,EAEA,WAAW,MAAsB;AAC/B,QAAI,KAAK,WAAY;AAErB,SAAK,WAAW,KAAK,IAAI;AAEzB,QAAI,KAAK,WAAW,UAAU,KAAK,aAAa;AAC9C,WAAK,MAAM,EAAE,MAAM,QAAQ,KAAK;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,aAAa,QAA0B;AACrC,QAAI,KAAK,WAAY;AAErB,SAAK,aAAa,KAAK,MAAM;AAE7B,QAAI,KAAK,aAAa,UAAU,KAAK,aAAa;AAChD,WAAK,MAAM,EAAE,MAAM,QAAQ,KAAK;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,KAAK,WAAY;AAErB,UAAM,QAAQ,KAAK,WAAW,OAAO,CAAC;AACtC,UAAM,UAAU,KAAK,aAAa,OAAO,CAAC;AAE1C,UAAM,WAA4B,CAAC;AAGnC,QAAI,MAAM,SAAS,GAAG;AACpB,eAAS,KAAK,KAAK,YAAY,KAAK,CAAC;AAAA,IACvC;AAGA,QAAI,QAAQ,SAAS,GAAG;AACtB,eAAS,KAAK,KAAK,cAAc,OAAO,CAAC;AAAA,IAC3C;AAEA,UAAM,QAAQ,IAAI,QAAQ;AAAA,EAC5B;AAAA,EAEA,MAAM,WAA0B;AAC9B,QAAI,KAAK,iBAAiB;AACxB,oBAAc,KAAK,eAAe;AAClC,WAAK,kBAAkB;AAAA,IACzB;AAGA,UAAM,KAAK,MAAM;AAEjB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,MAAc,YAAY,OAAkC;AAE1D,UAAM,gBAAgB,MAAM;AAAA,MAAI,UAC9B,WAAW,MAAM,KAAK,aAAa,KAAK,cAAc,EAAE,cAAc,CAAC;AAAA,IACzE;AAEA,UAAM,OAAO,EAAE,cAAc;AAC7B,UAAM,WAAW,KAAK,OAAO,SAAS,QAAQ,OAAO,EAAE,IAAI;AAE3D,UAAM,KAAK,YAAY,UAAU,IAAI;AAAA,EACvC;AAAA,EAEA,MAAc,cAAc,SAAsC;AAEhE,UAAM,kBAAkB,QAAQ;AAAA,MAAI,YAClC,aAAa,QAAQ,KAAK,aAAa,KAAK,cAAc,EAAE,gBAAgB,CAAC;AAAA,IAC/E;AAEA,UAAM,OAAO,EAAE,gBAAgB;AAC/B,UAAM,WAAW,KAAK,OAAO,SAAS,QAAQ,OAAO,EAAE,IAAI;AAE3D,UAAM,KAAK,YAAY,UAAU,IAAI;AAAA,EACvC;AAAA,EAEA,MAAc,YAAY,UAAkB,MAA8B;AACxE,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,OAAO,SAAS;AAE5E,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,UAAU;AAAA,QACrC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,GAAG,KAAK,OAAO;AAAA,QACjB;AAAA,QACA,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,QAAQ,WAAW;AAAA,MACrB,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,gBAAQ,KAAK,yBAAyB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,MAChF;AAAA,IACF,SAAS,OAAO;AACd,UAAK,MAAgB,SAAS,cAAc;AAC1C,gBAAQ,KAAK,yBAAyB;AAAA,MACxC,OAAO;AACL,gBAAQ,KAAK,wBAAwB,KAAK;AAAA,MAC5C;AAAA,IACF,UAAE;AACA,mBAAa,SAAS;AAAA,IACxB;AAAA,EACF;AACF;;;ACpQA,SAAS,WAAW,SAAiB,IAAY;AAC/C,QAAM,QAAQ,IAAI,WAAW,MAAM;AACnC,SAAO,gBAAgB,KAAK;AAC5B,SAAO,MAAM,KAAK,KAAK,EACpB,IAAI,OAAK,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EACxC,KAAK,EAAE;AACZ;AA4BA,IAAI,kBAAyC;AA0BtC,SAAS,mBAAmB,QAAyC;AAC1E,MAAI,iBAAiB;AACnB,oBAAgB,SAAS;AAAA,EAC3B;AACA,oBAAkB,IAAI,eAAe,MAAM;AAC3C,SAAO;AACT;AAKO,SAAS,eAAsC;AACpD,SAAO;AACT;AAOO,IAAM,iBAAN,MAAqB;AAAA,EAU1B,YAAY,QAAyB;AARrC,SAAQ,WAA8C;AACtD,SAAQ,gBAA+B;AACvC,SAAQ,oBAA2D;AAGnE;AAAA,SAAQ,WAAkG,oBAAI,IAAI;AAClH,SAAQ,aAAuG,oBAAI,IAAI;AAGrH,SAAK,SAAS;AAAA,MACZ,SAAS,OAAO,WAAW;AAAA,MAC3B,aAAa,OAAO,eAAe;AAAA,MACnC,gBAAgB,OAAO,kBAAkB;AAAA,MACzC,UAAU,OAAO,YAAY;AAAA,MAC7B,gBAAgB,OAAO;AAAA,MACvB,UAAU,OAAO,YAAY,EAAE,OAAO,GAAK,oBAAoB,KAAK;AAAA,MACpE,gBAAgB,OAAO,kBAAkB;AAAA,MACzC,mBAAmB,OAAO,qBAAqB;AAAA,IACjD;AAEA,QAAI,KAAK,OAAO,SAAS;AACvB,WAAK,aAAa;AAClB,WAAK,uBAAuB;AAAA,IAC9B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAqB;AAC3B,YAAQ,KAAK,OAAO,UAAU;AAAA,MAC5B,KAAK;AACH,aAAK,WAAW,IAAI,gBAAgB,EAAE,SAAS,KAAK,CAAC;AACrD;AAAA,MACF,KAAK;AACH,YAAI,CAAC,KAAK,OAAO,gBAAgB;AAC/B,kBAAQ,KAAK,iEAAiE;AAC9E;AAAA,QACF;AACA,aAAK,WAAW,IAAI;AAAA,UAClB,KAAK,OAAO;AAAA,UACZ,KAAK,OAAO;AAAA,UACZ,KAAK,OAAO;AAAA,QACd;AACA;AAAA,MACF,KAAK;AAAA,MACL;AACE,aAAK,WAAW;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAA+B;AACrC,QAAI,CAAC,KAAK,OAAO,kBAAkB,CAAC,KAAK,SAAU;AAEnD,SAAK,oBAAoB,YAAY,MAAM;AACzC,WAAK,aAAa;AAAA,IACpB,GAAG,KAAK,OAAO,iBAAiB;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,UAAmB,OAAgB;AACtD,QAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAEjC,UAAM,WAAW,KAAK,OAAO;AAC7B,QAAI,WAAW,SAAS,mBAAoB,QAAO;AAEnD,UAAM,QAAQ,SAAS,SAAS;AAChC,WAAO,KAAK,OAAO,IAAI;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,UAAU,MAAc,aAAsC,CAAC,GAAG,eAAyC;AACzG,UAAM,UAAU,eAAe,WAAW,KAAK,iBAAiB,WAAW,EAAE;AAC7E,UAAM,SAAS,WAAW,CAAC;AAC3B,UAAM,eAAe,eAAe;AACpC,UAAM,YAAY,YAAY,IAAI;AAGlC,QAAI,CAAC,iBAAiB,CAAC,KAAK,eAAe;AACzC,WAAK,gBAAgB;AAAA,IACvB;AAEA,QAAI,iBAAiB,EAAE,GAAG,WAAW;AACrC,QAAI,QAAQ;AACZ,QAAI,UAAU,KAAK,aAAa;AAEhC,UAAM,UAAuB,EAAE,SAAS,QAAQ,aAAa;AAE7D,UAAM,UAAU,CAAC,QAAwB,UAAwB;AAC/D,UAAI,MAAO;AACX,cAAQ;AAER,YAAM,UAAU,YAAY,IAAI;AAChC,YAAM,aAAa,UAAU;AAG7B,UAAI,WAAW,WAAW,CAAC,SAAS;AAClC,kBAAU,KAAK,aAAa,IAAI;AAAA,MAClC;AAEA,UAAI,CAAC,WAAW,CAAC,KAAK,SAAU;AAEhC,YAAM,WAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAEA,WAAK,SAAS,WAAW,QAAQ;AAGjC,UAAI,CAAC,gBAAgB,KAAK,kBAAkB,SAAS;AACnD,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF;AAEA,WAAO;AAAA,MACL,KAAK,MAAM,QAAQ,IAAI;AAAA,MACvB,cAAc,CAAC,UAAiB,QAAQ,SAAS,KAAK;AAAA,MACtD,eAAe,CAAC,UAAmC;AACjD,yBAAiB,EAAE,GAAG,gBAAgB,GAAG,MAAM;AAAA,MACjD;AAAA,MACA,YAAY,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,MAAM,SACJ,MACA,IACA,aAAsC,CAAC,GACvC,eACY;AACZ,UAAM,OAAO,KAAK,UAAU,MAAM,YAAY,aAAa;AAE3D,QAAI;AACF,YAAM,SAAS,MAAM,GAAG,IAAI;AAC5B,WAAK,IAAI;AACT,aAAO;AAAA,IACT,SAAS,OAAO;AACd,WAAK,aAAa,KAAc;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,iBACE,MACA,QAAgB,GAChB,aAAwD,CAAC,GACnD;AACN,QAAI,CAAC,KAAK,OAAO,WAAW,CAAC,KAAK,OAAO,eAAgB;AAEzD,UAAM,MAAM,KAAK,aAAa,MAAM,UAAU;AAC9C,UAAM,WAAW,KAAK,SAAS,IAAI,GAAG;AAEtC,QAAI,UAAU;AACZ,eAAS,SAAS;AAAA,IACpB,OAAO;AACL,WAAK,SAAS,IAAI,KAAK,EAAE,OAAO,WAAW,CAAC;AAAA,IAC9C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,gBACE,MACA,OACA,aAAwD,CAAC,GACnD;AACN,QAAI,CAAC,KAAK,OAAO,WAAW,CAAC,KAAK,OAAO,eAAgB;AAEzD,UAAM,MAAM,KAAK,aAAa,MAAM,UAAU;AAC9C,UAAM,WAAW,KAAK,WAAW,IAAI,GAAG;AAExC,QAAI,UAAU;AACZ,eAAS,OAAO,KAAK,KAAK;AAAA,IAC5B,OAAO;AACL,WAAK,WAAW,IAAI,KAAK,EAAE,QAAQ,CAAC,KAAK,GAAG,WAAW,CAAC;AAAA,IAC1D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAc,YAA+D;AAChG,UAAM,cAAc,OAAO,QAAQ,UAAU,EAC1C,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EACrC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE,EAC3B,KAAK,GAAG;AACX,WAAO,GAAG,IAAI,IAAI,WAAW;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAqB;AAC3B,QAAI,CAAC,KAAK,SAAU;AAEpB,UAAM,YAAY,YAAY,IAAI;AAGlC,eAAW,CAAC,KAAK,IAAI,KAAK,KAAK,UAAU;AACvC,YAAM,OAAO,IAAI,MAAM,GAAG,EAAE,CAAC;AAC7B,YAAM,SAAqB;AAAA,QACzB;AAAA,QACA,MAAM;AAAA,QACN,OAAO,KAAK;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB;AAAA,MACF;AACA,WAAK,SAAS,aAAa,MAAM;AAAA,IACnC;AAGA,eAAW,CAAC,KAAK,IAAI,KAAK,KAAK,YAAY;AACzC,YAAM,OAAO,IAAI,MAAM,GAAG,EAAE,CAAC;AAC7B,UAAI,KAAK,OAAO,WAAW,EAAG;AAG9B,YAAM,MAAM,KAAK,OAAO,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AACjD,YAAM,MAAM,MAAM,KAAK,OAAO;AAE9B,YAAM,SAAqB;AAAA,QACzB;AAAA,QACA,MAAM;AAAA,QACN,OAAO;AAAA,QACP,YAAY;AAAA,UACV,GAAG,KAAK;AAAA,UACR,OAAO,KAAK,OAAO;AAAA,UACnB;AAAA,UACA,KAAK,KAAK,IAAI,GAAG,KAAK,MAAM;AAAA,UAC5B,KAAK,KAAK,IAAI,GAAG,KAAK,MAAM;AAAA,QAC9B;AAAA,QACA;AAAA,MACF;AACA,WAAK,SAAS,aAAa,MAAM;AAGjC,WAAK,SAAS,CAAC;AAAA,IACjB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,SAAK,aAAa;AAClB,UAAM,KAAK,UAAU,MAAM;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAA0B;AAC9B,QAAI,KAAK,mBAAmB;AAC1B,oBAAc,KAAK,iBAAiB;AACpC,WAAK,oBAAoB;AAAA,IAC3B;AAEA,UAAM,KAAK,MAAM;AACjB,UAAM,KAAK,UAAU,SAAS;AAC9B,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,YAAqB;AACnB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,YAA6B;AAC3B,WAAO,EAAE,GAAG,KAAK,OAAO;AAAA,EAC1B;AACF;;;AChUO,IAAM,cAAc;AAAA;AAAA,EAEzB,mBAAmB;AAAA;AAAA,EAEnB,iBAAiB;AAAA;AAAA,EAEjB,iBAAiB;AAAA;AAAA,EAEjB,cAAc;AAAA;AAAA,EAEd,YAAY;AAAA;AAAA,EAEZ,cAAc;AAChB;AAKO,IAAM,4BAA4B,CAAC,GAAG,GAAG,IAAI,IAAI,IAAI,KAAK,KAAK,KAAK,KAAM,MAAM,GAAI;AAKpF,IAAM,0BAA0B,CAAC,KAAK,KAAK,KAAM,MAAM,KAAM,KAAO,KAAO,GAAK;;;ACjIvF,IAAM,UAAU;AAChB,IAAM,aAAa;AACnB,IAAM,aAAa;AAGnB,IAAM,yBAAyB,OAAO,OAAO;AA6B7C,IAAI,oBAAiC;AAAA,EACnC,cAAc;AAChB;AAqBO,SAAS,oBAAoB,QAA2B;AAC7D,sBAAoB;AAAA,IAClB,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AAGA,QAAM,QAAQ,cAAc;AAC5B,QAAM,aAAa,EAAE,MAAM,CAAC,QAAQ;AAClC,YAAQ,KAAK,6DAA6D,GAAG;AAAA,EAC/E,CAAC;AACH;AAKO,SAAS,iBAA8B;AAC5C,SAAO,EAAE,GAAG,kBAAkB;AAChC;AAuCO,SAAS,YAAY,KAAa,SAA0B;AACjE,MAAI,SAAS;AACX,WAAO,GAAG,GAAG,KAAK,OAAO;AAAA,EAC3B;AACA,SAAO;AACT;AAWO,IAAM,aAAN,MAAiB;AAAA,EAAjB;AACL,SAAQ,KAAyB;AACjC,SAAQ,YAAyC;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjD,MAAc,QAA8B;AAC1C,QAAI,KAAK,GAAI,QAAO,KAAK;AACzB,QAAI,KAAK,UAAW,QAAO,KAAK;AAIhC,QAAI,UAAU,WAAW,UAAU,QAAQ,SAAS;AAClD,UAAI;AACF,cAAM,cAAc,MAAM,UAAU,QAAQ,QAAQ;AACpD,YAAI,aAAa;AACf,kBAAQ,IAAI,qEAAqE;AAAA,QACnF,OAAO;AACL,kBAAQ,IAAI,8DAA8D;AAAA,QAC5E;AAGA,YAAI,UAAU,QAAQ,UAAU;AAC9B,gBAAM,WAAW,MAAM,UAAU,QAAQ,SAAS;AAClD,gBAAM,WAAW,SAAS,SAAS,KAAK,OAAO,MAAM,QAAQ,CAAC;AAC9D,gBAAM,YAAY,SAAS,SAAS,KAAK,OAAO,MAAM,QAAQ,CAAC;AAC/D,kBAAQ,IAAI,yBAAyB,MAAM,QAAQ,OAAO,UAAU;AAAA,QACtE;AAAA,MACF,SAAS,KAAK;AACZ,gBAAQ,KAAK,sDAAsD,GAAG;AAAA,MACxE;AAAA,IACF;AAEA,SAAK,YAAY,IAAI,QAAQ,CAAC,SAAS,WAAW;AAChD,YAAM,UAAU,UAAU,KAAK,SAAS,UAAU;AAElD,cAAQ,UAAU,MAAM;AACtB,gBAAQ,MAAM,0CAA0C,QAAQ,KAAK;AACrE,eAAO,QAAQ,KAAK;AAAA,MACtB;AAEA,cAAQ,YAAY,MAAM;AACxB,aAAK,KAAK,QAAQ;AAClB,gBAAQ,KAAK,EAAE;AAAA,MACjB;AAEA,cAAQ,kBAAkB,CAAC,UAAU;AACnC,cAAM,KAAM,MAAM,OAA4B;AAC9C,cAAM,aAAc,MAAgC;AACpD,cAAM,KAAM,MAAM,OAA4B;AAE9C,YAAI,aAAa,GAAG;AAElB,gBAAM,QAAQ,GAAG,kBAAkB,YAAY,EAAE,SAAS,MAAM,CAAC;AACjE,gBAAM,YAAY,kBAAkB,kBAAkB,EAAE,QAAQ,MAAM,CAAC;AAAA,QACzE,WAAW,aAAa,KAAK,IAAI;AAE/B,gBAAM,QAAQ,GAAG,YAAY,UAAU;AAGvC,cAAI,CAAC,MAAM,WAAW,SAAS,gBAAgB,GAAG;AAChD,kBAAM,YAAY,kBAAkB,kBAAkB,EAAE,QAAQ,MAAM,CAAC;AAAA,UACzE;AAGA,gBAAM,gBAAgB,MAAM,WAAW;AACvC,wBAAc,YAAY,CAAC,gBAAgB;AACzC,kBAAM,SAAU,YAAY,OAA0C;AACtE,gBAAI,QAAQ;AACV,oBAAM,QAAQ,OAAO;AACrB,kBAAI,MAAM,mBAAmB,QAAW;AACtC,sBAAM,iBAAiB,MAAM,YAAY,KAAK,IAAI;AAClD,uBAAO,OAAO,KAAK;AAAA,cACrB;AACA,qBAAO,SAAS;AAAA,YAClB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,IAAI,KAA+B;AACvC,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,KAAK,GAAG,YAAY,YAAY,UAAU;AAChD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,MAAM,GAAG;AAC/B,gBAAQ,YAAY,MAAM,QAAQ,QAAQ,SAAS,CAAC;AACpD,gBAAQ,UAAU,MAAM,QAAQ,KAAK;AAAA,MACvC,CAAC;AAAA,IACH,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,IAAI,KAA0C;AAClD,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,kBAAkB,EAAE,aAAa,IAAI,CAAC;AACxE,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAE9B,cAAM,KAAK,GAAG,YAAY,YAAY,WAAW;AACjD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,IAAI,GAAG;AAC7B,gBAAQ,YAAY,MAAM;AACxB,gBAAM,SAAS,QAAQ;AACvB,gBAAM,MAAM,QAAQ,QAAQ;AAC5B,gBAAM,cAAc,EAAE,aAAa,IAAI,CAAC;AACxC,cAAI,QAAQ;AACV,kBAAM,cAAc,EAAE,oBAAoB,OAAO,KAAK,CAAC;AAEvD,mBAAO,iBAAiB,KAAK,IAAI;AACjC,kBAAM,IAAI,MAAM;AAAA,UAClB;AACA,gBAAM,IAAI;AACV,cAAI,KAAK;AACP,uBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AAAA,UACvD,OAAO;AACL,uBAAW,iBAAiB,sBAAsB,GAAG,CAAC,CAAC;AAAA,UACzD;AACA,kBAAQ,QAAQ,QAAQ,IAAI;AAAA,QAC9B;AACA,gBAAQ,UAAU,MAAM;AACtB,gBAAM,cAAc,EAAE,aAAa,MAAM,CAAC;AAC1C,gBAAM,IAAI;AACV,qBAAW,iBAAiB,sBAAsB,GAAG,CAAC,CAAC;AACvD,kBAAQ,IAAI;AAAA,QACd;AAAA,MACF,CAAC;AAAA,IACH,QAAQ;AACN,YAAM,aAAa,IAAI,MAAM,kBAAkB,CAAC;AAChD,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsBA,MAAM,kBAAkB,KAAa,aAAiD;AACpF,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,gCAAgC,EAAE,aAAa,IAAI,CAAC;AAEtF,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,YAAM,SAAS,MAAM,IAAI,QAAiC,CAAC,YAAY;AACrE,cAAM,KAAK,GAAG,YAAY,YAAY,UAAU;AAChD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,IAAI,GAAG;AAC7B,gBAAQ,YAAY,MAAM,QAAQ,QAAQ,MAAiC;AAC3E,gBAAQ,UAAU,MAAM,QAAQ,MAAS;AAAA,MAC3C,CAAC;AAGD,UAAI,CAAC,QAAQ,MAAM;AACjB,cAAM,cAAc,EAAE,aAAa,MAAM,CAAC;AAC1C,cAAM,IAAI;AACV,mBAAW,iBAAiB,sBAAsB,GAAG,CAAC,CAAC;AACvD,eAAO,EAAE,MAAM,MAAM,OAAO,MAAM;AAAA,MACpC;AAEA,YAAM,cAAc,EAAE,aAAa,MAAM,oBAAoB,OAAO,KAAK,CAAC;AAG1E,UAAI,CAAC,OAAO,MAAM;AAChB,cAAM,cAAc,EAAE,mBAAmB,OAAO,eAAe,MAAM,CAAC;AACtE,cAAM,IAAI;AACV,mBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AACrD,eAAO,EAAE,MAAM,OAAO,MAAM,OAAO,MAAM;AAAA,MAC3C;AAGA,YAAM,WAAW,eAAe;AAChC,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,UAAU,EAAE,QAAQ,OAAO,CAAC;AACzD,YAAI,CAAC,SAAS,IAAI;AAEhB,gBAAM,cAAc,EAAE,mBAAmB,OAAO,eAAe,MAAM,CAAC;AACtE,gBAAM,IAAI;AACV,qBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AACrD,iBAAO,EAAE,MAAM,OAAO,MAAM,OAAO,MAAM;AAAA,QAC3C;AAEA,cAAM,aAAa,SAAS,QAAQ,IAAI,MAAM;AAC9C,cAAM,UAAU,eAAe,QAAQ,eAAe,OAAO;AAE7D,cAAM,cAAc;AAAA,UAClB,mBAAmB;AAAA,UACnB,eAAe;AAAA,UACf,qBAAqB,cAAc;AAAA,UACnC,qBAAqB,OAAO;AAAA,QAC9B,CAAC;AACD,cAAM,IAAI;AAEV,YAAI,SAAS;AACX,qBAAW,iBAAiB,qBAAqB,GAAG,CAAC,CAAC;AACtD,kBAAQ,IAAI,yCAAyC,GAAG,EAAE;AAAA,QAC5D,OAAO;AACL,qBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AAAA,QACvD;AAEA,eAAO,EAAE,MAAM,OAAO,MAAM,OAAO,QAAQ;AAAA,MAC7C,SAAS,YAAY;AAGnB,gBAAQ,KAAK,2DAA2D,UAAU;AAClF,cAAM,cAAc,EAAE,mBAAmB,OAAO,eAAe,MAAM,CAAC;AACtE,cAAM,IAAI;AACV,mBAAW,iBAAiB,oBAAoB,GAAG,CAAC,CAAC;AACrD,eAAO,EAAE,MAAM,OAAO,MAAM,OAAO,MAAM;AAAA,MAC3C;AAAA,IACF,QAAQ;AACN,YAAM,aAAa,IAAI,MAAM,gCAAgC,CAAC;AAC9D,aAAO,EAAE,MAAM,MAAM,OAAO,MAAM;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,IAAI,KAAa,MAAmB,MAAe,SAAiC;AACxF,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,kBAAkB;AAAA,MAClD,aAAa;AAAA,MACb,oBAAoB,KAAK;AAAA,MACzB,GAAI,WAAW,EAAE,iBAAiB,QAAQ;AAAA,IAC5C,CAAC;AACD,QAAI;AAEF,WAAK,WAAW,EAAE,MAAM,CAAC,QAAQ;AAC/B,gBAAQ,KAAK,oCAAoC,GAAG;AAAA,MACtD,CAAC;AAED,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,YAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,cAAM,KAAK,GAAG,YAAY,YAAY,WAAW;AACjD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,MAAM,KAAK,IAAI;AACrB,cAAM,SAAsB;AAAA,UAC1B;AAAA,UACA;AAAA,UACA,MAAM,KAAK;AAAA,UACX,UAAU;AAAA,UACV,gBAAgB;AAAA,UAChB;AAAA,UACA;AAAA,QACF;AACA,cAAM,UAAU,MAAM,IAAI,MAAM;AAChC,gBAAQ,YAAY,MAAM;AACxB,gBAAM,IAAI;AACV,kBAAQ;AAAA,QACV;AACA,gBAAQ,UAAU,MAAM;AACtB,gBAAM,aAAa,QAAQ,SAAS,IAAI,MAAM,kBAAkB,CAAC;AACjE,iBAAO,QAAQ,KAAK;AAAA,QACtB;AAAA,MACF,CAAC;AAGD,WAAK,aAAa,EAAE,MAAM,CAAC,QAAQ;AACjC,gBAAQ,KAAK,mDAAmD,GAAG;AAAA,MACrE,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,cAAQ,KAAK,uCAAuC,GAAG;AACvD,YAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,IACxE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,aAA4B;AACxC,UAAM,QAAQ,MAAM,KAAK,aAAa;AACtC,QAAI,CAAC,OAAO;AACV;AAAA,IACF;AAEA,UAAM,SAAS;AACf,UAAM,YAAY,aAAa;AAE/B,QAAI,MAAM,cAAc,IAAI;AAC1B,cAAQ,KAAK,8BAA8B,MAAM,YAAY,QAAQ,CAAC,CAAC,WAAW,YAAY,MAAM,SAAS,CAAC,MAAM,YAAY,MAAM,UAAU,CAAC,GAAG;AAGpJ,iBAAW,iBAAiB,6BAA6B,GAAG;AAAA,QAC1D,cAAc,OAAO,KAAK,MAAM,MAAM,WAAW,CAAC;AAAA,MACpD,CAAC;AAGD,UAAI,OAAO,gBAAgB;AACzB,YAAI;AACF,iBAAO,eAAe,KAAK;AAAA,QAC7B,SAAS,KAAK;AACZ,kBAAQ,KAAK,+CAA+C,GAAG;AAAA,QACjE;AAAA,MACF;AAAA,IACF;AAEA,QAAI,MAAM,cAAc,IAAI;AAC1B,cAAQ,KAAK,oEAAoE;AAEjF,YAAM,cAAc,KAAK,IAAI,MAAM,aAAa,KAAK,KAAK,OAAO,IAAI;AACrE,YAAM,KAAK,YAAY,WAAW;AAAA,IACpC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,KAA4B;AACvC,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,KAAK,GAAG,YAAY,YAAY,WAAW;AACjD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,OAAO,GAAG;AAChB,WAAG,aAAa,MAAM,QAAQ;AAAA,MAChC,CAAC;AAAA,IACH,QAAQ;AAAA,IAER;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,KAAK,GAAG,YAAY,YAAY,WAAW;AACjD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,MAAM;AACZ,WAAG,aAAa,MAAM,QAAQ;AAAA,MAChC,CAAC;AAAA,IACH,QAAQ;AAAA,IAER;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAgC;AACpC,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAC5B,aAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,cAAM,KAAK,GAAG,YAAY,YAAY,UAAU;AAChD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,OAAO;AAC7B,gBAAQ,YAAY,MAAM;AACxB,gBAAM,SAAU,QAAQ,UAA4B,CAAC;AACrD,kBAAQ;AAAA,YACN,WAAW,OAAO,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,MAAM,CAAC;AAAA,YACpD,YAAY,OAAO;AAAA,YACnB,QAAQ,OAAO,IAAI,CAAC,OAAO;AAAA,cACzB,KAAK,EAAE;AAAA,cACP,MAAM,EAAE;AAAA,cACR,UAAU,IAAI,KAAK,EAAE,QAAQ;AAAA,YAC/B,EAAE;AAAA,UACJ,CAAC;AAAA,QACH;AACA,gBAAQ,UAAU,MAAM,QAAQ,EAAE,WAAW,GAAG,YAAY,GAAG,QAAQ,CAAC,EAAE,CAAC;AAAA,MAC7E,CAAC;AAAA,IACH,QAAQ;AACN,aAAO,EAAE,WAAW,GAAG,YAAY,GAAG,QAAQ,CAAC,EAAE;AAAA,IACnD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,eAA8B;AAClC,UAAM,SAAS;AACf,UAAM,UAAU,OAAO,gBAAgB;AAEvC,UAAM,QAAQ,MAAM,KAAK,SAAS;AAClC,QAAI,MAAM,aAAa,SAAS;AAC9B;AAAA,IACF;AAEA,UAAM,cAAc,MAAM,YAAY;AACtC,UAAM,cAAc,MAAM,KAAK,YAAY,WAAW;AAEtD,QAAI,YAAY,SAAS,GAAG;AAC1B,cAAQ,IAAI,sCAAsC,YAAY,MAAM,mBAAmB,YAAY,WAAW,CAAC,EAAE;AAAA,IACnH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,YAAY,aAAwC;AACxD,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,0BAA0B;AAAA,MAC1D,4BAA4B;AAAA,IAC9B,CAAC;AAED,QAAI;AACF,YAAM,KAAK,MAAM,KAAK,MAAM;AAG5B,YAAM,SAAS,MAAM,IAAI,QAAuB,CAAC,YAAY;AAC3D,cAAM,KAAK,GAAG,YAAY,YAAY,UAAU;AAChD,cAAM,QAAQ,GAAG,YAAY,UAAU;AACvC,cAAM,UAAU,MAAM,OAAO;AAC7B,gBAAQ,YAAY,MAAM;AACxB,gBAAM,MAAO,QAAQ,UAA4B,CAAC;AAElD,cAAI,KAAK,CAAC,GAAG,OAAO,EAAE,kBAAkB,EAAE,YAAY,MAAM,EAAE,kBAAkB,EAAE,YAAY,EAAE;AAChG,kBAAQ,GAAG;AAAA,QACb;AACA,gBAAQ,UAAU,MAAM,QAAQ,CAAC,CAAC;AAAA,MACpC,CAAC;AAED,YAAM,cAAwB,CAAC;AAC/B,UAAI,aAAa;AAGjB,iBAAW,SAAS,QAAQ;AAC1B,YAAI,cAAc,aAAa;AAC7B;AAAA,QACF;AAEA,cAAM,KAAK,OAAO,MAAM,GAAG;AAC3B,oBAAY,KAAK,MAAM,GAAG;AAC1B,sBAAc,MAAM;AAEpB,gBAAQ,IAAI,yBAAyB,MAAM,GAAG,KAAK,YAAY,MAAM,IAAI,CAAC,GAAG;AAAA,MAC/E;AAEA,YAAM,cAAc;AAAA,QAClB,wBAAwB;AAAA,QACxB,2BAA2B,YAAY;AAAA,MACzC,CAAC;AACD,YAAM,IAAI;AAGV,UAAI,aAAa,GAAG;AAClB,mBAAW,iBAAiB,wBAAwB,YAAY,QAAQ;AAAA,UACtE,aAAa,OAAO,UAAU;AAAA,QAChC,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT,SAAS,KAAK;AACZ,YAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,cAAQ,KAAK,iCAAiC,GAAG;AACjD,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,MAAM,eAA0C;AAC9C,QAAI,CAAC,WAAW,SAAS,UAAU;AACjC,aAAO;AAAA,IACT;AAEA,QAAI;AACF,YAAM,WAAW,MAAM,UAAU,QAAQ,SAAS;AAClD,YAAM,YAAY,SAAS,SAAS;AACpC,YAAM,aAAa,SAAS,SAAS;AACrC,YAAM,cAAc,aAAa,IAAK,YAAY,aAAc,MAAM;AAEtE,YAAM,QAAQ,MAAM,KAAK,SAAS;AAElC,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA,YAAY,MAAM;AAAA,MACpB;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAGA,IAAI,gBAAmC;AAKhC,SAAS,gBAA4B;AAC1C,MAAI,CAAC,eAAe;AAClB,oBAAgB,IAAI,WAAW;AAAA,EACjC;AACA,SAAO;AACT;AAQA,IAAM,uBAAuB,MAAM,OAAO;AA0C1C,eAAsB,eACpB,KACA,mBACsB;AAEtB,MAAI,UAAiC,CAAC;AACtC,MAAI,OAAO,sBAAsB,YAAY;AAE3C,cAAU,EAAE,YAAY,kBAAkB;AAAA,EAC5C,WAAW,mBAAmB;AAE5B,cAAU;AAAA,EACZ;AAEA,QAAM,EAAE,SAAS,gBAAgB,OAAO,WAAW,IAAI;AAEvD,QAAM,QAAQ,cAAc;AAC5B,QAAM,WAAW,UAAU,YAAY,KAAK,OAAO,IAAI;AACvD,QAAM,YAAY,aAAa;AAC/B,QAAM,OAAO,WAAW,UAAU,kBAAkB;AAAA,IAClD,aAAa;AAAA,IACb,GAAI,WAAW,EAAE,iBAAiB,QAAQ;AAAA,IAC1C,wBAAwB;AAAA,EAC1B,CAAC;AAGD,MAAI,eAAe;AACjB,UAAM,aAAa,MAAM,MAAM,kBAAkB,UAAU,GAAG;AAE9D,QAAI,WAAW,QAAQ,CAAC,WAAW,OAAO;AACxC,cAAQ,IAAI,uCAAuC,GAAG,MAAM,WAAW,KAAK,aAAa,OAAO,MAAM,QAAQ,CAAC,CAAC,KAAK;AACrH,mBAAa,WAAW,KAAK,YAAY,WAAW,KAAK,UAAU;AACnE,YAAM,cAAc;AAAA,QAClB,mBAAmB;AAAA,QACnB,yBAAyB;AAAA,QACzB,qBAAqB;AAAA,QACrB,oBAAoB,WAAW,KAAK;AAAA,MACtC,CAAC;AACD,YAAM,IAAI;AACV,aAAO,WAAW;AAAA,IACpB;AAEA,QAAI,WAAW,OAAO;AACpB,cAAQ,IAAI,yCAAyC,GAAG,EAAE;AAC1D,YAAM,cAAc;AAAA,QAClB,mBAAmB;AAAA,QACnB,yBAAyB;AAAA,QACzB,qBAAqB;AAAA,MACvB,CAAC;AAAA,IAEH;AAAA,EAEF,OAAO;AAEL,UAAM,SAAS,MAAM,MAAM,IAAI,QAAQ;AACvC,QAAI,QAAQ;AACV,cAAQ,IAAI,2BAA2B,GAAG,MAAM,OAAO,aAAa,OAAO,MAAM,QAAQ,CAAC,CAAC,KAAK;AAChG,mBAAa,OAAO,YAAY,OAAO,UAAU;AACjD,YAAM,cAAc;AAAA,QAClB,mBAAmB;AAAA,QACnB,oBAAoB,OAAO;AAAA,MAC7B,CAAC;AACD,YAAM,IAAI;AACV,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,cAAc,EAAE,mBAAmB,MAAM,CAAC;AAChD,UAAQ,IAAI,sCAAsC,GAAG,EAAE;AAEvD,MAAI;AAEF,UAAM,WAAW,MAAM,MAAM,GAAG;AAChC,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,mBAAmB,GAAG,KAAK,SAAS,MAAM,EAAE;AAAA,IAC9D;AAEA,UAAM,gBAAgB,SAAS,QAAQ,IAAI,gBAAgB;AAC3D,UAAM,QAAQ,gBAAgB,SAAS,eAAe,EAAE,IAAI;AAC5D,UAAM,OAAO,SAAS,QAAQ,IAAI,MAAM,KAAK;AAG7C,UAAM,mBAAmB,QAAQ;AACjC,QAAI,kBAAkB;AACpB,cAAQ,IAAI,+CAA+C,QAAQ,OAAO,MAAM,QAAQ,CAAC,CAAC,oCAAoC;AAAA,IAChI;AAEA,QAAI,CAAC,SAAS,MAAM;AAClB,YAAMA,QAAO,MAAM,SAAS,YAAY;AACxC,UAAI,CAAC,kBAAkB;AACrB,cAAM,MAAM,IAAI,UAAUA,OAAM,MAAM,OAAO;AAAA,MAC/C;AACA,YAAM,cAAc;AAAA,QAClB,oBAAoBA,MAAK;AAAA,QACzB,6BAA6B,CAAC;AAAA,MAChC,CAAC;AACD,YAAM,IAAI;AACV,aAAOA;AAAA,IACT;AAGA,UAAM,SAAS,SAAS,KAAK,UAAU;AACvC,UAAM,SAAuB,CAAC;AAC9B,QAAI,SAAS;AAEb,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,KAAM;AACV,aAAO,KAAK,KAAK;AACjB,gBAAU,MAAM;AAChB,mBAAa,QAAQ,SAAS,MAAM;AAAA,IACtC;AAGA,UAAM,OAAO,IAAI,WAAW,MAAM;AAClC,QAAI,SAAS;AACb,eAAW,SAAS,QAAQ;AAC1B,WAAK,IAAI,OAAO,MAAM;AACtB,gBAAU,MAAM;AAAA,IAClB;AAEA,UAAM,SAAS,KAAK;AAGpB,QAAI,CAAC,kBAAkB;AACrB,YAAM,MAAM,IAAI,UAAU,QAAQ,MAAM,OAAO;AAC/C,cAAQ,IAAI,wBAAwB,GAAG,MAAM,OAAO,aAAa,OAAO,MAAM,QAAQ,CAAC,CAAC,KAAK;AAAA,IAC/F;AAEA,UAAM,cAAc;AAAA,MAClB,oBAAoB,OAAO;AAAA,MAC3B,6BAA6B,CAAC;AAAA,IAChC,CAAC;AACD,UAAM,IAAI;AAEV,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,UAAM;AAAA,EACR;AACF;AAKA,eAAsB,cACpB,MACA,YACe;AACf,QAAM,QAAQ,cAAc;AAE5B,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,MAAM,KAAK,CAAC;AAClB,iBAAa,GAAG,KAAK,QAAQ,GAAG;AAEhC,QAAI,MAAM,MAAM,IAAI,GAAG,GAAG;AACxB,cAAQ,IAAI,gCAAgC,GAAG,EAAE;AACjD;AAAA,IACF;AAEA,UAAM,eAAe,GAAG;AAAA,EAC1B;AAEA,eAAa,KAAK,QAAQ,KAAK,QAAQ,MAAM;AAC/C;AAKO,SAAS,YAAY,OAAuB;AACjD,MAAI,QAAQ,KAAM,QAAO,GAAG,KAAK;AACjC,MAAI,QAAQ,OAAO,KAAM,QAAO,IAAI,QAAQ,MAAM,QAAQ,CAAC,CAAC;AAC5D,MAAI,QAAQ,OAAO,OAAO,KAAM,QAAO,IAAI,QAAQ,OAAO,MAAM,QAAQ,CAAC,CAAC;AAC1E,SAAO,IAAI,QAAQ,OAAO,OAAO,MAAM,QAAQ,CAAC,CAAC;AACnD;;;ACr3BO,SAAS,cAAuB;AACrC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,QAAM,KAAK,UAAU,UAAU,YAAY;AAC3C,SACE,mBAAmB,KAAK,EAAE;AAAA;AAAA,EAGzB,SAAS,KAAK,EAAE,KAAK,SAAS,KAAK,EAAE,KAAK,CAAC,qBAAqB,KAAK,EAAE;AAE5E;AAUO,SAAS,QAAiB;AAC/B,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,QAAM,KAAK,UAAU,UAAU,YAAY;AAC3C,SAAO,mBAAmB,KAAK,EAAE;AACnC;AASO,SAAS,YAAqB;AACnC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,SAAO,WAAW,KAAK,UAAU,SAAS;AAC5C;AAYO,SAAS,WAAoB;AAClC,SAAO,MAAM,KAAK,UAAU;AAC9B;AAUO,SAAS,eAAwB;AACtC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,SAAO,SAAS,aAAa,UAAU,QAAQ;AACjD;AAYO,SAAS,wBAAwC;AAItD,MAAI,SAAS,KAAK,MAAM,GAAG;AACzB,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AASO,SAAS,eACd,YACA,iBACgB;AAChB,UAAQ,YAAY;AAAA,IAClB,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,UAAI,CAAC,iBAAiB;AACpB,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,aAAO;AAAA,IAET,KAAK;AACH,aAAO;AAAA,IAET,KAAK;AACH,aAAO,kBAAkB,WAAW;AAAA,IAEtC,KAAK;AAAA,IACL;AAEE,YAAM,cAAc,sBAAsB;AAC1C,UAAI,gBAAgB,YAAY,CAAC,iBAAiB;AAChD,eAAO;AAAA,MACT;AACA,aAAO;AAAA,EACX;AACF;AAOO,SAAS,wBAAgC;AAC9C,MAAI,MAAM,GAAG;AAEX,WAAO;AAAA,EACT;AAEA,MAAI,UAAU,GAAG;AAEf,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAWO,SAAS,wBAAiC;AAE/C,MAAI,SAAS,GAAG;AACd,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAUO,SAAS,WAAoB;AAClC,MAAI,OAAO,cAAc,YAAa,QAAO;AAC7C,QAAM,KAAK,UAAU,UAAU,YAAY;AAE3C,SAAO,SAAS,KAAK,EAAE,KAAK,CAAC,kCAAkC,KAAK,EAAE;AACxE;AAaO,SAAS,sBAA+B;AAC7C,SAAO,SAAS,KAAK,MAAM;AAC7B;AAUO,SAAS,+BAAwC;AACtD,MAAI,OAAO,WAAW,YAAa,QAAO;AAC1C,SAAO,uBAAuB,UAAU,6BAA6B;AACvE;AAaO,SAAS,qBAA8B;AAC5C,SAAO,MAAM,KAAK,6BAA6B;AACjD;AAaO,SAAS,yBAAkC;AAChD,SAAO,MAAM;AACf;;;ACpOA,IAAM,SAAS,aAAa,YAAY;AAGxC,IAAI,cAAgC;AACpC,IAAI,gBAAuC;AAG3C,IAAM,gBAAgB;AAUtB,eAAsB,oBAAsC;AAE1D,MAAI,MAAM,GAAG;AACX,WAAO,MAAM,6CAA6C;AAC1D,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,aAAa,GAAG;AACnB,WAAO,MAAM,2CAA2C;AACxD,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAU,MAAM,UAAU,IAAI,eAAe;AACnD,QAAI,CAAC,SAAS;AACZ,aAAO,MAAM,oCAAoC;AACjD,aAAO;AAAA,IACT;AAGA,UAAM,SAAS,MAAM,QAAQ,cAAc;AAC3C,QAAI,CAAC,QAAQ;AACX,aAAO,MAAM,uCAAuC;AACpD,aAAO;AAAA,IACT;AAGA,WAAO,QAAQ;AAEf,WAAO,MAAM,qCAAqC;AAClD,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,MAAM,iDAAiD,EAAE,OAAO,IAAI,CAAC;AAC5E,WAAO;AAAA,EACT;AACF;AAOA,SAAS,cAAc,KAAsB;AAE3C,MAAI,IAAI,KAAK,YAAY;AAGzB,QAAM,aAAa,sBAAsB;AACzC,QAAM,cAAc,sBAAsB;AAE1C,MAAI,IAAI,KAAK,aAAa;AAC1B,MAAI,IAAI,KAAK,OAAO;AACpB,MAAI,IAAI,KAAK,QAAQ;AAErB,SAAO,KAAK,mBAAmB;AAAA,IAC7B;AAAA,IACA,MAAM;AAAA,IACN,OAAO;AAAA,IACP,UAAU,MAAM,IAAI,QAAQ,SAAS,IAAI,YAAY;AAAA,EACvD,CAAC;AACH;AAeA,eAAsB,eACpB,SACoB;AAEpB,MAAI,eAAe,kBAAkB,SAAS;AAC5C,WAAO;AAAA,EACT;AAGA,MAAI,eAAe,kBAAkB,SAAS;AAC5C,WAAO;AAAA,MACL,oCAAoC,aAAa,8BAC3B,OAAO;AAAA,IAC/B;AACA,WAAO;AAAA,EACT;AAEA,SAAO,KAAK,6BAA6B,OAAO,aAAa;AAE7D,MAAI;AACF,QAAI,YAAY,QAAQ;AAEtB,YAAM,SAAS,MAAM,OAAO,iBAAiB;AAC7C,oBAAc,OAAO,WAAW;AAAA,IAClC,OAAO;AAEL,YAAM,SAAS,MAAM,OAAO,wBAAwB;AACpD,oBAAc,OAAO,WAAW;AAAA,IAClC;AAEA,oBAAgB;AAGhB,kBAAc,WAAW;AAEzB,WAAO,KAAK,oCAAoC,EAAE,QAAQ,CAAC;AAE3D,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,MAAM,oCAAoC,OAAO,YAAY;AAAA,MAClE,OAAO;AAAA,IACT,CAAC;AACD,UAAM,IAAI;AAAA,MACR,gCAAgC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,IAClF;AAAA,EACF;AACF;AAWA,eAAsB,4BACpB,aAAgC,QACsB;AAEtD,QAAM,kBAAkB,MAAM,kBAAkB;AAGhD,QAAM,UAAU,eAAe,YAAY,eAAe;AAE1D,SAAO,KAAK,+BAA+B;AAAA,IACzC;AAAA,IACA;AAAA,IACA,iBAAiB;AAAA,EACnB,CAAC;AAGD,QAAM,MAAM,MAAM,eAAe,OAAO;AAExC,SAAO,EAAE,KAAK,QAAQ;AACxB;AAUO,SAAS,kBACd,SACgB;AAChB,MAAI,YAAY,UAAU;AACxB,WAAO;AAAA,MACL,oBAAoB;AAAA,QAClB;AAAA,UACE,MAAM;AAAA,UACN,iBAAiB;AAAA;AAAA,QACnB;AAAA,MACF;AAAA,MACA,wBAAwB;AAAA,IAC1B;AAAA,EACF;AAGA,SAAO;AAAA,IACL,oBAAoB,CAAC,MAAM;AAAA,IAC3B,wBAAwB;AAAA,EAC1B;AACF;AAWA,eAAsB,0BACpB,aACA,kBAIC;AACD,QAAM,MAAM,MAAM,eAAe,gBAAgB;AAGjD,QAAM,YAAY,IAAI,WAAW,WAAW;AAE5C,MAAI,qBAAqB,UAAU;AACjC,QAAI;AACF,YAAMC,WAAU,kBAAkB,QAAQ;AAC1C,YAAMC,WAAU,MAAM,IAAI,iBAAiB,OAAO,WAAWD,QAAO;AAEpE,aAAO,KAAK,qCAAqC;AACjD,aAAO,EAAE,SAAAC,UAAS,SAAS,SAAS;AAAA,IACtC,SAAS,KAAK;AACZ,aAAO,KAAK,wDAAwD;AAAA,QAClE,OAAO,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,MACxD,CAAC;AAAA,IAEH;AAAA,EACF;AAGA,QAAM,UAAU,kBAAkB,MAAM;AACxC,QAAM,UAAU,MAAM,IAAI,iBAAiB,OAAO,WAAW,OAAO;AAEpE,SAAO,KAAK,mCAAmC;AAC/C,SAAO,EAAE,SAAS,SAAS,OAAO;AACpC;AAKO,SAAS,mBAA0C;AACxD,SAAO;AACT;AAKO,SAAS,sBAA+B;AAC7C,SAAO,gBAAgB;AACzB;;;AC/RO,IAAM,kBAAkB;AAAA,EAC7B;AAAA,EAAgB;AAAA,EAAiB;AAAA,EAAe;AAAA,EAAmB;AAAA,EACnE;AAAA,EAAa;AAAA,EAAmB;AAAA,EAChC;AAAA,EAAgB;AAAA,EAAiB;AAAA,EAAmB;AAAA,EACpD;AAAA,EAAiB;AAAA,EAAkB;AAAA,EAAkB;AAAA,EACrD;AAAA,EAAiB;AAAA,EAAkB;AAAA,EAAiB;AAAA,EACpD;AAAA,EAAe;AAAA,EACf;AAAA,EAAc;AAAA,EAAW;AAAA,EAAW;AAAA,EACpC;AAAA,EAAc;AAAA,EAAmB;AAAA,EAAoB;AAAA,EAAkB;AAAA,EACvE;AAAA,EAAe;AAAA,EAAa;AAAA,EAAsB;AAAA,EAClD;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAe;AAAA,EACpD;AAAA,EAAkB;AAAA,EAAkB;AAAA,EAAmB;AAAA,EACvD;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAoB;AAAA,EACzD;AAAA,EAAoB;AAAA,EACpB;AAAA,EAAiB;AAAA,EAAkB;AACrC;AAGO,IAAM,oBAAoB;AAMjC,IAAM,wBAA4C;AAAA,EAChD,CAAC,WAAW,UAAU;AAAA,EACtB,CAAC,aAAa,YAAY;AAAA,EAC1B,CAAC,kBAAkB,iBAAiB;AAAA,EACpC,CAAC,kBAAkB,iBAAiB;AAAA,EACpC,CAAC,mBAAmB,kBAAkB;AAAA,EACtC,CAAC,oBAAoB,mBAAmB;AAAA,EACxC,CAAC,kBAAkB,iBAAiB;AAAA,EACpC,CAAC,oBAAoB,mBAAmB;AAAA,EACxC,CAAC,sBAAsB,qBAAqB;AAAA,EAC5C,CAAC,iBAAiB,gBAAgB;AAAA,EAClC,CAAC,mBAAmB,kBAAkB;AAAA,EACtC,CAAC,gBAAgB,eAAe;AAAA,EAChC,CAAC,mBAAmB,kBAAkB;AAAA,EACtC,CAAC,gBAAgB,eAAe;AAAA,EAChC,CAAC,iBAAiB,gBAAgB;AAAA,EAClC,CAAC,mBAAmB,kBAAkB;AAAA,EACtC,CAAC,iBAAiB,gBAAgB;AAAA,EAClC,CAAC,kBAAkB,iBAAiB;AAAA,EACpC,CAAC,iBAAiB,gBAAgB;AAAA,EAClC,CAAC,eAAe,cAAc;AAChC;AAGA,IAAM,wBAA4C,sBAAsB,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM;AAAA,EACtF,gBAAgB,QAAQ,CAAmC;AAAA,EAC3D,gBAAgB,QAAQ,CAAmC;AAC7D,CAAC,EAAE,OAAO,CAAC,CAAC,GAAG,CAAC,MAAM,MAAM,MAAM,MAAM,EAAE;AAOnC,SAAS,sBAAsB,OAAmC;AACvE,QAAM,SAAS,IAAI,aAAa,KAAK;AACrC,aAAW,CAAC,MAAM,IAAI,KAAK,uBAAuB;AAChD,UAAM,OAAO,MAAM,IAAI,IAAI,MAAM,IAAI,KAAK;AAC1C,WAAO,IAAI,IAAI;AACf,WAAO,IAAI,IAAI;AAAA,EACjB;AACA,SAAO;AACT;AASO,IAAM,wBAAwB;AAAA,EACnC;AAAA,EAAgB;AAAA,EAAiB;AAAA,EAAe;AAAA,EAAmB;AAAA,EACnE;AAAA,EAAa;AAAA,EAAmB;AAAA,EAChC;AAAA,EAAgB;AAAA,EAAiB;AAAA,EAAmB;AAAA,EACpD;AAAA,EAAiB;AAAA,EAAkB;AAAA,EAAkB;AAAA,EACrD;AAAA,EAAiB;AAAA,EAAkB;AAAA,EAAiB;AAAA,EACpD;AAAA,EAAe;AAAA,EACf;AAAA,EAAc;AAAA,EAAW;AAAA,EACzB;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAe;AAAA,EACpD;AAAA,EAAsB;AAAA,EACtB;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAe;AAAA,EACpD;AAAA,EAAkB;AAAA,EAAkB;AAAA,EAAmB;AAAA,EACvD;AAAA,EAAkB;AAAA,EAAmB;AAAA,EAAoB;AAAA,EACzD;AAAA,EAAoB;AAAA,EACpB;AAAA,EAAiB;AAAA,EAAkB;AAAA,EACnC;AAAA,EAAc;AAAA,EAAmB;AAAA,EAAoB;AACvD;AAQO,IAAM,yBAAmC,sBAAsB;AAAA,EACpE,CAAC,SAAS,gBAAgB,QAAQ,IAAsC;AAC1E;AAQO,SAAS,oBAAoB,OAAmC;AACrE,QAAM,SAAS,IAAI,aAAa,EAAE;AAClC,WAAS,IAAI,GAAG,IAAI,IAAI,KAAK;AAC3B,WAAO,uBAAuB,CAAC,CAAC,IAAI,MAAM,CAAC;AAAA,EAC7C;AACA,SAAO;AACT;;;ACnFA,IAAMC,UAAS,aAAa,UAAU;AAyB/B,IAAM,YAAY;AAAA,EACvB;AAAA,EAAS;AAAA,EAAO;AAAA,EAAQ;AAAA,EAAS;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAC1D;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAC7C;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AAAA,EAAK;AACzD;AAmBO,IAAM,oBAAN,MAAwB;AAAA,EAW7B,YAAY,QAAiC;AAV7C,SAAQ,UAAmC;AAC3C,SAAQ,MAAwB;AAEhC,SAAQ,WAA2B;AACnC,SAAQ,YAAY;AAIpB;AAAA,SAAQ,iBAAgC,QAAQ,QAAQ;AAGtD,SAAK,SAAS;AACd,SAAK,qBAAqB,OAAO,sBAAsB;AAAA,EACzD;AAAA,EAQA,IAAI,UAAoC;AACtC,WAAO,KAAK,UAAU,KAAK,WAAW;AAAA,EACxC;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAA2B;AAC/B,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,iBAAiB;AAAA,MACjD,aAAa,KAAK,OAAO;AAAA,MACzB,2BAA2B,KAAK,OAAO,WAAW;AAAA,IACpD,CAAC;AAED,QAAI;AAIF,MAAAA,QAAO,KAAK,2BAA2B,EAAE,YAAY,KAAK,OAAO,WAAW,OAAO,CAAC;AAEpF,YAAM,EAAE,KAAK,QAAQ,IAAI,MAAM,4BAA4B,KAAK,OAAO,WAAW,MAAM;AACxF,WAAK,MAAM;AACX,WAAK,WAAW;AAEhB,MAAAA,QAAO,KAAK,uBAAuB,EAAE,SAAS,KAAK,SAAS,CAAC;AAG7D,YAAM,QAAQ,cAAc;AAC5B,YAAM,WAAW,KAAK,OAAO;AAC7B,YAAM,WAAW,MAAM,MAAM,IAAI,QAAQ;AAEzC,UAAI;AACJ,UAAI,UAAU;AACZ,QAAAA,QAAO,MAAM,4BAA4B,EAAE,SAAS,CAAC;AACrD,sBAAe,MAAM,MAAM,IAAI,QAAQ;AAGvC,YAAI,CAAC,aAAa;AAChB,UAAAA,QAAO,KAAK,oDAAoD,EAAE,SAAS,CAAC;AAC5E,gBAAM,MAAM,OAAO,QAAQ;AAC3B,UAAAA,QAAO,KAAK,uDAAuD,EAAE,SAAS,CAAC;AAC/E,wBAAc,MAAM,eAAe,QAAQ;AAAA,QAC7C;AAAA,MACF,OAAO;AACL,QAAAA,QAAO,MAAM,8BAA8B,EAAE,SAAS,CAAC;AACvD,sBAAc,MAAM,eAAe,QAAQ;AAAA,MAC7C;AAGA,UAAI,CAAC,aAAa;AAChB,cAAM,WAAW,yBAAyB,QAAQ;AAClD,QAAAA,QAAO,MAAM,UAAU,EAAE,UAAU,SAAS,CAAC;AAC7C,cAAM,IAAI,MAAM,QAAQ;AAAA,MAC1B;AAEA,MAAAA,QAAO,MAAM,yBAAyB;AAAA,QACpC,MAAM,YAAY,YAAY,UAAU;AAAA,QACxC,SAAS,KAAK;AAAA,MAChB,CAAC;AAGD,YAAM,iBAAiB,kBAAkB,KAAK,QAAQ;AACtD,MAAAA,QAAO,KAAK,4CAA4C;AAAA,QACtD,mBAAmB,KAAK;AAAA,MAC1B,CAAC;AAGD,YAAM,YAAY,IAAI,WAAW,WAAW;AAC5C,WAAK,UAAU,MAAM,KAAK,IAAK,iBAAiB,OAAO,WAAW,cAAc;AAEhF,MAAAA,QAAO,KAAK,qCAAqC;AAAA,QAC/C,mBAAmB,KAAK;AAAA,QACxB,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,SAAS,KAAK;AAAA,QACd,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,QAAQ,KAAK,QAAQ;AAAA,QACrB,SAAS,KAAK,QAAQ;AAAA,MACxB,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB,KAAK;AAAA,QACtB,sBAAsB;AAAA,QACtB,gBAAgB;AAAA,MAClB,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAID,MAAAA,QAAO,MAAM,oDAAoD;AACjE,YAAM,cAAc,YAAY,IAAI;AACpC,YAAM,cAAc,IAAI,aAAa,IAAK;AAC1C,YAAM,KAAK,MAAM,aAAa,CAAC;AAC/B,YAAM,eAAe,YAAY,IAAI,IAAI;AACzC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,cAAc,KAAK,MAAM,YAAY;AAAA,QACrC,SAAS,KAAK;AAAA,MAChB,CAAC;AACD,iBAAW,gBAAgB,2BAA2B,cAAc;AAAA,QAClE,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,MAC3C;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AACD,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,MACJ,cACA,gBAAwB,GACC;AACzB,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAKA,UAAM,mBAAmB,IAAI,aAAa,YAAY;AAGtD,QAAI;AACJ,QAAI,iBAAiB,WAAW,MAAO;AACrC,cAAQ;AAAA,IACV,WAAW,iBAAiB,SAAS,MAAO;AAE1C,cAAQ,IAAI,aAAa,IAAK;AAC9B,YAAM,IAAI,kBAAkB,CAAC;AAAA,IAC/B,OAAO;AAEL,cAAQ,iBAAiB,MAAM,GAAG,IAAK;AAAA,IACzC;AAGA,UAAM,WAAW,IAAI,aAAa,KAAK,kBAAkB;AACzD,aAAS,KAAK,IAAI,eAAe,KAAK,qBAAqB,CAAC,CAAC,IAAI;AAIjE,UAAM,YAAY,IAAI,aAAa,KAAK;AACxC,UAAM,eAAe,IAAI,aAAa,QAAQ;AAE9C,UAAM,QAAQ;AAAA,MACZ,SAAS,IAAI,KAAK,IAAK,OAAO,WAAW,WAAW,CAAC,GAAG,IAAK,CAAC;AAAA,MAC9D,YAAY,IAAI,KAAK,IAAK,OAAO,WAAW,cAAc,CAAC,GAAG,KAAK,kBAAkB,CAAC;AAAA,IACxF;AAGA,WAAO,KAAK,eAAe,KAAK;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,QAAgC;AAChD,UAAM,SAAmB,CAAC;AAC1B,QAAI,YAAY;AAEhB,eAAW,SAAS,QAAQ;AAE1B,UAAI,SAAS;AACb,UAAI,SAAS,MAAM,CAAC;AACpB,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAI,MAAM,CAAC,IAAI,QAAQ;AACrB,mBAAS,MAAM,CAAC;AAChB,mBAAS;AAAA,QACX;AAAA,MACF;AAGA,UAAI,WAAW,aAAa,WAAW,GAAG;AACxC,eAAO,KAAK,MAAM;AAAA,MACpB;AACA,kBAAY;AAAA,IACd;AAGA,WAAO,OAAO,IAAI,OAAK,UAAU,CAAC,MAAM,MAAM,MAAM,UAAU,CAAC,CAAC,EAAE,KAAK,EAAE;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA,EAKQ,eACN,OACyB;AACzB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,kBAAkB;AAAA,UAClD,qBAAqB,KAAK;AAAA,UAC1B,2BAA2B;AAAA,QAC7B,CAAC;AACD,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAClC,gBAAM,UAAU,MAAM,KAAK,QAAS,IAAI,KAAK;AAC7C,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAE5C,gBAAM,YAAY,QAAQ,YAAY;AACtC,gBAAM,mBAAmB,QAAQ,aAAa;AAE9C,cAAI,CAAC,aAAa,CAAC,kBAAkB;AACnC,kBAAM,IAAI,MAAM,4BAA4B;AAAA,UAC9C;AAEA,gBAAM,UAAU,UAAU;AAC1B,gBAAM,iBAAiB,iBAAiB;AAGxC,gBAAM,eAAe,UAAU,KAAK,CAAC;AACrC,gBAAM,eAAe,iBAAiB,KAAK,CAAC;AAC5C,gBAAM,eAAe,UAAU,KAAK,CAAC;AACrC,gBAAM,iBAAiB,iBAAiB,KAAK,CAAC;AAG9C,gBAAM,YAA4B,CAAC;AACnC,gBAAM,cAA8B,CAAC;AAErC,mBAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,sBAAU,KAAK,QAAQ,MAAM,IAAI,eAAe,IAAI,KAAK,YAAY,CAAC;AAAA,UACxE;AAEA,mBAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,kBAAM,WAAW,eAAe,MAAM,IAAI,iBAAiB,IAAI,KAAK,cAAc;AAElF,wBAAY,KAAK,sBAAsB,QAAQ,CAAC;AAAA,UAClD;AAGA,gBAAM,OAAO,KAAK,UAAU,SAAS;AAErC,UAAAA,QAAO,MAAM,uBAAuB;AAAA,YAClC,iBAAiB,KAAK,MAAM,kBAAkB,GAAG,IAAI;AAAA,YACrD;AAAA,YACA;AAAA,YACA,YAAY,KAAK;AAAA,UACnB,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,wBAAwB;AAAA,YACxB,wBAAwB;AAAA,UAC1B,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,UAChB,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN;AAAA,YACA;AAAA,YACA;AAAA,YACA,WAAW;AAAA,YACX;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,aAA2B,MAA8C;AACrF,UAAM,QAAQ,gBAAgB,QAAQ,IAAI;AAC1C,QAAI,UAAU,IAAI;AAChB,YAAM,IAAI,MAAM,uBAAuB,IAAI,EAAE;AAAA,IAC/C;AACA,WAAO,YAAY,KAAK;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,QAAQ;AAC3B,WAAK,UAAU;AAAA,IACjB;AAAA,EACF;AACF;AAAA;AAAA;AAAA;AAAA;AA1Wa,kBAoBJ,oBAAoB;;;AC9D7B,IAAMC,UAAS,aAAa,kBAAkB;AAK9C,IAAM,uBAAuB,oBAAI,IAAoB;AACrD,gBAAgB,QAAQ,CAAC,MAAM,UAAU;AACvC,uBAAqB,IAAI,MAAM,KAAK;AACtC,CAAC;AAKD,IAAM,iBAAiB,IAAI,IAAY,sBAAsB;;;AC1D7D,IAAMC,UAAS,aAAa,SAAS;AA8C9B,IAAM,mBAAN,MAAM,kBAAiB;AAAA,EAO5B,YAAY,SAAwB,CAAC,GAAG;AALxC,SAAQ,WAAsD;AAC9D,SAAQ,eAA8B;AACtC,SAAQ,YAAY;AACpB,SAAQ,gBAAwB;AAG9B,SAAK,SAAS;AAAA,MACZ,OAAO,OAAO,SAAS;AAAA,MACvB,cAAc,OAAO,gBAAgB;AAAA,MACrC,UAAU,OAAO,YAAY;AAAA,MAC7B,MAAM,OAAO,QAAQ;AAAA,MACrB,OAAO,OAAO,SAAS;AAAA,MACvB,QAAQ,OAAO,UAAU;AAAA,MACzB,gBAAgB,OAAO;AAAA,MACvB,OAAO,OAAO;AAAA,MACd,mBAAmB,OAAO,sBAAsB;AAAA;AAAA,IAClD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,oBAAsC;AACjD,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,YAAsG;AAC/G,QAAI,KAAK,WAAW;AAClB,MAAAA,QAAO,MAAM,mCAAmC;AAChD,aAAO,KAAK,WAAW;AACrB,cAAM,IAAI,QAAQ,aAAW,WAAW,SAAS,GAAG,CAAC;AAAA,MACvD;AACA;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,aAAa;AAGpC,QAAI,KAAK,aAAa,QAAQ,KAAK,iBAAiB,WAAW;AAC7D,MAAAA,QAAO,MAAM,wBAAwB,EAAE,OAAO,UAAU,CAAC;AACzD;AAAA,IACF;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,gBAAgB;AAAA,MAChD,iBAAiB;AAAA,MACjB,iBAAiB,KAAK,OAAO;AAAA,MAC7B,kBAAkB,KAAK,OAAO;AAAA,IAChC,CAAC;AAED,QAAI;AACF,YAAM,YAAY,YAAY,IAAI;AAElC,MAAAA,QAAO,KAAK,iBAAiB;AAAA,QAC3B,OAAO;AAAA,QACP,OAAO,KAAK,OAAO;AAAA,QACnB,QAAQ,KAAK,OAAO;AAAA,QACpB,cAAc,KAAK,OAAO;AAAA,MAC5B,CAAC;AAGD,UAAI,KAAK,aAAa,QAAQ,KAAK,iBAAiB,WAAW;AAC7D,QAAAA,QAAO,MAAM,uBAAuB,EAAE,UAAU,KAAK,aAAa,CAAC;AACnE,cAAM,KAAK,SAAS,QAAQ;AAC5B,aAAK,WAAW;AAAA,MAClB;AAIA,YAAM,YAAY,MAAM,kBAAiB,kBAAkB;AAC3D,YAAM,SAAS,KAAK,OAAO,WAAW,SACjC,YAAY,WAAW,SACxB,KAAK,OAAO;AAEhB,MAAAA,QAAO,KAAK,qBAAqB,EAAE,QAAQ,UAAU,CAAC;AAItD,UAAI,mBAAmB;AACvB,UAAI,oBAAoB;AACxB,UAAI,kBAAkB;AACtB,UAAI,iBAAiB;AACrB,UAAI,eAAe;AACnB,UAAI,IAAI,SAAS,KAAK,MAAM;AAC1B,YAAI,SAAS,KAAK,KAAK,QAAQ;AAC/B,YAAI,SAAS,KAAK,KAAK,aAAa;AAAA,MACtC;AAEA,MAAAA,QAAO,KAAK,kCAAkC;AAAA,QAC5C,kBAAkB,IAAI;AAAA,QACtB,iBAAiB,IAAI;AAAA,QACrB,cAAc,IAAI;AAAA,MACpB,CAAC;AAGD,YAAM,kBAAuB;AAAA,QAC3B,OAAO,KAAK,OAAO;AAAA,QACnB;AAAA,QACA,mBAAmB;AAAA;AAAA,QAEnB,UAAU,UAAU,SAAS,gBAAgB,IAAI,kBAAkB;AAAA;AAAA,QAEnE,GAAI,KAAK,OAAO,SAAS,EAAE,OAAO,KAAK,OAAO,MAAM;AAAA,MACtD;AAGA,UAAI,WAAW,UAAU;AACvB,wBAAgB,kBAAkB;AAAA,UAChC,oBAAoB,CAAC,QAAQ;AAAA,QAC/B;AACA,QAAAA,QAAO,KAAK,oCAAoC;AAAA,MAClD;AAEA,WAAK,WAAW,MAAM;AAAA,QACpB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAGA,WAAK,gBAAgB;AAErB,WAAK,eAAe;AACpB,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,OAAO;AAAA,QACP,YAAY,KAAK,MAAM,UAAU;AAAA,MACnC,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,wBAAwB;AAAA,MAC1B,CAAC;AACD,YAAM,IAAI;AAAA,IACZ,SAAS,OAAO;AACd,YAAM,eAAe;AAAA,QACnB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D,OAAO,iBAAiB,QAAQ,MAAM,QAAQ;AAAA,QAC9C,MAAM,iBAAiB,QAAQ,MAAM,OAAO;AAAA,QAC5C;AAAA,MACF;AACA,MAAAA,QAAO,MAAM,wBAAwB,YAAY;AACjD,YAAM,aAAa,KAAc;AACjC,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,WACJ,OACA,SAY8B;AAC9B,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAKA,UAAM,YAAY,IAAI,aAAa,KAAK;AAExC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,sBAAsB;AAAA,MACtD,iBAAiB,UAAU;AAAA,MAC3B,oBAAoB,UAAU,SAAS;AAAA,MACvC,iBAAiB,KAAK;AAAA,IACxB,CAAC;AAED,QAAI;AACF,YAAM,aAAa,YAAY,IAAI;AAEnC,YAAM,mBAAmB,UAAU,SAAS;AAC5C,YAAM,eAAe,mBAAmB;AAExC,MAAAA,QAAO,MAAM,0BAA0B;AAAA,QACrC,cAAc,UAAU;AAAA,QACxB,iBAAiB,iBAAiB,QAAQ,CAAC;AAAA,QAC3C;AAAA,MACF,CAAC;AAMD,YAAM,oBAAyB;AAAA;AAAA,QAE7B,OAAO;AAAA,QACP,WAAW;AAAA;AAAA,QAGX,gBAAgB,SAAS,iBAAiB,eAAe,mBAAmB;AAAA,QAC5E,iBAAiB,SAAS,kBAAkB,eAAe,IAAI;AAAA;AAAA,QAG/D,mBAAmB,SAAS,oBAAoB;AAAA,QAChD,sBAAsB;AAAA,MACxB;AAGA,UAAI,KAAK,OAAO,cAAc;AAC5B,0BAAkB,WAAW,SAAS,YAAY,KAAK,OAAO;AAC9D,0BAAkB,OAAO,SAAS,QAAQ,KAAK,OAAO;AAAA,MACxD;AAGA,YAAM,YAAY,MAAM,KAAK,SAAS,WAAW,iBAAiB;AAGlE,YAAM,SAAS,MAAM,QAAQ,SAAS,IAAI,UAAU,CAAC,IAAI;AAEzD,YAAM,kBAAkB,YAAY,IAAI,IAAI;AAG5C,UAAI,cAAc,OAAO;AACzB,UAAI,KAAK,OAAO,mBAAmB;AACjC,sBAAc,KAAK,sBAAsB,WAAW;AAAA,MACtD;AAEA,YAAM,gBAAqC;AAAA,QACzC,MAAM;AAAA,QACN,UAAU,KAAK,OAAO;AAAA,QACtB;AAAA,QACA,QAAQ,OAAO;AAAA,MACjB;AAEA,MAAAA,QAAO,MAAM,0BAA0B;AAAA,QACrC,MAAM,cAAc;AAAA,QACpB,iBAAiB,KAAK,MAAM,eAAe;AAAA,QAC3C,aAAa,OAAO,QAAQ,UAAU;AAAA,MACxC,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,6BAA6B;AAAA,QAC7B,uBAAuB,cAAc,KAAK;AAAA,MAC5C,CAAC;AACD,YAAM,IAAI;AAEV,aAAO;AAAA,IACT,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,oBAAoB,EAAE,MAAM,CAAC;AAC1C,YAAM,aAAa,KAAc;AACjC,YAAM,IAAI,MAAM,iCAAiC,KAAK,EAAE;AAAA,IAC1D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,oBACJ,OACA,SACA,UACA,SAM8B;AAC9B,QAAI,CAAC,KAAK,UAAU;AAClB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,gCAAgC;AAAA,MAChE,iBAAiB,MAAM;AAAA,MACvB,oBAAoB,MAAM,SAAS;AAAA,IACrC,CAAC;AAED,QAAI;AACF,YAAM,aAAa,YAAY,IAAI;AAEnC,MAAAA,QAAO,MAAM,oCAAoC;AAAA,QAC/C,cAAc,MAAM;AAAA,QACpB,kBAAkB,MAAM,SAAS,MAAO,QAAQ,CAAC;AAAA,MACnD,CAAC;AAOD,YAAM,oBAAyB;AAAA,QAC7B,OAAO;AAAA,QACP,WAAW;AAAA,QACX,gBAAgB,SAAS,gBAAgB;AAAA,QACzC,iBAAiB,SAAS,iBAAiB;AAAA,QAC3C,mBAAmB;AAAA,QACnB,sBAAsB;AAAA,MACxB;AAGA,UAAI,KAAK,OAAO,cAAc;AAC5B,0BAAkB,WAAW,SAAS,YAAY,KAAK,OAAO;AAC9D,0BAAkB,OAAO,SAAS,QAAQ,KAAK,OAAO;AAAA,MACxD;AAGA,YAAM,YAAY,MAAM,KAAK,SAAS,OAAO,iBAAiB;AAG9D,YAAM,SAAS,MAAM,QAAQ,SAAS,IAAI,UAAU,CAAC,IAAI;AAEzD,YAAM,kBAAkB,YAAY,IAAI,IAAI;AAG5C,UAAI,OAAO,UAAU,SAAS;AAC5B,mBAAW,SAAS,OAAO,QAAQ;AACjC,kBAAQ;AAAA,YACN,MAAM,MAAM;AAAA,YACZ,WAAW,MAAM;AAAA,UACnB,CAAC;AAAA,QACH;AAAA,MACF;AAGA,UAAI,UAAU;AACZ,iBAAS,OAAO,IAAI;AAAA,MACtB;AAEA,MAAAA,QAAO,MAAM,oCAAoC;AAAA,QAC/C,MAAM,OAAO;AAAA,QACb,iBAAiB,KAAK,MAAM,eAAe;AAAA,QAC3C,aAAa,OAAO,QAAQ,UAAU;AAAA,MACxC,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,6BAA6B;AAAA,QAC7B,wBAAwB,OAAO,QAAQ,UAAU;AAAA,MACnD,CAAC;AACD,YAAM,IAAI;AAEV,aAAO;AAAA,QACL,MAAM,OAAO;AAAA,QACb,UAAU,KAAK,OAAO;AAAA,QACtB;AAAA,QACA,QAAQ,OAAO;AAAA,MACjB;AAAA,IACF,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,8BAA8B,EAAE,MAAM,CAAC;AACpD,YAAM,aAAa,KAAc;AACjC,YAAM,IAAI,MAAM,2CAA2C,KAAK,EAAE;AAAA,IACpE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,UAAU;AACjB,MAAAA,QAAO,MAAM,mBAAmB,EAAE,OAAO,KAAK,aAAa,CAAC;AAC5D,YAAM,KAAK,SAAS,QAAQ;AAC5B,WAAK,WAAW;AAChB,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAAoB;AACtB,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAAkB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAuB;AAE7B,QAAI,KAAK,OAAO,gBAAgB;AAC9B,aAAO,KAAK,OAAO;AAAA,IACrB;AAGA,QAAI,YAAY,0BAA0B,KAAK,OAAO,KAAK;AAG3D,QAAI,CAAC,KAAK,OAAO,cAAc;AAC7B,mBAAa;AAAA,IACf;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,sBAAsB,MAAsB;AAGlD,UAAM,UAAU,KAAK,QAAQ,iBAAiB,EAAE;AAGhD,WAAO,QAAQ,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAAA,EAC3C;AACF;;;ACxbA,IAAMC,UAAS,aAAa,cAAc;AASnC,IAAM,wBAAN,MAAsD;AAAA,EAU3D,YAAY,QAA4B;AATxC,SAAQ,UAAmC;AAC3C,SAAQ,MAAwB;AAEhC,SAAQ,WAA2B;AACnC,SAAQ,YAAY;AAGpB;AAAA,SAAQ,iBAAgC,QAAQ,QAAQ;AAGtD,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,KAAK,UAAU,KAAK,WAAW;AAAA,EACxC;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAkC;AACtC,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,qBAAqB;AAAA,MACrD,aAAa,KAAK,OAAO;AAAA,MACzB,2BAA2B,KAAK,OAAO,WAAW;AAAA,IACpD,CAAC;AAED,QAAI;AAEF,YAAM,aAAa,KAAK,OAAO,WAAW;AAC1C,MAAAA,QAAO,KAAK,2BAA2B,EAAE,WAAW,CAAC;AAErD,YAAM,EAAE,KAAK,QAAQ,IAAI,MAAM,4BAA4B,UAAU;AACrE,WAAK,MAAM;AACX,WAAK,WAAW;AAEhB,MAAAA,QAAO,KAAK,uBAAuB,EAAE,SAAS,KAAK,SAAS,CAAC;AAG7D,YAAM,QAAQ,cAAc;AAC5B,YAAM,WAAW,KAAK,OAAO;AAC7B,YAAM,WAAW,MAAM,MAAM,IAAI,QAAQ;AAEzC,UAAI;AACJ,UAAI,UAAU;AACZ,QAAAA,QAAO,MAAM,4BAA4B,EAAE,SAAS,CAAC;AACrD,sBAAe,MAAM,MAAM,IAAI,QAAQ;AAEvC,YAAI,CAAC,aAAa;AAChB,UAAAA,QAAO,KAAK,oDAAoD,EAAE,SAAS,CAAC;AAC5E,gBAAM,MAAM,OAAO,QAAQ;AAC3B,wBAAc,MAAM,eAAe,QAAQ;AAAA,QAC7C;AAAA,MACF,OAAO;AACL,QAAAA,QAAO,MAAM,8BAA8B,EAAE,SAAS,CAAC;AACvD,sBAAc,MAAM,eAAe,QAAQ;AAAA,MAC7C;AAEA,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI,MAAM,yBAAyB,QAAQ,EAAE;AAAA,MACrD;AAEA,MAAAA,QAAO,MAAM,yBAAyB;AAAA,QACpC,MAAM,YAAY,YAAY,UAAU;AAAA,QACxC,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,YAAM,iBAAiB,kBAAkB,KAAK,QAAQ;AACtD,YAAM,YAAY,IAAI,WAAW,WAAW;AAC5C,WAAK,UAAU,MAAM,KAAK,IAAK,iBAAiB,OAAO,WAAW,cAAc;AAEhF,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,SAAS,KAAK;AAAA,QACd,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,QAAQ,KAAK,QAAQ;AAAA,QACrB,SAAS,KAAK,QAAQ;AAAA,MACxB,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB,KAAK;AAAA,QACtB,sBAAsB;AAAA,QACtB,gBAAgB;AAAA,MAClB,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAGD,MAAAA,QAAO,MAAM,0BAA0B;AACvC,YAAM,cAAc,YAAY,IAAI;AACpC,YAAM,cAAc,IAAI,aAAa,IAAK;AAC1C,YAAM,KAAK,MAAM,WAAW;AAC5B,YAAM,eAAe,YAAY,IAAI,IAAI;AACzC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,cAAc,KAAK,MAAM,YAAY;AAAA,QACrC,SAAS,KAAK;AAAA,MAChB,CAAC;AACD,iBAAW,gBAAgB,2BAA2B,cAAc;AAAA,QAClE,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,MAC3C;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AACD,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,MACJ,cACA,gBACwB;AACxB,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAGA,UAAM,YAAY,IAAI,aAAa,YAAY;AAE/C,UAAM,QAAQ;AAAA,MACZ,kBAAkB,IAAI,KAAK,IAAK,OAAO,WAAW,WAAW,CAAC,GAAG,UAAU,MAAM,CAAC;AAAA,IACpF;AAEA,WAAO,KAAK,eAAe,OAAO,UAAU,MAAM;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKQ,eACN,OACA,cACwB;AACxB,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,sBAAsB;AAAA,UACtD,qBAAqB,KAAK;AAAA,UAC1B,2BAA2B;AAAA,QAC7B,CAAC;AACD,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAClC,gBAAM,UAAU,MAAM,KAAK,QAAS,IAAI,KAAK;AAC7C,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAE5C,gBAAM,mBAAmB,QAAQ,aAAa;AAE9C,cAAI,CAAC,kBAAkB;AACrB,kBAAM,IAAI,MAAM,uCAAuC;AAAA,UACzD;AAEA,gBAAM,iBAAiB,iBAAiB;AACxC,gBAAM,YAAY,iBAAiB,KAAK,CAAC;AACzC,gBAAM,iBAAiB,iBAAiB,KAAK,CAAC;AAG9C,gBAAM,cAA8B,CAAC;AACrC,mBAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,kBAAM,WAAW,eAAe,MAAM,IAAI,iBAAiB,IAAI,KAAK,cAAc;AAClF,kBAAM,WAAW,oBAAoB,QAAQ;AAC7C,wBAAY,KAAK,sBAAsB,QAAQ,CAAC;AAAA,UAClD;AAEA,UAAAA,QAAO,MAAM,uBAAuB;AAAA,YAClC,iBAAiB,KAAK,MAAM,kBAAkB,GAAG,IAAI;AAAA,YACrD;AAAA,YACA;AAAA,UACF,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,oBAAoB;AAAA,UACtB,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,UAChB,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,QAAQ;AAC3B,WAAK,UAAU;AAAA,IACjB;AAAA,EACF;AACF;;;AC9QA,IAAMC,UAAS,aAAa,eAAe;AAkCpC,SAAS,cAAc,QAA6C;AACzE,QAAM,OAAO,OAAO,QAAQ;AAC5B,QAAM,kBAAkB,OAAO,mBAAmB;AAGlD,MAAI;AAEJ,MAAI,SAAS,OAAO;AAClB,aAAS;AACT,IAAAA,QAAO,KAAK,4CAA4C;AAAA,EAC1D,WAAW,SAAS,OAAO;AACzB,aAAS;AACT,IAAAA,QAAO,KAAK,uCAAuC;AAAA,EACrD,OAAO;AAEL,aAAS,oBAAoB;AAC7B,IAAAA,QAAO,KAAK,gCAAgC;AAAA,MAC1C;AAAA,MACA,UAAU,SAAS;AAAA,IACrB,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ;AACV,IAAAA,QAAO,KAAK,8CAA8C;AAC1D,WAAO,IAAI,sBAAsB;AAAA,MAC/B,UAAU,OAAO;AAAA,IACnB,CAAC;AAAA,EACH;AAGA,QAAM,cAAc,IAAI,kBAAkB;AAAA,IACxC,UAAU,OAAO;AAAA,IACjB,SAAS,OAAO,cAAc;AAAA,IAC9B,oBAAoB,OAAO;AAAA,EAC7B,CAAC;AAED,MAAI,iBAAiB;AACnB,IAAAA,QAAO,KAAK,8CAA8C;AAC1D,WAAO,IAAI,oBAAoB,aAAa,MAAM;AAAA,EACpD;AAEA,EAAAA,QAAO,KAAK,0CAA0C;AACtD,SAAO;AACT;AAQA,IAAM,sBAAN,MAAoD;AAAA,EAKlD,YAAY,aAAgC,QAA6B;AAFzE,SAAQ,gBAAgB;AAGtB,SAAK,iBAAiB;AACtB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,MAAM,OAAkC;AACtC,QAAI;AACF,aAAO,MAAM,KAAK,eAAe,KAAK;AAAA,IACxC,SAAS,OAAO;AACd,MAAAA,QAAO,KAAK,oDAAoD;AAAA,QAC9D,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,CAAC;AAGD,UAAI;AACF,cAAM,KAAK,eAAe,QAAQ;AAAA,MACpC,QAAQ;AAAA,MAER;AAGA,WAAK,iBAAiB,IAAI,sBAAsB;AAAA,QAC9C,UAAU,KAAK,OAAO;AAAA,MACxB,CAAC;AACD,WAAK,gBAAgB;AAErB,MAAAA,QAAO,KAAK,8CAA8C;AAC1D,aAAO,MAAM,KAAK,eAAe,KAAK;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,cAA4B,eAAgD;AACtF,WAAO,KAAK,eAAe,MAAM,cAAc,aAAa;AAAA,EAC9D;AAAA,EAEA,MAAM,UAAyB;AAC7B,WAAO,KAAK,eAAe,QAAQ;AAAA,EACrC;AACF;;;AClHA,IAAMC,UAAS,aAAa,WAAW;AAkFhC,IAAM,qBAAN,MAAyB;AAAA,EA2B9B,YAAY,QAAyB;AA1BrC,SAAQ,UAAmC;AAC3C,SAAQ,MAAwB;AAEhC,SAAQ,WAA2B;AACnC,SAAQ,YAAY;AAGpB;AAAA,SAAQ,QAAuB;AAU/B;AAAA,SAAQ,iBAAgC,QAAQ,QAAQ;AAGxD;AAAA,SAAQ,kBAAkC,CAAC;AAC3C,SAAQ,cAAc;AAGtB;AAAA,SAAQ,WAA0B;AAGhC,UAAM,aAAa,OAAO,cAAc;AAExC,QAAI,eAAe,OAAQ,eAAe,MAAO;AAC/C,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,SAAK,SAAS;AAAA,MACZ,UAAU,OAAO;AAAA,MACjB,SAAS,OAAO,WAAW;AAAA,MAC3B;AAAA,MACA,WAAW,OAAO,aAAa;AAAA,MAC/B,uBAAuB,OAAO,yBAAyB;AAAA,IACzD;AAGA,SAAK,YAAY,eAAe,OAAQ,MAAM;AAC9C,SAAK,cAAc,eAAe,OAAQ,KAAK;AAC/C,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAAA,EAClD;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,KAAK,UAAU,KAAK,WAAW;AAAA,EACxC;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,IAAI,aAAqB;AACvB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,eAAuB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,qBAA6B;AAC3B,WAAQ,KAAK,YAAY,KAAK,OAAO,aAAc;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,OAA8B;AAClC,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,kBAAkB;AAAA,MAClD,aAAa,KAAK,OAAO;AAAA,MACzB,2BAA2B,KAAK,OAAO;AAAA,MACvC,qBAAqB,KAAK,OAAO;AAAA,IACnC,CAAC;AAED,QAAI;AAIF,MAAAA,QAAO,KAAK,2BAA2B,EAAE,YAAY,KAAK,OAAO,QAAQ,CAAC;AAE1E,YAAM,EAAE,KAAK,QAAQ,IAAI,MAAM,4BAA4B,KAAK,OAAO,OAAO;AAC9E,WAAK,MAAM;AACX,WAAK,WAAW;AAEhB,MAAAA,QAAO,KAAK,uBAAuB,EAAE,SAAS,KAAK,SAAS,CAAC;AAG7D,YAAM,QAAQ,cAAc;AAC5B,YAAM,WAAW,KAAK,OAAO;AAC7B,YAAM,WAAW,MAAM,MAAM,IAAI,QAAQ;AAEzC,UAAI;AACJ,UAAI,UAAU;AACZ,QAAAA,QAAO,MAAM,4BAA4B,EAAE,SAAS,CAAC;AACrD,sBAAe,MAAM,MAAM,IAAI,QAAQ;AAAA,MACzC,OAAO;AACL,QAAAA,QAAO,MAAM,8BAA8B,EAAE,SAAS,CAAC;AACvD,sBAAc,MAAM,eAAe,QAAQ;AAAA,MAC7C;AAEA,MAAAA,QAAO,MAAM,yBAAyB;AAAA,QACpC,MAAM,YAAY,YAAY,UAAU;AAAA,QACxC,SAAS,KAAK;AAAA,MAChB,CAAC;AAID,YAAM,iBAAiB,kBAAkB,KAAK,QAAQ;AACtD,YAAM,YAAY,IAAI,WAAW,WAAW;AAC5C,WAAK,UAAU,MAAM,IAAI,iBAAiB,OAAO,WAAW,cAAc;AAG1E,WAAK,MAAM;AAEX,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,QAAO,KAAK,6BAA6B;AAAA,QACvC,SAAS,KAAK;AAAA,QACd,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,YAAY,KAAK,OAAO;AAAA,QACxB,WAAW,KAAK;AAAA,QAChB,WAAW,KAAK,OAAO;AAAA,MACzB,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB,KAAK;AAAA,QACtB,sBAAsB;AAAA,QACtB,gBAAgB;AAAA,MAClB,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,QACzC,YAAY,KAAK,OAAO;AAAA,QACxB,WAAW,KAAK;AAAA,MAClB;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AACD,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,QAAI,CAAC,KAAK,KAAK;AACb,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,QAAQ,IAAI,KAAK,IAAI,OAAO,WAAW,IAAI,aAAa,IAAI,IAAI,GAAG,GAAG,CAAC,GAAG,GAAG,GAAG,CAAC;AAEtF,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAEhD,SAAK,kBAAkB,CAAC;AACxB,SAAK,cAAc;AAGnB,QAAI,CAAC,KAAK,UAAU;AAClB,UAAI;AACF,aAAK,WAAW,IAAI,KAAK,IAAI;AAAA,UAC3B;AAAA,UACA,IAAI,cAAc,CAAC,OAAO,KAAK,OAAO,UAAU,CAAC,CAAC;AAAA,UAClD,CAAC;AAAA,QACH;AAAA,MACF,SAAS,GAAG;AAGV,QAAAA,QAAO,KAAK,4DAA4D;AAAA,UACtE,OAAO,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AAAA,QAClD,CAAC;AACD,aAAK,WAAW,IAAI,KAAK,IAAI;AAAA,UAC3B;AAAA,UACA,CAAC,OAAO,KAAK,OAAO,UAAU,CAAC;AAAA,UAC/B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAAQ,YAA8C;AAC1D,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,QAAI,WAAW,WAAW,KAAK,WAAW;AACxC,YAAM,IAAI;AAAA,QACR,+BAA+B,KAAK,SAAS,iBAAiB,WAAW,MAAM;AAAA,MAEjF;AAAA,IACF;AAEA,WAAO,KAAK,eAAe,UAAU;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,aACJ,OACA,UAOI,CAAC,GACqB;AAC1B,UAAM;AAAA,MACJ,sBAAsB;AAAA,MACtB,uBAAuB;AAAA,MACvB,cAAc;AAAA,IAChB,IAAI;AAEJ,SAAK,MAAM;AAEX,UAAM,WAA4B,CAAC;AACnC,UAAM,kBAAkB,KAAK,mBAAmB;AAChD,UAAM,kBAAkB,KAAK,KAAK,sBAAsB,eAAe;AACvE,UAAM,mBAAmB,KAAK,KAAK,uBAAuB,eAAe;AACzE,UAAM,YAAY,KAAK,KAAK,cAAc,eAAe;AAEzD,QAAI,WAAW;AACf,QAAI,cAAc;AAClB,QAAI,eAAe;AACnB,QAAI,eAAe;AACnB,QAAI,YAAY;AAGhB,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,MAAM,QAAQ,KAAK,KAAK,WAAW;AACvE,YAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,KAAK,SAAS;AAC/C,YAAM,SAAS,MAAM,KAAK,QAAQ,KAAK;AACvC,YAAM,aAAa,IAAI,KAAK;AAC5B,YAAM,SAAS,aAAa;AAE5B,UAAI,OAAO,UAAU;AACnB,YAAI,CAAC,UAAU;AAEb,qBAAW;AACX,wBAAc,KAAK,IAAI,GAAG,SAAS,WAAW;AAC9C,yBAAe;AACf,yBAAe;AACf,sBAAY;AAAA,QACd;AACA,uBAAe;AACf;AACA,qBAAa,OAAO;AAAA,MACtB,WAAW,UAAU;AACnB;AACA,YAAI,gBAAgB,kBAAkB;AAEpC,cAAI,gBAAgB,iBAAiB;AACnC,qBAAS,KAAK;AAAA,cACZ,OAAO,cAAc;AAAA,cACrB,MAAM,SAAS,eAAe;AAAA,cAC9B,gBAAgB,YAAY;AAAA,YAC9B,CAAC;AAAA,UACH;AACA,qBAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,QAAI,YAAY,gBAAgB,iBAAiB;AAC/C,YAAM,QAAS,MAAM,SAAS,KAAK,OAAO,aAAc;AACxD,eAAS,KAAK;AAAA,QACZ,OAAO,cAAc;AAAA,QACrB,KAAK,QAAQ;AAAA,QACb,gBAAgB,YAAY;AAAA,MAC9B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,SAA+B;AAClD,QAAI,MAAM;AACV,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,aAAO,QAAQ,CAAC,IAAI,QAAQ,CAAC;AAAA,IAC/B;AACA,WAAO,KAAK,KAAK,MAAM,QAAQ,MAAM;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,YAA8C;AAInE,UAAM,iBAAiB,IAAI,aAAa,UAAU;AAIlD,UAAM,uBAAuB;AAC7B,UAAM,MAAM,KAAK,aAAa,cAAc;AAC5C,QAAI,MAAM,sBAAsB;AAE9B,UAAI,CAAC,KAAK,aAAa;AACrB,aAAK,gBAAgB,KAAK,IAAI,aAAa,cAAc,CAAC;AAC1D,YAAI,KAAK,gBAAgB,SAAS,KAAK,OAAO,uBAAuB;AACnE,eAAK,gBAAgB,MAAM;AAAA,QAC7B;AAAA,MACF;AAEA,MAAAA,QAAO,MAAM,4CAA4C;AAAA,QACvD,KAAK,KAAK,MAAM,MAAM,GAAK,IAAI;AAAA,QAC/B,WAAW;AAAA,MACb,CAAC;AAED,aAAO,QAAQ,QAAQ;AAAA,QACrB,aAAa;AAAA,QACb,UAAU;AAAA,QACV,iBAAiB;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,qBAAqB;AAAA,UACrD,qBAAqB,KAAK;AAAA,UAC1B,wBAAwB,KAAK;AAAA,QAC/B,CAAC;AACD,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAGlC,gBAAM,YAAY,KAAK,cAAc,KAAK;AAC1C,gBAAM,cAAc,IAAI,aAAa,SAAS;AAC9C,sBAAY,IAAI,KAAK,SAAS,CAAC;AAC/B,sBAAY,IAAI,gBAAgB,KAAK,WAAW;AAKhD,gBAAM,kBAAkB,IAAI,aAAa,WAAW;AACpD,gBAAM,cAAc,IAAI,KAAK,IAAK,OAAO,WAAW,iBAAiB,CAAC,GAAG,SAAS,CAAC;AAEnF,gBAAM,WAAW,KAAK;AAItB,gBAAM,YAAY,IAAI,aAAa,KAAK,MAAO,IAAoB;AACnE,gBAAM,cAAc,IAAI,KAAK,IAAK,OAAO,WAAW,WAAW,KAAK,MAAO,IAAgB;AAE3F,gBAAM,QAAQ;AAAA,YACZ,SAAS;AAAA,YACT,SAAS;AAAA,YACT,MAAM;AAAA,UACR;AAGA,gBAAM,UAAU,MAAM,KAAK,QAAS,IAAI,KAAK;AAG7C,gBAAM,eAAe,QAAQ,QAAQ;AACrC,gBAAM,iBAAiB,QAAQ,QAAQ,KAAK,QAAQ,OAAO;AAE3D,cAAI,CAAC,cAAc;AACjB,kBAAM,IAAI,MAAM,sCAAsC;AAAA,UACxD;AAEA,gBAAM,cAAe,aAAa,KAAsB,CAAC;AAGzD,cAAI,gBAAgB;AAClB,iBAAK,QAAQ,IAAI,KAAK,IAAK;AAAA,cACzB;AAAA,cACA,IAAI,aAAa,eAAe,IAAoB;AAAA,cACpD,CAAC,GAAG,GAAG,GAAG;AAAA,YACZ;AAAA,UACF;AAGA,eAAK,UAAU,WAAW,MAAM,CAAC,KAAK,WAAW;AAEjD,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAC5C,gBAAM,WAAW,cAAc,KAAK,OAAO;AAG3C,cAAI;AAEJ,cAAI,YAAY,CAAC,KAAK,aAAa;AAEjC,8BAAkB,CAAC,GAAG,KAAK,eAAe;AAC1C,iBAAK,kBAAkB,CAAC;AACxB,YAAAA,QAAO,MAAM,yCAAyC;AAAA,cACpD,iBAAiB,gBAAgB;AAAA,cACjC,YAAY,KAAK,MAAM,gBAAgB,SAAS,KAAK,mBAAmB,CAAC;AAAA,YAC3E,CAAC;AAAA,UACH,WAAW,CAAC,YAAY,CAAC,KAAK,aAAa;AAEzC,iBAAK,gBAAgB,KAAK,IAAI,aAAa,UAAU,CAAC;AACtD,gBAAI,KAAK,gBAAgB,SAAS,KAAK,OAAO,uBAAuB;AACnE,mBAAK,gBAAgB,MAAM;AAAA,YAC7B;AAAA,UACF,WAAW,CAAC,YAAY,KAAK,aAAa;AAExC,iBAAK,kBAAkB,CAAC;AAAA,UAC1B;AAEA,eAAK,cAAc;AAEnB,UAAAA,QAAO,MAAM,2BAA2B;AAAA,YACtC,aAAa,KAAK,MAAM,cAAc,GAAI,IAAI;AAAA,YAC9C;AAAA,YACA,iBAAiB,KAAK,MAAM,kBAAkB,GAAG,IAAI;AAAA,UACvD,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,yBAAyB;AAAA,YACzB,uBAAuB;AAAA,UACzB,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,UAChB,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,QAAQ;AAC3B,WAAK,UAAU;AAAA,IACjB;AACA,SAAK,QAAQ;AACb,SAAK,WAAW;AAAA,EAClB;AACF;AAAA;AAAA;AAAA;AAAA;AAngBa,mBAkFJ,oBAAoB;;;AChM7B,IAAMC,UAAS,aAAa,iBAAiB;AAG7C,IAAMC,iBAAgB;AAGtB,IAAM,kBAAkB;AACxB,IAAM,uBAAuB;AAmF7B,IAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgNf,IAAM,kBAAN,MAAsB;AAAA,EA2B3B,YAAY,QAAyB;AA1BrC,SAAQ,SAAwB;AAEhC,SAAQ,YAAY;AACpB,SAAQ,YAAY;AAapB;AAAA,SAAQ,iBAAgC,QAAQ,QAAQ;AAGxD;AAAA,SAAQ,kBAAkC,CAAC;AAC3C,SAAQ,cAAc;AAGtB;AAAA,SAAQ,mBAAuG,oBAAI,IAAI;AACvH,SAAQ,YAAY;AAGlB,UAAM,aAAa,OAAO,cAAc;AAExC,QAAI,eAAe,OAAQ,eAAe,MAAO;AAC/C,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AAEA,SAAK,SAAS;AAAA,MACZ,UAAU,OAAO;AAAA,MACjB;AAAA,MACA,WAAW,OAAO,aAAa;AAAA,MAC/B,uBAAuB,OAAO,yBAAyB;AAAA,IACzD;AAGA,SAAK,YAAY,eAAe,OAAQ,MAAM;AAC9C,SAAK,cAAc,eAAe,OAAQ,KAAK;AAG/C,SAAK,QAAQ,IAAI,aAAa,IAAI,IAAI,GAAG;AACzC,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAAA,EAClD;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAAyB;AAC3B,WAAO,KAAK,YAAY,SAAS;AAAA,EACnC;AAAA,EAEA,IAAI,aAAqB;AACvB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,eAAuB;AACrB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,qBAA6B;AAC3B,WAAQ,KAAK,YAAY,KAAK,OAAO,aAAc;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAuB;AAC7B,UAAM,OAAO,IAAI,KAAK,CAAC,aAAa,GAAG,EAAE,MAAM,yBAAyB,CAAC;AACzE,UAAM,UAAU,IAAI,gBAAgB,IAAI;AACxC,UAAM,SAAS,IAAI,OAAO,OAAO;AAGjC,QAAI,gBAAgB,OAAO;AAG3B,WAAO,YAAY,CAAC,UAAyC;AAC3D,WAAK,oBAAoB,MAAM,IAAI;AAAA,IACrC;AAGA,WAAO,UAAU,CAAC,UAAU;AAC1B,MAAAD,QAAO,MAAM,gBAAgB,EAAE,OAAO,MAAM,QAAQ,CAAC;AAErD,iBAAW,CAAC,EAAE,QAAQ,KAAK,KAAK,kBAAkB;AAChD,iBAAS,OAAO,IAAI,MAAM,iBAAiB,MAAM,OAAO,EAAE,CAAC;AAAA,MAC7D;AACA,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,oBAAoB,QAA+B;AAEzD,UAAM,WAAW,KAAK,iBAAiB,IAAI,OAAO,IAAI;AACtD,QAAI,UAAU;AACZ,WAAK,iBAAiB,OAAO,OAAO,IAAI;AACxC,UAAI,OAAO,SAAS,SAAS;AAC3B,iBAAS,OAAO,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MACzC,OAAO;AACL,iBAAS,QAAQ,MAAM;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAe,SAA2B,cAAsB,WAA+B;AACrG,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,UAAI,CAAC,KAAK,QAAQ;AAChB,eAAO,IAAI,MAAM,wBAAwB,CAAC;AAC1C;AAAA,MACF;AAGA,YAAM,YAAY,WAAW,MAAM;AACjC,aAAK,iBAAiB,OAAO,YAAY;AACzC,eAAO,IAAI,MAAM,oCAAoC,SAAS,IAAI,CAAC;AAAA,MACrE,GAAG,SAAS;AAGZ,WAAK,iBAAiB,IAAI,cAAc;AAAA,QACtC,SAAS,CAAC,UAAU;AAClB,uBAAa,SAAS;AACtB,kBAAQ,KAAU;AAAA,QACpB;AAAA,QACA,QAAQ,CAAC,UAAU;AACjB,uBAAa,SAAS;AACtB,iBAAO,KAAK;AAAA,QACd;AAAA,MACF,CAAC;AAGD,WAAK,iBAAiB,IAAI,SAAS;AAAA,QACjC,SAAS,MAAM;AAAA,QAAC;AAAA;AAAA,QAChB,QAAQ,CAAC,UAAU;AACjB,uBAAa,SAAS;AACtB,eAAK,iBAAiB,OAAO,YAAY;AACzC,iBAAO,KAAK;AAAA,QACd;AAAA,MACF,CAAC;AAGD,WAAK,OAAO,YAAY,OAAO;AAAA,IACjC,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAoC;AACxC,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,wBAAwB;AAAA,MACxD,aAAa,KAAK,OAAO;AAAA,MACzB,qBAAqB,KAAK,OAAO;AAAA,IACnC,CAAC;AAED,QAAI;AACF,MAAAA,QAAO,KAAK,wBAAwB;AAGpC,WAAK,SAAS,KAAK,aAAa;AAEhC,MAAAA,QAAO,KAAK,8BAA8B;AAAA,QACxC,UAAU,KAAK,OAAO;AAAA,QACtB,YAAY,KAAK,OAAO;AAAA,MAC1B,CAAC;AAGD,YAAM,SAAS,MAAM,KAAK;AAAA,QAMxB;AAAA,UACE,MAAM;AAAA,UACN,UAAU,KAAK,OAAO;AAAA,UACtB,YAAY,KAAK,OAAO;AAAA,UACxB,WAAWC;AAAA,QACb;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,WAAK,YAAY;AAEjB,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAD,QAAO,KAAK,kCAAkC;AAAA,QAC5C,SAAS;AAAA,QACT,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,kBAAkB,KAAK,MAAM,OAAO,UAAU;AAAA,QAC9C,YAAY,KAAK,OAAO;AAAA,QACxB,WAAW,KAAK;AAAA,QAChB,WAAW,KAAK,OAAO;AAAA,MACzB,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB;AAAA,QACjB,sBAAsB;AAAA,QACtB,6BAA6B,OAAO;AAAA,MACtC,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS;AAAA,MACX,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,YAAY,OAAO;AAAA,QACnB,aAAa,OAAO;AAAA,QACpB,YAAY,KAAK,OAAO;AAAA,QACxB,WAAW,KAAK;AAAA,MAClB;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AAGD,UAAI,KAAK,QAAQ;AACf,aAAK,OAAO,UAAU;AACtB,aAAK,SAAS;AAAA,MAChB;AAEA,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI,CAAC,KAAK,aAAa,CAAC,KAAK,QAAQ;AACnC,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAGA,UAAM,SAAS,MAAM,KAAK;AAAA,MACxB,EAAE,MAAM,QAAQ;AAAA,MAChB;AAAA,MACA;AAAA,IACF;AAGA,SAAK,QAAQ,OAAO;AACpB,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAChD,SAAK,kBAAkB,CAAC;AACxB,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAAQ,YAA8C;AAC1D,QAAI,CAAC,KAAK,aAAa,CAAC,KAAK,QAAQ;AACnC,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAEA,QAAI,WAAW,WAAW,KAAK,WAAW;AACxC,YAAM,IAAI;AAAA,QACR,+BAA+B,KAAK,SAAS,iBAAiB,WAAW,MAAM;AAAA,MAEjF;AAAA,IACF;AAEA,WAAO,KAAK,eAAe,UAAU;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,YAA8C;AAEnE,UAAM,iBAAiB,IAAI,aAAa,UAAU;AAElD,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,2BAA2B;AAAA,UAC3D,qBAAqB;AAAA,UACrB,wBAAwB,KAAK;AAAA,QAC/B,CAAC;AAED,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAGlC,gBAAM,SAAS,MAAM,KAAK;AAAA,YAMxB;AAAA,cACE,MAAM;AAAA,cACN,OAAO;AAAA,cACP,OAAO,KAAK;AAAA,cACZ,SAAS,KAAK;AAAA,YAChB;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAGA,eAAK,QAAQ,OAAO;AAGpB,eAAK,UAAU,eAAe,MAAM,CAAC,KAAK,WAAW;AAErD,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAC5C,gBAAM,WAAW,OAAO,cAAc,KAAK,OAAO;AAGlD,cAAI;AAEJ,cAAI,YAAY,CAAC,KAAK,aAAa;AAEjC,8BAAkB,CAAC,GAAG,KAAK,eAAe;AAC1C,iBAAK,kBAAkB,CAAC;AACxB,YAAAA,QAAO,MAAM,yCAAyC;AAAA,cACpD,iBAAiB,gBAAgB;AAAA,cACjC,YAAY,KAAK,MAAM,gBAAgB,SAAS,KAAK,mBAAmB,CAAC;AAAA,YAC3E,CAAC;AAAA,UACH,WAAW,CAAC,YAAY,CAAC,KAAK,aAAa;AAEzC,iBAAK,gBAAgB,KAAK,IAAI,aAAa,cAAc,CAAC;AAC1D,gBAAI,KAAK,gBAAgB,SAAS,KAAK,OAAO,uBAAuB;AACnE,mBAAK,gBAAgB,MAAM;AAAA,YAC7B;AAAA,UACF,WAAW,CAAC,YAAY,KAAK,aAAa;AAExC,iBAAK,kBAAkB,CAAC;AAAA,UAC1B;AAEA,eAAK,cAAc;AAEnB,UAAAA,QAAO,MAAM,kCAAkC;AAAA,YAC7C,aAAa,KAAK,MAAM,OAAO,cAAc,GAAI,IAAI;AAAA,YACrD;AAAA,YACA,iBAAiB,KAAK,MAAM,kBAAkB,GAAG,IAAI;AAAA,YACrD,cAAc,KAAK,MAAM,OAAO,kBAAkB,GAAG,IAAI;AAAA,UAC3D,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,gCAAgC,OAAO;AAAA,YACvC,yBAAyB,OAAO;AAAA,YAChC,uBAAuB;AAAA,UACzB,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS;AAAA,UACX,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS;AAAA,YACT,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN,aAAa,OAAO;AAAA,YACpB;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS;AAAA,YACT,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,QAAQ;AACf,UAAI;AAEF,cAAM,KAAK,YAAY,EAAE,MAAM,UAAU,GAAG,YAAY,oBAAoB;AAAA,MAC9E,QAAQ;AAAA,MAER;AAGA,WAAK,OAAO,UAAU;AACtB,WAAK,SAAS;AAAA,IAChB;AAEA,SAAK,YAAY;AACjB,SAAK,QAAQ,IAAI,aAAa,IAAI,IAAI,GAAG;AACzC,SAAK,UAAU,IAAI,aAAa,KAAK,WAAW;AAChD,SAAK,kBAAkB,CAAC;AACxB,SAAK,cAAc;AACnB,SAAK,iBAAiB,MAAM;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cAAuB;AAC5B,WAAO,OAAO,WAAW;AAAA,EAC3B;AACF;;;AChuBA,IAAME,UAAS,aAAa,iBAAiB;AAkGtC,SAAS,oBAA6B;AAE3C,MAAI,OAAO,WAAW,aAAa;AACjC,IAAAA,QAAO,MAAM,oDAAoD;AACjE,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,QAAQ,eAAe,OAAO,IAAI,oBAAoB,aAAa;AAC5E,IAAAA,QAAO,MAAM,uDAAuD;AACpE,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,SAAS,aAAa;AAC/B,IAAAA,QAAO,MAAM,oDAAoD;AACjE,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAgCO,SAAS,gBAAgB,QAAkD;AAChF,QAAM,kBAAkB,OAAO,mBAAmB;AAGlD,MAAI;AAEJ,MAAI,OAAO,cAAc,QAAW;AAElC,gBAAY,OAAO;AACnB,IAAAA,QAAO,MAAM,oCAAoC,EAAE,UAAU,CAAC;AAAA,EAChE,OAAO;AAEL,UAAM,kBAAkB,kBAAkB;AAC1C,UAAM,WAAW,SAAS;AAI1B,gBAAY,mBAAmB,CAAC;AAEhC,IAAAA,QAAO,MAAM,mCAAmC;AAAA,MAC9C;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAGA,MAAI,WAAW;AACb,IAAAA,QAAO,KAAK,4CAA4C;AACxD,UAAM,SAAS,IAAI,gBAAgB;AAAA,MACjC,UAAU,OAAO;AAAA,MACjB,YAAY,OAAO;AAAA,MACnB,WAAW,OAAO;AAAA,MAClB,uBAAuB,OAAO;AAAA,IAChC,CAAC;AAED,QAAI,iBAAiB;AAEnB,aAAO,IAAI,sBAAsB,QAAQ,MAAM;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AAEA,EAAAA,QAAO,KAAK,2CAA2C;AACvD,SAAO,IAAI,mBAAmB,MAAM;AACtC;AAQA,IAAM,wBAAN,MAAwD;AAAA,EAKtD,YAAY,QAAyB,QAAgC;AAFrE,SAAQ,gBAAgB;AAGtB,SAAK,iBAAiB;AACtB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,UAAiC;AAEnC,QAAI,CAAC,KAAK,SAAU,QAAO;AAC3B,WAAO,KAAK,gBAAiB,KAAK,eAAsC,UAAU;AAAA,EACpF;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,aAAqB;AACvB,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,IAAI,YAAoB;AACtB,WAAO,KAAK,eAAe;AAAA,EAC7B;AAAA,EAEA,MAAM,OAAmD;AACvD,QAAI;AACF,aAAO,MAAM,KAAK,eAAe,KAAK;AAAA,IACxC,SAAS,OAAO;AACd,MAAAA,QAAO,KAAK,mDAAmD;AAAA,QAC7D,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC9D,CAAC;AAGD,UAAI;AACF,cAAM,KAAK,eAAe,QAAQ;AAAA,MACpC,QAAQ;AAAA,MAER;AAGA,WAAK,iBAAiB,IAAI,mBAAmB,KAAK,MAAM;AACxD,WAAK,gBAAgB;AAErB,MAAAA,QAAO,KAAK,2CAA2C;AACvD,aAAO,MAAM,KAAK,eAAe,KAAK;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,QAAQ,YAA8C;AAC1D,WAAO,KAAK,eAAe,QAAQ,UAAU;AAAA,EAC/C;AAAA,EAEA,QAA8B;AAC5B,WAAO,KAAK,eAAe,MAAM;AAAA,EACnC;AAAA,EAEA,MAAM,UAAyB;AAC7B,WAAO,KAAK,eAAe,QAAQ;AAAA,EACrC;AAAA,EAEA,eAAuB;AACrB,WAAO,KAAK,eAAe,aAAa;AAAA,EAC1C;AAAA,EAEA,qBAA6B;AAC3B,WAAO,KAAK,eAAe,mBAAmB;AAAA,EAChD;AACF;;;ACtQA,IAAMC,WAAS,aAAa,aAAa;AAalC,IAAM,qBAAqB,CAAC,WAAW,SAAS,SAAS,KAAK;AAmE9D,IAAM,uBAAN,MAA2B;AAAA,EAQhC,YAAY,QAA2B;AAPvC,SAAQ,UAAmC;AAC3C,SAAQ,MAAwB;AAEhC,SAAQ,WAA2B;AACnC,SAAQ,YAAY;AACpB,SAAQ,iBAAgC,QAAQ,QAAQ;AAGtD,SAAK,SAAS;AAAA,MACZ,UAAU,OAAO;AAAA,MACjB,SAAS,OAAO,WAAW;AAAA,MAC3B,YAAY,OAAO,cAAc;AAAA,IACnC;AAAA,EACF;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,KAAK,UAAU,KAAK,WAAW;AAAA,EACxC;AAAA,EAEA,IAAI,WAAoB;AACtB,WAAO,KAAK,YAAY;AAAA,EAC1B;AAAA,EAEA,IAAI,aAAqB;AACvB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,OAAsC;AAC1C,QAAI,KAAK,WAAW;AAClB,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AAEA,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,SAAK,YAAY;AACjB,UAAM,YAAY,YAAY,IAAI;AAClC,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,oBAAoB;AAAA,MACpD,aAAa,KAAK,OAAO;AAAA,MACzB,2BAA2B,KAAK,OAAO;AAAA,IACzC,CAAC;AAED,QAAI;AACF,MAAAA,SAAO,KAAK,2BAA2B,EAAE,YAAY,KAAK,OAAO,QAAQ,CAAC;AAE1E,YAAM,EAAE,KAAK,QAAQ,IAAI,MAAM,4BAA4B,KAAK,OAAO,OAAO;AAC9E,WAAK,MAAM;AACX,WAAK,WAAW;AAEhB,MAAAA,SAAO,KAAK,uBAAuB,EAAE,SAAS,KAAK,SAAS,CAAC;AAG7D,MAAAA,SAAO,KAAK,yBAAyB;AACrC,YAAM,QAAQ,cAAc;AAC5B,YAAM,WAAW,KAAK,OAAO;AAC7B,YAAM,WAAW,MAAM,MAAM,IAAI,QAAQ;AACzC,MAAAA,SAAO,KAAK,wBAAwB,EAAE,UAAU,SAAS,CAAC;AAE1D,UAAI;AACJ,UAAI,UAAU;AACZ,QAAAA,SAAO,KAAK,+BAA+B,EAAE,SAAS,CAAC;AACvD,sBAAe,MAAM,MAAM,IAAI,QAAQ;AACvC,QAAAA,SAAO,KAAK,2BAA2B,EAAE,MAAM,YAAY,YAAY,UAAU,EAAE,CAAC;AAAA,MACtF,OAAO;AACL,QAAAA,SAAO,KAAK,kCAAkC,EAAE,SAAS,CAAC;AAC1D,sBAAc,MAAM,eAAe,QAAQ;AAC3C,QAAAA,SAAO,KAAK,4BAA4B,EAAE,MAAM,YAAY,YAAY,UAAU,EAAE,CAAC;AAAA,MACvF;AAEA,MAAAA,SAAO,KAAK,mEAAmE;AAC/E,MAAAA,SAAO,MAAM,yBAAyB;AAAA,QACpC,MAAM,YAAY,YAAY,UAAU;AAAA,QACxC,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,YAAM,iBAAiB,kBAAkB,KAAK,QAAQ;AACtD,YAAM,YAAY,IAAI,WAAW,WAAW;AAC5C,WAAK,UAAU,MAAM,IAAI,iBAAiB,OAAO,WAAW,cAAc;AAE1E,YAAM,aAAa,YAAY,IAAI,IAAI;AAEvC,MAAAA,SAAO,KAAK,6BAA6B;AAAA,QACvC,SAAS,KAAK;AAAA,QACd,YAAY,KAAK,MAAM,UAAU;AAAA,QACjC,YAAY,KAAK,OAAO;AAAA,QACxB,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,MAC3C,CAAC;AAED,YAAM,cAAc;AAAA,QAClB,iBAAiB,KAAK;AAAA,QACtB,sBAAsB;AAAA,QACtB,gBAAgB;AAAA,MAClB,CAAC;AACD,YAAM,IAAI;AACV,iBAAW,gBAAgB,yBAAyB,YAAY;AAAA,QAC9D,OAAO;AAAA,QACP,SAAS,KAAK;AAAA,MAChB,CAAC;AAED,aAAO;AAAA,QACL,SAAS,KAAK;AAAA,QACd;AAAA,QACA,YAAY,CAAC,GAAG,KAAK,QAAQ,UAAU;AAAA,QACvC,aAAa,CAAC,GAAG,KAAK,QAAQ,WAAW;AAAA,QACzC,YAAY,KAAK,OAAO;AAAA,MAC1B;AAAA,IACF,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,iBAAW,iBAAiB,sBAAsB,GAAG;AAAA,QACnD,OAAO;AAAA,QACP,YAAY;AAAA,MACd,CAAC;AACD,YAAM;AAAA,IACR,UAAE;AACA,WAAK,YAAY;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,MAAM,OAAiD;AAC3D,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AAEA,WAAO,KAAK,eAAe,KAAK;AAAA,EAClC;AAAA,EAEQ,eAAe,OAAiD;AAItE,UAAM,YAAY,IAAI,aAAa,KAAK;AAExC,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,iBAAiB,KAAK,eAAe,KAAK,YAAY;AACzD,cAAM,YAAY,aAAa;AAC/B,cAAM,OAAO,WAAW,UAAU,qBAAqB;AAAA,UACrD,qBAAqB,KAAK;AAAA,UAC1B,2BAA2B,UAAU;AAAA,QACvC,CAAC;AAED,YAAI;AACF,gBAAM,YAAY,YAAY,IAAI;AAIlC,gBAAM,cAAc,IAAI,KAAK,IAAK,OAAO,WAAW,WAAW,CAAC,GAAG,UAAU,MAAM,CAAC;AAGpF,gBAAM,UAAU,MAAM,KAAK,QAAS,IAAI,EAAE,OAAO,YAAY,CAAC;AAK9D,gBAAM,eAAe,QAAQ,QAAQ;AACrC,gBAAM,mBAAmB,QAAQ,eAAe;AAEhD,cAAI,CAAC,cAAc;AACjB,kBAAM,IAAI;AAAA,cACR,yDAAyD,OAAO,KAAK,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,YAC1F;AAAA,UACF;AAGA,gBAAM,aAAa,aAAa;AAChC,gBAAM,SAAS,IAAI,aAAa,UAAU;AAG1C,gBAAM,QAAQ,KAAK,QAAQ,MAAM;AAGjC,gBAAM,gBAAkD;AAAA,YACtD,SAAS,MAAM,CAAC;AAAA,YAChB,OAAO,MAAM,CAAC;AAAA,YACd,OAAO,MAAM,CAAC;AAAA,YACd,KAAK,MAAM,CAAC;AAAA,UACd;AAGA,cAAI,SAAS;AACb,cAAI,UAAU,MAAM,CAAC;AACrB,mBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAI,MAAM,CAAC,IAAI,SAAS;AACtB,wBAAU,MAAM,CAAC;AACjB,uBAAS;AAAA,YACX;AAAA,UACF;AAEA,gBAAM,WAAyB;AAAA,YAC7B,SAAS,mBAAmB,MAAM;AAAA,YAClC,YAAY;AAAA,YACZ;AAAA,UACF;AAGA,cAAI,aAA6B,CAAC;AAClC,cAAI,YAAY;AAEhB,cAAI,kBAAkB;AACpB,kBAAM,gBAAgB,iBAAiB;AACvC,kBAAM,OAAO,iBAAiB;AAE9B,gBAAI,KAAK,WAAW,GAAG;AAErB,0BAAY,KAAK,CAAC;AAClB,oBAAM,eAAe,KAAK,CAAC;AAE3B,uBAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,sBAAM,QAAQ,IAAI;AAClB,2BAAW,KAAK,IAAI,aAAa,cAAc,MAAM,OAAO,QAAQ,YAAY,CAAC,CAAC;AAAA,cACpF;AAAA,YACF;AAAA,UACF;AAKA,gBAAM,SAAyB,CAAC;AAChC,mBAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,mBAAO,KAAK;AAAA,cACV,SAAS,SAAS;AAAA,cAClB,YAAY,SAAS;AAAA,cACrB,eAAe,EAAE,GAAG,cAAc;AAAA,YACpC,CAAC;AAAA,UACH;AAEA,gBAAM,kBAAkB,YAAY,IAAI,IAAI;AAE5C,UAAAA,SAAO,MAAM,+BAA+B;AAAA,YAC1C;AAAA,YACA,UAAU,SAAS;AAAA,YACnB,YAAY,KAAK,MAAM,SAAS,aAAa,GAAG;AAAA,YAChD,iBAAiB,KAAK,MAAM,eAAe;AAAA,UAC7C,CAAC;AAED,gBAAM,cAAc;AAAA,YAClB,yBAAyB;AAAA,YACzB,wBAAwB;AAAA,YACxB,8BAA8B,SAAS;AAAA,UACzC,CAAC;AACD,gBAAM,IAAI;AACV,qBAAW,gBAAgB,2BAA2B,iBAAiB;AAAA,YACrE,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,UAChB,CAAC;AACD,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AAED,kBAAQ;AAAA,YACN;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,SAAS,KAAK;AACZ,gBAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AACtE,qBAAW,iBAAiB,yBAAyB,GAAG;AAAA,YACtD,OAAO;AAAA,YACP,SAAS,KAAK;AAAA,YACd,QAAQ;AAAA,UACV,CAAC;AACD,iBAAO,GAAG;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,QAAQ,QAAoC;AAElD,QAAI,MAAM,OAAO,CAAC;AAClB,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAI,OAAO,CAAC,IAAI,IAAK,OAAM,OAAO,CAAC;AAAA,IACrC;AAGA,UAAM,MAAM,IAAI,aAAa,OAAO,MAAM;AAC1C,QAAI,MAAM;AACV,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAI,CAAC,IAAI,KAAK,IAAI,OAAO,CAAC,IAAI,GAAG;AACjC,aAAO,IAAI,CAAC;AAAA,IACd;AAGA,UAAM,QAAQ,IAAI,aAAa,OAAO,MAAM;AAC5C,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,CAAC,IAAI,IAAI,CAAC,IAAI;AAAA,IACtB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,YAAM,KAAK,QAAQ,QAAQ;AAC3B,WAAK,UAAU;AAAA,IACjB;AAAA,EACF;AACF;AAAA;AAAA;AAAA;AAAA;AArUa,qBAgCJ,oBAAoB;;;ACrI7B,IAAMC,WAAS,aAAa,cAAc;AAsGnC,IAAM,0BAAN,MAAM,yBAAwB;AAAA,EAenC,YAAY,SAA6B,CAAC,GAAG;AAb7C,SAAQ,cAAiD;AACzD,SAAQ,cAAc;AACtB,SAAQ,YAAY;AACpB,SAAQ,kBAAkB;AAG1B;AAAA,SAAQ,kBAA0C,CAAC;AACnD,SAAQ,iBAAwC,CAAC;AAGjD;AAAA,SAAQ,eAAmE;AAC3E,SAAQ,eAAgD;AAGtD,SAAK,SAAS;AAAA,MACZ,UAAU,OAAO,YAAY;AAAA,MAC7B,YAAY,OAAO,cAAc;AAAA,MACjC,gBAAgB,OAAO,kBAAkB;AAAA,MACzC,iBAAiB,OAAO,mBAAmB;AAAA,IAC7C;AAEA,IAAAA,SAAO,MAAM,mCAAmC;AAAA,MAC9C,UAAU,KAAK,OAAO;AAAA,MACtB,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,cAAuB;AAC5B,WAAO,6BAA6B;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,YAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,UAAsC;AAC7C,SAAK,gBAAgB,KAAK,QAAQ;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,UAAqC;AAC3C,SAAK,eAAe,KAAK,QAAQ;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,UAAsC;AAC9C,UAAM,QAAQ,KAAK,gBAAgB,QAAQ,QAAQ;AACnD,QAAI,UAAU,IAAI;AAChB,WAAK,gBAAgB,OAAO,OAAO,CAAC;AAAA,IACtC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,UAAqC;AAC5C,UAAM,QAAQ,KAAK,eAAe,QAAQ,QAAQ;AAClD,QAAI,UAAU,IAAI;AAChB,WAAK,eAAe,OAAO,OAAO,CAAC;AAAA,IACrC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAAuB;AAC3B,QAAI,KAAK,aAAa;AACpB,MAAAA,SAAO,KAAK,mBAAmB;AAC/B;AAAA,IACF;AAEA,QAAI,CAAC,yBAAwB,YAAY,GAAG;AAC1C,YAAM,QAAQ,IAAI;AAAA,QAChB;AAAA,MAGF;AACA,WAAK,UAAU,KAAK;AACpB,YAAM;AAAA,IACR;AAEA,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,sBAAsB;AAAA,MACtD,mBAAmB,KAAK,OAAO;AAAA,MAC/B,qBAAqB,KAAK,OAAO;AAAA,IACnC,CAAC;AAED,QAAI;AAEF,YAAM,yBAAyB,OAAO,qBAAqB,OAAO;AAClE,UAAI,CAAC,wBAAwB;AAC3B,cAAM,IAAI,MAAM,yCAAyC;AAAA,MAC3D;AAEA,WAAK,cAAc,IAAI,uBAAuB;AAC9C,WAAK,YAAY,aAAa,KAAK,OAAO;AAC1C,WAAK,YAAY,iBAAiB,KAAK,OAAO;AAC9C,WAAK,YAAY,OAAO,KAAK,OAAO;AACpC,WAAK,YAAY,kBAAkB,KAAK,OAAO;AAG/C,WAAK,mBAAmB;AAGxB,WAAK,YAAY,MAAM;AACvB,WAAK,cAAc;AACnB,WAAK,YAAY,YAAY,IAAI;AACjC,WAAK,kBAAkB;AAEvB,MAAAA,SAAO,KAAK,8BAA8B;AAAA,QACxC,UAAU,KAAK,OAAO;AAAA,MACxB,CAAC;AAED,YAAM,IAAI;AAAA,IACZ,SAAS,OAAO;AACd,YAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,WAAK,UAAU,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AACxE,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAyC;AAC7C,QAAI,CAAC,KAAK,eAAe,CAAC,KAAK,aAAa;AAC1C,MAAAA,SAAO,KAAK,yBAAyB;AACrC,aAAO;AAAA,QACL,MAAM,KAAK;AAAA,QACX,UAAU,KAAK,OAAO;AAAA,QACtB,iBAAiB;AAAA,QACjB,SAAS;AAAA,MACX;AAAA,IACF;AAEA,UAAM,YAAY,aAAa;AAC/B,UAAM,OAAO,WAAW,UAAU,mBAAmB;AAErD,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,eAAe;AACpB,WAAK,eAAe;AAEpB,UAAI;AACF,aAAK,YAAa,KAAK;AAAA,MAEzB,SAAS,OAAO;AACd,cAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,aAAK,cAAc;AACnB,eAAO,KAAK;AAAA,MACd;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,QAAI,KAAK,eAAe,KAAK,aAAa;AACxC,WAAK,YAAY,MAAM;AACvB,WAAK,cAAc;AACnB,MAAAA,SAAO,KAAK,4BAA4B;AAAA,IAC1C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,WAAW,QAAwD;AACvE,UAAM,IAAI;AAAA,MACR;AAAA,IAGF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,QAAI,KAAK,aAAa;AACpB,UAAI,KAAK,aAAa;AACpB,aAAK,YAAY,MAAM;AAAA,MACzB;AACA,WAAK,cAAc;AAAA,IACrB;AACA,SAAK,cAAc;AACnB,SAAK,kBAAkB,CAAC;AACxB,SAAK,iBAAiB,CAAC;AACvB,IAAAA,SAAO,MAAM,kCAAkC;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,YAAa;AAEvB,SAAK,YAAY,WAAW,CAAC,UAAkC;AAC7D,YAAM,YAAY,aAAa;AAC/B,YAAM,OAAO,WAAW,UAAU,uBAAuB;AAEzD,UAAI;AAEF,iBAAS,IAAI,MAAM,aAAa,IAAI,MAAM,QAAQ,QAAQ,KAAK;AAC7D,gBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,gBAAM,cAAc,OAAO,CAAC;AAE5B,cAAI,aAAa;AACf,kBAAM,OAAO,YAAY;AACzB,kBAAM,UAAU,OAAO;AAGvB,gBAAI,SAAS;AACX,mBAAK,mBAAmB,OAAO;AAAA,YACjC;AAEA,kBAAM,eAAwC;AAAA,cAC5C,MAAM,UAAU,KAAK,gBAAgB,KAAK,IAAI;AAAA,cAC9C,UAAU,KAAK,OAAO;AAAA,cACtB,iBAAiB,YAAY,IAAI,IAAI,KAAK;AAAA,cAC1C;AAAA,cACA,YAAY,YAAY;AAAA,YAC1B;AAGA,iBAAK,WAAW,YAAY;AAE5B,YAAAA,SAAO,MAAM,iBAAiB;AAAA,cAC5B,MAAM,KAAK,UAAU,GAAG,EAAE;AAAA,cAC1B;AAAA,cACA,YAAY,YAAY;AAAA,YAC1B,CAAC;AAAA,UACH;AAAA,QACF;AAEA,cAAM,IAAI;AAAA,MACZ,SAAS,OAAO;AACd,cAAM,aAAa,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAC5E,QAAAA,SAAO,MAAM,kCAAkC,EAAE,MAAM,CAAC;AAAA,MAC1D;AAAA,IACF;AAEA,SAAK,YAAY,UAAU,CAAC,UAAuC;AACjE,YAAM,QAAQ,IAAI,MAAM,6BAA6B,MAAM,KAAK,MAAM,MAAM,OAAO,EAAE;AACrF,MAAAA,SAAO,MAAM,4BAA4B,EAAE,OAAO,MAAM,OAAO,SAAS,MAAM,QAAQ,CAAC;AACvF,WAAK,UAAU,KAAK;AAEpB,UAAI,KAAK,cAAc;AACrB,aAAK,aAAa,KAAK;AACvB,aAAK,eAAe;AACpB,aAAK,eAAe;AAAA,MACtB;AAAA,IACF;AAEA,SAAK,YAAY,QAAQ,MAAM;AAC7B,WAAK,cAAc;AACnB,MAAAA,SAAO,KAAK,4BAA4B;AAAA,QACtC,WAAW,KAAK,gBAAgB;AAAA,QAChC,YAAY,YAAY,IAAI,IAAI,KAAK;AAAA,MACvC,CAAC;AAGD,UAAI,KAAK,cAAc;AACrB,cAAM,SAAkC;AAAA,UACtC,MAAM,KAAK,gBAAgB,KAAK;AAAA,UAChC,UAAU,KAAK,OAAO;AAAA,UACtB,iBAAiB,YAAY,IAAI,IAAI,KAAK;AAAA,UAC1C,SAAS;AAAA,QACX;AACA,aAAK,aAAa,MAAM;AACxB,aAAK,eAAe;AACpB,aAAK,eAAe;AAAA,MACtB;AAAA,IACF;AAEA,SAAK,YAAY,UAAU,MAAM;AAC/B,MAAAA,SAAO,MAAM,uCAAuC;AAAA,IACtD;AAEA,SAAK,YAAY,gBAAgB,MAAM;AACrC,MAAAA,SAAO,MAAM,iBAAiB;AAAA,IAChC;AAEA,SAAK,YAAY,cAAc,MAAM;AACnC,MAAAA,SAAO,MAAM,cAAc;AAAA,IAC7B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,QAAuC;AACxD,eAAW,YAAY,KAAK,iBAAiB;AAC3C,UAAI;AACF,iBAAS,MAAM;AAAA,MACjB,SAAS,OAAO;AACd,QAAAA,SAAO,MAAM,4BAA4B,EAAE,MAAM,CAAC;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,OAAoB;AACpC,eAAW,YAAY,KAAK,gBAAgB;AAC1C,UAAI;AACF,iBAAS,KAAK;AAAA,MAChB,SAAS,eAAe;AACtB,QAAAA,SAAO,MAAM,2BAA2B,EAAE,OAAO,cAAc,CAAC;AAAA,MAClE;AAAA,IACF;AAAA,EACF;AACF;;;ACxcO,IAAM,gBAAgB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAQO,IAAM,sBAAsB;AAgB5B,SAAS,oBAAoB,UAA0B,CAAC,GAAiB;AAC9E,QAAM,SAAS,IAAI,aAAa,mBAAmB;AAEnD,aAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,UAAM,MAAM,cAAc,QAAQ,IAAmB;AACrD,QAAI,OAAO,GAAG;AACZ,aAAO,GAAG,IAAI,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,KAAK,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,IAAM,iBAAiB;AAAA;AAAA,EAE5B,SAAS,oBAAoB,CAAC,CAAC;AAAA;AAAA,EAG/B,OAAO,oBAAoB,EAAE,KAAK,KAAK,WAAW,IAAI,CAAC;AAAA;AAAA,EAGvD,KAAK,oBAAoB,EAAE,SAAS,KAAK,OAAO,IAAI,CAAC;AAAA;AAAA,EAGrD,OAAO,oBAAoB,EAAE,OAAO,KAAK,SAAS,IAAI,CAAC;AAAA;AAAA,EAGvD,WAAW,oBAAoB,EAAE,WAAW,KAAK,MAAM,IAAI,CAAC;AAAA;AAAA,EAG5D,QAAQ,oBAAoB,EAAE,MAAM,KAAK,MAAM,IAAI,CAAC;AAAA;AAAA,EAGpD,WAAW,oBAAoB,EAAE,SAAS,KAAK,OAAO,IAAI,CAAC;AAAA;AAAA,EAG3D,SAAS,oBAAoB,EAAE,KAAK,KAAK,WAAW,KAAK,YAAY,IAAI,CAAC;AAAA;AAAA,EAG1E,OAAO,oBAAoB,EAAE,aAAa,KAAK,SAAS,IAAI,CAAC;AAAA;AAAA,EAG7D,SAAS,oBAAoB,EAAE,YAAY,KAAK,KAAK,IAAI,CAAC;AAAA;AAAA,EAG1D,QAAQ,oBAAoB,EAAE,MAAM,KAAK,OAAO,IAAI,CAAC;AAAA;AAAA,EAGrD,eAAe,oBAAoB,EAAE,SAAS,KAAK,OAAO,IAAI,CAAC;AACjE;AAOO,SAAS,iBAAiB,MAAuC;AACtE,SAAO,eAAe,IAAI,EAAE,MAAM;AACpC;AAgBO,SAAS,cACd,UACc;AACd,QAAM,SAAS,IAAI,aAAa,mBAAmB;AACnD,MAAI,cAAc;AAElB,aAAW,EAAE,QAAQ,OAAO,KAAK,UAAU;AACzC,mBAAe;AACf,aAAS,IAAI,GAAG,IAAI,qBAAqB,KAAK;AAC5C,aAAO,CAAC,MAAM,OAAO,CAAC,KAAK,KAAK;AAAA,IAClC;AAAA,EACF;AAGA,MAAI,cAAc,GAAG;AACnB,aAAS,IAAI,GAAG,IAAI,qBAAqB,KAAK;AAC5C,aAAO,CAAC,KAAK;AAAA,IACf;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,YACd,MACA,IACA,GACc;AACd,QAAM,SAAS,IAAI,aAAa,mBAAmB;AACnD,QAAM,WAAW,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,CAAC,CAAC;AAE3C,WAAS,IAAI,GAAG,IAAI,qBAAqB,KAAK;AAC5C,WAAO,CAAC,KAAK,KAAK,CAAC,KAAK,MAAM,IAAI,aAAa,GAAG,CAAC,KAAK,KAAK;AAAA,EAC/D;AAEA,SAAO;AACT;AAKO,IAAM,oBAAN,MAAwB;AAAA,EAAxB;AACL,SAAQ,iBAAiB,IAAI,aAAa,mBAAmB;AAC7D,SAAQ,gBAAgB,IAAI,aAAa,mBAAmB;AAC5D,SAAQ,qBAAqB;AAC7B,SAAQ,qBAAqB;AAC7B,SAAQ,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA,EAK9B,IAAI,UAAwB;AAC1B,QAAI,KAAK,sBAAsB,GAAK;AAClC,aAAO,KAAK;AAAA,IACd;AAGA,WAAO,YAAY,KAAK,gBAAgB,KAAK,eAAe,KAAK,kBAAkB;AAAA,EACrF;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,SAA+B;AACjC,UAAM,aAAa,oBAAoB,OAAO;AAC9C,SAAK,cAAc,IAAI,UAAU;AACjC,SAAK,eAAe,IAAI,UAAU;AAClC,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,QAAiC;AACzC,UAAM,aAAa,iBAAiB,MAAM;AAC1C,SAAK,cAAc,IAAI,UAAU;AACjC,SAAK,eAAe,IAAI,UAAU;AAClC,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,aAAa,SAAyB,YAA0B;AAC9D,SAAK,eAAe,IAAI,KAAK,OAAO;AACpC,SAAK,cAAc,IAAI,oBAAoB,OAAO,CAAC;AACnD,SAAK,qBAAqB;AAC1B,SAAK,sBAAsB,YAAY,IAAI;AAC3C,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,QAA2B,YAA0B;AACtE,SAAK,eAAe,IAAI,KAAK,OAAO;AACpC,SAAK,cAAc,IAAI,iBAAiB,MAAM,CAAC;AAC/C,SAAK,qBAAqB;AAC1B,SAAK,sBAAsB,YAAY,IAAI;AAC3C,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,SAAe;AACb,QAAI,KAAK,sBAAsB,EAAK;AAEpC,UAAM,UAAU,YAAY,IAAI,IAAI,KAAK;AACzC,SAAK,qBAAqB,KAAK,IAAI,GAAK,UAAU,KAAK,kBAAkB;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,kBAA2B;AAC7B,WAAO,KAAK,qBAAqB;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,eAAe,KAAK,CAAC;AAC1B,SAAK,cAAc,KAAK,CAAC;AACzB,SAAK,qBAAqB;AAAA,EAC5B;AACF;;;AC1OO,IAAM,mBAAN,cAA+B,aAAmD;AAAA,EAkCvF,YAAY,QAAyB;AACnC,UAAM;AAlCR,SAAS,OAAO;AAEhB,SAAQ,SAAyB;AACjC,SAAQ,aAA4B;AACpC,SAAQ,eAAe;AAGvB;AAAA,SAAQ,UAAmC;AAC3C,SAAQ,MAAiC;AACzC,SAAQ,MAAgC;AAExC,SAAQ,WAAuC;AAG/C;AAAA,SAAQ,KAAuB;AAC/B,SAAQ,sBAAsB;AAC9B,SAAiB,uBAAuB;AAGxC;AAAA,SAAQ,cAA8B,CAAC;AAGvC;AAAA,SAAQ,UAAiC,CAAC;AAC1C,SAAQ,gBAAsC;AAI9C;AAAA,SAAQ,aAAa;AACrB,SAAQ,4BAAoD;AAG5D;AAAA,SAAQ,aAAa,oBAAI,IAAkD;AAIzE,SAAK,kBAAkB;AACvB,SAAK,oBAAoB,IAAI,kBAAkB;AAAA,EACjD;AAAA,EAEA,IAAI,QAAwB;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,YAA2B;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,cAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,QAAsC;AAClD,SAAK,gBAAgB;AACrB,SAAK,aAAa,OAAO;AAEzB,QAAI;AAEF,YAAM,YAAY,MAAM,KAAK,aAAa,OAAO,MAAM;AAGvD,YAAM,QAAQ,IAAI;AAAA,QAChB,KAAK,YAAY;AAAA,QACjB,KAAK,QAAQ;AAAA,MACf,CAAC;AAGD,YAAM,KAAK,iBAAiB,WAAW,MAAM;AAE7C,WAAK,eAAe;AACpB,WAAK,SAAS,MAAM;AAEpB,WAAK,KAAK,qBAAqB,EAAE,WAAW,KAAK,YAAY,SAAS,KAAK,KAAK,CAAC;AAAA,IACnF,SAAS,OAAO;AACd,WAAK,SAAS,OAAO;AACrB,WAAK,KAAK,oBAAoB;AAAA,QAC5B;AAAA,QACA,aAAa;AAAA,MACf,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAEhC,SAAK,2BAA2B,MAAM;AAGtC,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,QAAQ;AACtB,WAAK,WAAW;AAAA,IAClB;AAGA,QAAI,KAAK,IAAI;AACX,WAAK,GAAG,MAAM,KAAM,mBAAmB;AACvC,WAAK,KAAK;AAAA,IACZ;AAGA,UAAM,QAAQ,IAAI;AAAA,MAChB,KAAK,SAAS,QAAQ;AAAA,MACtB,KAAK,KAAK,QAAQ;AAAA,MAClB,KAAK,KAAK,QAAQ;AAAA,IACpB,CAAC;AAED,SAAK,eAAe;AACpB,SAAK,SAAS,cAAc;AAE5B,SAAK,KAAK,qBAAqB,EAAE,QAAQ,oBAAoB,CAAC;AAAA,EAChE;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAAwC;AAChD,QAAI,CAAC,KAAK,aAAc;AAGxB,QAAI,KAAK,YAAY;AACnB,WAAK,oBAAoB,KAAK,EAAE,KAAK,CAAC,qBAAqB;AACzD,YAAI,kBAAkB;AACpB,eAAK,UAAU;AAAA,QACjB;AAAA,MACF,CAAC,EAAE,MAAM,CAAC,UAAU;AAClB,gBAAQ,MAAM,wDAAwD,KAAK;AAAA,MAC7E,CAAC;AAAA,IAEH;AAGA,UAAM,UAAU,iBAAiB,eAC7B,QACA,KAAK,eAAe,KAAK;AAG7B,SAAK,YAAY,KAAK,OAAO;AAG7B,SAAK,sBAAsB;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAS,MAA6B;AAC1C,QAAI,CAAC,KAAK,gBAAgB,CAAC,KAAK,IAAI;AAClC,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AAGA,SAAK,aAAa;AAAA,MAChB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,WAAW,KAAK,IAAI;AAAA,IACtB,CAAC;AAED,SAAK,SAAS,UAAU;AACxB,SAAK,KAAK,qBAAqB,EAAE,WAAW,KAAK,IAAI,EAAE,CAAC;AAGxD,SAAK,GAAG,KAAK,KAAK,UAAU;AAAA,MAC1B,MAAM;AAAA,MACN,WAAW,KAAK;AAAA,MAChB,SAAS;AAAA,MACT,SAAS;AAAA,QACP,SAAS,KAAK,QAAQ,MAAM,GAAG;AAAA;AAAA,QAC/B,SAAS,MAAM,KAAK,KAAK,kBAAkB,OAAO;AAAA,MACpD;AAAA,IACF,CAAC,CAAC;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,YAAkB;AAChB,QAAI,CAAC,KAAK,WAAY;AAEtB,SAAK,KAAK,yBAAyB,EAAE,WAAW,KAAK,IAAI,EAAE,CAAC;AAG5D,SAAK,2BAA2B,MAAM;AACtC,SAAK,4BAA4B;AAGjC,QAAI,KAAK,IAAI,eAAe,UAAU,MAAM;AAC1C,WAAK,GAAG,KAAK,KAAK,UAAU;AAAA,QAC1B,MAAM;AAAA,QACN,WAAW,KAAK;AAAA,QAChB,WAAW,KAAK,IAAI;AAAA,MACtB,CAAC,CAAC;AAAA,IACJ;AAEA,SAAK,aAAa;AAClB,SAAK,SAAS,WAAW;AAEzB,SAAK,KAAK,wBAAwB,EAAE,WAAW,KAAK,IAAI,GAAG,QAAQ,OAAO,CAAC;AAAA,EAC7E;AAAA,EAEA,aAAoC;AAClC,WAAO,CAAC,GAAG,KAAK,OAAO;AAAA,EACzB;AAAA,EAEA,eAAqB;AACnB,SAAK,UAAU,CAAC;AAChB,SAAK,KAAK,kBAAkB,EAAE,cAAc,EAAE,CAAC;AAAA,EACjD;AAAA,EAEA,MAAM,cAAgC;AACpC,QAAI,CAAC,KAAK,MAAM,KAAK,GAAG,eAAe,UAAU,MAAM;AACrD,aAAO;AAAA,IACT;AAEA,WAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,YAAM,UAAU,WAAW,MAAM,QAAQ,KAAK,GAAG,GAAI;AAErD,YAAM,UAAU,CAAC,UAAwB;AACvC,cAAM,OAAO,KAAK,MAAM,MAAM,IAAI;AAClC,YAAI,KAAK,SAAS,QAAQ;AACxB,uBAAa,OAAO;AACpB,eAAK,IAAI,oBAAoB,WAAW,OAAO;AAC/C,kBAAQ,IAAI;AAAA,QACd;AAAA,MACF;AAEA,WAAK,IAAI,iBAAiB,WAAW,OAAO;AAC5C,WAAK,IAAI,KAAK,KAAK,UAAU,EAAE,MAAM,OAAO,CAAC,CAAC;AAAA,IAChD,CAAC;AAAA,EACH;AAAA;AAAA,EAIQ,SAAS,OAA6B;AAC5C,UAAM,gBAAgB,KAAK;AAC3B,SAAK,SAAS;AACd,SAAK,KAAK,gBAAgB,EAAE,OAAO,cAAc,CAAC;AAAA,EACpD;AAAA,EAEA,MAAc,aAAa,QAAuC;AAChE,UAAM,SAAS,KAAK,WAAW,IAAI,OAAO,QAAQ;AAClD,QAAI,UAAU,OAAO,YAAY,KAAK,IAAI,IAAI,KAAO;AACnD,aAAO,OAAO;AAAA,IAChB;AAGA,QAAI,OAAO,YAAY,WAAW;AAChC,aAAO,OAAO,YAAY;AAAA,IAC5B;AAIA,UAAM,WAAW,KAAK,gBAAgB;AACtC,QAAI,SAAS,WAAW,OAAO,KAAK,SAAS,SAAS,WAAW,GAAG;AAClE,aAAO;AAAA,IACT;AAGA,UAAM,eAAe,SAAS,QAAQ,UAAU,UAAU,EAAE,QAAQ,SAAS,SAAS;AACtF,UAAM,WAAW,MAAM,MAAM,GAAG,YAAY,eAAe;AAAA,MACzD,QAAQ;AAAA,MACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,MAC9C,MAAM,KAAK,UAAU;AAAA,QACnB,UAAU,OAAO;AAAA,QACjB,QAAQ,OAAO,YAAY;AAAA,MAC7B,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,gBAAgB,SAAS,UAAU,EAAE;AAAA,IACvD;AAEA,UAAM,EAAE,OAAO,UAAU,IAAI,MAAM,SAAS,KAAK;AAEjD,SAAK,WAAW,IAAI,OAAO,UAAU;AAAA,MACnC;AAAA,MACA,WAAW,KAAK,IAAI,IAAI,YAAY;AAAA,IACtC,CAAC;AAED,WAAO;AAAA,EACT;AAAA,EAEA,MAAc,cAA6B;AAEzC,UAAM,QAAQ,IAAI;AAAA;AAAA,OAEf,YAAY;AACX,aAAK,UAAU,IAAI,iBAAiB;AAAA,UAClC,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,UAAU;AAAA,QACZ,CAAC;AACD,cAAM,KAAK,QAAQ,KAAK;AAAA,MAC1B,GAAG;AAAA;AAAA,OAEF,YAAY;AACX,aAAK,MAAM,IAAI,mBAAmB;AAAA,UAChC,UAAU;AAAA,UACV,SAAS;AAAA,UACT,YAAY;AAAA,UACZ,WAAW;AAAA,QACb,CAAC;AACD,cAAM,KAAK,IAAI,KAAK;AAAA,MACtB,GAAG;AAAA,IACL,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,UAAyB;AAGrC,UAAM,SAAS,KAAK,gBAAgB,QAAQ,UAAU;AAEtD,SAAK,MAAM,IAAI,kBAAkB;AAAA,MAC/B,UAAU;AAAA,MACV,SAAS;AAAA,IACX,CAAC;AAED,UAAM,KAAK,IAAI,KAAK;AAGpB,UAAM,KAAK,aAAa;AAAA,EAC1B;AAAA,EAEA,MAAc,eAA8B;AAC1C,QAAI,CAAC,KAAK,KAAK;AACb,YAAM,IAAI,MAAM,yCAAyC;AAAA,IAC3D;AAEA,SAAK,WAAW,IAAI,oBAAoB;AAAA,MACtC,KAAK,KAAK;AAAA,MACV,YAAY;AAAA,MACZ,eAAe;AAAA,IACjB,CAAC;AAED,UAAM,KAAK,SAAS,WAAW;AAG/B,SAAK,SAAS,GAAG,eAAe,CAAC,UAAwB;AAEvD,WAAK,KAAK,aAAa;AAAA,QACrB,aAAa;AAAA,QACb,KAAK,CAAC,SAAiB;AACrB,gBAAM,MAAO,gBAAsC,QAAQ,IAAI;AAC/D,iBAAO,OAAO,IAAI,MAAM,GAAG,IAAI;AAAA,QACjC;AAAA,QACA,WAAW,KAAK,IAAI;AAAA;AAAA,QACpB,aAAa;AAAA;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAED,SAAK,SAAS,GAAG,qBAAqB,MAAM;AAC1C,WAAK,aAAa;AAClB,WAAK,SAAS,MAAM;AACpB,WAAK,KAAK,oBAAoB,EAAE,YAAY,EAAE,CAAC;AAAA,IACjD,CAAC;AAED,SAAK,SAAS,GAAG,SAAS,CAAC,UAAiB;AAC1C,cAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAK,KAAK,oBAAoB;AAAA,QAC5B;AAAA,QACA,aAAa;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,iBAAiB,WAAmB,QAAsC;AACtF,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAM,QAAQ,IAAI,IAAI,GAAG,KAAK,gBAAgB,SAAS,QAAQ,QAAQ,IAAI,CAAC,KAAK;AACjF,YAAM,aAAa,IAAI,aAAa,OAAO,SAAS;AACpD,YAAM,aAAa,IAAI,eAAe,OAAO,OAAO,WAAW;AAE/D,WAAK,KAAK,IAAI,UAAU,MAAM,SAAS,CAAC;AAExC,WAAK,GAAG,SAAS,MAAM;AAErB,aAAK,IAAI,KAAK,KAAK,UAAU;AAAA,UAC3B,MAAM;AAAA,UACN,OAAO;AAAA,UACP,UAAU,OAAO,OAAO;AAAA,UACxB,cAAc,OAAO;AAAA,QACvB,CAAC,CAAC;AAAA,MACJ;AAEA,WAAK,GAAG,YAAY,CAAC,UAAU;AAC7B,aAAK,uBAAuB,KAAK,MAAM,MAAM,IAAI,CAAC;AAAA,MACpD;AAEA,WAAK,GAAG,UAAU,MAAM;AACtB,eAAO,IAAI,MAAM,6BAA6B,CAAC;AAAA,MACjD;AAEA,WAAK,GAAG,UAAU,CAAC,UAAU;AAC3B,aAAK,iBAAiB,KAAK;AAAA,MAC7B;AAGA,YAAM,cAAc,WAAW,MAAM;AACnC,eAAO,IAAI,MAAM,cAAc,CAAC;AAAA,MAClC,GAAG,GAAK;AAER,YAAM,cAAc,CAAC,UAAwB;AAC3C,cAAM,OAAO,KAAK,MAAM,MAAM,IAAI;AAClC,YAAI,KAAK,SAAS,gBAAgB;AAChC,uBAAa,WAAW;AACxB,eAAK,IAAI,oBAAoB,WAAW,WAAW;AACnD,kBAAQ;AAAA,QACV,WAAW,KAAK,SAAS,eAAe;AACtC,uBAAa,WAAW;AACxB,iBAAO,IAAI,MAAM,KAAK,OAAO,CAAC;AAAA,QAChC;AAAA,MACF;AAEA,WAAK,GAAG,iBAAiB,WAAW,WAAW;AAAA,IACjD,CAAC;AAAA,EACH;AAAA,EAEQ,uBAAuB,MAAqC;AAClE,YAAQ,KAAK,MAAM;AAAA,MACjB,KAAK;AACH,aAAK,SAAS,UAAU;AACxB,aAAK,aAAa;AAClB,aAAK,KAAK,qBAAqB;AAAA,UAC7B,MAAM,KAAK;AAAA,UACX,SAAS,KAAK;AAAA,QAChB,CAAC;AAED,YAAI,KAAK,SAAS;AAChB,eAAK,kBAAkB;AAAA,YACrB,EAAE,CAAC,KAAK,OAAiB,GAAG,IAAI;AAAA,YAChC;AAAA,UACF;AAAA,QACF;AAEA,YAAI,KAAK,UAAU;AACjB,eAAK,SAAS,MAAM;AAAA,QACtB;AACA;AAAA,MAEF,KAAK;AACH,aAAK,KAAK,qBAAqB;AAAA,UAC7B,MAAM,KAAK;AAAA,UACX,QAAQ,KAAK;AAAA,QACf,CAAC;AACD;AAAA,MAEF,KAAK;AAEH,YAAI,KAAK,SAAS,KAAK,UAAU;AAC/B,gBAAM,YAAY,KAAK,oBAAoB,KAAK,KAAe;AAC/D,gBAAM,QAAQ,IAAI,WAAW,SAAS;AACtC,eAAK,SAAS,aAAa,KAAK,EAAE,MAAM,CAAC,UAAU;AACjD,oBAAQ,MAAM,qCAAqC,KAAK;AAAA,UAC1D,CAAC;AAAA,QACH;AACA;AAAA,MAEF,KAAK;AAEH,YAAI,KAAK,UAAU;AACjB,eAAK,SAAS,IAAI,EAAE,MAAM,CAAC,UAAU;AACnC,oBAAQ,MAAM,mCAAmC,KAAK;AAAA,UACxD,CAAC;AAAA,QACH;AAEA;AAAA,MAEF,KAAK;AACH,aAAK,aAAa;AAAA,UAChB,MAAM;AAAA,UACN,SAAS,KAAK;AAAA,UACd,WAAW,KAAK,IAAI;AAAA,UACpB,SAAS,KAAK;AAAA,QAChB,CAAC;AACD,aAAK,KAAK,mBAAmB;AAAA,UAC3B,UAAU,KAAK;AAAA,UACf,YAAY,KAAK,cAAwB;AAAA,QAC3C,CAAC;AACD;AAAA,MAEF,KAAK;AACH,aAAK,KAAK,kBAAkB;AAAA,UAC1B,cAAc,KAAK;AAAA,UACnB,YAAY,KAAK;AAAA,QACnB,CAAC;AACD;AAAA,MAEF,KAAK;AACH,aAAK,KAAK,oBAAoB;AAAA,UAC5B,OAAO,IAAI,MAAM,KAAK,OAAiB;AAAA,UACvC,aAAc,KAAK,eAA2B;AAAA,QAChD,CAAC;AACD;AAAA,IACJ;AAAA,EACF;AAAA,EAEQ,wBAA8B;AAIpC,QAAI,KAAK,YAAY,WAAW,EAAG;AAGnC,UAAM,cAAc,KAAK,YAAY,OAAO,CAACC,MAAK,QAAQA,OAAM,IAAI,QAAQ,CAAC;AAI7E,QAAI,cAAc,IAAM;AAExB,UAAM,QAAQ,IAAI,aAAa,WAAW;AAC1C,QAAI,SAAS;AACb,eAAW,OAAO,KAAK,aAAa;AAClC,YAAM,IAAI,KAAK,MAAM;AACrB,gBAAU,IAAI;AAAA,IAChB;AACA,SAAK,cAAc,CAAC;AAIpB,QAAI,MAAM;AACV,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,aAAO,MAAM,CAAC,IAAI,MAAM,CAAC;AAAA,IAC3B;AACA,UAAM,MAAM,KAAK,KAAK,MAAM,MAAM,MAAM;AAGxC,QAAI,MAAM,MAAM;AACd,cAAQ,MAAM,qCAAqC,EAAE,KAAK,SAAS,MAAM,OAAO,CAAC;AACjF;AAAA,IACF;AAGA,QAAI,KAAK,SAAS;AAChB,WAAK,SAAS,WAAW;AACzB,WAAK,KAAK,qBAAqB,EAAE,WAAW,KAAK,IAAI,EAAE,CAAC;AAExD,WAAK,QAAQ,WAAW,KAAK,EAAE,KAAK,CAAC,WAAW;AAC9C,aAAK,KAAK,yBAAyB;AAAA,UACjC,MAAM,OAAO;AAAA,UACb,YAAY;AAAA,QACd,CAAC;AACD,aAAK,KAAK,mBAAmB,EAAE,WAAW,KAAK,IAAI,GAAG,YAAY,OAAO,gBAAgB,CAAC;AAG1F,cAAM,YAAY,OAAO,KAAK,KAAK;AACnC,YAAI,aAAa,CAAC,UAAU,SAAS,eAAe,GAAG;AACrD,eAAK,SAAS,SAAS,EAAE,MAAM,CAAC,UAAU;AACxC,oBAAQ,MAAM,gCAAgC,KAAK;AAAA,UACrD,CAAC;AAAA,QACH;AAAA,MACF,CAAC,EAAE,MAAM,CAAC,UAAU;AAClB,gBAAQ,MAAM,oCAAoC,KAAK;AAAA,MACzD,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,oBAAoB,OAAoD;AAEpF,UAAM,UAAU,iBAAiB,eAC7B,QACA,KAAK,eAAe,KAAK;AAG7B,QAAI,KAAK,KAAK;AAEZ,YAAM,YAAY,KAAK,IAAI,aAAa;AAGxC,eAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,QAAQ,KAAK,WAAW;AAC/D,cAAM,QAAQ,QAAQ,MAAM,GAAG,IAAI,SAAS;AAC5C,cAAM,SAAS,MAAM,KAAK,IAAI,QAAQ,KAAK;AAG3C,YAAI,OAAO,UAAU;AACnB,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAGA,QAAI,MAAM;AACV,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,aAAO,QAAQ,CAAC,IAAI,QAAQ,CAAC;AAAA,IAC/B;AACA,UAAM,MAAM,KAAK,KAAK,MAAM,QAAQ,MAAM;AAC1C,WAAO,MAAM;AAAA,EACf;AAAA,EAEQ,eAAe,OAAiC;AACtD,UAAM,UAAU,IAAI,aAAa,MAAM,MAAM;AAC7C,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAQ,CAAC,IAAI,MAAM,CAAC,IAAI;AAAA,IAC1B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,oBAAoB,QAA6B;AACvD,UAAM,eAAe,KAAK,MAAM;AAChC,UAAM,QAAQ,IAAI,WAAW,aAAa,MAAM;AAChD,aAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,YAAM,CAAC,IAAI,aAAa,WAAW,CAAC;AAAA,IACtC;AACA,WAAO,MAAM;AAAA,EACf;AAAA,EAEQ,aAAa,SAAoC;AACvD,SAAK,QAAQ,KAAK,OAAO;AACzB,SAAK,KAAK,kBAAkB,EAAE,cAAc,KAAK,QAAQ,OAAO,CAAC;AAAA,EACnE;AAAA,EAEQ,iBAAiB,OAAyB;AAChD,SAAK,eAAe;AAEpB,QAAI,MAAM,SAAS,KAAM;AAEvB,UAAI,KAAK,sBAAsB,KAAK,sBAAsB;AACxD,aAAK;AACL,mBAAW,MAAM;AACf,cAAI,KAAK,eAAe;AACtB,iBAAK,QAAQ,KAAK,aAAa,EAAE,MAAM,MAAM;AAAA,YAE7C,CAAC;AAAA,UACH;AAAA,QACF,GAAG,KAAK,IAAI,GAAG,KAAK,mBAAmB,IAAI,GAAI;AAAA,MACjD,OAAO;AACL,aAAK,SAAS,OAAO;AACrB,aAAK,KAAK,oBAAoB;AAAA,UAC5B,OAAO,IAAI,MAAM,mCAAmC;AAAA,UACpD,aAAa;AAAA,QACf,CAAC;AAAA,MACH;AAAA,IACF;AAEA,SAAK,KAAK,qBAAqB,EAAE,QAAQ,MAAM,UAAU,oBAAoB,CAAC;AAAA,EAChF;AACF;;;AC7oBA,IAAM,0BAAN,MAA6D;AAAA,EAW3D,YACE,QACA,SACA;AARF,SAAQ,WAAkC,CAAC;AAC3C,SAAQ,WAAW,oBAAI,IAAoB;AAQzC,SAAK,YAAY,OAAO;AACxB,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,YAAY,KAAK,IAAI;AAC1B,SAAK,kBAAkB,KAAK,IAAI;AAChC,SAAK,qBAAqB,IAAI,kBAAkB;AAEhD,QAAI,OAAO,SAAS;AAClB,WAAK,mBAAmB,UAAU,OAAO,OAAkE;AAAA,IAC7G;AAAA,EACF;AAAA,EAEA,IAAI,UAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,SAAwB;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,QAAwB;AAC1B,WAAO,KAAK,SAAS;AAAA,EACvB;AAAA,EAEA,IAAI,UAAiC;AACnC,WAAO,CAAC,GAAG,KAAK,QAAQ;AAAA,EAC1B;AAAA,EAEA,IAAI,UAA0B;AAC5B,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,IAAI,iBAAyB;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,SAAS,QAAQ,KAAK,OAAO;AACxC,SAAK,kBAAkB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,MAAM,MAAqB;AACzB,UAAM,KAAK,SAAS,WAAW;AAAA,EACjC;AAAA,EAEA,UAAU,OAAwC;AAChD,SAAK,SAAS,UAAU,KAAK;AAC7B,SAAK,kBAAkB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,MAAM,SAAS,MAA6B;AAC1C,UAAM,KAAK,SAAS,SAAS,IAAI;AACjC,SAAK,kBAAkB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,YAAkB;AAChB,SAAK,SAAS,UAAU;AACxB,SAAK,kBAAkB,KAAK,IAAI;AAAA,EAClC;AAAA,EAEA,WAAW,SAA+B;AACxC,SAAK,mBAAmB,IAAI,OAAO;AAAA,EACrC;AAAA,EAEA,WAAW,KAAa,OAAqB;AAC3C,SAAK,SAAS,IAAI,KAAK,KAAK;AAAA,EAC9B;AAAA,EAEA,cAAc,KAAmB;AAC/B,SAAK,SAAS,OAAO,GAAG;AAAA,EAC1B;AAAA,EAEA,aAAqC;AACnC,WAAO,OAAO,YAAY,KAAK,QAAQ;AAAA,EACzC;AAAA,EAEA,SAA0B;AACxB,WAAO;AAAA,MACL,WAAW,KAAK;AAAA,MAChB,UAAU,KAAK,QAAQ,OAAO;AAAA,MAC9B,aAAa,KAAK,QAAQ,OAAO;AAAA,MACjC,SAAS,KAAK;AAAA,MACd,SAAS,OAAO,YAAY,KAAK,QAAQ;AAAA,MACzC,SAAS,KAAK;AAAA,MACd,WAAW,KAAK;AAAA,MAChB,gBAAgB,KAAK;AAAA,IACvB;AAAA,EACF;AAAA,EAEA,OAAO,UAAiC;AACtC,SAAK,WAAW,CAAC,GAAG,SAAS,OAAO;AACpC,SAAK,WAAW,IAAI,IAAI,OAAO,QAAQ,SAAS,OAAO,CAAC;AACxD,SAAK,kBAAkB,SAAS;AAAA,EAClC;AAAA,EAEA,cAAoB;AAClB,SAAK,WAAW,KAAK,SAAS,WAAW;AAAA,EAC3C;AACF;AAKO,IAAM,2BAAN,cAAuC,aAAiC;AAAA,EAgB7E,YAAY,QAA4B;AACtC,UAAM;AAVR;AAAA,SAAQ,WAAW,oBAAI,IAAqC;AAG5D;AAAA,SAAQ,UAAU,oBAAI,IAA0B;AAGhD;AAAA,SAAQ,sBAA6D;AACrE,SAAiB,2BAA2B;AAI1C,SAAK,SAAS;AAAA,MACZ,qBAAqB;AAAA,MACrB,YAAY;AAAA,MACZ,GAAG;AAAA,IACL;AAGA,SAAK,UAAU,IAAI,iBAAiB,OAAO,OAAO;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,QAA4B;AACzC,SAAK,QAAQ,IAAI,OAAO,UAAU,MAAM;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,UAAwB;AACvC,SAAK,QAAQ,OAAO,QAAQ;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,UAA4C;AACpD,WAAO,KAAK,QAAQ,IAAI,QAAQ;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,UACA,UAAkC,CAAC,GACL;AAC9B,UAAM,SAAS,KAAK,QAAQ,IAAI,QAAQ;AACxC,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,qBAAqB,QAAQ,EAAE;AAAA,IACjD;AAEA,UAAM,YAAY,QAAQ,aAAa,KAAK,kBAAkB;AAE9D,UAAM,gBAA+B;AAAA,MACnC;AAAA,MACA;AAAA,MACA,cAAc,QAAQ;AAAA,MACtB,OAAO,QAAQ;AAAA,MACf,SAAS,QAAQ;AAAA,MACjB,UAAU,QAAQ;AAAA,IACpB;AAEA,UAAM,UAAU,IAAI,wBAAwB,eAAe,KAAK,OAAO;AAEvE,SAAK,SAAS,IAAI,WAAW,OAAO;AAGpC,SAAK,qBAAqB,KAAK,SAAS,SAAS;AAGjD,UAAM,QAAQ,MAAM;AAEpB,SAAK,KAAK,mBAAmB,EAAE,WAAW,SAAS,CAAC;AAEpD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,WAAkC;AACjD,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,QAAI,SAAS;AACX,YAAM,QAAQ,IAAI;AAClB,WAAK,SAAS,OAAO,SAAS;AAC9B,WAAK,KAAK,iBAAiB,EAAE,WAAW,QAAQ,mBAAmB,CAAC;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,WAAoD;AAC7D,WAAO,KAAK,SAAS,IAAI,SAAS;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,UAAyC;AACzD,WAAO,MAAM,KAAK,KAAK,SAAS,OAAO,CAAC,EACrC,OAAO,OAAK,EAAE,OAAO,OAAO,aAAa,QAAQ;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKA,wBAA8B;AAC5B,QAAI,KAAK,oBAAqB;AAE9B,SAAK,sBAAsB,YAAY,YAAY;AACjD,YAAM,KAAK,mBAAmB;AAAA,IAChC,GAAG,KAAK,wBAAwB;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,uBAA6B;AAC3B,QAAI,KAAK,qBAAqB;AAC5B,oBAAc,KAAK,mBAAmB;AACtC,WAAK,sBAAsB;AAAA,IAC7B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,SAAK,qBAAqB;AAG1B,UAAM,cAAc,MAAM,KAAK,KAAK,SAAS,OAAO,CAAC,EAAE,IAAI,OAAK,EAAE,IAAI,CAAC;AACvE,UAAM,QAAQ,IAAI,WAAW;AAC7B,SAAK,SAAS,MAAM;AAGpB,UAAM,KAAK,QAAQ,WAAW;AAAA,EAChC;AAAA;AAAA,EAIQ,oBAA4B;AAClC,WAAO,QAAQ,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAAA,EACtE;AAAA,EAEQ,qBAAqB,SAAoB,WAAyB;AAExE,UAAM,SAAoC;AAAA,MACxC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,SAAS,QAAQ;AAC1B,cAAQ,GAAG,OAAO,CAAC,SAAS;AAC1B,cAAM,YAAY;AAClB,aAAK,KAAK,OAAO,EAAE,GAAG,WAAW,UAAU,CAAkC;AAAA,MAC/E,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAc,qBAAoC;AAChD,QAAI;AACF,YAAM,KAAK,QAAQ,YAAY;AAAA,IACjC,QAAQ;AAAA,IAER;AAAA,EACF;AACF;;;AC9SO,IAAM,iBAAN,MAAM,eAAc;AAAA,EAApB;AACL,SAAQ,UAAU,oBAAI,IAA0B;AAChD,SAAQ,SAAS,oBAAI,IAAyB;AAC9C,SAAQ,QAAQ,oBAAI,IAAyB;AAC7C,SAAQ,wBAAwB,oBAAI,IAAkC;AAAA;AAAA;AAAA;AAAA;AAAA,EAetE,SACE,QACA,QAAqB,eAAc,eACnC,sBACM;AACN,SAAK,QAAQ,IAAI,OAAO,UAAU,MAAM;AACxC,SAAK,OAAO,IAAI,OAAO,UAAU,KAAK;AACtC,SAAK,MAAM,IAAI,OAAO,UAAU;AAAA,MAC9B,iBAAiB;AAAA,MACjB,oBAAoB;AAAA,MACpB,YAAY;AAAA,MACZ,mBAAmB;AAAA,MACnB,iBAAiB,KAAK,IAAI;AAAA,MAC1B,gBAAgB,KAAK,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,sBAAsB;AACxB,WAAK,sBAAsB,IAAI,OAAO,UAAU,oBAAoB;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,UAAwB;AACjC,SAAK,QAAQ,OAAO,QAAQ;AAC5B,SAAK,OAAO,OAAO,QAAQ;AAC3B,SAAK,MAAM,OAAO,QAAQ;AAC1B,SAAK,sBAAsB,OAAO,QAAQ;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAA4C;AAC9C,WAAO,KAAK,QAAQ,IAAI,QAAQ;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,UAA2B;AAC7B,WAAO,KAAK,QAAQ,IAAI,QAAQ;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,eAAyB;AACvB,WAAO,MAAM,KAAK,KAAK,QAAQ,KAAK,CAAC;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,UAA2B;AAC1C,UAAM,QAAQ,KAAK,OAAO,IAAI,QAAQ;AACtC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AAErC,QAAI,CAAC,SAAS,CAAC,MAAO,QAAO;AAE7B,WAAO,MAAM,kBAAkB,MAAM;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,UAA2B;AACxC,UAAM,QAAQ,KAAK,OAAO,IAAI,QAAQ;AACtC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AAErC,QAAI,CAAC,SAAS,CAAC,MAAO,QAAO;AAG7B,SAAK,iBAAiB,QAAQ;AAE9B,WAAO,MAAM,qBAAqB,MAAM;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,UAAkB,SAA0B;AACtD,UAAM,QAAQ,KAAK,OAAO,IAAI,QAAQ;AACtC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AAErC,QAAI,CAAC,SAAS,CAAC,MAAO,QAAO;AAG7B,SAAK,gBAAgB,QAAQ;AAE7B,WAAO,MAAM,oBAAoB,WAAW,MAAM;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,UAAwB;AACxC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,UAAwB;AACxC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,SAAS,MAAM,kBAAkB,GAAG;AACtC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,UAAwB;AACpC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,WAAK,iBAAiB,QAAQ;AAC9B,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,UAAkB,QAAsB;AACnD,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,YAAM,cAAc;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,UAAkB,SAAuB;AAC1D,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,WAAK,gBAAgB,QAAQ;AAC7B,YAAM,qBAAqB;AAAA,IAC7B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,UAAmC;AACpD,UAAM,SAAS,KAAK,QAAQ,IAAI,QAAQ;AACxC,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,qBAAqB,QAAQ,EAAE;AAAA,IACjD;AAGA,UAAM,WAAW,KAAK,sBAAsB,IAAI,QAAQ;AACxD,QAAI,UAAU;AACZ,YAAM,QAAQ,MAAM,SAAS;AAC7B,aAAO,YAAY,YAAY;AAC/B,aAAO;AAAA,IACT;AAGA,QAAI,OAAO,YAAY,WAAW;AAChC,aAAO,OAAO,YAAY;AAAA,IAC5B;AAEA,UAAM,IAAI,MAAM,uCAAuC,QAAQ,EAAE;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,UAAkB,aAAyD;AAC3F,UAAM,SAAS,KAAK,QAAQ,IAAI,QAAQ;AACxC,QAAI,QAAQ;AACV,aAAO,cAAc,EAAE,GAAG,OAAO,aAAa,GAAG,YAAY;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,UAA2C;AAClD,WAAO,KAAK,MAAM,IAAI,QAAQ;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,UAA2C;AAClD,WAAO,KAAK,OAAO,IAAI,QAAQ;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,UAAkB,OAAmC;AAC/D,UAAM,WAAW,KAAK,OAAO,IAAI,QAAQ;AACzC,QAAI,UAAU;AACZ,WAAK,OAAO,IAAI,UAAU,EAAE,GAAG,UAAU,GAAG,MAAM,CAAC;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,UAAwB;AACjC,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,OAAO;AACT,YAAM,qBAAqB;AAC3B,YAAM,aAAa;AACnB,YAAM,oBAAoB;AAC1B,YAAM,kBAAkB,KAAK,IAAI;AACjC,YAAM,iBAAiB,KAAK,IAAI;AAAA,IAClC;AAAA,EACF;AAAA;AAAA,EAIQ,iBAAiB,UAAwB;AAC/C,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,CAAC,MAAO;AAEZ,UAAM,MAAM,KAAK,IAAI;AACrB,QAAI,MAAM,MAAM,mBAAmB,KAAO;AACxC,YAAM,qBAAqB;AAC3B,YAAM,kBAAkB;AAAA,IAC1B;AAAA,EACF;AAAA,EAEQ,gBAAgB,UAAwB;AAC9C,UAAM,QAAQ,KAAK,MAAM,IAAI,QAAQ;AACrC,QAAI,CAAC,MAAO;AAEZ,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,aAAa,KAAK,KAAK,KAAK;AAClC,QAAI,MAAM,MAAM,kBAAkB,YAAY;AAC5C,YAAM,oBAAoB;AAC1B,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AACF;AAAA;AAAA;AAAA;AAtQa,eASK,gBAA6B;AAAA,EAC3C,aAAa;AAAA,EACb,mBAAmB;AAAA,EACnB,0BAA0B;AAAA,EAC1B,uBAAuB;AACzB;AAdK,IAAM,gBAAN;;;ACbA,IAAM,mBAAN,cAA+B,aAA8B;AAAA,EAUlE,YAAY,SAA0B,CAAC,GAAG;AACxC,UAAM;AARR,SAAQ,iBAAiB;AACzB,SAAQ,gBAAgC,CAAC;AACzC,SAAQ,YAAY;AACpB,SAAQ,eAAoC;AAC5C,SAAQ,oBAAoB;AAC5B,SAAQ,gBAAgB;AAItB,SAAK,SAAS;AAAA,MACZ,YAAY;AAAA,MACZ,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,GAAG;AAAA,IACL;AAEA,SAAK,cAAc,IAAI,aAAa,KAAK,OAAO,UAAU;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,IAAI,aAAa,EAAE,YAAY,KAAK,OAAO,WAAW,CAAC;AAAA,IAC7E;AAEA,QAAI,KAAK,aAAa,UAAU,aAAa;AAC3C,YAAM,KAAK,aAAa,OAAO;AAAA,IACjC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAA2B;AAEnC,SAAK,cAAc,KAAK,KAAK;AAG7B,SAAK,mBAAmB,KAAK;AAG7B,QAAI,CAAC,KAAK,aAAa,KAAK,cAAc,SAAS,GAAG;AACpD,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,OAA2B;AACpD,QAAI,SAAS;AAEb,WAAO,SAAS,MAAM,QAAQ;AAC5B,YAAM,YAAY,KAAK,OAAO,aAAa,KAAK;AAChD,YAAM,SAAS,KAAK,IAAI,WAAW,MAAM,SAAS,MAAM;AAExD,WAAK,YAAY,IAAI,MAAM,SAAS,QAAQ,SAAS,MAAM,GAAG,KAAK,cAAc;AACjF,WAAK,kBAAkB;AACvB,gBAAU;AAGV,UAAI,KAAK,kBAAkB,KAAK,OAAO,YAAY;AACjD,aAAK,KAAK,gBAAgB,EAAE,OAAO,IAAI,aAAa,KAAK,WAAW,EAAE,CAAC;AAGvE,cAAM,eAAe,KAAK,OAAO,aAAa,KAAK,OAAO;AAC1D,aAAK,YAAY,WAAW,GAAG,YAAY;AAC3C,aAAK,iBAAiB,KAAK,OAAO;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAA+B;AAC3C,QAAI,CAAC,KAAK,gBAAgB,KAAK,UAAW;AAE1C,SAAK,YAAY;AACjB,SAAK,oBAAoB,KAAK,aAAa;AAC3C,SAAK,gBAAgB;AAErB,SAAK,KAAK,kBAAkB,CAAC,CAAC;AAE9B,UAAM,KAAK,qBAAqB;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,uBAAsC;AAClD,QAAI,CAAC,KAAK,aAAc;AAExB,WAAO,KAAK,cAAc,SAAS,GAAG;AACpC,YAAM,QAAQ,KAAK,cAAc,MAAM;AAGvC,YAAM,SAAS,KAAK,aAAa,aAAa,GAAG,MAAM,QAAQ,KAAK,OAAO,UAAU;AACrF,aAAO,cAAc,OAAO,CAAC;AAE7B,YAAM,SAAS,KAAK,aAAa,mBAAmB;AACpD,aAAO,SAAS;AAChB,aAAO,QAAQ,KAAK,aAAa,WAAW;AAG5C,YAAM,WAAW,KAAK,oBAAoB,KAAK,gBAAgB,KAAK,OAAO;AAC3E,aAAO,MAAM,QAAQ;AAErB,WAAK,iBAAiB,MAAM;AAG5B,WAAK,WAAW;AAGhB,YAAM,IAAI,QAAQ,aAAW;AAC3B,eAAO,UAAU;AAAA,MACnB,CAAC;AAAA,IACH;AAEA,SAAK,YAAY;AACjB,SAAK,KAAK,gBAAgB,CAAC,CAAC;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAmB;AACzB,QAAI,CAAC,KAAK,aAAc;AAExB,UAAM,eAAe,KAAK,oBAAoB,KAAK,gBAAgB,KAAK,OAAO;AAC/E,UAAM,aAAa,KAAK,aAAa;AACrC,UAAM,WAAW,aAAa,gBAAgB;AAE9C,QAAI,KAAK,IAAI,OAAO,IAAI,KAAK,OAAO,YAAY;AAC9C,WAAK,KAAK,cAAc,EAAE,QAAQ,CAAC;AAAA,IACrC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmB;AACjB,SAAK,gBAAgB,CAAC;AACtB,SAAK,iBAAiB;AACtB,SAAK,YAAY,KAAK,CAAC;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAa;AACX,SAAK,WAAW;AAChB,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,sBAA8B;AAC5B,QAAI,CAAC,KAAK,aAAc,QAAO;AAC/B,WAAO,KAAK,aAAa,cAAc,KAAK;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA,EAKA,eAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,UAAgB;AACd,SAAK,KAAK;AACV,SAAK,cAAc,MAAM;AACzB,SAAK,eAAe;AAAA,EACtB;AACF;;;ACpLO,IAAM,sBAAN,cAAkC,aAAiC;AAAA,EAWxE,YAAY,SAA6B,CAAC,GAAG;AAC3C,UAAM;AAVR,SAAQ,aAAa;AACrB,SAAQ,kBAAkB;AAC1B,SAAQ,iBAAiB;AACzB,SAAQ,eAAqD;AAC7D,SAAQ,eAAe;AAGvB;AAAA,SAAQ,mCAAmC;AAIzC,SAAK,SAAS;AAAA,MACZ,cAAc;AAAA;AAAA,MACd,qBAAqB;AAAA;AAAA,MACrB,kBAAkB;AAAA;AAAA,MAClB,SAAS;AAAA,MACT,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,gBAAwB,cAAsB,GAAS;AACtE,QAAI,CAAC,KAAK,OAAO,QAAS;AAE1B,QAAI,iBAAiB,KAAK,OAAO,cAAc;AAC7C,WAAK,iBAAiB,eAAe,cAAc;AAAA,IACrD,OAAO;AACL,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,aAAa,SAA0C;AACrD,QAAI,CAAC,KAAK,OAAO,QAAS;AAE1B,UAAM,MAAM,KAAK,aAAa,OAAO;AAIrC,UAAM,iBAAiB,KAAK,IAAI,MAAM,MAAM,CAAG;AAE/C,QAAI,iBAAiB,KAAK,OAAO,cAAc;AAC7C,WAAK,iBAAiB,GAAG;AAAA,IAC3B,OAAO;AACL,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,UAAyB;AACrC,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,SAAwB;AACjC,SAAK,OAAO,UAAU;AACtB,QAAI,CAAC,SAAS;AACZ,WAAK,MAAM;AAAA,IACb;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,QAA2C;AACtD,SAAK,SAAS,EAAE,GAAG,KAAK,QAAQ,GAAG,OAAO;AAAA,EAC5C;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,aAAa;AAClB,SAAK,kBAAkB;AACvB,SAAK,iBAAiB;AACtB,SAAK,mCAAmC;AACxC,QAAI,KAAK,cAAc;AACrB,mBAAa,KAAK,YAAY;AAC9B,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,WAA8D;AAC5D,WAAO;AAAA,MACL,YAAY,KAAK;AAAA,MACjB,kBAAkB,KAAK,aAAa,KAAK,IAAI,IAAI,KAAK,kBAAkB;AAAA,IAC1E;AAAA,EACF;AAAA;AAAA,EAIQ,aAAa,SAA4C;AAC/D,QAAI,MAAM;AACV,UAAM,QAAQ,mBAAmB,aAAa,QAAQ;AAEtD,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAM,SAAS,QAAQ,CAAC,IAAI;AAC5B,aAAO,SAAS;AAAA,IAClB;AAEA,WAAO,KAAK,KAAK,MAAM,QAAQ,MAAM;AAAA,EACvC;AAAA,EAEQ,iBAAiB,KAAmB;AAC1C,UAAM,MAAM,KAAK,IAAI;AACrB,SAAK,iBAAiB;AAGtB,QAAI,KAAK,cAAc;AACrB,mBAAa,KAAK,YAAY;AAC9B,WAAK,eAAe;AAAA,IACtB;AAGA,QAAI,CAAC,KAAK,YAAY;AACpB,WAAK,aAAa;AAClB,WAAK,kBAAkB;AACvB,WAAK,KAAK,mBAAmB,EAAE,IAAI,CAAC;AAAA,IACtC;AAGA,QAAI,KAAK,gBAAgB,CAAC,KAAK,kCAAkC;AAC/D,YAAM,iBAAiB,MAAM,KAAK;AAClC,UAAI,kBAAkB,KAAK,OAAO,qBAAqB;AACrD,aAAK,mCAAmC;AACxC,aAAK,KAAK,0BAA0B,EAAE,KAAK,YAAY,eAAe,CAAC;AAAA,MACzE;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,oBAA0B;AAChC,QAAI,CAAC,KAAK,WAAY;AAGtB,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,WAAW,MAAM;AACnC,cAAM,aAAa,KAAK,iBAAiB,KAAK;AAC9C,aAAK,aAAa;AAClB,aAAK,eAAe;AAEpB,aAAK,mCAAmC;AACxC,aAAK,KAAK,gBAAgB,EAAE,WAAW,CAAC;AAAA,MAC1C,GAAG,KAAK,OAAO,gBAAgB;AAAA,IACjC;AAAA,EACF;AACF;;;ACjMO,IAAM,kBACX;AA8BK,SAAS,oBAAoB,KAAwC;AAE1E,QAAM,UAAU;AAChB,QAAM,QAAQ,IAAI,MAAM,OAAO;AAE/B,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,KAAK,MAAM,CAAC;AAAA,IACZ,OAAO,MAAM,CAAC;AAAA,IACd,QAAQ,MAAM,CAAC;AAAA,IACf,MAAM,MAAM,CAAC;AAAA,EACf;AACF;AAsBA,eAAsB,0BAA0B,UAAkB,iBAAmC;AACnG,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,SAAS;AAAA,MACpC,QAAQ;AAAA,MACR,OAAO;AAAA;AAAA,IACT,CAAC;AAED,WAAO,SAAS;AAAA,EAClB,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;;;AC/EA,IAAMC,WAAS,aAAa,mBAAmB;AAY/C,eAAsB,uBAAuB,SAKvB;AACpB,QAAM,UAAU,SAAS,WAAW;AACpC,QAAM,qBAAqB,SAAS,sBAAsB,CAAC;AAE3D,MAAI,EAAE,YAAY,SAAS;AACzB,IAAAA,SAAO,KAAK,6CAA6C;AACzD,WAAO,CAAC;AAAA,EACV;AAEA,MAAI;AACF,UAAM,aAAa,MAAM,OAAO,KAAK;AACrC,UAAM,gBAA0B,CAAC;AAEjC,UAAM,WAAW;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA,GAAG;AAAA,IACL;AAEA,eAAW,aAAa,YAAY;AAClC,YAAM,eAAe,SAAS;AAAA,QAAK,aACjC,UAAU,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC;AAAA,MACxD;AAEA,UAAI,cAAc;AAChB,YAAI,SAAS;AACX,UAAAA,SAAO,KAAK,kBAAkB,EAAE,UAAU,CAAC;AAAA,QAC7C;AACA,cAAM,UAAU,MAAM,OAAO,OAAO,SAAS;AAC7C,YAAI,SAAS;AACX,wBAAc,KAAK,SAAS;AAAA,QAC9B,WAAW,SAAS;AAClB,UAAAA,SAAO,KAAK,0BAA0B,EAAE,UAAU,CAAC;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS;AACX,MAAAA,SAAO,KAAK,2BAA2B;AAAA,QACrC,aAAa,WAAW;AAAA,QACxB,cAAc,cAAc;AAAA,QAC5B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,yBAAyB,EAAE,MAAM,CAAC;AAC/C,UAAM;AAAA,EACR;AACF;AAQA,eAAsB,mBAAmB,WAAqC;AAC5E,MAAI,EAAE,YAAY,SAAS;AACzB,IAAAA,SAAO,KAAK,6CAA6C;AACzD,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAU,MAAM,OAAO,OAAO,SAAS;AAC7C,IAAAA,SAAO,KAAK,0BAA0B,EAAE,WAAW,QAAQ,CAAC;AAC5D,WAAO;AAAA,EACT,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,wBAAwB,EAAE,WAAW,MAAM,CAAC;AACzD,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,aAAgC;AACpD,MAAI,EAAE,YAAY,SAAS;AACzB,IAAAA,SAAO,KAAK,6CAA6C;AACzD,WAAO,CAAC;AAAA,EACV;AAEA,MAAI;AACF,UAAM,aAAa,MAAM,OAAO,KAAK;AACrC,IAAAA,SAAO,MAAM,oBAAoB,EAAE,WAAW,CAAC;AAC/C,WAAO;AAAA,EACT,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,wBAAwB,EAAE,MAAM,CAAC;AAC9C,WAAO,CAAC;AAAA,EACV;AACF;AASA,eAAsB,uBACpB,WACA,YAOC;AACD,MAAI,EAAE,YAAY,SAAS;AACzB,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,MACb,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAAA,EACF;AAEA,MAAI;AACF,UAAM,QAAQ,MAAM,OAAO,KAAK,SAAS;AACzC,UAAM,WAAW,MAAM,MAAM,MAAM,UAAU;AAE7C,QAAI,CAAC,UAAU;AACb,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,aAAa;AAAA,QACb,QAAQ;AAAA,QACR,QAAQ;AAAA,MACV;AAAA,IACF;AAEA,UAAM,cAAc,SAAS,QAAQ,IAAI,cAAc;AACvD,UAAM,SACJ,aAAa,SAAS,WAAW,KACjC,aAAa,SAAS,YAAY;AAGpC,UAAM,iBAAiB,SAAS,MAAM;AACtC,UAAM,OAAO,MAAM,eAAe,KAAK;AACvC,UAAM,gBAAgB,KAAK,KAAK,EAAE,WAAW,GAAG,KAAK,KAAK,SAAS,WAAW;AAE9E,UAAM,QAAQ;AAAA,MACZ,SAAS,WAAW,OACpB,CAAC,UACD,CAAC,iBACD,gBACC,YAAY,SAAS,kBAAkB,KACtC,YAAY,SAAS,0BAA0B,KAC/C,YAAY,SAAS,QAAQ;AAAA,IACjC;AAEA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,QAAQ,UAAU;AAAA,MAClB,QAAQ,QACJ,mBACA,mBAAmB,SAAS,MAAM,iBAAiB,WAAW,YAAY,UAAU,aAAa;AAAA,IACvG;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,oCAAoC,EAAE,WAAW,YAAY,MAAM,CAAC;AACjF,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,MACb,QAAQ;AAAA,MACR,QAAQ,UAAU,KAAK;AAAA,IACzB;AAAA,EACF;AACF;AAOA,eAAsB,uBAQnB;AACD,MAAI,EAAE,YAAY,SAAS;AACzB,WAAO,EAAE,aAAa,GAAG,gBAAgB,GAAG,gBAAgB,CAAC,EAAE;AAAA,EACjE;AAEA,QAAM,iBAA4E,CAAC;AACnF,MAAI,iBAAiB;AAErB,MAAI;AACF,UAAM,aAAa,MAAM,OAAO,KAAK;AAErC,eAAW,aAAa,YAAY;AAClC,UAAI,CAAC,UAAU,YAAY,EAAE,SAAS,cAAc,GAAG;AACrD;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM,OAAO,KAAK,SAAS;AACzC,YAAM,WAAW,MAAM,MAAM,KAAK;AAElC,iBAAW,WAAW,UAAU;AAC9B;AACA,cAAM,MAAM,QAAQ;AAEpB,cAAM,aAAa,MAAM,uBAAuB,WAAW,GAAG;AAE9D,YAAI,WAAW,UAAU,CAAC,WAAW,OAAO;AAC1C,yBAAe,KAAK;AAAA,YAClB;AAAA,YACA;AAAA,YACA,QAAQ,WAAW,UAAU;AAAA,UAC/B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,IAAAA,SAAO,KAAK,uBAAuB;AAAA,MACjC,aAAa,WAAW;AAAA,MACxB;AAAA,MACA,cAAc,eAAe;AAAA,IAC/B,CAAC;AAED,WAAO;AAAA,MACL,aAAa,WAAW;AAAA,MACxB;AAAA,MACA;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,yBAAyB,EAAE,MAAM,CAAC;AAC/C,UAAM;AAAA,EACR;AACF;AAWA,eAAsB,kBAAkB,oBAAoB,OAAwB;AAClF,MAAI,EAAE,YAAY,SAAS;AACzB,IAAAA,SAAO,KAAK,6CAA6C;AACzD,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,aAAa,MAAM,OAAO,KAAK;AACrC,QAAI,eAAe;AAEnB,eAAW,aAAa,YAAY;AAClC,YAAM,UAAU,MAAM,OAAO,OAAO,SAAS;AAC7C,UAAI,SAAS;AACX;AAAA,MACF;AAAA,IACF;AAEA,IAAAA,SAAO,KAAK,8BAA8B;AAAA,MACxC,cAAc;AAAA,IAChB,CAAC;AAED,QAAI,mBAAmB;AAErB,YAAM,EAAE,KAAAC,KAAI,IAAI,MAAM,OAAO,iCAA2B;AACxD,MAAAA,KAAI,kBAAkB;AACtB,MAAAD,SAAO,KAAK,+DAA+D;AAAA,IAC7E;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,IAAAA,SAAO,MAAM,uBAAuB,EAAE,MAAM,CAAC;AAC7C,UAAM;AAAA,EACR;AACF;;;ACtHO,IAAM,2BAAiD;AAAA,EAC5D,cAAc;AAAA,EACd,sBAAsB;AAAA;AAAA,EACtB,kBAAkB;AAAA,EAClB,kBAAkB;AAAA,EAElB,QAAQ;AAAA,IACN;AAAA,MACE,MAAM;AAAA,MACN,WAAW,CAAC,cAAc;AAAA,MAC1B,aAAa,CAAC,CAAG;AAAA,MACjB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,SAAS;AAAA,IACX;AAAA,IACA;AAAA,MACE,MAAM;AAAA,MACN,WAAW,CAAC,gBAAgB;AAAA,MAC5B,aAAa,CAAC,CAAG;AAAA,MACjB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,SAAS;AAAA;AAAA,MACT,eAAe;AAAA,IACjB;AAAA,IACA;AAAA,MACE,MAAM;AAAA,MACN,WAAW,CAAC,oBAAoB,oBAAoB;AAAA,MACpD,aAAa,CAAC,KAAK,GAAG;AAAA,MACtB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,SAAS;AAAA;AAAA,MACT,eAAe;AAAA,IACjB;AAAA,IACA;AAAA,MACE,MAAM;AAAA,MACN,WAAW,CAAC,cAAc;AAAA,MAC1B,aAAa,CAAC,CAAG;AAAA,MACjB,qBAAqB;AAAA,MACrB,qBAAqB;AAAA,MACrB,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EAEA,aAAa;AAAA;AAAA,IAEX,EAAE,MAAM,QAAQ,IAAI,aAAa,SAAS,qBAAqB,UAAU,IAAI;AAAA,IAC7E,EAAE,MAAM,YAAY,IAAI,aAAa,SAAS,qBAAqB,UAAU,IAAI;AAAA;AAAA;AAAA,IAGjF,EAAE,MAAM,aAAa,IAAI,YAAY,SAAS,oBAAoB,UAAU,IAAI;AAAA;AAAA,IAGhF,EAAE,MAAM,YAAY,IAAI,YAAY,SAAS,kBAAkB,UAAU,IAAI;AAAA,IAC7E,EAAE,MAAM,QAAQ,IAAI,YAAY,SAAS,kBAAkB,UAAU,IAAI;AAAA;AAAA,IAGzE,EAAE,MAAM,YAAY,IAAI,QAAQ,SAAS,mBAAmB,UAAU,IAAI;AAAA;AAAA,IAG1E,EAAE,MAAM,aAAa,IAAI,QAAQ,SAAS,WAAW,UAAU,IAAI;AAAA,IACnE,EAAE,MAAM,YAAY,IAAI,QAAQ,SAAS,WAAW,UAAU,IAAI;AAAA;AAAA,IAGlE,EAAE,MAAM,YAAY,IAAI,aAAa,SAAS,aAAa,UAAU,IAAI;AAAA,EAC3E;AAAA,EAEA,iBAAiB;AAAA,IACf,EAAE,SAAS,SAAS,MAAM,iBAAiB,WAAW,KAAK,YAAY,EAAI;AAAA,IAC3E,EAAE,SAAS,OAAO,MAAM,eAAe,WAAW,KAAK,YAAY,IAAI;AAAA,IACvE,EAAE,SAAS,SAAS,MAAM,iBAAiB,WAAW,KAAK,YAAY,IAAI;AAAA,IAC3E,EAAE,SAAS,WAAW,MAAM,gBAAgB,WAAW,KAAK,YAAY,EAAI;AAAA,IAC5E,EAAE,SAAS,aAAa,MAAM,qBAAqB,WAAW,KAAK,YAAY,IAAI;AAAA,IACnF,EAAE,SAAS,QAAQ,MAAM,gBAAgB,WAAW,KAAK,YAAY,EAAI;AAAA,IACzE,EAAE,SAAS,WAAW,MAAM,mBAAmB,WAAW,KAAK,YAAY,EAAI;AAAA,IAC/E,EAAE,SAAS,WAAW,MAAM,mBAAmB,WAAW,GAAK,YAAY,EAAI;AAAA,EACjF;AAAA,EAEA,cAAc,CAAC,sBAAsB,uBAAuB,oBAAoB;AAClF;;;AC5NO,IAAM,iBAAN,cAA6B,aAAmC;AAAA,EA6BrE,YAAY,SAAwC,CAAC,GAAG;AACtD,UAAM;AA3BR,SAAQ,gBAAuC;AAG/C;AAAA,SAAQ,kBAA2B;AACnC,SAAQ,qBAA6B;AACrC,SAAQ,qBAA6B;AACrC,SAAQ,sBAA8B;AAGtC;AAAA,SAAQ,iBAAsC;AAC9C,SAAQ,oBAA4B;AACpC,SAAQ,qBAA6B;AACrC,SAAQ,sBAA8B;AAGtC;AAAA,SAAQ,cAAsB;AAC9B,SAAQ,gBAAwB;AAChC,SAAQ,qBAA6B;AAGrC;AAAA,SAAQ,iBAAyB;AACjC,SAAQ,iBAAyB;AAO/B,SAAK,SAAS,EAAE,GAAG,0BAA0B,GAAG,OAAO;AAGvD,UAAM,eAAe,KAAK,OAAO,OAAO;AAAA,MACtC,CAAC,MAAM,EAAE,SAAS,KAAK,OAAO;AAAA,IAChC;AACA,QAAI,CAAC,cAAc;AACjB,YAAM,IAAI,MAAM,kBAAkB,KAAK,OAAO,YAAY,aAAa;AAAA,IACzE;AACA,SAAK,eAAe;AACpB,SAAK,iBAAiB,KAAK,IAAI;AAC/B,SAAK,iBAAiB,KAAK,IAAI;AAG/B,SAAK,eAAe,KAAK,cAAc;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,QAA4B;AAC9B,WAAO,KAAK,aAAa;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,SAA0B;AAC5B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,OAAkC;AAExC,UAAM,aAAa,KAAK,OAAO,YAAY;AAAA,MACzC,CAAC,MACC,EAAE,SAAS,KAAK,aAAa,QAC7B,EAAE,YAAY,UACb,CAAC,EAAE,aAAa,EAAE,UAAU;AAAA,IACjC;AAEA,QAAI,CAAC,YAAY;AACf,aAAO;AAAA,IACT;AAEA,SAAK,gBAAgB,YAAY,KAAK;AACtC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,SAAuB,YAA0B;AAC1D,UAAM,cAAc,KAAK;AAEzB,SAAK,iBAAiB;AACtB,SAAK,oBAAoB,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,UAAU,CAAC;AAG5D,UAAM,UAAU,KAAK,OAAO,gBAAgB;AAAA,MAC1C,CAAC,MAAM,EAAE,YAAY;AAAA,IACvB;AACA,QAAI,WAAW,KAAK,aAAa,qBAAqB;AACpD,WAAK,sBAAsB,QAAQ,YAAY,KAAK;AAAA,IACtD,OAAO;AACL,WAAK,sBAAsB;AAAA,IAC7B;AAEA,QAAI,gBAAgB,SAAS;AAC3B,WAAK,KAAK,kBAAkB,EAAE,SAAS,WAAW,CAAC;AAAA,IACrD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,eAAqB;AACnB,SAAK,iBAAiB;AACtB,SAAK,oBAAoB;AACzB,SAAK,sBAAsB;AAC3B,SAAK,KAAK,kBAAkB,EAAE,SAAS,MAAM,YAAY,EAAE,CAAC;AAAA,EAC9D;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,QAAsB;AACnC,SAAK,cAAc,KAAK,IAAI,GAAG,KAAK,IAAI,GAAG,MAAM,CAAC;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,WAA+B,gBAAwB,KAAW;AACzE,UAAM,cAAc,KAAK,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AACvE,QAAI,CAAC,aAAa;AAChB,cAAQ,KAAK,2BAA2B,SAAS,aAAa;AAC9D;AAAA,IACF;AAEA,QAAI,YAAY,SAAS,KAAK,aAAa,QAAQ,CAAC,KAAK,iBAAiB;AACxE;AAAA,IACF;AAGA,UAAM,mBAA+B;AAAA,MACnC,MAAM,KAAK,aAAa;AAAA,MACxB,IAAI;AAAA,MACJ,SAAS;AAAA;AAAA,MACT,UAAU;AAAA,IACZ;AAEA,SAAK,gBAAgB,kBAAkB,SAAS;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,SAAmC;AACxC,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,KAAK,WAAW,MAAM,KAAK;AACjC,SAAK,iBAAiB;AAEtB,UAAM,YAAY,KAAK;AAGvB,QAAI,KAAK,iBAAiB;AACxB,WAAK,iBAAiB,SAAS;AAAA,IACjC;AAGA,SAAK,aAAa,GAAG;AAGrB,SAAK,mBAAmB,SAAS;AAGjC,SAAK,cAAc,SAAS;AAG5B,SAAK,eAAe,KAAK,cAAc;AACvC,SAAK,KAAK,iBAAiB,KAAK,YAAY;AAE5C,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,UAAM,eAAe,KAAK,OAAO,OAAO;AAAA,MACtC,CAAC,MAAM,EAAE,SAAS,KAAK,OAAO;AAAA,IAChC;AACA,QAAI,cAAc;AAChB,WAAK,eAAe;AACpB,WAAK,gBAAgB;AACrB,WAAK,kBAAkB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,iBAAiB,KAAK,IAAI;AAC/B,WAAK,qBAAqB;AAC1B,WAAK,gBAAgB;AACrB,WAAK,eAAe,KAAK,cAAc;AAAA,IACzC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAA6B;AAC3B,UAAM,QAAQ,oBAAI,IAAY;AAG9B,eAAW,SAAS,KAAK,OAAO,QAAQ;AACtC,iBAAW,QAAQ,MAAM,WAAW;AAClC,cAAM,IAAI,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,eAAW,WAAW,KAAK,OAAO,iBAAiB;AACjD,YAAM,IAAI,QAAQ,IAAI;AAAA,IACxB;AAGA,eAAW,QAAQ,KAAK,OAAO,cAAc;AAC3C,YAAM,IAAI,IAAI;AAAA,IAChB;AAEA,WAAO,MAAM,KAAK,KAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAMQ,gBAAgB,YAAwB,OAA+B;AAC7E,UAAM,cAAc,KAAK,OAAO,OAAO;AAAA,MACrC,CAAC,MAAM,EAAE,SAAS,WAAW;AAAA,IAC/B;AACA,QAAI,CAAC,aAAa;AAChB,cAAQ,KAAK,kCAAkC,WAAW,EAAE,aAAa;AACzE;AAAA,IACF;AAEA,UAAM,YAAY,KAAK,aAAa;AAEpC,SAAK,gBAAgB,KAAK;AAC1B,SAAK,eAAe;AACpB,SAAK,kBAAkB;AACvB,SAAK,qBAAqB;AAC1B,SAAK,qBAAqB,WAAW;AACrC,SAAK,sBAAsB,KAAK,IAAI;AACpC,SAAK,iBAAiB,KAAK,IAAI;AAG/B,QAAI,CAAC,KAAK,aAAa,qBAAqB;AAC1C,WAAK,sBAAsB;AAAA,IAC7B;AAEA,SAAK,KAAK,gBAAgB;AAAA,MACxB,MAAM;AAAA,MACN,IAAI,YAAY;AAAA,MAChB,SAAS;AAAA,IACX,CAAC;AAED,SAAK,KAAK,oBAAoB;AAAA,MAC5B,MAAM;AAAA,MACN,IAAI,YAAY;AAAA,MAChB,UAAU,WAAW;AAAA,IACvB,CAAC;AAAA,EACH;AAAA,EAEQ,iBAAiB,WAAyB;AAChD,QAAI,CAAC,KAAK,mBAAmB,KAAK,sBAAsB,GAAG;AACzD,WAAK,kBAAkB;AACvB,WAAK,qBAAqB;AAC1B;AAAA,IACF;AAGA,UAAM,UAAU,KAAK,IAAI,IAAI,KAAK;AAClC,SAAK,qBAAqB,KAAK,IAAI,GAAG,UAAU,KAAK,kBAAkB;AAEvE,QAAI,KAAK,sBAAsB,GAAG;AAChC,WAAK,kBAAkB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,gBAAgB;AACrB,WAAK,KAAK,kBAAkB,EAAE,OAAO,KAAK,aAAa,KAAK,CAAC;AAAA,IAC/D;AAAA,EACF;AAAA,EAEQ,aAAa,KAAmB;AACtC,QAAI,KAAK,gBAAiB;AAC1B,QAAI,KAAK,aAAa,WAAW,EAAG;AAEpC,UAAM,UAAU,MAAM,KAAK;AAC3B,QAAI,WAAW,KAAK,aAAa,SAAS;AACxC,WAAK,QAAQ,SAAS;AAAA,IACxB;AAAA,EACF;AAAA,EAEQ,mBAAmB,WAAyB;AAClD,QAAI,CAAC,KAAK,gBAAgB;AAExB,WAAK,qBAAqB,KAAK;AAAA,QAC7B;AAAA,QACA,KAAK,qBAAqB,YAAY;AAAA,MACxC;AACA;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,OAAO,gBAAgB;AAAA,MAC1C,CAAC,MAAM,EAAE,YAAY,KAAK;AAAA,IAC5B;AACA,UAAM,aAAa,SAAS,cAAc;AAG1C,UAAM,OAAO,KAAK,sBAAsB,KAAK;AAC7C,UAAM,YAAY,aAAa;AAE/B,QAAI,KAAK,IAAI,IAAI,KAAK,WAAW;AAC/B,WAAK,qBAAqB,KAAK;AAAA,IACjC,OAAO;AACL,WAAK,sBAAsB,KAAK,KAAK,IAAI,IAAI;AAAA,IAC/C;AAAA,EACF;AAAA,EAEQ,cAAc,WAAyB;AAC7C,QAAI,CAAC,KAAK,aAAa,qBAAqB;AAC1C,WAAK,gBAAgB,KAAK,IAAI,GAAG,KAAK,gBAAgB,YAAY,CAAG;AACrE;AAAA,IACF;AAGA,UAAM,gBACJ,KAAK,cAAc,KAAK,OAAO,mBAC3B,KAAK,cAAc,KAAK,OAAO,mBAC/B;AAGN,UAAM,OAAO,gBAAgB,KAAK;AAClC,UAAM,aAAa;AACnB,UAAM,YAAY,aAAa;AAE/B,QAAI,KAAK,IAAI,IAAI,KAAK,WAAW;AAC/B,WAAK,gBAAgB;AAAA,IACvB,OAAO;AACL,WAAK,iBAAiB,KAAK,KAAK,IAAI,IAAI;AAAA,IAC1C;AAGA,UAAM,YAAY,KAAK,OAAO,aAAa;AAC3C,QAAI,YAAY,GAAG;AACjB,WAAK,qBAAqB,KAAK;AAAA,QAC7B,YAAY;AAAA,QACZ,KAAK,MAAM,KAAK,gBAAgB,SAAS;AAAA,MAC3C;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,gBAAiC;AACvC,UAAM,eAA8B,CAAC;AAGrC,UAAM,IAAI,KAAK;AACf,UAAM,mBAAmB,IAAI,KAAK,IAAI,IAAI;AAG1C,QAAI,KAAK,iBAAiB,KAAK,iBAAiB;AAC9C,YAAM,UAAU,IAAI;AACpB,eAAS,IAAI,GAAG,IAAI,KAAK,cAAc,UAAU,QAAQ,KAAK;AAC5D,cAAM,OAAO,KAAK,cAAc,UAAU,CAAC;AAC3C,cAAM,aAAa,KAAK,cAAc,YAAY,CAAC,KAAK;AACxD,qBAAa,KAAK;AAAA,UAChB;AAAA,UACA,QAAQ,aAAa;AAAA,UACrB,OAAO;AAAA,UACP,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,SAAS,KAAK,kBAAkB,mBAAmB;AACzD,aAAS,IAAI,GAAG,IAAI,KAAK,aAAa,UAAU,QAAQ,KAAK;AAC3D,YAAM,OAAO,KAAK,aAAa,UAAU,CAAC;AAC1C,YAAM,aAAa,KAAK,aAAa,YAAY,CAAC,KAAK;AACvD,mBAAa,KAAK;AAAA,QAChB;AAAA,QACA,QAAQ,aAAa;AAAA,QACrB,OAAO;AAAA,QACP,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,KAAK,kBAAkB,KAAK,qBAAqB,MAAM;AACzD,YAAM,UAAU,KAAK,OAAO,gBAAgB;AAAA,QAC1C,CAAC,MAAM,EAAE,YAAY,KAAK;AAAA,MAC5B;AACA,UAAI,SAAS;AACX,qBAAa,KAAK;AAAA,UAChB,MAAM,QAAQ;AAAA,UACd,QAAQ,KAAK;AAAA,UACb,OAAO;AAAA,UACP,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,KAAK,gBAAgB,QAAQ,KAAK,OAAO,aAAa,SAAS,GAAG;AACpE,YAAM,cAAc,KAAK,OAAO,aAAa,KAAK,kBAAkB;AACpE,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,QAAQ,KAAK;AAAA,QACb,OAAO,IAAM,KAAK,cAAc;AAAA;AAAA,QAChC,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,OAAO,KAAK,aAAa;AAAA,MACzB;AAAA,MACA,eAAe,KAAK,qBAAqB,OAAO,KAAK,iBAAiB;AAAA,MACtE,kBAAkB,KAAK;AAAA,MACvB,iBAAiB,KAAK;AAAA,MACtB,oBAAoB,KAAK;AAAA,IAC3B;AAAA,EACF;AACF;;;ACndO,SAAS,aAAa,SAA+B;AAC1D,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,MAAI,aAAa;AACjB,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,kBAAc,QAAQ,CAAC,IAAI,QAAQ,CAAC;AAAA,EACtC;AAEA,SAAO,KAAK,KAAK,aAAa,QAAQ,MAAM;AAC9C;AAOO,SAAS,cAAc,SAA+B;AAC3D,MAAI,OAAO;AACX,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,MAAM,KAAK,IAAI,QAAQ,CAAC,CAAC;AAC/B,QAAI,MAAM,KAAM,QAAO;AAAA,EACzB;AACA,SAAO;AACT;AAKO,IAAM,sBAAN,MAA0B;AAAA;AAAA;AAAA;AAAA;AAAA,EAU/B,YAAY,kBAA0B,MAAM,aAAqB,MAAM;AATvE,SAAQ,cAAsB;AAC9B,SAAQ,eAAuB;AAS7B,SAAK,kBAAkB,KAAK,IAAI,GAAG,KAAK,IAAI,MAAM,eAAe,CAAC;AAClE,SAAK,aAAa;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,SAAsE;AAC5E,UAAM,aAAa,aAAa,OAAO;AACvC,UAAM,cAAc,cAAc,OAAO;AAGzC,UAAM,WAAW,aAAa,KAAK,aAAa,aAAa;AAC7D,UAAM,YAAY,cAAc,KAAK,aAAa,cAAc;AAIhE,QAAI,WAAW,KAAK,aAAa;AAE/B,WAAK,cACH,KAAK,cAAc,MAAM,WAAW;AAAA,IACxC,OAAO;AAEL,WAAK,cACH,KAAK,cAAc,KAAK,kBACxB,YAAY,IAAI,KAAK;AAAA,IACzB;AAEA,QAAI,YAAY,KAAK,cAAc;AACjC,WAAK,eAAe,KAAK,eAAe,MAAM,YAAY;AAAA,IAC5D,OAAO;AACL,WAAK,eACH,KAAK,eAAe,KAAK,kBACzB,aAAa,IAAI,KAAK;AAAA,IAC1B;AAIA,UAAM,SAAS,KAAK,cAAc,MAAM,KAAK,eAAe;AAE5D,WAAO;AAAA,MACL,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,MACX,QAAQ,KAAK,IAAI,GAAG,SAAS,CAAC;AAAA;AAAA,IAChC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,cAAc;AACnB,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,MAAc;AAChB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,OAAe;AACjB,WAAO,KAAK;AAAA,EACd;AACF;AAOO,IAAM,mBAAN,MAAuB;AAAA;AAAA;AAAA;AAAA;AAAA,EAS5B,YAAY,cAAsB,IAAI,oBAA4B,MAAM;AARxE,SAAQ,gBAA0B,CAAC;AASjC,SAAK,cAAc;AACnB,SAAK,oBAAoB;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAQ,QAAmE;AACzE,SAAK,cAAc,KAAK,MAAM;AAC9B,QAAI,KAAK,cAAc,SAAS,KAAK,aAAa;AAChD,WAAK,cAAc,MAAM;AAAA,IAC3B;AAEA,QAAI,KAAK,cAAc,SAAS,GAAG;AACjC,aAAO,EAAE,YAAY,OAAO,kBAAkB,EAAE;AAAA,IAClD;AAGA,UAAM,aAAa,KAAK,cAAc,MAAM,GAAG,EAAE;AACjD,UAAM,UAAU,WAAW,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC,IAAI,WAAW;AAGnE,UAAM,WAAW,SAAS;AAC1B,UAAM,aAAa,WAAW,KAAK;AAEnC,WAAO;AAAA,MACL;AAAA,MACA,kBAAkB,aAAa,KAAK,IAAI,GAAG,WAAW,GAAG,IAAI;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,gBAAgB,CAAC;AAAA,EACxB;AACF;","names":["data","options","session","logger","logger","logger","logger","logger","logger","logger","WASM_CDN_PATH","logger","logger","logger","sum","logger","env"]}
|