@eldrforge/kodrdriv 1.2.16 → 1.2.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/development.js +54 -0
- package/dist/commands/development.js.map +1 -1
- package/dist/commands/publish.js +130 -4
- package/dist/commands/publish.js.map +1 -1
- package/dist/commands/tree.js +11 -1
- package/dist/commands/tree.js.map +1 -1
- package/dist/constants.js +1 -1
- package/dist/util/openai.js +5 -0
- package/dist/util/openai.js.map +1 -1
- package/package.json +1 -1
package/dist/constants.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import os__default from 'os';
|
|
2
2
|
import path__default from 'path';
|
|
3
3
|
|
|
4
|
-
const VERSION = '1.2.
|
|
4
|
+
const VERSION = '1.2.18 (HEAD/474d462 T:v1.2.18 2025-10-20 07:41:35 -0700) linux x64 v22.20.0';
|
|
5
5
|
const PROGRAM_NAME = 'kodrdriv';
|
|
6
6
|
const DEFAULT_OVERRIDES = false;
|
|
7
7
|
const DATE_FORMAT_YEAR_MONTH_DAY_HOURS_MINUTES_SECONDS_MILLISECONDS = 'YYYY-MM-DD-HHmmss.SSS';
|
package/dist/util/openai.js
CHANGED
|
@@ -173,6 +173,7 @@ async function createCompletion(messages, options = {
|
|
|
173
173
|
apiOptions.reasoning_effort = options.openaiReasoning;
|
|
174
174
|
}
|
|
175
175
|
// Add timeout wrapper to the OpenAI API call
|
|
176
|
+
const startTime = Date.now();
|
|
176
177
|
const completionPromise = openai.chat.completions.create(apiOptions);
|
|
177
178
|
// Create timeout promise with proper cleanup to prevent memory leaks
|
|
178
179
|
let timeoutId = null;
|
|
@@ -192,6 +193,7 @@ async function createCompletion(messages, options = {
|
|
|
192
193
|
clearTimeout(timeoutId);
|
|
193
194
|
}
|
|
194
195
|
}
|
|
196
|
+
const elapsedTime = Date.now() - startTime;
|
|
195
197
|
// Save response debug file if enabled
|
|
196
198
|
if (options.debug && (options.debugResponseFile || options.debugFile)) {
|
|
197
199
|
const debugFile = options.debugResponseFile || options.debugFile;
|
|
@@ -206,6 +208,9 @@ async function createCompletion(messages, options = {
|
|
|
206
208
|
const responseSize = response.length;
|
|
207
209
|
const responseSizeKB = (responseSize / 1024).toFixed(2);
|
|
208
210
|
logger.info(' Response size: %s KB (%s bytes)', responseSizeKB, responseSize.toLocaleString());
|
|
211
|
+
// Log elapsed time
|
|
212
|
+
const elapsedTimeFormatted = elapsedTime >= 1000 ? `${(elapsedTime / 1000).toFixed(1)}s` : `${elapsedTime}ms`;
|
|
213
|
+
logger.info(' Time: %s', elapsedTimeFormatted);
|
|
209
214
|
// Log token usage if available
|
|
210
215
|
if (completion.usage) {
|
|
211
216
|
var _completion_usage_prompt_tokens, _completion_usage_completion_tokens, _completion_usage_total_tokens;
|
package/dist/util/openai.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai.js","sources":["../../src/util/openai.ts"],"sourcesContent":["import { OpenAI } from 'openai';\nimport { ChatCompletionMessageParam } from 'openai/resources';\nimport * as Storage from './storage';\nimport { getLogger } from '../logging';\nimport { archiveAudio } from './general';\nimport { Config } from '../types';\nimport { safeJsonParse } from './validation';\n// eslint-disable-next-line no-restricted-imports\nimport fs from 'fs';\n\nexport interface Transcription {\n text: string;\n}\n\n/**\n * Get the appropriate model to use based on command-specific configuration\n * Command-specific model overrides the global model setting\n */\nexport function getModelForCommand(config: Config, commandName: string): string {\n let commandModel: string | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandModel = config.commit?.model;\n break;\n case 'release':\n commandModel = config.release?.model;\n break;\n case 'review':\n case 'audio-review':\n commandModel = config.review?.model;\n break;\n default:\n // For other commands, just use global model\n break;\n }\n\n // Return command-specific model if available, otherwise global model\n return commandModel || config.model || 'gpt-4o-mini';\n}\n\n/**\n * Get the appropriate OpenAI reasoning level based on command-specific configuration\n * Command-specific reasoning overrides the global reasoning setting\n */\nexport function getOpenAIReasoningForCommand(config: Config, commandName: string): 'low' | 'medium' | 'high' {\n let commandReasoning: 'low' | 'medium' | 'high' | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandReasoning = config.commit?.openaiReasoning;\n break;\n case 'release':\n commandReasoning = config.release?.openaiReasoning;\n break;\n case 'review':\n case 'audio-review':\n commandReasoning = config.review?.openaiReasoning;\n break;\n default:\n // For other commands, just use global reasoning\n break;\n }\n\n // Return command-specific reasoning if available, otherwise global reasoning\n return commandReasoning || config.openaiReasoning || 'low';\n}\n\n/**\n * Get the appropriate OpenAI max output tokens based on command-specific configuration\n * Command-specific max output tokens overrides the global setting\n */\nexport function getOpenAIMaxOutputTokensForCommand(config: Config, commandName: string): number {\n let commandMaxOutputTokens: number | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandMaxOutputTokens = config.commit?.openaiMaxOutputTokens;\n break;\n case 'release':\n commandMaxOutputTokens = config.release?.openaiMaxOutputTokens;\n break;\n case 'review':\n case 'audio-review':\n commandMaxOutputTokens = config.review?.openaiMaxOutputTokens;\n break;\n default:\n // For other commands, just use global max output tokens\n break;\n }\n\n // Return command-specific max output tokens if available, otherwise global setting\n return commandMaxOutputTokens || config.openaiMaxOutputTokens || 10000;\n}\n\nexport class OpenAIError extends Error {\n constructor(message: string, public readonly isTokenLimitError: boolean = false) {\n super(message);\n this.name = 'OpenAIError';\n }\n}\n\n// Check if an error is a token limit exceeded error\nexport function isTokenLimitError(error: any): boolean {\n if (!error?.message) return false;\n\n const message = error.message.toLowerCase();\n return message.includes('maximum context length') ||\n message.includes('context_length_exceeded') ||\n message.includes('token limit') ||\n message.includes('too many tokens') ||\n message.includes('reduce the length');\n}\n\n// Check if an error is a rate limit error\nexport function isRateLimitError(error: any): boolean {\n if (!error?.message && !error?.code && !error?.status) return false;\n\n // Check for OpenAI specific rate limit indicators\n if (error.status === 429 || error.code === 'rate_limit_exceeded') {\n return true;\n }\n\n // Only check message if it exists\n if (error.message) {\n const message = error.message.toLowerCase();\n return message.includes('rate limit exceeded') ||\n message.includes('too many requests') ||\n message.includes('quota exceeded') ||\n (message.includes('rate') && message.includes('limit'));\n }\n\n return false;\n}\n\nexport async function createCompletion(messages: ChatCompletionMessageParam[], options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, maxTokens?: number, openaiReasoning?: 'low' | 'medium' | 'high', openaiMaxOutputTokens?: number } = { model: \"gpt-4o-mini\" }): Promise<string | any> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n let openai: OpenAI | null = null;\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n // Create the client which we'll close in the finally block.\n const timeoutMs = parseInt(process.env.OPENAI_TIMEOUT_MS || '300000'); // Default to 5 minutes\n openai = new OpenAI({\n apiKey: apiKey,\n timeout: timeoutMs,\n });\n\n const modelToUse = options.model || \"gpt-4o-mini\";\n\n // Calculate request size\n const requestSize = JSON.stringify(messages).length;\n const requestSizeKB = (requestSize / 1024).toFixed(2);\n\n // Log model, reasoning level, and request size\n const reasoningInfo = options.openaiReasoning ? ` | Reasoning: ${options.openaiReasoning}` : '';\n logger.info('🤖 Making request to OpenAI');\n logger.info(' Model: %s%s', modelToUse, reasoningInfo);\n logger.info(' Request size: %s KB (%s bytes)', requestSizeKB, requestSize.toLocaleString());\n\n logger.debug('Sending prompt to OpenAI: %j', messages);\n\n // Use openaiMaxOutputTokens if specified (highest priority), otherwise fall back to maxTokens, or default to 10000\n const maxCompletionTokens = options.openaiMaxOutputTokens ?? options.maxTokens ?? 10000;\n\n // Save request debug file if enabled\n if (options.debug && (options.debugRequestFile || options.debugFile)) {\n const requestData = {\n model: modelToUse,\n messages,\n max_completion_tokens: maxCompletionTokens,\n response_format: options.responseFormat,\n reasoning_effort: options.openaiReasoning,\n };\n const debugFile = options.debugRequestFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(requestData, null, 2), 'utf8');\n logger.debug('Wrote request debug file to %s', debugFile);\n }\n\n // Prepare the API call options\n const apiOptions: any = {\n model: modelToUse,\n messages,\n max_completion_tokens: maxCompletionTokens,\n response_format: options.responseFormat,\n };\n\n // Add reasoning parameter if specified and model supports it\n if (options.openaiReasoning && (modelToUse.includes('gpt-5') || modelToUse.includes('o3'))) {\n apiOptions.reasoning_effort = options.openaiReasoning;\n }\n\n // Add timeout wrapper to the OpenAI API call\n const completionPromise = openai.chat.completions.create(apiOptions);\n\n // Create timeout promise with proper cleanup to prevent memory leaks\n let timeoutId: NodeJS.Timeout | null = null;\n const timeoutPromise = new Promise<never>((_, reject) => {\n const timeoutMs = parseInt(process.env.OPENAI_TIMEOUT_MS || '300000'); // Default to 5 minutes\n timeoutId = setTimeout(() => reject(new OpenAIError(`OpenAI API call timed out after ${timeoutMs/1000} seconds`)), timeoutMs);\n });\n\n let completion;\n try {\n completion = await Promise.race([completionPromise, timeoutPromise]);\n } finally {\n // Clear the timeout to prevent memory leaks\n if (timeoutId !== null) {\n clearTimeout(timeoutId);\n }\n }\n\n // Save response debug file if enabled\n if (options.debug && (options.debugResponseFile || options.debugFile)) {\n const debugFile = options.debugResponseFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(completion, null, 2), 'utf8');\n logger.debug('Wrote response debug file to %s', debugFile);\n }\n\n const response = completion.choices[0]?.message?.content?.trim();\n if (!response) {\n throw new OpenAIError('No response received from OpenAI');\n }\n\n // Calculate and log response size\n const responseSize = response.length;\n const responseSizeKB = (responseSize / 1024).toFixed(2);\n logger.info(' Response size: %s KB (%s bytes)', responseSizeKB, responseSize.toLocaleString());\n\n // Log token usage if available\n if (completion.usage) {\n logger.info(' Token usage: %s prompt + %s completion = %s total',\n completion.usage.prompt_tokens?.toLocaleString() || '?',\n completion.usage.completion_tokens?.toLocaleString() || '?',\n completion.usage.total_tokens?.toLocaleString() || '?'\n );\n }\n\n logger.debug('Received response from OpenAI: %s...', response.substring(0, 30));\n if (options.responseFormat) {\n return safeJsonParse(response, 'OpenAI API response');\n } else {\n return response;\n }\n\n } catch (error: any) {\n logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);\n const isTokenError = isTokenLimitError(error);\n throw new OpenAIError(`Failed to create completion: ${error.message}`, isTokenError);\n } finally {\n // OpenAI client cleanup is handled automatically by the library\n // No manual cleanup needed for newer versions\n }\n}\n\n// Create completion with automatic retry on token limit errors\nexport async function createCompletionWithRetry(\n messages: ChatCompletionMessageParam[],\n options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, maxTokens?: number, openaiReasoning?: 'low' | 'medium' | 'high', openaiMaxOutputTokens?: number } = { model: \"gpt-4o-mini\" },\n retryCallback?: (attempt: number) => Promise<ChatCompletionMessageParam[]>\n): Promise<string | any> {\n const logger = getLogger();\n const maxRetries = 3;\n\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n try {\n const messagesToSend = attempt === 1 ? messages : (retryCallback ? await retryCallback(attempt) : messages);\n return await createCompletion(messagesToSend, options);\n } catch (error: any) {\n if (error instanceof OpenAIError && error.isTokenLimitError && attempt < maxRetries && retryCallback) {\n logger.warn('Token limit exceeded on attempt %d/%d, retrying with reduced content...', attempt, maxRetries);\n // Add exponential backoff for token limit errors\n const backoffMs = Math.min(1000 * Math.pow(2, attempt - 1), 10000);\n await new Promise(resolve => setTimeout(resolve, backoffMs));\n continue;\n } else if (isRateLimitError(error) && attempt < maxRetries) {\n // Handle rate limiting with exponential backoff\n const backoffMs = Math.min(2000 * Math.pow(2, attempt - 1), 15000); // More reasonable backoff: 2s, 4s, 8s, max 15s\n logger.warn(`Rate limit hit on attempt ${attempt}/${maxRetries}, waiting ${backoffMs}ms before retry...`);\n await new Promise(resolve => setTimeout(resolve, backoffMs));\n continue;\n }\n throw error;\n }\n }\n\n // This should never be reached, but TypeScript requires it\n throw new OpenAIError('Max retries exceeded');\n}\n\nexport async function transcribeAudio(filePath: string, options: { model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, outputDirectory?: string } = { model: \"whisper-1\" }): Promise<Transcription> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n let openai: OpenAI | null = null;\n let audioStream: fs.ReadStream | null = null;\n let streamClosed = false;\n\n // Helper function to safely close the stream\n const closeAudioStream = () => {\n if (audioStream && !streamClosed) {\n try {\n // Only call destroy if it exists and the stream isn't already destroyed\n if (typeof audioStream.destroy === 'function' && !audioStream.destroyed) {\n audioStream.destroy();\n }\n streamClosed = true;\n logger.debug('Audio stream closed successfully');\n } catch (streamErr) {\n logger.debug('Failed to destroy audio read stream: %s', (streamErr as Error).message);\n streamClosed = true; // Mark as closed even if destroy failed\n }\n }\n };\n\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n openai = new OpenAI({\n apiKey: apiKey,\n });\n\n logger.debug('Transcribing audio file: %s', filePath);\n\n // Save request debug file if enabled\n if (options.debug && (options.debugRequestFile || options.debugFile)) {\n const requestData = {\n model: options.model || \"whisper-1\",\n file: filePath, // Can't serialize the stream, so just save the file path\n response_format: \"json\",\n };\n const debugFile = options.debugRequestFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(requestData, null, 2), 'utf8');\n logger.debug('Wrote request debug file to %s', debugFile);\n }\n\n audioStream = await storage.readStream(filePath);\n\n // Set up error handler for the stream to ensure cleanup on stream errors\n // Only add handler if the stream has the 'on' method (real streams)\n if (audioStream && typeof audioStream.on === 'function') {\n audioStream.on('error', (streamError) => {\n logger.error('Audio stream error: %s', streamError.message);\n closeAudioStream();\n });\n }\n\n let transcription;\n try {\n transcription = await openai.audio.transcriptions.create({\n model: options.model || \"whisper-1\",\n file: audioStream,\n response_format: \"json\",\n });\n // Close the stream immediately after successful API call to prevent race conditions\n closeAudioStream();\n } catch (apiError) {\n // Close the stream immediately if the API call fails\n closeAudioStream();\n throw apiError;\n }\n\n // Save response debug file if enabled\n if (options.debug && (options.debugResponseFile || options.debugFile)) {\n const debugFile = options.debugResponseFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(transcription, null, 2), 'utf8');\n logger.debug('Wrote response debug file to %s', debugFile);\n }\n\n const response = transcription;\n if (!response) {\n throw new OpenAIError('No transcription received from OpenAI');\n }\n\n logger.debug('Received transcription from OpenAI: %s', response);\n\n // Archive the audio file and transcription\n try {\n const outputDir = options.outputDirectory || 'output';\n await archiveAudio(filePath, response.text, outputDir);\n } catch (archiveError: any) {\n // Don't fail the transcription if archiving fails, just log the error\n logger.warn('Failed to archive audio file: %s', archiveError.message);\n }\n\n return response;\n\n } catch (error: any) {\n logger.error('Error transcribing audio file: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);\n } finally {\n // Ensure the audio stream is properly closed to release file handles\n closeAudioStream();\n // OpenAI client cleanup is handled automatically by the library\n // No manual cleanup needed for newer versions\n }\n}\n"],"names":["getModelForCommand","config","commandName","commandModel","commit","model","release","review","getOpenAIReasoningForCommand","commandReasoning","openaiReasoning","getOpenAIMaxOutputTokensForCommand","commandMaxOutputTokens","openaiMaxOutputTokens","OpenAIError","Error","message","isTokenLimitError","name","error","toLowerCase","includes","isRateLimitError","code","status","createCompletion","messages","options","logger","getLogger","storage","Storage","log","debug","openai","completion","apiKey","process","env","OPENAI_API_KEY","timeoutMs","parseInt","OPENAI_TIMEOUT_MS","OpenAI","timeout","modelToUse","requestSize","JSON","stringify","length","requestSizeKB","toFixed","reasoningInfo","info","toLocaleString","maxCompletionTokens","maxTokens","debugRequestFile","debugFile","requestData","max_completion_tokens","response_format","responseFormat","reasoning_effort","writeFile","apiOptions","completionPromise","chat","completions","create","timeoutId","timeoutPromise","Promise","_","reject","setTimeout","race","clearTimeout","debugResponseFile","response","choices","content","trim","responseSize","responseSizeKB","usage","prompt_tokens","completion_tokens","total_tokens","substring","safeJsonParse","stack","isTokenError","createCompletionWithRetry","retryCallback","maxRetries","attempt","messagesToSend","warn","backoffMs","Math","min","pow","resolve","transcribeAudio","filePath","audioStream","streamClosed","closeAudioStream","destroy","destroyed","streamErr","file","readStream","on","streamError","transcription","audio","transcriptions","apiError","outputDir","outputDirectory","archiveAudio","text","archiveError"],"mappings":";;;;;;;;;;;;;;;;;;;AAcA;;;AAGC,IACM,SAASA,kBAAAA,CAAmBC,MAAc,EAAEC,WAAmB,EAAA;IAClE,IAAIC,YAAAA;IAEJ,OAAQD,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACcD,YAAAA,IAAAA,cAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeI,KAAK;AACnC,YAAA;QACJ,KAAK,SAAA;AACcJ,YAAAA,IAAAA,eAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBI,KAAK;AACpC,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACcJ,YAAAA,IAAAA,cAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeI,KAAK;AACnC,YAAA;AAIR;;IAGA,OAAOF,YAAAA,IAAgBF,MAAAA,CAAOI,KAAK,IAAI,aAAA;AAC3C;AAEA;;;AAGC,IACM,SAASG,4BAAAA,CAA6BP,MAAc,EAAEC,WAAmB,EAAA;IAC5E,IAAIO,gBAAAA;IAEJ,OAAQP,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACkBD,YAAAA,IAAAA,cAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeS,eAAe;AACjD,YAAA;QACJ,KAAK,SAAA;AACkBT,YAAAA,IAAAA,eAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBS,eAAe;AAClD,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACkBT,YAAAA,IAAAA,cAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeS,eAAe;AACjD,YAAA;AAIR;;IAGA,OAAOD,gBAAAA,IAAoBR,MAAAA,CAAOS,eAAe,IAAI,KAAA;AACzD;AAEA;;;AAGC,IACM,SAASC,kCAAAA,CAAmCV,MAAc,EAAEC,WAAmB,EAAA;IAClF,IAAIU,sBAAAA;IAEJ,OAAQV,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACwBD,YAAAA,IAAAA,cAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeY,qBAAqB;AAC7D,YAAA;QACJ,KAAK,SAAA;AACwBZ,YAAAA,IAAAA,eAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBY,qBAAqB;AAC9D,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACwBZ,YAAAA,IAAAA,cAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeY,qBAAqB;AAC7D,YAAA;AAIR;;IAGA,OAAOD,sBAAAA,IAA0BX,MAAAA,CAAOY,qBAAqB,IAAI,KAAA;AACrE;AAEO,MAAMC,WAAAA,SAAoBC,KAAAA,CAAAA;AAC7B,IAAA,WAAA,CAAYC,OAAe,EAAE,iBAAgBC,GAA6B,KAAK,CAAE;QAC7E,KAAK,CAACD,oEADmCC,iBAAAA,GAAAA,iBAAAA;QAEzC,IAAI,CAACC,IAAI,GAAG,aAAA;AAChB,IAAA;AACJ;AAEA;AACO,SAASD,kBAAkBE,KAAU,EAAA;AACxC,IAAA,IAAI,EAACA,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,KAAAA,CAAOH,OAAO,GAAE,OAAO,KAAA;AAE5B,IAAA,MAAMA,OAAAA,GAAUG,KAAAA,CAAMH,OAAO,CAACI,WAAW,EAAA;AACzC,IAAA,OAAOJ,QAAQK,QAAQ,CAAC,6BACjBL,OAAAA,CAAQK,QAAQ,CAAC,yBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,kBACjBL,OAAAA,CAAQK,QAAQ,CAAC,iBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,mBAAA,CAAA;AAC5B;AAEA;AACO,SAASC,iBAAiBH,KAAU,EAAA;AACvC,IAAA,IAAI,EAACA,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,MAAOH,OAAO,CAAA,IAAI,EAACG,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,KAAAA,CAAOI,IAAI,KAAI,EAACJ,KAAAA,KAAAA,IAAAA,IAAAA,4BAAAA,KAAAA,CAAOK,MAAM,GAAE,OAAO,KAAA;;AAG9D,IAAA,IAAIL,MAAMK,MAAM,KAAK,OAAOL,KAAAA,CAAMI,IAAI,KAAK,qBAAA,EAAuB;QAC9D,OAAO,IAAA;AACX,IAAA;;IAGA,IAAIJ,KAAAA,CAAMH,OAAO,EAAE;AACf,QAAA,MAAMA,OAAAA,GAAUG,KAAAA,CAAMH,OAAO,CAACI,WAAW,EAAA;AACzC,QAAA,OAAOJ,QAAQK,QAAQ,CAAC,0BACjBL,OAAAA,CAAQK,QAAQ,CAAC,mBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,qBAChBL,OAAAA,CAAQK,QAAQ,CAAC,MAAA,CAAA,IAAWL,OAAAA,CAAQK,QAAQ,CAAC,OAAA,CAAA;AACzD,IAAA;IAEA,OAAO,KAAA;AACX;AAEO,eAAeI,gBAAAA,CAAiBC,QAAsC,EAAEC,OAAAA,GAAiP;IAAEtB,KAAAA,EAAO;AAAc,CAAC,EAAA;AACpV,IAAA,MAAMuB,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AACnD,IAAA,IAAIC,MAAAA,GAAwB,IAAA;IAC5B,IAAI;AAoFiBC,QAAAA,IAAAA,oCAAAA,EAAAA,4BAAAA,EAAAA,oBAAAA;AAnFjB,QAAA,MAAMC,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAItB,WAAAA,CAAY,gDAAA,CAAA;AAC1B,QAAA;;QAGA,MAAM0B,SAAAA,GAAYC,SAASJ,OAAAA,CAAQC,GAAG,CAACI,iBAAiB,IAAI;AAC5DR,QAAAA,MAAAA,GAAS,IAAIS,MAAAA,CAAO;YAChBP,MAAAA,EAAQA,MAAAA;YACRQ,OAAAA,EAASJ;AACb,SAAA,CAAA;QAEA,MAAMK,UAAAA,GAAalB,OAAAA,CAAQtB,KAAK,IAAI,aAAA;;AAGpC,QAAA,MAAMyC,WAAAA,GAAcC,IAAAA,CAAKC,SAAS,CAACtB,UAAUuB,MAAM;AACnD,QAAA,MAAMC,gBAAgB,CAACJ,cAAc,IAAG,EAAGK,OAAO,CAAC,CAAA,CAAA;;QAGnD,MAAMC,aAAAA,GAAgBzB,OAAAA,CAAQjB,eAAe,GAAG,CAAC,cAAc,EAAEiB,OAAAA,CAAQjB,eAAe,CAAA,CAAE,GAAG,EAAA;AAC7FkB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,6BAAA,CAAA;QACZzB,MAAAA,CAAOyB,IAAI,CAAC,gBAAA,EAAkBR,UAAAA,EAAYO,aAAAA,CAAAA;AAC1CxB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,mCAAA,EAAqCH,aAAAA,EAAeJ,YAAYQ,cAAc,EAAA,CAAA;QAE1F1B,MAAAA,CAAOK,KAAK,CAAC,8BAAA,EAAgCP,QAAAA,CAAAA;YAGjBC,8BAAAA,EAAAA,IAAAA;;AAA5B,QAAA,MAAM4B,mBAAAA,GAAsB5B,CAAAA,IAAAA,GAAAA,CAAAA,iCAAAA,OAAAA,CAAQd,qBAAqB,MAAA,IAAA,IAA7Bc,8BAAAA,KAAAA,KAAAA,CAAAA,GAAAA,8BAAAA,GAAiCA,OAAAA,CAAQ6B,SAAS,MAAA,IAAA,IAAlD7B,kBAAAA,IAAAA,GAAsD,KAAA;;QAGlF,IAAIA,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQ8B,gBAAgB,IAAI9B,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AAClE,YAAA,MAAMC,WAAAA,GAAc;gBAChBtD,KAAAA,EAAOwC,UAAAA;AACPnB,gBAAAA,QAAAA;gBACAkC,qBAAAA,EAAuBL,mBAAAA;AACvBM,gBAAAA,eAAAA,EAAiBlC,QAAQmC,cAAc;AACvCC,gBAAAA,gBAAAA,EAAkBpC,QAAQjB;AAC9B,aAAA;AACA,YAAA,MAAMgD,SAAAA,GAAY/B,OAAAA,CAAQ8B,gBAAgB,IAAI9B,QAAQ+B,SAAS;YAC/D,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACW,WAAAA,EAAa,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC1E/B,MAAAA,CAAOK,KAAK,CAAC,gCAAA,EAAkCyB,SAAAA,CAAAA;AACnD,QAAA;;AAGA,QAAA,MAAMO,UAAAA,GAAkB;YACpB5D,KAAAA,EAAOwC,UAAAA;AACPnB,YAAAA,QAAAA;YACAkC,qBAAAA,EAAuBL,mBAAAA;AACvBM,YAAAA,eAAAA,EAAiBlC,QAAQmC;AAC7B,SAAA;;AAGA,QAAA,IAAInC,OAAAA,CAAQjB,eAAe,KAAKmC,UAAAA,CAAWxB,QAAQ,CAAC,OAAA,CAAA,IAAYwB,UAAAA,CAAWxB,QAAQ,CAAC,IAAA,CAAI,CAAA,EAAI;YACxF4C,UAAAA,CAAWF,gBAAgB,GAAGpC,OAAAA,CAAQjB,eAAe;AACzD,QAAA;;AAGA,QAAA,MAAMwD,oBAAoBhC,MAAAA,CAAOiC,IAAI,CAACC,WAAW,CAACC,MAAM,CAACJ,UAAAA,CAAAA;;AAGzD,QAAA,IAAIK,SAAAA,GAAmC,IAAA;AACvC,QAAA,MAAMC,cAAAA,GAAiB,IAAIC,OAAAA,CAAe,CAACC,CAAAA,EAAGC,MAAAA,GAAAA;YAC1C,MAAMlC,SAAAA,GAAYC,SAASJ,OAAAA,CAAQC,GAAG,CAACI,iBAAiB,IAAI;AAC5D4B,YAAAA,SAAAA,GAAYK,UAAAA,CAAW,IAAMD,MAAAA,CAAO,IAAI5D,WAAAA,CAAY,CAAC,gCAAgC,EAAE0B,SAAAA,GAAU,IAAA,CAAK,QAAQ,CAAC,CAAA,CAAA,EAAIA,SAAAA,CAAAA;AACvH,QAAA,CAAA,CAAA;QAEA,IAAIL,UAAAA;QACJ,IAAI;YACAA,UAAAA,GAAa,MAAMqC,OAAAA,CAAQI,IAAI,CAAC;AAACV,gBAAAA,iBAAAA;AAAmBK,gBAAAA;AAAe,aAAA,CAAA;QACvE,CAAA,QAAU;;AAEN,YAAA,IAAID,cAAc,IAAA,EAAM;gBACpBO,YAAAA,CAAaP,SAAAA,CAAAA;AACjB,YAAA;AACJ,QAAA;;QAGA,IAAI3C,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQmD,iBAAiB,IAAInD,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AACnE,YAAA,MAAMA,SAAAA,GAAY/B,OAAAA,CAAQmD,iBAAiB,IAAInD,QAAQ+B,SAAS;YAChE,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACb,UAAAA,EAAY,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YACzEP,MAAAA,CAAOK,KAAK,CAAC,iCAAA,EAAmCyB,SAAAA,CAAAA;AACpD,QAAA;AAEA,QAAA,MAAMqB,YAAW5C,oBAAAA,GAAAA,UAAAA,CAAW6C,OAAO,CAAC,CAAA,CAAE,cAArB7C,oBAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,CAAAA,4BAAAA,GAAAA,qBAAuBnB,OAAO,MAAA,IAAA,IAA9BmB,oDAAAA,oCAAAA,GAAAA,4BAAAA,CAAgC8C,OAAO,MAAA,IAAA,IAAvC9C,oCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,qCAAyC+C,IAAI,EAAA;AAC9D,QAAA,IAAI,CAACH,QAAAA,EAAU;AACX,YAAA,MAAM,IAAIjE,WAAAA,CAAY,kCAAA,CAAA;AAC1B,QAAA;;QAGA,MAAMqE,YAAAA,GAAeJ,SAAS9B,MAAM;AACpC,QAAA,MAAMmC,iBAAiB,CAACD,eAAe,IAAG,EAAGhC,OAAO,CAAC,CAAA,CAAA;AACrDvB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,oCAAA,EAAsC+B,cAAAA,EAAgBD,aAAa7B,cAAc,EAAA,CAAA;;QAG7F,IAAInB,UAAAA,CAAWkD,KAAK,EAAE;AAEdlD,YAAAA,IAAAA,+BAAAA,EACAA,mCAAAA,EACAA,8BAAAA;AAHJP,YAAAA,MAAAA,CAAOyB,IAAI,CAAC,sDAAA,EACRlB,CAAAA,CAAAA,+BAAAA,GAAAA,WAAWkD,KAAK,CAACC,aAAa,MAAA,IAAA,IAA9BnD,sDAAAA,+BAAAA,CAAgCmB,cAAc,EAAA,KAAM,GAAA,EACpDnB,EAAAA,mCAAAA,GAAAA,UAAAA,CAAWkD,KAAK,CAACE,iBAAiB,MAAA,IAAA,IAAlCpD,mCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,mCAAAA,CAAoCmB,cAAc,OAAM,GAAA,EACxDnB,CAAAA,CAAAA,8BAAAA,GAAAA,UAAAA,CAAWkD,KAAK,CAACG,YAAY,cAA7BrD,8BAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,8BAAAA,CAA+BmB,cAAc,EAAA,KAAM,GAAA,CAAA;AAE3D,QAAA;AAEA1B,QAAAA,MAAAA,CAAOK,KAAK,CAAC,sCAAA,EAAwC8C,QAAAA,CAASU,SAAS,CAAC,CAAA,EAAG,EAAA,CAAA,CAAA;QAC3E,IAAI9D,OAAAA,CAAQmC,cAAc,EAAE;AACxB,YAAA,OAAO4B,cAAcX,QAAAA,EAAU,qBAAA,CAAA;QACnC,CAAA,MAAO;YACH,OAAOA,QAAAA;AACX,QAAA;AAEJ,IAAA,CAAA,CAAE,OAAO5D,KAAAA,EAAY;AACjBS,QAAAA,MAAAA,CAAOT,KAAK,CAAC,iCAAA,EAAmCA,MAAMH,OAAO,EAAEG,MAAMwE,KAAK,CAAA;AAC1E,QAAA,MAAMC,eAAe3E,iBAAAA,CAAkBE,KAAAA,CAAAA;QACvC,MAAM,IAAIL,YAAY,CAAC,6BAA6B,EAAEK,KAAAA,CAAMH,OAAO,EAAE,EAAE4E,YAAAA,CAAAA;IAC3E,CAAA,QAAU;;;AAGV,IAAA;AACJ;AAEA;AACO,eAAeC,yBAAAA,CAClBnE,QAAsC,EACtCC,OAAAA,GAAiP;IAAEtB,KAAAA,EAAO;AAAc,CAAC,EACzQyF,aAA0E,EAAA;AAE1E,IAAA,MAAMlE,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMkE,UAAAA,GAAa,CAAA;AAEnB,IAAA,IAAK,IAAIC,OAAAA,GAAU,CAAA,EAAGA,OAAAA,IAAWD,YAAYC,OAAAA,EAAAA,CAAW;QACpD,IAAI;AACA,YAAA,MAAMC,iBAAiBD,OAAAA,KAAY,CAAA,GAAItE,WAAYoE,aAAAA,GAAgB,MAAMA,cAAcE,OAAAA,CAAAA,GAAWtE,QAAAA;YAClG,OAAO,MAAMD,iBAAiBwE,cAAAA,EAAgBtE,OAAAA,CAAAA;AAClD,QAAA,CAAA,CAAE,OAAOR,KAAAA,EAAY;AACjB,YAAA,IAAIA,iBAAiBL,WAAAA,IAAeK,KAAAA,CAAMF,iBAAiB,IAAI+E,OAAAA,GAAUD,cAAcD,aAAAA,EAAe;gBAClGlE,MAAAA,CAAOsE,IAAI,CAAC,yEAAA,EAA2EF,OAAAA,EAASD,UAAAA,CAAAA;;gBAEhG,MAAMI,SAAAA,GAAYC,IAAAA,CAAKC,GAAG,CAAC,IAAA,GAAOD,KAAKE,GAAG,CAAC,CAAA,EAAGN,OAAAA,GAAU,CAAA,CAAA,EAAI,KAAA,CAAA;AAC5D,gBAAA,MAAM,IAAIxB,OAAAA,CAAQ+B,CAAAA,OAAAA,GAAW5B,WAAW4B,OAAAA,EAASJ,SAAAA,CAAAA,CAAAA;AACjD,gBAAA;AACJ,YAAA,CAAA,MAAO,IAAI7E,gBAAAA,CAAiBH,KAAAA,CAAAA,IAAU6E,OAAAA,GAAUD,UAAAA,EAAY;;AAExD,gBAAA,MAAMI,SAAAA,GAAYC,IAAAA,CAAKC,GAAG,CAAC,IAAA,GAAOD,IAAAA,CAAKE,GAAG,CAAC,CAAA,EAAGN,OAAAA,GAAU,CAAA,CAAA,EAAI,KAAA,CAAA,CAAA;AAC5DpE,gBAAAA,MAAAA,CAAOsE,IAAI,CAAC,CAAC,0BAA0B,EAAEF,OAAAA,CAAQ,CAAC,EAAED,UAAAA,CAAW,UAAU,EAAEI,SAAAA,CAAU,kBAAkB,CAAC,CAAA;AACxG,gBAAA,MAAM,IAAI3B,OAAAA,CAAQ+B,CAAAA,OAAAA,GAAW5B,WAAW4B,OAAAA,EAASJ,SAAAA,CAAAA,CAAAA;AACjD,gBAAA;AACJ,YAAA;YACA,MAAMhF,KAAAA;AACV,QAAA;AACJ,IAAA;;AAGA,IAAA,MAAM,IAAIL,WAAAA,CAAY,sBAAA,CAAA;AAC1B;AAEO,eAAe0F,eAAAA,CAAgBC,QAAgB,EAAE9E,OAAAA,GAAoJ;IAAEtB,KAAAA,EAAO;AAAY,CAAC,EAAA;AAC9N,IAAA,MAAMuB,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AACnD,IAAA,IAAIC,MAAAA,GAAwB,IAAA;AAC5B,IAAA,IAAIwE,WAAAA,GAAoC,IAAA;AACxC,IAAA,IAAIC,YAAAA,GAAe,KAAA;;AAGnB,IAAA,MAAMC,gBAAAA,GAAmB,IAAA;QACrB,IAAIF,WAAAA,IAAe,CAACC,YAAAA,EAAc;YAC9B,IAAI;;gBAEA,IAAI,OAAOD,YAAYG,OAAO,KAAK,cAAc,CAACH,WAAAA,CAAYI,SAAS,EAAE;AACrEJ,oBAAAA,WAAAA,CAAYG,OAAO,EAAA;AACvB,gBAAA;gBACAF,YAAAA,GAAe,IAAA;AACf/E,gBAAAA,MAAAA,CAAOK,KAAK,CAAC,kCAAA,CAAA;AACjB,YAAA,CAAA,CAAE,OAAO8E,SAAAA,EAAW;AAChBnF,gBAAAA,MAAAA,CAAOK,KAAK,CAAC,yCAAA,EAA4C8E,UAAoB/F,OAAO,CAAA;AACpF2F,gBAAAA,YAAAA,GAAe;AACnB,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA;IAEA,IAAI;AACA,QAAA,MAAMvE,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAItB,WAAAA,CAAY,gDAAA,CAAA;AAC1B,QAAA;AAEAoB,QAAAA,MAAAA,GAAS,IAAIS,MAAAA,CAAO;YAChBP,MAAAA,EAAQA;AACZ,SAAA,CAAA;QAEAR,MAAAA,CAAOK,KAAK,CAAC,6BAAA,EAA+BwE,QAAAA,CAAAA;;QAG5C,IAAI9E,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQ8B,gBAAgB,IAAI9B,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AAClE,YAAA,MAAMC,WAAAA,GAAc;gBAChBtD,KAAAA,EAAOsB,OAAAA,CAAQtB,KAAK,IAAI,WAAA;gBACxB2G,IAAAA,EAAMP,QAAAA;gBACN5C,eAAAA,EAAiB;AACrB,aAAA;AACA,YAAA,MAAMH,SAAAA,GAAY/B,OAAAA,CAAQ8B,gBAAgB,IAAI9B,QAAQ+B,SAAS;YAC/D,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACW,WAAAA,EAAa,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC1E/B,MAAAA,CAAOK,KAAK,CAAC,gCAAA,EAAkCyB,SAAAA,CAAAA;AACnD,QAAA;QAEAgD,WAAAA,GAAc,MAAM5E,OAAAA,CAAQmF,UAAU,CAACR,QAAAA,CAAAA;;;AAIvC,QAAA,IAAIC,WAAAA,IAAe,OAAOA,WAAAA,CAAYQ,EAAE,KAAK,UAAA,EAAY;YACrDR,WAAAA,CAAYQ,EAAE,CAAC,OAAA,EAAS,CAACC,WAAAA,GAAAA;AACrBvF,gBAAAA,MAAAA,CAAOT,KAAK,CAAC,wBAAA,EAA0BgG,WAAAA,CAAYnG,OAAO,CAAA;AAC1D4F,gBAAAA,gBAAAA,EAAAA;AACJ,YAAA,CAAA,CAAA;AACJ,QAAA;QAEA,IAAIQ,aAAAA;QACJ,IAAI;AACAA,YAAAA,aAAAA,GAAgB,MAAMlF,MAAAA,CAAOmF,KAAK,CAACC,cAAc,CAACjD,MAAM,CAAC;gBACrDhE,KAAAA,EAAOsB,OAAAA,CAAQtB,KAAK,IAAI,WAAA;gBACxB2G,IAAAA,EAAMN,WAAAA;gBACN7C,eAAAA,EAAiB;AACrB,aAAA,CAAA;;AAEA+C,YAAAA,gBAAAA,EAAAA;AACJ,QAAA,CAAA,CAAE,OAAOW,QAAAA,EAAU;;AAEfX,YAAAA,gBAAAA,EAAAA;YACA,MAAMW,QAAAA;AACV,QAAA;;QAGA,IAAI5F,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQmD,iBAAiB,IAAInD,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AACnE,YAAA,MAAMA,SAAAA,GAAY/B,OAAAA,CAAQmD,iBAAiB,IAAInD,QAAQ+B,SAAS;YAChE,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACoE,aAAAA,EAAe,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC5ExF,MAAAA,CAAOK,KAAK,CAAC,iCAAA,EAAmCyB,SAAAA,CAAAA;AACpD,QAAA;AAEA,QAAA,MAAMqB,QAAAA,GAAWqC,aAAAA;AACjB,QAAA,IAAI,CAACrC,QAAAA,EAAU;AACX,YAAA,MAAM,IAAIjE,WAAAA,CAAY,uCAAA,CAAA;AAC1B,QAAA;QAEAc,MAAAA,CAAOK,KAAK,CAAC,wCAAA,EAA0C8C,QAAAA,CAAAA;;QAGvD,IAAI;YACA,MAAMyC,SAAAA,GAAY7F,OAAAA,CAAQ8F,eAAe,IAAI,QAAA;AAC7C,YAAA,MAAMC,YAAAA,CAAajB,QAAAA,EAAU1B,QAAAA,CAAS4C,IAAI,EAAEH,SAAAA,CAAAA;AAChD,QAAA,CAAA,CAAE,OAAOI,YAAAA,EAAmB;;AAExBhG,YAAAA,MAAAA,CAAOsE,IAAI,CAAC,kCAAA,EAAoC0B,YAAAA,CAAa5G,OAAO,CAAA;AACxE,QAAA;QAEA,OAAO+D,QAAAA;AAEX,IAAA,CAAA,CAAE,OAAO5D,KAAAA,EAAY;AACjBS,QAAAA,MAAAA,CAAOT,KAAK,CAAC,sCAAA,EAAwCA,MAAMH,OAAO,EAAEG,MAAMwE,KAAK,CAAA;AAC/E,QAAA,MAAM,IAAI7E,WAAAA,CAAY,CAAC,4BAA4B,EAAEK,KAAAA,CAAMH,OAAO,CAAA,CAAE,CAAA;IACxE,CAAA,QAAU;;AAEN4F,QAAAA,gBAAAA,EAAAA;;;AAGJ,IAAA;AACJ;;;;"}
|
|
1
|
+
{"version":3,"file":"openai.js","sources":["../../src/util/openai.ts"],"sourcesContent":["import { OpenAI } from 'openai';\nimport { ChatCompletionMessageParam } from 'openai/resources';\nimport * as Storage from './storage';\nimport { getLogger } from '../logging';\nimport { archiveAudio } from './general';\nimport { Config } from '../types';\nimport { safeJsonParse } from './validation';\n// eslint-disable-next-line no-restricted-imports\nimport fs from 'fs';\n\nexport interface Transcription {\n text: string;\n}\n\n/**\n * Get the appropriate model to use based on command-specific configuration\n * Command-specific model overrides the global model setting\n */\nexport function getModelForCommand(config: Config, commandName: string): string {\n let commandModel: string | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandModel = config.commit?.model;\n break;\n case 'release':\n commandModel = config.release?.model;\n break;\n case 'review':\n case 'audio-review':\n commandModel = config.review?.model;\n break;\n default:\n // For other commands, just use global model\n break;\n }\n\n // Return command-specific model if available, otherwise global model\n return commandModel || config.model || 'gpt-4o-mini';\n}\n\n/**\n * Get the appropriate OpenAI reasoning level based on command-specific configuration\n * Command-specific reasoning overrides the global reasoning setting\n */\nexport function getOpenAIReasoningForCommand(config: Config, commandName: string): 'low' | 'medium' | 'high' {\n let commandReasoning: 'low' | 'medium' | 'high' | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandReasoning = config.commit?.openaiReasoning;\n break;\n case 'release':\n commandReasoning = config.release?.openaiReasoning;\n break;\n case 'review':\n case 'audio-review':\n commandReasoning = config.review?.openaiReasoning;\n break;\n default:\n // For other commands, just use global reasoning\n break;\n }\n\n // Return command-specific reasoning if available, otherwise global reasoning\n return commandReasoning || config.openaiReasoning || 'low';\n}\n\n/**\n * Get the appropriate OpenAI max output tokens based on command-specific configuration\n * Command-specific max output tokens overrides the global setting\n */\nexport function getOpenAIMaxOutputTokensForCommand(config: Config, commandName: string): number {\n let commandMaxOutputTokens: number | undefined;\n\n switch (commandName) {\n case 'commit':\n case 'audio-commit':\n commandMaxOutputTokens = config.commit?.openaiMaxOutputTokens;\n break;\n case 'release':\n commandMaxOutputTokens = config.release?.openaiMaxOutputTokens;\n break;\n case 'review':\n case 'audio-review':\n commandMaxOutputTokens = config.review?.openaiMaxOutputTokens;\n break;\n default:\n // For other commands, just use global max output tokens\n break;\n }\n\n // Return command-specific max output tokens if available, otherwise global setting\n return commandMaxOutputTokens || config.openaiMaxOutputTokens || 10000;\n}\n\nexport class OpenAIError extends Error {\n constructor(message: string, public readonly isTokenLimitError: boolean = false) {\n super(message);\n this.name = 'OpenAIError';\n }\n}\n\n// Check if an error is a token limit exceeded error\nexport function isTokenLimitError(error: any): boolean {\n if (!error?.message) return false;\n\n const message = error.message.toLowerCase();\n return message.includes('maximum context length') ||\n message.includes('context_length_exceeded') ||\n message.includes('token limit') ||\n message.includes('too many tokens') ||\n message.includes('reduce the length');\n}\n\n// Check if an error is a rate limit error\nexport function isRateLimitError(error: any): boolean {\n if (!error?.message && !error?.code && !error?.status) return false;\n\n // Check for OpenAI specific rate limit indicators\n if (error.status === 429 || error.code === 'rate_limit_exceeded') {\n return true;\n }\n\n // Only check message if it exists\n if (error.message) {\n const message = error.message.toLowerCase();\n return message.includes('rate limit exceeded') ||\n message.includes('too many requests') ||\n message.includes('quota exceeded') ||\n (message.includes('rate') && message.includes('limit'));\n }\n\n return false;\n}\n\nexport async function createCompletion(messages: ChatCompletionMessageParam[], options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, maxTokens?: number, openaiReasoning?: 'low' | 'medium' | 'high', openaiMaxOutputTokens?: number } = { model: \"gpt-4o-mini\" }): Promise<string | any> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n let openai: OpenAI | null = null;\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n // Create the client which we'll close in the finally block.\n const timeoutMs = parseInt(process.env.OPENAI_TIMEOUT_MS || '300000'); // Default to 5 minutes\n openai = new OpenAI({\n apiKey: apiKey,\n timeout: timeoutMs,\n });\n\n const modelToUse = options.model || \"gpt-4o-mini\";\n\n // Calculate request size\n const requestSize = JSON.stringify(messages).length;\n const requestSizeKB = (requestSize / 1024).toFixed(2);\n\n // Log model, reasoning level, and request size\n const reasoningInfo = options.openaiReasoning ? ` | Reasoning: ${options.openaiReasoning}` : '';\n logger.info('🤖 Making request to OpenAI');\n logger.info(' Model: %s%s', modelToUse, reasoningInfo);\n logger.info(' Request size: %s KB (%s bytes)', requestSizeKB, requestSize.toLocaleString());\n\n logger.debug('Sending prompt to OpenAI: %j', messages);\n\n // Use openaiMaxOutputTokens if specified (highest priority), otherwise fall back to maxTokens, or default to 10000\n const maxCompletionTokens = options.openaiMaxOutputTokens ?? options.maxTokens ?? 10000;\n\n // Save request debug file if enabled\n if (options.debug && (options.debugRequestFile || options.debugFile)) {\n const requestData = {\n model: modelToUse,\n messages,\n max_completion_tokens: maxCompletionTokens,\n response_format: options.responseFormat,\n reasoning_effort: options.openaiReasoning,\n };\n const debugFile = options.debugRequestFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(requestData, null, 2), 'utf8');\n logger.debug('Wrote request debug file to %s', debugFile);\n }\n\n // Prepare the API call options\n const apiOptions: any = {\n model: modelToUse,\n messages,\n max_completion_tokens: maxCompletionTokens,\n response_format: options.responseFormat,\n };\n\n // Add reasoning parameter if specified and model supports it\n if (options.openaiReasoning && (modelToUse.includes('gpt-5') || modelToUse.includes('o3'))) {\n apiOptions.reasoning_effort = options.openaiReasoning;\n }\n\n // Add timeout wrapper to the OpenAI API call\n const startTime = Date.now();\n const completionPromise = openai.chat.completions.create(apiOptions);\n\n // Create timeout promise with proper cleanup to prevent memory leaks\n let timeoutId: NodeJS.Timeout | null = null;\n const timeoutPromise = new Promise<never>((_, reject) => {\n const timeoutMs = parseInt(process.env.OPENAI_TIMEOUT_MS || '300000'); // Default to 5 minutes\n timeoutId = setTimeout(() => reject(new OpenAIError(`OpenAI API call timed out after ${timeoutMs/1000} seconds`)), timeoutMs);\n });\n\n let completion;\n try {\n completion = await Promise.race([completionPromise, timeoutPromise]);\n } finally {\n // Clear the timeout to prevent memory leaks\n if (timeoutId !== null) {\n clearTimeout(timeoutId);\n }\n }\n\n const elapsedTime = Date.now() - startTime;\n\n // Save response debug file if enabled\n if (options.debug && (options.debugResponseFile || options.debugFile)) {\n const debugFile = options.debugResponseFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(completion, null, 2), 'utf8');\n logger.debug('Wrote response debug file to %s', debugFile);\n }\n\n const response = completion.choices[0]?.message?.content?.trim();\n if (!response) {\n throw new OpenAIError('No response received from OpenAI');\n }\n\n // Calculate and log response size\n const responseSize = response.length;\n const responseSizeKB = (responseSize / 1024).toFixed(2);\n logger.info(' Response size: %s KB (%s bytes)', responseSizeKB, responseSize.toLocaleString());\n\n // Log elapsed time\n const elapsedTimeFormatted = elapsedTime >= 1000\n ? `${(elapsedTime / 1000).toFixed(1)}s`\n : `${elapsedTime}ms`;\n logger.info(' Time: %s', elapsedTimeFormatted);\n\n // Log token usage if available\n if (completion.usage) {\n logger.info(' Token usage: %s prompt + %s completion = %s total',\n completion.usage.prompt_tokens?.toLocaleString() || '?',\n completion.usage.completion_tokens?.toLocaleString() || '?',\n completion.usage.total_tokens?.toLocaleString() || '?'\n );\n }\n\n logger.debug('Received response from OpenAI: %s...', response.substring(0, 30));\n if (options.responseFormat) {\n return safeJsonParse(response, 'OpenAI API response');\n } else {\n return response;\n }\n\n } catch (error: any) {\n logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);\n const isTokenError = isTokenLimitError(error);\n throw new OpenAIError(`Failed to create completion: ${error.message}`, isTokenError);\n } finally {\n // OpenAI client cleanup is handled automatically by the library\n // No manual cleanup needed for newer versions\n }\n}\n\n// Create completion with automatic retry on token limit errors\nexport async function createCompletionWithRetry(\n messages: ChatCompletionMessageParam[],\n options: { responseFormat?: any, model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, maxTokens?: number, openaiReasoning?: 'low' | 'medium' | 'high', openaiMaxOutputTokens?: number } = { model: \"gpt-4o-mini\" },\n retryCallback?: (attempt: number) => Promise<ChatCompletionMessageParam[]>\n): Promise<string | any> {\n const logger = getLogger();\n const maxRetries = 3;\n\n for (let attempt = 1; attempt <= maxRetries; attempt++) {\n try {\n const messagesToSend = attempt === 1 ? messages : (retryCallback ? await retryCallback(attempt) : messages);\n return await createCompletion(messagesToSend, options);\n } catch (error: any) {\n if (error instanceof OpenAIError && error.isTokenLimitError && attempt < maxRetries && retryCallback) {\n logger.warn('Token limit exceeded on attempt %d/%d, retrying with reduced content...', attempt, maxRetries);\n // Add exponential backoff for token limit errors\n const backoffMs = Math.min(1000 * Math.pow(2, attempt - 1), 10000);\n await new Promise(resolve => setTimeout(resolve, backoffMs));\n continue;\n } else if (isRateLimitError(error) && attempt < maxRetries) {\n // Handle rate limiting with exponential backoff\n const backoffMs = Math.min(2000 * Math.pow(2, attempt - 1), 15000); // More reasonable backoff: 2s, 4s, 8s, max 15s\n logger.warn(`Rate limit hit on attempt ${attempt}/${maxRetries}, waiting ${backoffMs}ms before retry...`);\n await new Promise(resolve => setTimeout(resolve, backoffMs));\n continue;\n }\n throw error;\n }\n }\n\n // This should never be reached, but TypeScript requires it\n throw new OpenAIError('Max retries exceeded');\n}\n\nexport async function transcribeAudio(filePath: string, options: { model?: string, debug?: boolean, debugFile?: string, debugRequestFile?: string, debugResponseFile?: string, outputDirectory?: string } = { model: \"whisper-1\" }): Promise<Transcription> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n let openai: OpenAI | null = null;\n let audioStream: fs.ReadStream | null = null;\n let streamClosed = false;\n\n // Helper function to safely close the stream\n const closeAudioStream = () => {\n if (audioStream && !streamClosed) {\n try {\n // Only call destroy if it exists and the stream isn't already destroyed\n if (typeof audioStream.destroy === 'function' && !audioStream.destroyed) {\n audioStream.destroy();\n }\n streamClosed = true;\n logger.debug('Audio stream closed successfully');\n } catch (streamErr) {\n logger.debug('Failed to destroy audio read stream: %s', (streamErr as Error).message);\n streamClosed = true; // Mark as closed even if destroy failed\n }\n }\n };\n\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n openai = new OpenAI({\n apiKey: apiKey,\n });\n\n logger.debug('Transcribing audio file: %s', filePath);\n\n // Save request debug file if enabled\n if (options.debug && (options.debugRequestFile || options.debugFile)) {\n const requestData = {\n model: options.model || \"whisper-1\",\n file: filePath, // Can't serialize the stream, so just save the file path\n response_format: \"json\",\n };\n const debugFile = options.debugRequestFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(requestData, null, 2), 'utf8');\n logger.debug('Wrote request debug file to %s', debugFile);\n }\n\n audioStream = await storage.readStream(filePath);\n\n // Set up error handler for the stream to ensure cleanup on stream errors\n // Only add handler if the stream has the 'on' method (real streams)\n if (audioStream && typeof audioStream.on === 'function') {\n audioStream.on('error', (streamError) => {\n logger.error('Audio stream error: %s', streamError.message);\n closeAudioStream();\n });\n }\n\n let transcription;\n try {\n transcription = await openai.audio.transcriptions.create({\n model: options.model || \"whisper-1\",\n file: audioStream,\n response_format: \"json\",\n });\n // Close the stream immediately after successful API call to prevent race conditions\n closeAudioStream();\n } catch (apiError) {\n // Close the stream immediately if the API call fails\n closeAudioStream();\n throw apiError;\n }\n\n // Save response debug file if enabled\n if (options.debug && (options.debugResponseFile || options.debugFile)) {\n const debugFile = options.debugResponseFile || options.debugFile;\n await storage.writeFile(debugFile!, JSON.stringify(transcription, null, 2), 'utf8');\n logger.debug('Wrote response debug file to %s', debugFile);\n }\n\n const response = transcription;\n if (!response) {\n throw new OpenAIError('No transcription received from OpenAI');\n }\n\n logger.debug('Received transcription from OpenAI: %s', response);\n\n // Archive the audio file and transcription\n try {\n const outputDir = options.outputDirectory || 'output';\n await archiveAudio(filePath, response.text, outputDir);\n } catch (archiveError: any) {\n // Don't fail the transcription if archiving fails, just log the error\n logger.warn('Failed to archive audio file: %s', archiveError.message);\n }\n\n return response;\n\n } catch (error: any) {\n logger.error('Error transcribing audio file: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);\n } finally {\n // Ensure the audio stream is properly closed to release file handles\n closeAudioStream();\n // OpenAI client cleanup is handled automatically by the library\n // No manual cleanup needed for newer versions\n }\n}\n"],"names":["getModelForCommand","config","commandName","commandModel","commit","model","release","review","getOpenAIReasoningForCommand","commandReasoning","openaiReasoning","getOpenAIMaxOutputTokensForCommand","commandMaxOutputTokens","openaiMaxOutputTokens","OpenAIError","Error","message","isTokenLimitError","name","error","toLowerCase","includes","isRateLimitError","code","status","createCompletion","messages","options","logger","getLogger","storage","Storage","log","debug","openai","completion","apiKey","process","env","OPENAI_API_KEY","timeoutMs","parseInt","OPENAI_TIMEOUT_MS","OpenAI","timeout","modelToUse","requestSize","JSON","stringify","length","requestSizeKB","toFixed","reasoningInfo","info","toLocaleString","maxCompletionTokens","maxTokens","debugRequestFile","debugFile","requestData","max_completion_tokens","response_format","responseFormat","reasoning_effort","writeFile","apiOptions","startTime","Date","now","completionPromise","chat","completions","create","timeoutId","timeoutPromise","Promise","_","reject","setTimeout","race","clearTimeout","elapsedTime","debugResponseFile","response","choices","content","trim","responseSize","responseSizeKB","elapsedTimeFormatted","usage","prompt_tokens","completion_tokens","total_tokens","substring","safeJsonParse","stack","isTokenError","createCompletionWithRetry","retryCallback","maxRetries","attempt","messagesToSend","warn","backoffMs","Math","min","pow","resolve","transcribeAudio","filePath","audioStream","streamClosed","closeAudioStream","destroy","destroyed","streamErr","file","readStream","on","streamError","transcription","audio","transcriptions","apiError","outputDir","outputDirectory","archiveAudio","text","archiveError"],"mappings":";;;;;;;;;;;;;;;;;;;AAcA;;;AAGC,IACM,SAASA,kBAAAA,CAAmBC,MAAc,EAAEC,WAAmB,EAAA;IAClE,IAAIC,YAAAA;IAEJ,OAAQD,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACcD,YAAAA,IAAAA,cAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeI,KAAK;AACnC,YAAA;QACJ,KAAK,SAAA;AACcJ,YAAAA,IAAAA,eAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBI,KAAK;AACpC,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACcJ,YAAAA,IAAAA,cAAAA;AAAfE,YAAAA,YAAAA,GAAAA,CAAeF,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeI,KAAK;AACnC,YAAA;AAIR;;IAGA,OAAOF,YAAAA,IAAgBF,MAAAA,CAAOI,KAAK,IAAI,aAAA;AAC3C;AAEA;;;AAGC,IACM,SAASG,4BAAAA,CAA6BP,MAAc,EAAEC,WAAmB,EAAA;IAC5E,IAAIO,gBAAAA;IAEJ,OAAQP,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACkBD,YAAAA,IAAAA,cAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeS,eAAe;AACjD,YAAA;QACJ,KAAK,SAAA;AACkBT,YAAAA,IAAAA,eAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBS,eAAe;AAClD,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACkBT,YAAAA,IAAAA,cAAAA;AAAnBQ,YAAAA,gBAAAA,GAAAA,CAAmBR,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeS,eAAe;AACjD,YAAA;AAIR;;IAGA,OAAOD,gBAAAA,IAAoBR,MAAAA,CAAOS,eAAe,IAAI,KAAA;AACzD;AAEA;;;AAGC,IACM,SAASC,kCAAAA,CAAmCV,MAAc,EAAEC,WAAmB,EAAA;IAClF,IAAIU,sBAAAA;IAEJ,OAAQV,WAAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACwBD,YAAAA,IAAAA,cAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,iBAAAA,MAAAA,CAAOG,MAAM,MAAA,IAAA,IAAbH,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeY,qBAAqB;AAC7D,YAAA;QACJ,KAAK,SAAA;AACwBZ,YAAAA,IAAAA,eAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,kBAAAA,MAAAA,CAAOK,OAAO,MAAA,IAAA,IAAdL,eAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,gBAAgBY,qBAAqB;AAC9D,YAAA;QACJ,KAAK,QAAA;QACL,KAAK,cAAA;AACwBZ,YAAAA,IAAAA,cAAAA;AAAzBW,YAAAA,sBAAAA,GAAAA,CAAyBX,iBAAAA,MAAAA,CAAOM,MAAM,MAAA,IAAA,IAAbN,cAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,eAAeY,qBAAqB;AAC7D,YAAA;AAIR;;IAGA,OAAOD,sBAAAA,IAA0BX,MAAAA,CAAOY,qBAAqB,IAAI,KAAA;AACrE;AAEO,MAAMC,WAAAA,SAAoBC,KAAAA,CAAAA;AAC7B,IAAA,WAAA,CAAYC,OAAe,EAAE,iBAAgBC,GAA6B,KAAK,CAAE;QAC7E,KAAK,CAACD,oEADmCC,iBAAAA,GAAAA,iBAAAA;QAEzC,IAAI,CAACC,IAAI,GAAG,aAAA;AAChB,IAAA;AACJ;AAEA;AACO,SAASD,kBAAkBE,KAAU,EAAA;AACxC,IAAA,IAAI,EAACA,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,KAAAA,CAAOH,OAAO,GAAE,OAAO,KAAA;AAE5B,IAAA,MAAMA,OAAAA,GAAUG,KAAAA,CAAMH,OAAO,CAACI,WAAW,EAAA;AACzC,IAAA,OAAOJ,QAAQK,QAAQ,CAAC,6BACjBL,OAAAA,CAAQK,QAAQ,CAAC,yBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,kBACjBL,OAAAA,CAAQK,QAAQ,CAAC,iBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,mBAAA,CAAA;AAC5B;AAEA;AACO,SAASC,iBAAiBH,KAAU,EAAA;AACvC,IAAA,IAAI,EAACA,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,MAAOH,OAAO,CAAA,IAAI,EAACG,KAAAA,KAAAA,IAAAA,IAAAA,KAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,KAAAA,CAAOI,IAAI,KAAI,EAACJ,KAAAA,KAAAA,IAAAA,IAAAA,4BAAAA,KAAAA,CAAOK,MAAM,GAAE,OAAO,KAAA;;AAG9D,IAAA,IAAIL,MAAMK,MAAM,KAAK,OAAOL,KAAAA,CAAMI,IAAI,KAAK,qBAAA,EAAuB;QAC9D,OAAO,IAAA;AACX,IAAA;;IAGA,IAAIJ,KAAAA,CAAMH,OAAO,EAAE;AACf,QAAA,MAAMA,OAAAA,GAAUG,KAAAA,CAAMH,OAAO,CAACI,WAAW,EAAA;AACzC,QAAA,OAAOJ,QAAQK,QAAQ,CAAC,0BACjBL,OAAAA,CAAQK,QAAQ,CAAC,mBAAA,CAAA,IACjBL,OAAAA,CAAQK,QAAQ,CAAC,qBAChBL,OAAAA,CAAQK,QAAQ,CAAC,MAAA,CAAA,IAAWL,OAAAA,CAAQK,QAAQ,CAAC,OAAA,CAAA;AACzD,IAAA;IAEA,OAAO,KAAA;AACX;AAEO,eAAeI,gBAAAA,CAAiBC,QAAsC,EAAEC,OAAAA,GAAiP;IAAEtB,KAAAA,EAAO;AAAc,CAAC,EAAA;AACpV,IAAA,MAAMuB,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AACnD,IAAA,IAAIC,MAAAA,GAAwB,IAAA;IAC5B,IAAI;AAuFiBC,QAAAA,IAAAA,oCAAAA,EAAAA,4BAAAA,EAAAA,oBAAAA;AAtFjB,QAAA,MAAMC,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAItB,WAAAA,CAAY,gDAAA,CAAA;AAC1B,QAAA;;QAGA,MAAM0B,SAAAA,GAAYC,SAASJ,OAAAA,CAAQC,GAAG,CAACI,iBAAiB,IAAI;AAC5DR,QAAAA,MAAAA,GAAS,IAAIS,MAAAA,CAAO;YAChBP,MAAAA,EAAQA,MAAAA;YACRQ,OAAAA,EAASJ;AACb,SAAA,CAAA;QAEA,MAAMK,UAAAA,GAAalB,OAAAA,CAAQtB,KAAK,IAAI,aAAA;;AAGpC,QAAA,MAAMyC,WAAAA,GAAcC,IAAAA,CAAKC,SAAS,CAACtB,UAAUuB,MAAM;AACnD,QAAA,MAAMC,gBAAgB,CAACJ,cAAc,IAAG,EAAGK,OAAO,CAAC,CAAA,CAAA;;QAGnD,MAAMC,aAAAA,GAAgBzB,OAAAA,CAAQjB,eAAe,GAAG,CAAC,cAAc,EAAEiB,OAAAA,CAAQjB,eAAe,CAAA,CAAE,GAAG,EAAA;AAC7FkB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,6BAAA,CAAA;QACZzB,MAAAA,CAAOyB,IAAI,CAAC,gBAAA,EAAkBR,UAAAA,EAAYO,aAAAA,CAAAA;AAC1CxB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,mCAAA,EAAqCH,aAAAA,EAAeJ,YAAYQ,cAAc,EAAA,CAAA;QAE1F1B,MAAAA,CAAOK,KAAK,CAAC,8BAAA,EAAgCP,QAAAA,CAAAA;YAGjBC,8BAAAA,EAAAA,IAAAA;;AAA5B,QAAA,MAAM4B,mBAAAA,GAAsB5B,CAAAA,IAAAA,GAAAA,CAAAA,iCAAAA,OAAAA,CAAQd,qBAAqB,MAAA,IAAA,IAA7Bc,8BAAAA,KAAAA,KAAAA,CAAAA,GAAAA,8BAAAA,GAAiCA,OAAAA,CAAQ6B,SAAS,MAAA,IAAA,IAAlD7B,kBAAAA,IAAAA,GAAsD,KAAA;;QAGlF,IAAIA,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQ8B,gBAAgB,IAAI9B,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AAClE,YAAA,MAAMC,WAAAA,GAAc;gBAChBtD,KAAAA,EAAOwC,UAAAA;AACPnB,gBAAAA,QAAAA;gBACAkC,qBAAAA,EAAuBL,mBAAAA;AACvBM,gBAAAA,eAAAA,EAAiBlC,QAAQmC,cAAc;AACvCC,gBAAAA,gBAAAA,EAAkBpC,QAAQjB;AAC9B,aAAA;AACA,YAAA,MAAMgD,SAAAA,GAAY/B,OAAAA,CAAQ8B,gBAAgB,IAAI9B,QAAQ+B,SAAS;YAC/D,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACW,WAAAA,EAAa,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC1E/B,MAAAA,CAAOK,KAAK,CAAC,gCAAA,EAAkCyB,SAAAA,CAAAA;AACnD,QAAA;;AAGA,QAAA,MAAMO,UAAAA,GAAkB;YACpB5D,KAAAA,EAAOwC,UAAAA;AACPnB,YAAAA,QAAAA;YACAkC,qBAAAA,EAAuBL,mBAAAA;AACvBM,YAAAA,eAAAA,EAAiBlC,QAAQmC;AAC7B,SAAA;;AAGA,QAAA,IAAInC,OAAAA,CAAQjB,eAAe,KAAKmC,UAAAA,CAAWxB,QAAQ,CAAC,OAAA,CAAA,IAAYwB,UAAAA,CAAWxB,QAAQ,CAAC,IAAA,CAAI,CAAA,EAAI;YACxF4C,UAAAA,CAAWF,gBAAgB,GAAGpC,OAAAA,CAAQjB,eAAe;AACzD,QAAA;;QAGA,MAAMwD,SAAAA,GAAYC,KAAKC,GAAG,EAAA;AAC1B,QAAA,MAAMC,oBAAoBnC,MAAAA,CAAOoC,IAAI,CAACC,WAAW,CAACC,MAAM,CAACP,UAAAA,CAAAA;;AAGzD,QAAA,IAAIQ,SAAAA,GAAmC,IAAA;AACvC,QAAA,MAAMC,cAAAA,GAAiB,IAAIC,OAAAA,CAAe,CAACC,CAAAA,EAAGC,MAAAA,GAAAA;YAC1C,MAAMrC,SAAAA,GAAYC,SAASJ,OAAAA,CAAQC,GAAG,CAACI,iBAAiB,IAAI;AAC5D+B,YAAAA,SAAAA,GAAYK,UAAAA,CAAW,IAAMD,MAAAA,CAAO,IAAI/D,WAAAA,CAAY,CAAC,gCAAgC,EAAE0B,SAAAA,GAAU,IAAA,CAAK,QAAQ,CAAC,CAAA,CAAA,EAAIA,SAAAA,CAAAA;AACvH,QAAA,CAAA,CAAA;QAEA,IAAIL,UAAAA;QACJ,IAAI;YACAA,UAAAA,GAAa,MAAMwC,OAAAA,CAAQI,IAAI,CAAC;AAACV,gBAAAA,iBAAAA;AAAmBK,gBAAAA;AAAe,aAAA,CAAA;QACvE,CAAA,QAAU;;AAEN,YAAA,IAAID,cAAc,IAAA,EAAM;gBACpBO,YAAAA,CAAaP,SAAAA,CAAAA;AACjB,YAAA;AACJ,QAAA;QAEA,MAAMQ,WAAAA,GAAcd,IAAAA,CAAKC,GAAG,EAAA,GAAKF,SAAAA;;QAGjC,IAAIvC,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQuD,iBAAiB,IAAIvD,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AACnE,YAAA,MAAMA,SAAAA,GAAY/B,OAAAA,CAAQuD,iBAAiB,IAAIvD,QAAQ+B,SAAS;YAChE,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACb,UAAAA,EAAY,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YACzEP,MAAAA,CAAOK,KAAK,CAAC,iCAAA,EAAmCyB,SAAAA,CAAAA;AACpD,QAAA;AAEA,QAAA,MAAMyB,YAAWhD,oBAAAA,GAAAA,UAAAA,CAAWiD,OAAO,CAAC,CAAA,CAAE,cAArBjD,oBAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,CAAAA,4BAAAA,GAAAA,qBAAuBnB,OAAO,MAAA,IAAA,IAA9BmB,oDAAAA,oCAAAA,GAAAA,4BAAAA,CAAgCkD,OAAO,MAAA,IAAA,IAAvClD,oCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,qCAAyCmD,IAAI,EAAA;AAC9D,QAAA,IAAI,CAACH,QAAAA,EAAU;AACX,YAAA,MAAM,IAAIrE,WAAAA,CAAY,kCAAA,CAAA;AAC1B,QAAA;;QAGA,MAAMyE,YAAAA,GAAeJ,SAASlC,MAAM;AACpC,QAAA,MAAMuC,iBAAiB,CAACD,eAAe,IAAG,EAAGpC,OAAO,CAAC,CAAA,CAAA;AACrDvB,QAAAA,MAAAA,CAAOyB,IAAI,CAAC,oCAAA,EAAsCmC,cAAAA,EAAgBD,aAAajC,cAAc,EAAA,CAAA;;AAG7F,QAAA,MAAMmC,uBAAuBR,WAAAA,IAAe,IAAA,GACtC,GAAG,CAACA,cAAc,IAAG,EAAG9B,OAAO,CAAC,GAAG,CAAC,CAAC,GACrC,CAAA,EAAG8B,WAAAA,CAAY,EAAE,CAAC;QACxBrD,MAAAA,CAAOyB,IAAI,CAAC,aAAA,EAAeoC,oBAAAA,CAAAA;;QAG3B,IAAItD,UAAAA,CAAWuD,KAAK,EAAE;AAEdvD,YAAAA,IAAAA,+BAAAA,EACAA,mCAAAA,EACAA,8BAAAA;AAHJP,YAAAA,MAAAA,CAAOyB,IAAI,CAAC,sDAAA,EACRlB,CAAAA,CAAAA,+BAAAA,GAAAA,WAAWuD,KAAK,CAACC,aAAa,MAAA,IAAA,IAA9BxD,sDAAAA,+BAAAA,CAAgCmB,cAAc,EAAA,KAAM,GAAA,EACpDnB,EAAAA,mCAAAA,GAAAA,UAAAA,CAAWuD,KAAK,CAACE,iBAAiB,MAAA,IAAA,IAAlCzD,mCAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,mCAAAA,CAAoCmB,cAAc,OAAM,GAAA,EACxDnB,CAAAA,CAAAA,8BAAAA,GAAAA,UAAAA,CAAWuD,KAAK,CAACG,YAAY,cAA7B1D,8BAAAA,KAAAA,KAAAA,CAAAA,GAAAA,KAAAA,CAAAA,GAAAA,8BAAAA,CAA+BmB,cAAc,EAAA,KAAM,GAAA,CAAA;AAE3D,QAAA;AAEA1B,QAAAA,MAAAA,CAAOK,KAAK,CAAC,sCAAA,EAAwCkD,QAAAA,CAASW,SAAS,CAAC,CAAA,EAAG,EAAA,CAAA,CAAA;QAC3E,IAAInE,OAAAA,CAAQmC,cAAc,EAAE;AACxB,YAAA,OAAOiC,cAAcZ,QAAAA,EAAU,qBAAA,CAAA;QACnC,CAAA,MAAO;YACH,OAAOA,QAAAA;AACX,QAAA;AAEJ,IAAA,CAAA,CAAE,OAAOhE,KAAAA,EAAY;AACjBS,QAAAA,MAAAA,CAAOT,KAAK,CAAC,iCAAA,EAAmCA,MAAMH,OAAO,EAAEG,MAAM6E,KAAK,CAAA;AAC1E,QAAA,MAAMC,eAAehF,iBAAAA,CAAkBE,KAAAA,CAAAA;QACvC,MAAM,IAAIL,YAAY,CAAC,6BAA6B,EAAEK,KAAAA,CAAMH,OAAO,EAAE,EAAEiF,YAAAA,CAAAA;IAC3E,CAAA,QAAU;;;AAGV,IAAA;AACJ;AAEA;AACO,eAAeC,yBAAAA,CAClBxE,QAAsC,EACtCC,OAAAA,GAAiP;IAAEtB,KAAAA,EAAO;AAAc,CAAC,EACzQ8F,aAA0E,EAAA;AAE1E,IAAA,MAAMvE,MAAAA,GAASC,SAAAA,EAAAA;AACf,IAAA,MAAMuE,UAAAA,GAAa,CAAA;AAEnB,IAAA,IAAK,IAAIC,OAAAA,GAAU,CAAA,EAAGA,OAAAA,IAAWD,YAAYC,OAAAA,EAAAA,CAAW;QACpD,IAAI;AACA,YAAA,MAAMC,iBAAiBD,OAAAA,KAAY,CAAA,GAAI3E,WAAYyE,aAAAA,GAAgB,MAAMA,cAAcE,OAAAA,CAAAA,GAAW3E,QAAAA;YAClG,OAAO,MAAMD,iBAAiB6E,cAAAA,EAAgB3E,OAAAA,CAAAA;AAClD,QAAA,CAAA,CAAE,OAAOR,KAAAA,EAAY;AACjB,YAAA,IAAIA,iBAAiBL,WAAAA,IAAeK,KAAAA,CAAMF,iBAAiB,IAAIoF,OAAAA,GAAUD,cAAcD,aAAAA,EAAe;gBAClGvE,MAAAA,CAAO2E,IAAI,CAAC,yEAAA,EAA2EF,OAAAA,EAASD,UAAAA,CAAAA;;gBAEhG,MAAMI,SAAAA,GAAYC,IAAAA,CAAKC,GAAG,CAAC,IAAA,GAAOD,KAAKE,GAAG,CAAC,CAAA,EAAGN,OAAAA,GAAU,CAAA,CAAA,EAAI,KAAA,CAAA;AAC5D,gBAAA,MAAM,IAAI1B,OAAAA,CAAQiC,CAAAA,OAAAA,GAAW9B,WAAW8B,OAAAA,EAASJ,SAAAA,CAAAA,CAAAA;AACjD,gBAAA;AACJ,YAAA,CAAA,MAAO,IAAIlF,gBAAAA,CAAiBH,KAAAA,CAAAA,IAAUkF,OAAAA,GAAUD,UAAAA,EAAY;;AAExD,gBAAA,MAAMI,SAAAA,GAAYC,IAAAA,CAAKC,GAAG,CAAC,IAAA,GAAOD,IAAAA,CAAKE,GAAG,CAAC,CAAA,EAAGN,OAAAA,GAAU,CAAA,CAAA,EAAI,KAAA,CAAA,CAAA;AAC5DzE,gBAAAA,MAAAA,CAAO2E,IAAI,CAAC,CAAC,0BAA0B,EAAEF,OAAAA,CAAQ,CAAC,EAAED,UAAAA,CAAW,UAAU,EAAEI,SAAAA,CAAU,kBAAkB,CAAC,CAAA;AACxG,gBAAA,MAAM,IAAI7B,OAAAA,CAAQiC,CAAAA,OAAAA,GAAW9B,WAAW8B,OAAAA,EAASJ,SAAAA,CAAAA,CAAAA;AACjD,gBAAA;AACJ,YAAA;YACA,MAAMrF,KAAAA;AACV,QAAA;AACJ,IAAA;;AAGA,IAAA,MAAM,IAAIL,WAAAA,CAAY,sBAAA,CAAA;AAC1B;AAEO,eAAe+F,eAAAA,CAAgBC,QAAgB,EAAEnF,OAAAA,GAAoJ;IAAEtB,KAAAA,EAAO;AAAY,CAAC,EAAA;AAC9N,IAAA,MAAMuB,MAAAA,GAASC,SAAAA,EAAAA;IACf,MAAMC,OAAAA,GAAUC,MAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AACnD,IAAA,IAAIC,MAAAA,GAAwB,IAAA;AAC5B,IAAA,IAAI6E,WAAAA,GAAoC,IAAA;AACxC,IAAA,IAAIC,YAAAA,GAAe,KAAA;;AAGnB,IAAA,MAAMC,gBAAAA,GAAmB,IAAA;QACrB,IAAIF,WAAAA,IAAe,CAACC,YAAAA,EAAc;YAC9B,IAAI;;gBAEA,IAAI,OAAOD,YAAYG,OAAO,KAAK,cAAc,CAACH,WAAAA,CAAYI,SAAS,EAAE;AACrEJ,oBAAAA,WAAAA,CAAYG,OAAO,EAAA;AACvB,gBAAA;gBACAF,YAAAA,GAAe,IAAA;AACfpF,gBAAAA,MAAAA,CAAOK,KAAK,CAAC,kCAAA,CAAA;AACjB,YAAA,CAAA,CAAE,OAAOmF,SAAAA,EAAW;AAChBxF,gBAAAA,MAAAA,CAAOK,KAAK,CAAC,yCAAA,EAA4CmF,UAAoBpG,OAAO,CAAA;AACpFgG,gBAAAA,YAAAA,GAAe;AACnB,YAAA;AACJ,QAAA;AACJ,IAAA,CAAA;IAEA,IAAI;AACA,QAAA,MAAM5E,MAAAA,GAASC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AACzC,QAAA,IAAI,CAACH,MAAAA,EAAQ;AACT,YAAA,MAAM,IAAItB,WAAAA,CAAY,gDAAA,CAAA;AAC1B,QAAA;AAEAoB,QAAAA,MAAAA,GAAS,IAAIS,MAAAA,CAAO;YAChBP,MAAAA,EAAQA;AACZ,SAAA,CAAA;QAEAR,MAAAA,CAAOK,KAAK,CAAC,6BAAA,EAA+B6E,QAAAA,CAAAA;;QAG5C,IAAInF,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQ8B,gBAAgB,IAAI9B,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AAClE,YAAA,MAAMC,WAAAA,GAAc;gBAChBtD,KAAAA,EAAOsB,OAAAA,CAAQtB,KAAK,IAAI,WAAA;gBACxBgH,IAAAA,EAAMP,QAAAA;gBACNjD,eAAAA,EAAiB;AACrB,aAAA;AACA,YAAA,MAAMH,SAAAA,GAAY/B,OAAAA,CAAQ8B,gBAAgB,IAAI9B,QAAQ+B,SAAS;YAC/D,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACW,WAAAA,EAAa,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC1E/B,MAAAA,CAAOK,KAAK,CAAC,gCAAA,EAAkCyB,SAAAA,CAAAA;AACnD,QAAA;QAEAqD,WAAAA,GAAc,MAAMjF,OAAAA,CAAQwF,UAAU,CAACR,QAAAA,CAAAA;;;AAIvC,QAAA,IAAIC,WAAAA,IAAe,OAAOA,WAAAA,CAAYQ,EAAE,KAAK,UAAA,EAAY;YACrDR,WAAAA,CAAYQ,EAAE,CAAC,OAAA,EAAS,CAACC,WAAAA,GAAAA;AACrB5F,gBAAAA,MAAAA,CAAOT,KAAK,CAAC,wBAAA,EAA0BqG,WAAAA,CAAYxG,OAAO,CAAA;AAC1DiG,gBAAAA,gBAAAA,EAAAA;AACJ,YAAA,CAAA,CAAA;AACJ,QAAA;QAEA,IAAIQ,aAAAA;QACJ,IAAI;AACAA,YAAAA,aAAAA,GAAgB,MAAMvF,MAAAA,CAAOwF,KAAK,CAACC,cAAc,CAACnD,MAAM,CAAC;gBACrDnE,KAAAA,EAAOsB,OAAAA,CAAQtB,KAAK,IAAI,WAAA;gBACxBgH,IAAAA,EAAMN,WAAAA;gBACNlD,eAAAA,EAAiB;AACrB,aAAA,CAAA;;AAEAoD,YAAAA,gBAAAA,EAAAA;AACJ,QAAA,CAAA,CAAE,OAAOW,QAAAA,EAAU;;AAEfX,YAAAA,gBAAAA,EAAAA;YACA,MAAMW,QAAAA;AACV,QAAA;;QAGA,IAAIjG,OAAAA,CAAQM,KAAK,KAAKN,OAAAA,CAAQuD,iBAAiB,IAAIvD,OAAAA,CAAQ+B,SAAQ,CAAA,EAAI;AACnE,YAAA,MAAMA,SAAAA,GAAY/B,OAAAA,CAAQuD,iBAAiB,IAAIvD,QAAQ+B,SAAS;YAChE,MAAM5B,OAAAA,CAAQkC,SAAS,CAACN,SAAAA,EAAYX,KAAKC,SAAS,CAACyE,aAAAA,EAAe,IAAA,EAAM,CAAA,CAAA,EAAI,MAAA,CAAA;YAC5E7F,MAAAA,CAAOK,KAAK,CAAC,iCAAA,EAAmCyB,SAAAA,CAAAA;AACpD,QAAA;AAEA,QAAA,MAAMyB,QAAAA,GAAWsC,aAAAA;AACjB,QAAA,IAAI,CAACtC,QAAAA,EAAU;AACX,YAAA,MAAM,IAAIrE,WAAAA,CAAY,uCAAA,CAAA;AAC1B,QAAA;QAEAc,MAAAA,CAAOK,KAAK,CAAC,wCAAA,EAA0CkD,QAAAA,CAAAA;;QAGvD,IAAI;YACA,MAAM0C,SAAAA,GAAYlG,OAAAA,CAAQmG,eAAe,IAAI,QAAA;AAC7C,YAAA,MAAMC,YAAAA,CAAajB,QAAAA,EAAU3B,QAAAA,CAAS6C,IAAI,EAAEH,SAAAA,CAAAA;AAChD,QAAA,CAAA,CAAE,OAAOI,YAAAA,EAAmB;;AAExBrG,YAAAA,MAAAA,CAAO2E,IAAI,CAAC,kCAAA,EAAoC0B,YAAAA,CAAajH,OAAO,CAAA;AACxE,QAAA;QAEA,OAAOmE,QAAAA;AAEX,IAAA,CAAA,CAAE,OAAOhE,KAAAA,EAAY;AACjBS,QAAAA,MAAAA,CAAOT,KAAK,CAAC,sCAAA,EAAwCA,MAAMH,OAAO,EAAEG,MAAM6E,KAAK,CAAA;AAC/E,QAAA,MAAM,IAAIlF,WAAAA,CAAY,CAAC,4BAA4B,EAAEK,KAAAA,CAAMH,OAAO,CAAA,CAAE,CAAA;IACxE,CAAA,QAAU;;AAENiG,QAAAA,gBAAAA,EAAAA;;;AAGJ,IAAA;AACJ;;;;"}
|