@elizaos/plugin-openai 1.5.14 → 1.5.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,9 +2,9 @@
2
2
  "version": 3,
3
3
  "sources": ["../../src/index.ts"],
4
4
  "sourcesContent": [
5
- "import { createOpenAI } from \"@ai-sdk/openai\";\nimport type {\n DetokenizeTextParams,\n GenerateTextParams,\n IAgentRuntime,\n ImageDescriptionParams,\n ModelTypeName,\n ObjectGenerationParams,\n Plugin,\n TextEmbeddingParams,\n TokenizeTextParams,\n} from \"@elizaos/core\";\nimport { EventType, logger, ModelType, VECTOR_DIMS } from \"@elizaos/core\";\nimport {\n generateObject,\n generateText,\n JSONParseError,\n type JSONValue,\n type LanguageModelUsage,\n} from \"ai\";\nimport { encodingForModel, type TiktokenModel } from \"js-tiktoken\";\n\nexport interface OpenAITranscriptionParams {\n audio: Blob | File;\n model?: string;\n language?: string;\n response_format?: string;\n prompt?: string;\n temperature?: number;\n timestampGranularities?: string[];\n}\n\nexport interface OpenAITextToSpeechParams {\n text: string;\n model?: string;\n voice?: string;\n format?: \"mp3\" | \"wav\" | \"flac\" | string;\n instructions?: string;\n}\n\n/**\n * Retrieves a configuration setting from the runtime, falling back to environment variables or a default value if not found.\n *\n * @param key - The name of the setting to retrieve.\n * @param defaultValue - The value to return if the setting is not found in the runtime or environment.\n * @returns The resolved setting value, or {@link defaultValue} if not found.\n */\nfunction getSetting(\n runtime: IAgentRuntime,\n key: string,\n defaultValue?: string,\n): string | undefined {\n return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;\n}\n\nfunction isBrowser(): boolean {\n return (\n typeof globalThis !== \"undefined\" &&\n typeof (globalThis as any).document !== \"undefined\"\n );\n}\n\n/**\n * Determines whether we're running in a browser with a server-hosted proxy configured.\n * In this mode, we do not require a real API key on the client and rely on the proxy to inject it.\n */\nfunction isProxyMode(runtime: IAgentRuntime): boolean {\n return isBrowser() && !!getSetting(runtime, \"OPENAI_BROWSER_BASE_URL\");\n}\n\nfunction getAuthHeader(\n runtime: IAgentRuntime,\n forEmbedding = false,\n): Record<string, string> {\n if (isBrowser()) return {};\n const key = forEmbedding ? getEmbeddingApiKey(runtime) : getApiKey(runtime);\n return key ? { Authorization: `Bearer ${key}` } : {};\n}\n\n/**\n * Retrieves the OpenAI API base URL from runtime settings, environment variables, or defaults, using provider-aware resolution.\n *\n * @returns The resolved base URL for OpenAI API requests.\n */\nfunction getBaseURL(runtime: IAgentRuntime): string {\n const browserURL = getSetting(runtime, \"OPENAI_BROWSER_BASE_URL\");\n const baseURL = (\n isBrowser() && browserURL\n ? browserURL\n : getSetting(runtime, \"OPENAI_BASE_URL\", \"https://api.openai.com/v1\")\n ) as string;\n logger.debug(`[OpenAI] Default base URL: ${baseURL}`);\n return baseURL;\n}\n\n/**\n * Retrieves the OpenAI API base URL for embeddings, falling back to the general base URL.\n *\n * @returns The resolved base URL for OpenAI embedding requests.\n */\nfunction getEmbeddingBaseURL(runtime: IAgentRuntime): string {\n const embeddingURL = isBrowser()\n ? getSetting(runtime, \"OPENAI_BROWSER_EMBEDDING_URL\") ||\n getSetting(runtime, \"OPENAI_BROWSER_BASE_URL\")\n : getSetting(runtime, \"OPENAI_EMBEDDING_URL\");\n if (embeddingURL) {\n logger.debug(`[OpenAI] Using specific embedding base URL: ${embeddingURL}`);\n return embeddingURL;\n }\n logger.debug(\"[OpenAI] Falling back to general base URL for embeddings.\");\n return getBaseURL(runtime);\n}\n\n/**\n * Helper function to get the API key for OpenAI\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getApiKey(runtime: IAgentRuntime): string | undefined {\n return getSetting(runtime, \"OPENAI_API_KEY\");\n}\n\n/**\n * Helper function to get the embedding API key for OpenAI, falling back to the general API key if not set.\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getEmbeddingApiKey(runtime: IAgentRuntime): string | undefined {\n const embeddingApiKey = getSetting(runtime, \"OPENAI_EMBEDDING_API_KEY\");\n if (embeddingApiKey) {\n logger.debug(\"[OpenAI] Using specific embedding API key (present)\");\n return embeddingApiKey;\n }\n logger.debug(\"[OpenAI] Falling back to general API key for embeddings.\");\n return getApiKey(runtime);\n}\n\n/**\n * Helper function to get the small model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured small model name\n */\nfunction getSmallModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, \"OPENAI_SMALL_MODEL\") ??\n (getSetting(runtime, \"SMALL_MODEL\", \"gpt-5-nano\") as string)\n );\n}\n\n/**\n * Helper function to get the large model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured large model name\n */\nfunction getLargeModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, \"OPENAI_LARGE_MODEL\") ??\n (getSetting(runtime, \"LARGE_MODEL\", \"gpt-5-mini\") as string)\n );\n}\n\n/**\n * Helper function to get the image description model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured image description model name\n */\nfunction getImageDescriptionModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, \"OPENAI_IMAGE_DESCRIPTION_MODEL\", \"gpt-5-nano\") ??\n \"gpt-5-nano\"\n );\n}\n\n/**\n * Helper function to get experimental telemetry setting\n *\n * @param runtime The runtime context\n * @returns Whether experimental telemetry is enabled\n */\nfunction getExperimentalTelemetry(runtime: IAgentRuntime): boolean {\n const setting = getSetting(runtime, \"OPENAI_EXPERIMENTAL_TELEMETRY\", \"false\");\n // Convert to string and check for truthy values\n const normalizedSetting = String(setting).toLowerCase();\n const result = normalizedSetting === \"true\";\n logger.debug(\n `[OpenAI] Experimental telemetry in function: \"${setting}\" (type: ${typeof setting}, normalized: \"${normalizedSetting}\", result: ${result})`,\n );\n return result;\n}\n\n/**\n * Create an OpenAI client with proper configuration\n *\n * @param runtime The runtime context\n * @returns Configured OpenAI client\n */\nfunction createOpenAIClient(runtime: IAgentRuntime) {\n const baseURL = getBaseURL(runtime);\n // In proxy mode (browser + proxy base URL), pass a harmless placeholder key.\n // The server proxy replaces Authorization; no secrets leave the server.\n const apiKey =\n getApiKey(runtime) ?? (isProxyMode(runtime) ? \"sk-proxy\" : undefined);\n return createOpenAI({ apiKey: (apiKey ?? \"\") as string, baseURL });\n}\n\n/**\n * Asynchronously tokenizes the given text based on the specified model and prompt.\n *\n * @param {ModelTypeName} model - The type of model to use for tokenization.\n * @param {string} prompt - The text prompt to tokenize.\n * @returns {number[]} - An array of tokens representing the encoded prompt.\n */\nasync function tokenizeText(model: ModelTypeName, prompt: string) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ??\n process.env.SMALL_MODEL ??\n \"gpt-5-nano\")\n : (process.env.LARGE_MODEL ?? \"gpt-5-mini\");\n const tokens = encodingForModel(modelName as TiktokenModel).encode(prompt);\n return tokens;\n}\n\n/**\n * Detokenize a sequence of tokens back into text using the specified model.\n *\n * @param {ModelTypeName} model - The type of model to use for detokenization.\n * @param {number[]} tokens - The sequence of tokens to detokenize.\n * @returns {string} The detokenized text.\n */\nasync function detokenizeText(model: ModelTypeName, tokens: number[]) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ??\n process.env.SMALL_MODEL ??\n \"gpt-5-nano\")\n : (process.env.OPENAI_LARGE_MODEL ??\n process.env.LARGE_MODEL ??\n \"gpt-5-mini\");\n return encodingForModel(modelName as TiktokenModel).decode(tokens);\n}\n\n/**\n * Helper function to generate objects using specified model type\n */\nasync function generateObjectByModelType(\n runtime: IAgentRuntime,\n params: ObjectGenerationParams,\n modelType: string,\n getModelFn: (runtime: IAgentRuntime) => string,\n): Promise<JSONValue> {\n const openai = createOpenAIClient(runtime);\n const modelName = getModelFn(runtime);\n logger.log(`[OpenAI] Using ${modelType} model: ${modelName}`);\n const temperature = params.temperature ?? 0;\n const schemaPresent = !!params.schema;\n\n if (schemaPresent) {\n logger.info(\n `Using ${modelType} without schema validation (schema provided but output=no-schema)`,\n );\n }\n\n try {\n const { object, usage } = await generateObject({\n model: openai.languageModel(modelName),\n output: \"no-schema\",\n prompt: params.prompt,\n temperature: temperature,\n experimental_repairText: getJsonRepairFunction(),\n });\n\n if (usage) {\n emitModelUsageEvent(\n runtime,\n modelType as ModelTypeName,\n params.prompt,\n usage,\n );\n }\n return object;\n } catch (error: unknown) {\n if (error instanceof JSONParseError) {\n logger.error(`[generateObject] Failed to parse JSON: ${error.message}`);\n\n const repairFunction = getJsonRepairFunction();\n const repairedJsonString = await repairFunction({\n text: error.text,\n error,\n });\n\n if (repairedJsonString) {\n try {\n const repairedObject = JSON.parse(repairedJsonString);\n logger.info(\"[generateObject] Successfully repaired JSON.\");\n return repairedObject;\n } catch (repairParseError: unknown) {\n const message =\n repairParseError instanceof Error\n ? repairParseError.message\n : String(repairParseError);\n logger.error(\n `[generateObject] Failed to parse repaired JSON: ${message}`,\n );\n throw repairParseError;\n }\n } else {\n logger.error(\"[generateObject] JSON repair failed.\");\n throw error;\n }\n } else {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`[generateObject] Unknown error: ${message}`);\n throw error;\n }\n }\n}\n\n/**\n * Returns a function to repair JSON text\n */\nfunction getJsonRepairFunction(): (params: {\n text: string;\n error: unknown;\n}) => Promise<string | null> {\n return async ({ text, error }: { text: string; error: unknown }) => {\n try {\n if (error instanceof JSONParseError) {\n const cleanedText = text.replace(/```json\\n|\\n```|```/g, \"\");\n JSON.parse(cleanedText);\n return cleanedText;\n }\n return null;\n } catch (jsonError: unknown) {\n const message =\n jsonError instanceof Error ? jsonError.message : String(jsonError);\n logger.warn(`Failed to repair JSON text: ${message}`);\n return null;\n }\n };\n}\n\n/**\n * Emits a model usage event\n * @param runtime The runtime context\n * @param type The model type\n * @param prompt The prompt used\n * @param usage The LLM usage data\n */\nfunction emitModelUsageEvent(\n runtime: IAgentRuntime,\n type: ModelTypeName,\n prompt: string,\n usage: LanguageModelUsage,\n) {\n runtime.emitEvent(EventType.MODEL_USED, {\n provider: \"openai\",\n type,\n prompt,\n tokens: {\n prompt: usage.inputTokens,\n completion: usage.outputTokens,\n total: usage.totalTokens,\n },\n });\n}\n\n/**\n * function for text-to-speech\n */\nasync function fetchTextToSpeech(\n runtime: IAgentRuntime,\n options: OpenAITextToSpeechParams,\n) {\n const defaultModel = getSetting(\n runtime,\n \"OPENAI_TTS_MODEL\",\n \"gpt-4o-mini-tts\",\n );\n const defaultVoice = getSetting(runtime, \"OPENAI_TTS_VOICE\", \"nova\");\n const defaultInstructions = getSetting(\n runtime,\n \"OPENAI_TTS_INSTRUCTIONS\",\n \"\",\n );\n const baseURL = getBaseURL(runtime);\n\n const model = options.model || (defaultModel as string);\n const voice = options.voice || (defaultVoice as string);\n const instructions = options.instructions ?? (defaultInstructions as string);\n const format = options.format || \"mp3\";\n\n try {\n const res = await fetch(`${baseURL}/audio/speech`, {\n method: \"POST\",\n headers: {\n ...getAuthHeader(runtime),\n \"Content-Type\": \"application/json\",\n // Hint desired audio format in Accept when possible\n ...(format === \"mp3\" ? { Accept: \"audio/mpeg\" } : {}),\n },\n body: JSON.stringify({\n model,\n voice,\n input: options.text,\n format,\n ...(instructions && { instructions }),\n }),\n });\n\n if (!res.ok) {\n const err = await res.text();\n throw new Error(`OpenAI TTS error ${res.status}: ${err}`);\n }\n\n return res.body;\n } catch (err: unknown) {\n const message = err instanceof Error ? err.message : String(err);\n throw new Error(`Failed to fetch speech from OpenAI TTS: ${message}`);\n }\n}\n\n/**\n * Defines the OpenAI plugin with its name, description, and configuration options.\n * @type {Plugin}\n */\nexport const openaiPlugin: Plugin = {\n name: \"openai\",\n description: \"OpenAI plugin\",\n config: {\n OPENAI_API_KEY: process.env.OPENAI_API_KEY,\n OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,\n OPENAI_SMALL_MODEL: process.env.OPENAI_SMALL_MODEL,\n OPENAI_LARGE_MODEL: process.env.OPENAI_LARGE_MODEL,\n SMALL_MODEL: process.env.SMALL_MODEL,\n LARGE_MODEL: process.env.LARGE_MODEL,\n OPENAI_EMBEDDING_MODEL: process.env.OPENAI_EMBEDDING_MODEL,\n OPENAI_EMBEDDING_API_KEY: process.env.OPENAI_EMBEDDING_API_KEY,\n OPENAI_EMBEDDING_URL: process.env.OPENAI_EMBEDDING_URL,\n OPENAI_EMBEDDING_DIMENSIONS: process.env.OPENAI_EMBEDDING_DIMENSIONS,\n OPENAI_IMAGE_DESCRIPTION_MODEL: process.env.OPENAI_IMAGE_DESCRIPTION_MODEL,\n OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS:\n process.env.OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS,\n OPENAI_EXPERIMENTAL_TELEMETRY: process.env.OPENAI_EXPERIMENTAL_TELEMETRY,\n },\n async init(_config, runtime) {\n // do check in the background\n new Promise<void>(async (resolve) => {\n resolve();\n try {\n if (!getApiKey(runtime) && !isBrowser()) {\n logger.warn(\n \"OPENAI_API_KEY is not set in environment - OpenAI functionality will be limited\",\n );\n return;\n }\n try {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: { ...getAuthHeader(runtime) },\n });\n if (!response.ok) {\n logger.warn(\n `OpenAI API key validation failed: ${response.statusText}`,\n );\n logger.warn(\n \"OpenAI functionality will be limited until a valid API key is provided\",\n );\n } else {\n logger.log(\"OpenAI API key validated successfully\");\n }\n } catch (fetchError: unknown) {\n const message =\n fetchError instanceof Error\n ? fetchError.message\n : String(fetchError);\n logger.warn(`Error validating OpenAI API key: ${message}`);\n logger.warn(\n \"OpenAI functionality will be limited until a valid API key is provided\",\n );\n }\n } catch (error: unknown) {\n const message =\n (error as { errors?: Array<{ message: string }> })?.errors\n ?.map((e) => e.message)\n .join(\", \") ||\n (error instanceof Error ? error.message : String(error));\n logger.warn(\n `OpenAI plugin configuration issue: ${message} - You need to configure the OPENAI_API_KEY in your environment variables`,\n );\n }\n });\n },\n\n models: {\n [ModelType.TEXT_EMBEDDING]: async (\n runtime: IAgentRuntime,\n params: TextEmbeddingParams | string | null,\n ): Promise<number[]> => {\n const embeddingModelName = getSetting(\n runtime,\n \"OPENAI_EMBEDDING_MODEL\",\n \"text-embedding-3-small\",\n );\n const embeddingDimension = Number.parseInt(\n getSetting(runtime, \"OPENAI_EMBEDDING_DIMENSIONS\", \"1536\") || \"1536\",\n 10,\n ) as (typeof VECTOR_DIMS)[keyof typeof VECTOR_DIMS];\n\n if (!Object.values(VECTOR_DIMS).includes(embeddingDimension)) {\n const errorMsg = `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(\", \")}`;\n logger.error(errorMsg);\n throw new Error(errorMsg);\n }\n if (params === null) {\n logger.debug(\"Creating test embedding for initialization\");\n const testVector = Array(embeddingDimension).fill(0);\n testVector[0] = 0.1;\n return testVector;\n }\n let text: string;\n if (typeof params === \"string\") {\n text = params;\n } else if (typeof params === \"object\" && params.text) {\n text = params.text;\n } else {\n logger.warn(\"Invalid input format for embedding\");\n const fallbackVector = Array(embeddingDimension).fill(0);\n fallbackVector[0] = 0.2;\n return fallbackVector;\n }\n if (!text.trim()) {\n logger.warn(\"Empty text for embedding\");\n const emptyVector = Array(embeddingDimension).fill(0);\n emptyVector[0] = 0.3;\n return emptyVector;\n }\n\n const embeddingBaseURL = getEmbeddingBaseURL(runtime);\n\n try {\n const response = await fetch(`${embeddingBaseURL}/embeddings`, {\n method: \"POST\",\n headers: {\n ...getAuthHeader(runtime, true),\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n model: embeddingModelName,\n input: text,\n }),\n });\n\n // Clone available if needed for logging/debugging\n // const debugText = await response.clone().text().catch(() => \"\");\n\n if (!response.ok) {\n logger.error(\n `OpenAI API error: ${response.status} - ${response.statusText}`,\n );\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.4;\n return errorVector;\n }\n\n const data = (await response.json()) as {\n data: [{ embedding: number[] }];\n usage?: { prompt_tokens: number; total_tokens: number };\n };\n\n if (!data?.data?.[0]?.embedding) {\n logger.error(\"API returned invalid structure\");\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.5;\n return errorVector;\n }\n\n const embedding = data.data[0].embedding;\n\n if (data.usage) {\n const usage = {\n inputTokens: data.usage.prompt_tokens,\n outputTokens: 0,\n totalTokens: data.usage.total_tokens,\n };\n\n emitModelUsageEvent(runtime, ModelType.TEXT_EMBEDDING, text, usage);\n }\n\n logger.log(`Got valid embedding with length ${embedding.length}`);\n return embedding;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error generating embedding: ${message}`);\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.6;\n return errorVector;\n }\n },\n [ModelType.TEXT_TOKENIZER_ENCODE]: async (\n _runtime,\n { prompt, modelType = ModelType.TEXT_LARGE }: TokenizeTextParams,\n ) => {\n return await tokenizeText(modelType ?? ModelType.TEXT_LARGE, prompt);\n },\n [ModelType.TEXT_TOKENIZER_DECODE]: async (\n _runtime,\n { tokens, modelType = ModelType.TEXT_LARGE }: DetokenizeTextParams,\n ) => {\n return await detokenizeText(modelType ?? ModelType.TEXT_LARGE, tokens);\n },\n [ModelType.TEXT_SMALL]: async (\n runtime: IAgentRuntime,\n {\n prompt,\n stopSequences = [],\n maxTokens = 8192,\n temperature = 0.7,\n frequencyPenalty = 0.7,\n presencePenalty = 0.7,\n }: GenerateTextParams,\n ) => {\n const openai = createOpenAIClient(runtime);\n const modelName = getSmallModel(runtime);\n const experimentalTelemetry = getExperimentalTelemetry(runtime);\n\n logger.log(`[OpenAI] Using TEXT_SMALL model: ${modelName}`);\n logger.log(prompt);\n\n const { text: openaiResponse, usage } = await generateText({\n model: openai.languageModel(modelName),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxOutputTokens: maxTokens,\n frequencyPenalty: frequencyPenalty,\n presencePenalty: presencePenalty,\n stopSequences: stopSequences,\n experimental_telemetry: {\n isEnabled: experimentalTelemetry,\n },\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, ModelType.TEXT_SMALL, prompt, usage);\n }\n\n return openaiResponse;\n },\n [ModelType.TEXT_LARGE]: async (\n runtime: IAgentRuntime,\n {\n prompt,\n stopSequences = [],\n maxTokens = 8192,\n temperature = 0.7,\n frequencyPenalty = 0.7,\n presencePenalty = 0.7,\n }: GenerateTextParams,\n ) => {\n const openai = createOpenAIClient(runtime);\n const modelName = getLargeModel(runtime);\n const experimentalTelemetry = getExperimentalTelemetry(runtime);\n\n logger.log(`[OpenAI] Using TEXT_LARGE model: ${modelName}`);\n logger.log(prompt);\n\n const { text: openaiResponse, usage } = await generateText({\n model: openai.languageModel(modelName),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxOutputTokens: maxTokens,\n frequencyPenalty: frequencyPenalty,\n presencePenalty: presencePenalty,\n stopSequences: stopSequences,\n experimental_telemetry: {\n isEnabled: experimentalTelemetry,\n },\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, ModelType.TEXT_LARGE, prompt, usage);\n }\n\n return openaiResponse;\n },\n [ModelType.IMAGE]: async (\n runtime: IAgentRuntime,\n params: {\n prompt: string;\n n?: number;\n size?: string;\n },\n ) => {\n const n = params.n || 1;\n const size = params.size || \"1024x1024\";\n const prompt = params.prompt;\n const modelName = \"gpt-image-1\"; // Updated image model\n logger.log(`[OpenAI] Using IMAGE model: ${modelName}`);\n\n const baseURL = getBaseURL(runtime);\n\n try {\n const response = await fetch(`${baseURL}/images/generations`, {\n method: \"POST\",\n headers: {\n ...getAuthHeader(runtime),\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n model: modelName,\n prompt: prompt,\n n: n,\n size: size,\n }),\n });\n\n // const debugText = await response.clone().text().catch(() => \"\");\n\n if (!response.ok) {\n throw new Error(`Failed to generate image: ${response.statusText}`);\n }\n\n const data = await response.json();\n const typedData = data as { data: { url: string }[] };\n\n return typedData.data;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n throw error;\n }\n },\n [ModelType.IMAGE_DESCRIPTION]: async (\n runtime: IAgentRuntime,\n params: ImageDescriptionParams | string,\n ) => {\n let imageUrl: string;\n let promptText: string | undefined;\n const modelName = getImageDescriptionModel(runtime);\n logger.log(`[OpenAI] Using IMAGE_DESCRIPTION model: ${modelName}`);\n const maxTokens = Number.parseInt(\n getSetting(runtime, \"OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS\", \"8192\") ||\n \"8192\",\n 10,\n );\n\n if (typeof params === \"string\") {\n imageUrl = params;\n promptText =\n \"Please analyze this image and provide a title and detailed description.\";\n } else {\n imageUrl = params.imageUrl;\n promptText =\n params.prompt ||\n \"Please analyze this image and provide a title and detailed description.\";\n }\n\n const messages = [\n {\n role: \"user\",\n content: [\n { type: \"text\", text: promptText },\n { type: \"image_url\", image_url: { url: imageUrl } },\n ],\n },\n ];\n\n const baseURL = getBaseURL(runtime);\n\n try {\n const requestBody: Record<string, any> = {\n model: modelName,\n messages: messages,\n max_tokens: maxTokens,\n };\n\n const response = await fetch(`${baseURL}/chat/completions`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n ...getAuthHeader(runtime),\n },\n body: JSON.stringify(requestBody),\n });\n\n // const debugText = await response.clone().text().catch(() => \"\");\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.status}`);\n }\n\n const result: unknown = await response.json();\n\n type OpenAIResponseType = {\n choices?: Array<{\n message?: { content?: string };\n finish_reason?: string;\n }>;\n usage?: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n };\n\n const typedResult = result as OpenAIResponseType;\n const content = typedResult.choices?.[0]?.message?.content;\n\n if (typedResult.usage) {\n emitModelUsageEvent(\n runtime,\n ModelType.IMAGE_DESCRIPTION,\n typeof params === \"string\" ? params : params.prompt || \"\",\n {\n inputTokens: typedResult.usage.prompt_tokens,\n outputTokens: typedResult.usage.completion_tokens,\n totalTokens: typedResult.usage.total_tokens,\n },\n );\n }\n\n if (!content) {\n return {\n title: \"Failed to analyze image\",\n description: \"No response from API\",\n };\n }\n\n // Check if a custom prompt was provided (not the default prompt)\n const isCustomPrompt =\n typeof params === \"object\" &&\n params.prompt &&\n params.prompt !==\n \"Please analyze this image and provide a title and detailed description.\";\n\n // If custom prompt is used, return the raw content\n if (isCustomPrompt) {\n return content;\n }\n\n // Otherwise, maintain backwards compatibility with object return\n const titleMatch = content.match(/title[:\\s]+(.+?)(?:\\n|$)/i);\n const title = titleMatch?.[1]?.trim() || \"Image Analysis\";\n const description = content\n .replace(/title[:\\s]+(.+?)(?:\\n|$)/i, \"\")\n .trim();\n\n const processedResult = { title, description };\n return processedResult;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error analyzing image: ${message}`);\n return {\n title: \"Failed to analyze image\",\n description: `Error: ${message}`,\n };\n }\n },\n [ModelType.TRANSCRIPTION]: async (\n runtime: IAgentRuntime,\n input: Blob | File | OpenAITranscriptionParams,\n ) => {\n let modelName = getSetting(\n runtime,\n \"OPENAI_TRANSCRIPTION_MODEL\",\n \"gpt-4o-mini-transcribe\",\n );\n logger.log(`[OpenAI] Using TRANSCRIPTION model: ${modelName}`);\n\n const baseURL = getBaseURL(runtime);\n\n // Support either Blob/File directly, or an object with { audio: Blob/File, ...options }\n let blob: Blob;\n let extraParams: OpenAITranscriptionParams | null = null;\n\n if (input instanceof Blob || input instanceof File) {\n blob = input as Blob;\n } else if (\n typeof input === \"object\" &&\n input !== null &&\n (input as any).audio != null\n ) {\n const params = input as any;\n if (\n !(params.audio instanceof Blob) &&\n !(params.audio instanceof File)\n ) {\n throw new Error(\n \"TRANSCRIPTION param 'audio' must be a Blob/File. Wrap buffers as: new Blob([buffer], { type: 'audio/mpeg' })\",\n );\n }\n blob = params.audio as Blob;\n extraParams = params as OpenAITranscriptionParams;\n if (typeof params.model === \"string\" && params.model) {\n modelName = params.model;\n }\n } else {\n throw new Error(\n \"TRANSCRIPTION expects a Blob/File or an object { audio: Blob/File, language?, response_format?, timestampGranularities?, prompt?, temperature?, model? }\",\n );\n }\n\n const mime = (blob as File).type || \"audio/webm\";\n const filename =\n (blob as File).name ||\n (mime.includes(\"mp3\") || mime.includes(\"mpeg\")\n ? \"recording.mp3\"\n : mime.includes(\"ogg\")\n ? \"recording.ogg\"\n : mime.includes(\"wav\")\n ? \"recording.wav\"\n : mime.includes(\"webm\")\n ? \"recording.webm\"\n : \"recording.bin\");\n\n const formData = new FormData();\n formData.append(\"file\", blob, filename);\n formData.append(\"model\", String(modelName));\n if (extraParams) {\n if (typeof extraParams.language === \"string\") {\n formData.append(\"language\", String(extraParams.language));\n }\n if (typeof extraParams.response_format === \"string\") {\n formData.append(\n \"response_format\",\n String(extraParams.response_format),\n );\n }\n if (typeof extraParams.prompt === \"string\") {\n formData.append(\"prompt\", String(extraParams.prompt));\n }\n if (typeof extraParams.temperature === \"number\") {\n formData.append(\"temperature\", String(extraParams.temperature));\n }\n if (Array.isArray(extraParams.timestampGranularities)) {\n for (const g of extraParams.timestampGranularities) {\n formData.append(\"timestamp_granularities[]\", String(g));\n }\n }\n }\n\n try {\n const response = await fetch(`${baseURL}/audio/transcriptions`, {\n method: \"POST\",\n headers: {\n ...getAuthHeader(runtime),\n },\n body: formData,\n });\n\n if (!response.ok) {\n throw new Error(\n `Failed to transcribe audio: ${response.status} ${response.statusText}`,\n );\n }\n\n const data = (await response.json()) as { text: string };\n return data.text || \"\";\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`TRANSCRIPTION error: ${message}`);\n throw error;\n }\n },\n [ModelType.TEXT_TO_SPEECH]: async (\n runtime: IAgentRuntime,\n input: string | OpenAITextToSpeechParams,\n ) => {\n // Normalize input into options with per-call overrides\n const options: OpenAITextToSpeechParams =\n typeof input === \"string\"\n ? { text: input }\n : (input as OpenAITextToSpeechParams);\n\n const resolvedModel =\n options.model ||\n (getSetting(runtime, \"OPENAI_TTS_MODEL\", \"gpt-4o-mini-tts\") as string);\n logger.log(`[OpenAI] Using TEXT_TO_SPEECH model: ${resolvedModel}`);\n try {\n const speechStream = await fetchTextToSpeech(runtime, options);\n return speechStream;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in TEXT_TO_SPEECH: ${message}`);\n throw error;\n }\n },\n [ModelType.OBJECT_SMALL]: async (\n runtime: IAgentRuntime,\n params: ObjectGenerationParams,\n ) => {\n return generateObjectByModelType(\n runtime,\n params,\n ModelType.OBJECT_SMALL,\n getSmallModel,\n );\n },\n [ModelType.OBJECT_LARGE]: async (\n runtime: IAgentRuntime,\n params: ObjectGenerationParams,\n ) => {\n return generateObjectByModelType(\n runtime,\n params,\n ModelType.OBJECT_LARGE,\n getLargeModel,\n );\n },\n },\n tests: [\n {\n name: \"openai_plugin_tests\",\n tests: [\n {\n name: \"openai_test_url_and_api_key_validation\",\n fn: async (runtime: IAgentRuntime) => {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: {\n Authorization: `Bearer ${getApiKey(runtime)}`,\n },\n });\n const data = await response.json();\n logger.log(\n { data: (data as { data?: unknown[] })?.data?.length ?? \"N/A\" },\n \"Models Available\",\n );\n if (!response.ok) {\n throw new Error(\n `Failed to validate OpenAI API key: ${response.statusText}`,\n );\n }\n },\n },\n {\n name: \"openai_test_text_embedding\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const embedding = await runtime.useModel(\n ModelType.TEXT_EMBEDDING,\n {\n text: \"Hello, world!\",\n },\n );\n logger.log({ embedding }, \"embedding\");\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_embedding: ${message}`);\n throw error;\n }\n },\n },\n {\n name: \"openai_test_text_large\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_LARGE, {\n prompt: \"What is the nature of reality in 10 words?\",\n });\n if (text.length === 0) {\n throw new Error(\"Failed to generate text\");\n }\n logger.log({ text }, \"generated with test_text_large\");\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_large: ${message}`);\n throw error;\n }\n },\n },\n {\n name: \"openai_test_text_small\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_SMALL, {\n prompt: \"What is the nature of reality in 10 words?\",\n });\n if (text.length === 0) {\n throw new Error(\"Failed to generate text\");\n }\n logger.log({ text }, \"generated with test_text_small\");\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_small: ${message}`);\n throw error;\n }\n },\n },\n {\n name: \"openai_test_image_generation\",\n fn: async (runtime: IAgentRuntime) => {\n logger.log(\"openai_test_image_generation\");\n try {\n const image = await runtime.useModel(ModelType.IMAGE, {\n prompt: \"A beautiful sunset over a calm ocean\",\n n: 1,\n size: \"1024x1024\",\n });\n logger.log({ image }, \"generated with test_image_generation\");\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_image_generation: ${message}`);\n throw error;\n }\n },\n },\n {\n name: \"image-description\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n logger.log(\"openai_test_image_description\");\n try {\n const result = await runtime.useModel(\n ModelType.IMAGE_DESCRIPTION,\n \"https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg\",\n );\n\n if (\n result &&\n typeof result === \"object\" &&\n \"title\" in result &&\n \"description\" in result\n ) {\n logger.log({ result }, \"Image description\");\n } else {\n logger.error(\n \"Invalid image description result format:\",\n result,\n );\n }\n } catch (e: unknown) {\n const message = e instanceof Error ? e.message : String(e);\n logger.error(`Error in image description test: ${message}`);\n }\n } catch (e: unknown) {\n const message = e instanceof Error ? e.message : String(e);\n logger.error(\n `Error in openai_test_image_description: ${message}`,\n );\n }\n },\n },\n {\n name: \"openai_test_transcription\",\n fn: async (runtime: IAgentRuntime) => {\n logger.log(\"openai_test_transcription\");\n try {\n const response = await fetch(\n \"https://upload.wikimedia.org/wikipedia/en/4/40/Chris_Benoit_Voice_Message.ogg\",\n );\n const arrayBuffer = await response.arrayBuffer();\n const transcription = await runtime.useModel(\n ModelType.TRANSCRIPTION,\n Buffer.from(new Uint8Array(arrayBuffer)),\n );\n logger.log(\n { transcription },\n \"generated with test_transcription\",\n );\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_transcription: ${message}`);\n throw error;\n }\n },\n },\n {\n name: \"openai_test_text_tokenizer_encode\",\n fn: async (runtime: IAgentRuntime) => {\n const prompt = \"Hello tokenizer encode!\";\n const tokens = await runtime.useModel(\n ModelType.TEXT_TOKENIZER_ENCODE,\n { prompt },\n );\n if (!Array.isArray(tokens) || tokens.length === 0) {\n throw new Error(\n \"Failed to tokenize text: expected non-empty array of tokens\",\n );\n }\n logger.log({ tokens }, \"Tokenized output\");\n },\n },\n {\n name: \"openai_test_text_tokenizer_decode\",\n fn: async (runtime: IAgentRuntime) => {\n const prompt = \"Hello tokenizer decode!\";\n const tokens = await runtime.useModel(\n ModelType.TEXT_TOKENIZER_ENCODE,\n { prompt },\n );\n const decodedText = await runtime.useModel(\n ModelType.TEXT_TOKENIZER_DECODE,\n { tokens },\n );\n if (decodedText !== prompt) {\n throw new Error(\n `Decoded text does not match original. Expected \"${prompt}\", got \"${decodedText}\"`,\n );\n }\n logger.log({ decodedText }, \"Decoded text\");\n },\n },\n {\n name: \"openai_test_text_to_speech\",\n fn: async (runtime: IAgentRuntime) => {\n try {\n const response = await fetchTextToSpeech(runtime, {\n text: \"Hello, this is a test for text-to-speech.\",\n });\n if (!response) {\n throw new Error(\"Failed to generate speech\");\n }\n logger.log(\"Generated speech successfully\");\n } catch (error: unknown) {\n const message =\n error instanceof Error ? error.message : String(error);\n logger.error(`Error in openai_test_text_to_speech: ${message}`);\n throw error;\n }\n },\n },\n ],\n },\n ],\n};\nexport default openaiPlugin;\n"
5
+ "import { createOpenAI } from '@ai-sdk/openai';\nimport type {\n DetokenizeTextParams,\n GenerateTextParams,\n IAgentRuntime,\n ImageDescriptionParams,\n ModelTypeName,\n ObjectGenerationParams,\n Plugin,\n TextEmbeddingParams,\n TokenizeTextParams,\n} from '@elizaos/core';\nimport { EventType, logger, ModelType, VECTOR_DIMS } from '@elizaos/core';\nimport {\n generateObject,\n generateText,\n JSONParseError,\n type JSONValue,\n type LanguageModelUsage,\n} from 'ai';\nimport { encodingForModel, type TiktokenModel } from 'js-tiktoken';\n\nexport interface OpenAITranscriptionParams {\n audio: Blob | File | Buffer;\n model?: string;\n language?: string;\n response_format?: string;\n prompt?: string;\n temperature?: number;\n timestampGranularities?: string[];\n mimeType?: string; // MIME type for Buffer audio data (e.g., 'audio/wav', 'audio/mp3', 'audio/webm')\n}\n\nexport interface OpenAITextToSpeechParams {\n text: string;\n model?: string;\n voice?: string;\n format?: 'mp3' | 'wav' | 'flac' | string;\n instructions?: string;\n}\n\n/**\n * Retrieves a configuration setting from the runtime, falling back to environment variables or a default value if not found.\n *\n * @param key - The name of the setting to retrieve.\n * @param defaultValue - The value to return if the setting is not found in the runtime or environment.\n * @returns The resolved setting value, or {@link defaultValue} if not found.\n */\nfunction getSetting(\n runtime: IAgentRuntime,\n key: string,\n defaultValue?: string\n): string | undefined {\n return runtime.getSetting(key) ?? process.env[key] ?? defaultValue;\n}\n\nfunction isBrowser(): boolean {\n return typeof globalThis !== 'undefined' && typeof (globalThis as any).document !== 'undefined';\n}\n\n/**\n * Determines whether we're running in a browser with a server-hosted proxy configured.\n * In this mode, we do not require a real API key on the client and rely on the proxy to inject it.\n */\nfunction isProxyMode(runtime: IAgentRuntime): boolean {\n return isBrowser() && !!getSetting(runtime, 'OPENAI_BROWSER_BASE_URL');\n}\n\nfunction getAuthHeader(runtime: IAgentRuntime, forEmbedding = false): Record<string, string> {\n if (isBrowser()) return {};\n const key = forEmbedding ? getEmbeddingApiKey(runtime) : getApiKey(runtime);\n return key ? { Authorization: `Bearer ${key}` } : {};\n}\n\n/**\n * Retrieves the OpenAI API base URL from runtime settings, environment variables, or defaults, using provider-aware resolution.\n *\n * @returns The resolved base URL for OpenAI API requests.\n */\nfunction getBaseURL(runtime: IAgentRuntime): string {\n const browserURL = getSetting(runtime, 'OPENAI_BROWSER_BASE_URL');\n const baseURL = (\n isBrowser() && browserURL\n ? browserURL\n : getSetting(runtime, 'OPENAI_BASE_URL', 'https://api.openai.com/v1')\n ) as string;\n logger.debug(`[OpenAI] Default base URL: ${baseURL}`);\n return baseURL;\n}\n\n/**\n * Retrieves the OpenAI API base URL for embeddings, falling back to the general base URL.\n *\n * @returns The resolved base URL for OpenAI embedding requests.\n */\nfunction getEmbeddingBaseURL(runtime: IAgentRuntime): string {\n const embeddingURL = isBrowser()\n ? getSetting(runtime, 'OPENAI_BROWSER_EMBEDDING_URL') ||\n getSetting(runtime, 'OPENAI_BROWSER_BASE_URL')\n : getSetting(runtime, 'OPENAI_EMBEDDING_URL');\n if (embeddingURL) {\n logger.debug(`[OpenAI] Using specific embedding base URL: ${embeddingURL}`);\n return embeddingURL;\n }\n logger.debug('[OpenAI] Falling back to general base URL for embeddings.');\n return getBaseURL(runtime);\n}\n\n/**\n * Helper function to get the API key for OpenAI\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getApiKey(runtime: IAgentRuntime): string | undefined {\n return getSetting(runtime, 'OPENAI_API_KEY');\n}\n\n/**\n * Helper function to get the embedding API key for OpenAI, falling back to the general API key if not set.\n *\n * @param runtime The runtime context\n * @returns The configured API key\n */\nfunction getEmbeddingApiKey(runtime: IAgentRuntime): string | undefined {\n const embeddingApiKey = getSetting(runtime, 'OPENAI_EMBEDDING_API_KEY');\n if (embeddingApiKey) {\n logger.debug('[OpenAI] Using specific embedding API key (present)');\n return embeddingApiKey;\n }\n logger.debug('[OpenAI] Falling back to general API key for embeddings.');\n return getApiKey(runtime);\n}\n\n/**\n * Helper function to get the small model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured small model name\n */\nfunction getSmallModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, 'OPENAI_SMALL_MODEL') ??\n (getSetting(runtime, 'SMALL_MODEL', 'gpt-5-nano') as string)\n );\n}\n\n/**\n * Helper function to get the large model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured large model name\n */\nfunction getLargeModel(runtime: IAgentRuntime): string {\n return (\n getSetting(runtime, 'OPENAI_LARGE_MODEL') ??\n (getSetting(runtime, 'LARGE_MODEL', 'gpt-5-mini') as string)\n );\n}\n\n/**\n * Helper function to get the image description model name with fallbacks\n *\n * @param runtime The runtime context\n * @returns The configured image description model name\n */\nfunction getImageDescriptionModel(runtime: IAgentRuntime): string {\n return getSetting(runtime, 'OPENAI_IMAGE_DESCRIPTION_MODEL', 'gpt-5-nano') ?? 'gpt-5-nano';\n}\n\n/**\n * Helper function to get experimental telemetry setting\n *\n * @param runtime The runtime context\n * @returns Whether experimental telemetry is enabled\n */\nfunction getExperimentalTelemetry(runtime: IAgentRuntime): boolean {\n const setting = getSetting(runtime, 'OPENAI_EXPERIMENTAL_TELEMETRY', 'false');\n // Convert to string and check for truthy values\n const normalizedSetting = String(setting).toLowerCase();\n const result = normalizedSetting === 'true';\n logger.debug(\n `[OpenAI] Experimental telemetry in function: \"${setting}\" (type: ${typeof setting}, normalized: \"${normalizedSetting}\", result: ${result})`\n );\n return result;\n}\n\n/**\n * Create an OpenAI client with proper configuration\n *\n * @param runtime The runtime context\n * @returns Configured OpenAI client\n */\nfunction createOpenAIClient(runtime: IAgentRuntime) {\n const baseURL = getBaseURL(runtime);\n // In proxy mode (browser + proxy base URL), pass a harmless placeholder key.\n // The server proxy replaces Authorization; no secrets leave the server.\n const apiKey = getApiKey(runtime) ?? (isProxyMode(runtime) ? 'sk-proxy' : undefined);\n return createOpenAI({ apiKey: (apiKey ?? '') as string, baseURL });\n}\n\n/**\n * Asynchronously tokenizes the given text based on the specified model and prompt.\n *\n * @param {ModelTypeName} model - The type of model to use for tokenization.\n * @param {string} prompt - The text prompt to tokenize.\n * @returns {number[]} - An array of tokens representing the encoded prompt.\n */\nasync function tokenizeText(model: ModelTypeName, prompt: string) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? 'gpt-5-nano')\n : (process.env.LARGE_MODEL ?? 'gpt-5-mini');\n const tokens = encodingForModel(modelName as TiktokenModel).encode(prompt);\n return tokens;\n}\n\n/**\n * Detokenize a sequence of tokens back into text using the specified model.\n *\n * @param {ModelTypeName} model - The type of model to use for detokenization.\n * @param {number[]} tokens - The sequence of tokens to detokenize.\n * @returns {string} The detokenized text.\n */\nasync function detokenizeText(model: ModelTypeName, tokens: number[]) {\n const modelName =\n model === ModelType.TEXT_SMALL\n ? (process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? 'gpt-5-nano')\n : (process.env.OPENAI_LARGE_MODEL ?? process.env.LARGE_MODEL ?? 'gpt-5-mini');\n return encodingForModel(modelName as TiktokenModel).decode(tokens);\n}\n\n/**\n * Helper function to generate objects using specified model type\n */\nasync function generateObjectByModelType(\n runtime: IAgentRuntime,\n params: ObjectGenerationParams,\n modelType: string,\n getModelFn: (runtime: IAgentRuntime) => string\n): Promise<JSONValue> {\n const openai = createOpenAIClient(runtime);\n const modelName = getModelFn(runtime);\n logger.log(`[OpenAI] Using ${modelType} model: ${modelName}`);\n const temperature = params.temperature ?? 0;\n const schemaPresent = !!params.schema;\n\n if (schemaPresent) {\n logger.info(\n `Using ${modelType} without schema validation (schema provided but output=no-schema)`\n );\n }\n\n try {\n const { object, usage } = await generateObject({\n model: openai.languageModel(modelName),\n output: 'no-schema',\n prompt: params.prompt,\n temperature: temperature,\n experimental_repairText: getJsonRepairFunction(),\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, modelType as ModelTypeName, params.prompt, usage);\n }\n return object;\n } catch (error: unknown) {\n if (error instanceof JSONParseError) {\n logger.error(`[generateObject] Failed to parse JSON: ${error.message}`);\n\n const repairFunction = getJsonRepairFunction();\n const repairedJsonString = await repairFunction({\n text: error.text,\n error,\n });\n\n if (repairedJsonString) {\n try {\n const repairedObject = JSON.parse(repairedJsonString);\n logger.info('[generateObject] Successfully repaired JSON.');\n return repairedObject;\n } catch (repairParseError: unknown) {\n const message =\n repairParseError instanceof Error ? repairParseError.message : String(repairParseError);\n logger.error(`[generateObject] Failed to parse repaired JSON: ${message}`);\n throw repairParseError;\n }\n } else {\n logger.error('[generateObject] JSON repair failed.');\n throw error;\n }\n } else {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`[generateObject] Unknown error: ${message}`);\n throw error;\n }\n }\n}\n\n/**\n * Returns a function to repair JSON text\n */\nfunction getJsonRepairFunction(): (params: {\n text: string;\n error: unknown;\n}) => Promise<string | null> {\n return async ({ text, error }: { text: string; error: unknown }) => {\n try {\n if (error instanceof JSONParseError) {\n const cleanedText = text.replace(/```json\\n|\\n```|```/g, '');\n JSON.parse(cleanedText);\n return cleanedText;\n }\n return null;\n } catch (jsonError: unknown) {\n const message = jsonError instanceof Error ? jsonError.message : String(jsonError);\n logger.warn(`Failed to repair JSON text: ${message}`);\n return null;\n }\n };\n}\n\n/**\n * Emits a model usage event\n * @param runtime The runtime context\n * @param type The model type\n * @param prompt The prompt used\n * @param usage The LLM usage data\n */\nfunction emitModelUsageEvent(\n runtime: IAgentRuntime,\n type: ModelTypeName,\n prompt: string,\n usage: LanguageModelUsage\n) {\n runtime.emitEvent(EventType.MODEL_USED, {\n provider: 'openai',\n type,\n prompt,\n tokens: {\n prompt: usage.inputTokens,\n completion: usage.outputTokens,\n total: usage.totalTokens,\n },\n });\n}\n\n/**\n * Detects audio MIME type from buffer by checking magic bytes (file signature)\n * @param buffer The audio buffer to analyze\n * @returns The detected MIME type or 'application/octet-stream' if unknown\n */\nfunction detectAudioMimeType(buffer: Buffer): string {\n if (buffer.length < 12) {\n return 'application/octet-stream';\n }\n\n // Check magic bytes for common audio formats\n // WAV: \"RIFF\" + size + \"WAVE\"\n if (\n buffer[0] === 0x52 &&\n buffer[1] === 0x49 &&\n buffer[2] === 0x46 &&\n buffer[3] === 0x46 &&\n buffer[8] === 0x57 &&\n buffer[9] === 0x41 &&\n buffer[10] === 0x56 &&\n buffer[11] === 0x45\n ) {\n return 'audio/wav';\n }\n\n // MP3: ID3 tag or MPEG frame sync\n if (\n (buffer[0] === 0x49 && buffer[1] === 0x44 && buffer[2] === 0x33) || // ID3\n (buffer[0] === 0xff && (buffer[1] & 0xe0) === 0xe0) // MPEG sync\n ) {\n return 'audio/mpeg';\n }\n\n // OGG: \"OggS\"\n if (buffer[0] === 0x4f && buffer[1] === 0x67 && buffer[2] === 0x67 && buffer[3] === 0x53) {\n return 'audio/ogg';\n }\n\n // FLAC: \"fLaC\"\n if (buffer[0] === 0x66 && buffer[1] === 0x4c && buffer[2] === 0x61 && buffer[3] === 0x43) {\n return 'audio/flac';\n }\n\n // M4A/MP4: \"ftyp\" at offset 4\n if (buffer[4] === 0x66 && buffer[5] === 0x74 && buffer[6] === 0x79 && buffer[7] === 0x70) {\n return 'audio/mp4';\n }\n\n // WebM: EBML header\n if (buffer[0] === 0x1a && buffer[1] === 0x45 && buffer[2] === 0xdf && buffer[3] === 0xa3) {\n return 'audio/webm';\n }\n\n // Unknown format - let API try to detect\n logger.warn('Could not detect audio format from buffer, using generic binary type');\n return 'application/octet-stream';\n}\n\n/**\n * Converts a Web ReadableStream to a Node.js Readable stream\n * Handles both browser and Node.js environments\n * Uses dynamic import to avoid bundling node:stream in browser builds\n */\nasync function webStreamToNodeStream(webStream: ReadableStream<Uint8Array>) {\n try {\n // Dynamic import to avoid browser bundling issues\n const { Readable } = await import('node:stream');\n const reader = webStream.getReader();\n\n return new Readable({\n async read() {\n try {\n const { done, value } = await reader.read();\n if (done) {\n this.push(null);\n } else {\n // Push the Uint8Array directly; Node.js Readable can handle it\n this.push(value);\n }\n } catch (error) {\n this.destroy(error as Error);\n }\n },\n destroy(error, callback) {\n reader.cancel().finally(() => callback(error));\n },\n });\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Failed to load node:stream module: ${message}`);\n throw new Error(\n `Cannot convert stream: node:stream module unavailable. This feature requires a Node.js environment.`\n );\n }\n}\n\n/**\n * function for text-to-speech\n */\nasync function fetchTextToSpeech(runtime: IAgentRuntime, options: OpenAITextToSpeechParams) {\n const defaultModel = getSetting(runtime, 'OPENAI_TTS_MODEL', 'gpt-4o-mini-tts');\n const defaultVoice = getSetting(runtime, 'OPENAI_TTS_VOICE', 'nova');\n const defaultInstructions = getSetting(runtime, 'OPENAI_TTS_INSTRUCTIONS', '');\n const baseURL = getBaseURL(runtime);\n\n const model = options.model || (defaultModel as string);\n const voice = options.voice || (defaultVoice as string);\n const instructions = options.instructions ?? (defaultInstructions as string);\n const format = options.format || 'mp3';\n\n try {\n const res = await fetch(`${baseURL}/audio/speech`, {\n method: 'POST',\n headers: {\n ...getAuthHeader(runtime),\n 'Content-Type': 'application/json',\n // Hint desired audio format in Accept when possible\n ...(format === 'mp3' ? { Accept: 'audio/mpeg' } : {}),\n },\n body: JSON.stringify({\n model,\n voice,\n input: options.text,\n format,\n ...(instructions && { instructions }),\n }),\n });\n\n if (!res.ok) {\n const err = await res.text();\n throw new Error(`OpenAI TTS error ${res.status}: ${err}`);\n }\n\n // Ensure response body exists\n if (!res.body) {\n throw new Error('OpenAI TTS response body is null');\n }\n\n // In Node.js, convert Web ReadableStream to Node.js Readable\n // In browser, return the Web ReadableStream directly\n if (!isBrowser()) {\n return await webStreamToNodeStream(res.body);\n }\n\n return res.body;\n } catch (err: unknown) {\n const message = err instanceof Error ? err.message : String(err);\n throw new Error(`Failed to fetch speech from OpenAI TTS: ${message}`);\n }\n}\n\n/**\n * Defines the OpenAI plugin with its name, description, and configuration options.\n * @type {Plugin}\n */\nexport const openaiPlugin: Plugin = {\n name: 'openai',\n description: 'OpenAI plugin',\n config: {\n OPENAI_API_KEY: process.env.OPENAI_API_KEY,\n OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,\n OPENAI_SMALL_MODEL: process.env.OPENAI_SMALL_MODEL,\n OPENAI_LARGE_MODEL: process.env.OPENAI_LARGE_MODEL,\n SMALL_MODEL: process.env.SMALL_MODEL,\n LARGE_MODEL: process.env.LARGE_MODEL,\n OPENAI_EMBEDDING_MODEL: process.env.OPENAI_EMBEDDING_MODEL,\n OPENAI_EMBEDDING_API_KEY: process.env.OPENAI_EMBEDDING_API_KEY,\n OPENAI_EMBEDDING_URL: process.env.OPENAI_EMBEDDING_URL,\n OPENAI_EMBEDDING_DIMENSIONS: process.env.OPENAI_EMBEDDING_DIMENSIONS,\n OPENAI_IMAGE_DESCRIPTION_MODEL: process.env.OPENAI_IMAGE_DESCRIPTION_MODEL,\n OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS: process.env.OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS,\n OPENAI_EXPERIMENTAL_TELEMETRY: process.env.OPENAI_EXPERIMENTAL_TELEMETRY,\n },\n async init(_config, runtime) {\n // do check in the background\n new Promise<void>(async (resolve) => {\n resolve();\n try {\n if (!getApiKey(runtime) && !isBrowser()) {\n logger.warn(\n 'OPENAI_API_KEY is not set in environment - OpenAI functionality will be limited'\n );\n return;\n }\n try {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: { ...getAuthHeader(runtime) },\n });\n if (!response.ok) {\n logger.warn(`OpenAI API key validation failed: ${response.statusText}`);\n logger.warn('OpenAI functionality will be limited until a valid API key is provided');\n } else {\n logger.log('OpenAI API key validated successfully');\n }\n } catch (fetchError: unknown) {\n const message = fetchError instanceof Error ? fetchError.message : String(fetchError);\n logger.warn(`Error validating OpenAI API key: ${message}`);\n logger.warn('OpenAI functionality will be limited until a valid API key is provided');\n }\n } catch (error: unknown) {\n const message =\n (error as { errors?: Array<{ message: string }> })?.errors\n ?.map((e) => e.message)\n .join(', ') || (error instanceof Error ? error.message : String(error));\n logger.warn(\n `OpenAI plugin configuration issue: ${message} - You need to configure the OPENAI_API_KEY in your environment variables`\n );\n }\n });\n },\n\n models: {\n [ModelType.TEXT_EMBEDDING]: async (\n runtime: IAgentRuntime,\n params: TextEmbeddingParams | string | null\n ): Promise<number[]> => {\n const embeddingModelName = getSetting(\n runtime,\n 'OPENAI_EMBEDDING_MODEL',\n 'text-embedding-3-small'\n );\n const embeddingDimension = Number.parseInt(\n getSetting(runtime, 'OPENAI_EMBEDDING_DIMENSIONS', '1536') || '1536',\n 10\n ) as (typeof VECTOR_DIMS)[keyof typeof VECTOR_DIMS];\n\n if (!Object.values(VECTOR_DIMS).includes(embeddingDimension)) {\n const errorMsg = `Invalid embedding dimension: ${embeddingDimension}. Must be one of: ${Object.values(VECTOR_DIMS).join(', ')}`;\n logger.error(errorMsg);\n throw new Error(errorMsg);\n }\n if (params === null) {\n logger.debug('Creating test embedding for initialization');\n const testVector = Array(embeddingDimension).fill(0);\n testVector[0] = 0.1;\n return testVector;\n }\n let text: string;\n if (typeof params === 'string') {\n text = params;\n } else if (typeof params === 'object' && params.text) {\n text = params.text;\n } else {\n logger.warn('Invalid input format for embedding');\n const fallbackVector = Array(embeddingDimension).fill(0);\n fallbackVector[0] = 0.2;\n return fallbackVector;\n }\n if (!text.trim()) {\n logger.warn('Empty text for embedding');\n const emptyVector = Array(embeddingDimension).fill(0);\n emptyVector[0] = 0.3;\n return emptyVector;\n }\n\n const embeddingBaseURL = getEmbeddingBaseURL(runtime);\n\n try {\n const response = await fetch(`${embeddingBaseURL}/embeddings`, {\n method: 'POST',\n headers: {\n ...getAuthHeader(runtime, true),\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model: embeddingModelName,\n input: text,\n }),\n });\n\n // Clone available if needed for logging/debugging\n // const debugText = await response.clone().text().catch(() => \"\");\n\n if (!response.ok) {\n logger.error(`OpenAI API error: ${response.status} - ${response.statusText}`);\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.4;\n return errorVector;\n }\n\n const data = (await response.json()) as {\n data: [{ embedding: number[] }];\n usage?: { prompt_tokens: number; total_tokens: number };\n };\n\n if (!data?.data?.[0]?.embedding) {\n logger.error('API returned invalid structure');\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.5;\n return errorVector;\n }\n\n const embedding = data.data[0].embedding;\n\n if (data.usage) {\n const usage = {\n inputTokens: data.usage.prompt_tokens,\n outputTokens: 0,\n totalTokens: data.usage.total_tokens,\n };\n\n emitModelUsageEvent(runtime, ModelType.TEXT_EMBEDDING, text, usage);\n }\n\n logger.log(`Got valid embedding with length ${embedding.length}`);\n return embedding;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error generating embedding: ${message}`);\n const errorVector = Array(embeddingDimension).fill(0);\n errorVector[0] = 0.6;\n return errorVector;\n }\n },\n [ModelType.TEXT_TOKENIZER_ENCODE]: async (\n _runtime,\n { prompt, modelType = ModelType.TEXT_LARGE }: TokenizeTextParams\n ) => {\n return await tokenizeText(modelType ?? ModelType.TEXT_LARGE, prompt);\n },\n [ModelType.TEXT_TOKENIZER_DECODE]: async (\n _runtime,\n { tokens, modelType = ModelType.TEXT_LARGE }: DetokenizeTextParams\n ) => {\n return await detokenizeText(modelType ?? ModelType.TEXT_LARGE, tokens);\n },\n [ModelType.TEXT_SMALL]: async (\n runtime: IAgentRuntime,\n {\n prompt,\n stopSequences = [],\n maxTokens = 8192,\n temperature = 0.7,\n frequencyPenalty = 0.7,\n presencePenalty = 0.7,\n }: GenerateTextParams\n ) => {\n const openai = createOpenAIClient(runtime);\n const modelName = getSmallModel(runtime);\n const experimentalTelemetry = getExperimentalTelemetry(runtime);\n\n logger.log(`[OpenAI] Using TEXT_SMALL model: ${modelName}`);\n logger.log(prompt);\n\n const { text: openaiResponse, usage } = await generateText({\n model: openai.languageModel(modelName),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxOutputTokens: maxTokens,\n frequencyPenalty: frequencyPenalty,\n presencePenalty: presencePenalty,\n stopSequences: stopSequences,\n experimental_telemetry: {\n isEnabled: experimentalTelemetry,\n },\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, ModelType.TEXT_SMALL, prompt, usage);\n }\n\n return openaiResponse;\n },\n [ModelType.TEXT_LARGE]: async (\n runtime: IAgentRuntime,\n {\n prompt,\n stopSequences = [],\n maxTokens = 8192,\n temperature = 0.7,\n frequencyPenalty = 0.7,\n presencePenalty = 0.7,\n }: GenerateTextParams\n ) => {\n const openai = createOpenAIClient(runtime);\n const modelName = getLargeModel(runtime);\n const experimentalTelemetry = getExperimentalTelemetry(runtime);\n\n logger.log(`[OpenAI] Using TEXT_LARGE model: ${modelName}`);\n logger.log(prompt);\n\n const { text: openaiResponse, usage } = await generateText({\n model: openai.languageModel(modelName),\n prompt: prompt,\n system: runtime.character.system ?? undefined,\n temperature: temperature,\n maxOutputTokens: maxTokens,\n frequencyPenalty: frequencyPenalty,\n presencePenalty: presencePenalty,\n stopSequences: stopSequences,\n experimental_telemetry: {\n isEnabled: experimentalTelemetry,\n },\n });\n\n if (usage) {\n emitModelUsageEvent(runtime, ModelType.TEXT_LARGE, prompt, usage);\n }\n\n return openaiResponse;\n },\n [ModelType.IMAGE]: async (\n runtime: IAgentRuntime,\n params: {\n prompt: string;\n n?: number;\n size?: string;\n }\n ) => {\n const n = params.n || 1;\n const size = params.size || '1024x1024';\n const prompt = params.prompt;\n const modelName = 'gpt-image-1'; // Updated image model\n logger.log(`[OpenAI] Using IMAGE model: ${modelName}`);\n\n const baseURL = getBaseURL(runtime);\n\n try {\n const response = await fetch(`${baseURL}/images/generations`, {\n method: 'POST',\n headers: {\n ...getAuthHeader(runtime),\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify({\n model: modelName,\n prompt: prompt,\n n: n,\n size: size,\n }),\n });\n\n // const debugText = await response.clone().text().catch(() => \"\");\n\n if (!response.ok) {\n throw new Error(`Failed to generate image: ${response.statusText}`);\n }\n\n const data = await response.json();\n const typedData = data as { data: { url: string }[] };\n\n return typedData.data;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n throw error;\n }\n },\n [ModelType.IMAGE_DESCRIPTION]: async (\n runtime: IAgentRuntime,\n params: ImageDescriptionParams | string\n ) => {\n let imageUrl: string;\n let promptText: string | undefined;\n const modelName = getImageDescriptionModel(runtime);\n logger.log(`[OpenAI] Using IMAGE_DESCRIPTION model: ${modelName}`);\n const maxTokens = Number.parseInt(\n getSetting(runtime, 'OPENAI_IMAGE_DESCRIPTION_MAX_TOKENS', '8192') || '8192',\n 10\n );\n\n if (typeof params === 'string') {\n imageUrl = params;\n promptText = 'Please analyze this image and provide a title and detailed description.';\n } else {\n imageUrl = params.imageUrl;\n promptText =\n params.prompt ||\n 'Please analyze this image and provide a title and detailed description.';\n }\n\n const messages = [\n {\n role: 'user',\n content: [\n { type: 'text', text: promptText },\n { type: 'image_url', image_url: { url: imageUrl } },\n ],\n },\n ];\n\n const baseURL = getBaseURL(runtime);\n\n try {\n const requestBody: Record<string, any> = {\n model: modelName,\n messages: messages,\n max_tokens: maxTokens,\n };\n\n const response = await fetch(`${baseURL}/chat/completions`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...getAuthHeader(runtime),\n },\n body: JSON.stringify(requestBody),\n });\n\n // const debugText = await response.clone().text().catch(() => \"\");\n\n if (!response.ok) {\n throw new Error(`OpenAI API error: ${response.status}`);\n }\n\n const result: unknown = await response.json();\n\n type OpenAIResponseType = {\n choices?: Array<{\n message?: { content?: string };\n finish_reason?: string;\n }>;\n usage?: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n };\n\n const typedResult = result as OpenAIResponseType;\n const content = typedResult.choices?.[0]?.message?.content;\n\n if (typedResult.usage) {\n emitModelUsageEvent(\n runtime,\n ModelType.IMAGE_DESCRIPTION,\n typeof params === 'string' ? params : params.prompt || '',\n {\n inputTokens: typedResult.usage.prompt_tokens,\n outputTokens: typedResult.usage.completion_tokens,\n totalTokens: typedResult.usage.total_tokens,\n }\n );\n }\n\n if (!content) {\n return {\n title: 'Failed to analyze image',\n description: 'No response from API',\n };\n }\n\n // Check if a custom prompt was provided (not the default prompt)\n const isCustomPrompt =\n typeof params === 'object' &&\n params.prompt &&\n params.prompt !==\n 'Please analyze this image and provide a title and detailed description.';\n\n // If custom prompt is used, return the raw content\n if (isCustomPrompt) {\n return content;\n }\n\n // Otherwise, maintain backwards compatibility with object return\n const titleMatch = content.match(/title[:\\s]+(.+?)(?:\\n|$)/i);\n const title = titleMatch?.[1]?.trim() || 'Image Analysis';\n const description = content.replace(/title[:\\s]+(.+?)(?:\\n|$)/i, '').trim();\n\n const processedResult = { title, description };\n return processedResult;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error analyzing image: ${message}`);\n return {\n title: 'Failed to analyze image',\n description: `Error: ${message}`,\n };\n }\n },\n [ModelType.TRANSCRIPTION]: async (\n runtime: IAgentRuntime,\n input: Blob | File | Buffer | OpenAITranscriptionParams\n ) => {\n let modelName = getSetting(runtime, 'OPENAI_TRANSCRIPTION_MODEL', 'gpt-4o-mini-transcribe');\n logger.log(`[OpenAI] Using TRANSCRIPTION model: ${modelName}`);\n\n const baseURL = getBaseURL(runtime);\n\n // Support Blob/File/Buffer directly, or an object with { audio: Blob/File/Buffer, ...options }\n let blob: Blob;\n let extraParams: OpenAITranscriptionParams | null = null;\n\n if (input instanceof Blob || input instanceof File) {\n blob = input as Blob;\n } else if (Buffer.isBuffer(input)) {\n // Convert Buffer to Blob for Node.js environments\n // Auto-detect MIME type from buffer content\n const detectedMimeType = detectAudioMimeType(input);\n logger.debug(`Auto-detected audio MIME type: ${detectedMimeType}`);\n // Cast to any to satisfy TypeScript's strict ArrayBufferLike typing\n // Note: Blob constructor creates a copy of the buffer data\n blob = new Blob([input] as any, { type: detectedMimeType });\n } else if (typeof input === 'object' && input !== null && (input as any).audio != null) {\n const params = input as any;\n if (\n !(params.audio instanceof Blob) &&\n !(params.audio instanceof File) &&\n !Buffer.isBuffer(params.audio)\n ) {\n throw new Error(\"TRANSCRIPTION param 'audio' must be a Blob/File/Buffer.\");\n }\n // Convert Buffer to Blob if needed\n if (Buffer.isBuffer(params.audio)) {\n // Use provided mimeType or auto-detect from buffer\n let mimeType = params.mimeType;\n if (!mimeType) {\n mimeType = detectAudioMimeType(params.audio);\n logger.debug(`Auto-detected audio MIME type: ${mimeType}`);\n } else {\n logger.debug(`Using provided MIME type: ${mimeType}`);\n }\n // Cast to any to satisfy TypeScript's strict ArrayBufferLike typing\n // Note: Blob constructor creates a copy of the buffer data\n blob = new Blob([params.audio] as any, { type: mimeType });\n } else {\n blob = params.audio as Blob;\n }\n extraParams = params as OpenAITranscriptionParams;\n if (typeof params.model === 'string' && params.model) {\n modelName = params.model;\n }\n } else {\n throw new Error(\n 'TRANSCRIPTION expects a Blob/File/Buffer or an object { audio: Blob/File/Buffer, mimeType?, language?, response_format?, timestampGranularities?, prompt?, temperature?, model? }'\n );\n }\n\n const mime = (blob as File).type || 'audio/webm';\n const filename =\n (blob as File).name ||\n (mime.includes('mp3') || mime.includes('mpeg')\n ? 'recording.mp3'\n : mime.includes('ogg')\n ? 'recording.ogg'\n : mime.includes('wav')\n ? 'recording.wav'\n : mime.includes('webm')\n ? 'recording.webm'\n : 'recording.bin');\n\n const formData = new FormData();\n formData.append('file', blob, filename);\n formData.append('model', String(modelName));\n if (extraParams) {\n if (typeof extraParams.language === 'string') {\n formData.append('language', String(extraParams.language));\n }\n if (typeof extraParams.response_format === 'string') {\n formData.append('response_format', String(extraParams.response_format));\n }\n if (typeof extraParams.prompt === 'string') {\n formData.append('prompt', String(extraParams.prompt));\n }\n if (typeof extraParams.temperature === 'number') {\n formData.append('temperature', String(extraParams.temperature));\n }\n if (Array.isArray(extraParams.timestampGranularities)) {\n for (const g of extraParams.timestampGranularities) {\n formData.append('timestamp_granularities[]', String(g));\n }\n }\n }\n\n try {\n const response = await fetch(`${baseURL}/audio/transcriptions`, {\n method: 'POST',\n headers: {\n ...getAuthHeader(runtime),\n },\n body: formData,\n });\n\n if (!response.ok) {\n throw new Error(`Failed to transcribe audio: ${response.status} ${response.statusText}`);\n }\n\n const data = (await response.json()) as { text: string };\n return data.text || '';\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`TRANSCRIPTION error: ${message}`);\n throw error;\n }\n },\n [ModelType.TEXT_TO_SPEECH]: async (\n runtime: IAgentRuntime,\n input: string | OpenAITextToSpeechParams\n ) => {\n // Normalize input into options with per-call overrides\n const options: OpenAITextToSpeechParams =\n typeof input === 'string' ? { text: input } : (input as OpenAITextToSpeechParams);\n\n const resolvedModel =\n options.model || (getSetting(runtime, 'OPENAI_TTS_MODEL', 'gpt-4o-mini-tts') as string);\n logger.log(`[OpenAI] Using TEXT_TO_SPEECH model: ${resolvedModel}`);\n try {\n const speechStream = await fetchTextToSpeech(runtime, options);\n return speechStream;\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in TEXT_TO_SPEECH: ${message}`);\n throw error;\n }\n },\n [ModelType.OBJECT_SMALL]: async (runtime: IAgentRuntime, params: ObjectGenerationParams) => {\n return generateObjectByModelType(runtime, params, ModelType.OBJECT_SMALL, getSmallModel);\n },\n [ModelType.OBJECT_LARGE]: async (runtime: IAgentRuntime, params: ObjectGenerationParams) => {\n return generateObjectByModelType(runtime, params, ModelType.OBJECT_LARGE, getLargeModel);\n },\n },\n tests: [\n {\n name: 'openai_plugin_tests',\n tests: [\n {\n name: 'openai_test_url_and_api_key_validation',\n fn: async (runtime: IAgentRuntime) => {\n const baseURL = getBaseURL(runtime);\n const response = await fetch(`${baseURL}/models`, {\n headers: {\n Authorization: `Bearer ${getApiKey(runtime)}`,\n },\n });\n const data = await response.json();\n logger.log(\n { data: (data as { data?: unknown[] })?.data?.length ?? 'N/A' },\n 'Models Available'\n );\n if (!response.ok) {\n throw new Error(`Failed to validate OpenAI API key: ${response.statusText}`);\n }\n },\n },\n {\n name: 'openai_test_text_embedding',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const embedding = await runtime.useModel(ModelType.TEXT_EMBEDDING, {\n text: 'Hello, world!',\n });\n logger.log({ embedding }, 'embedding');\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_embedding: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_large',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_LARGE, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log({ text }, 'generated with test_text_large');\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_large: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_small',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const text = await runtime.useModel(ModelType.TEXT_SMALL, {\n prompt: 'What is the nature of reality in 10 words?',\n });\n if (text.length === 0) {\n throw new Error('Failed to generate text');\n }\n logger.log({ text }, 'generated with test_text_small');\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_text_small: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_image_generation',\n fn: async (runtime: IAgentRuntime) => {\n logger.log('openai_test_image_generation');\n try {\n const image = await runtime.useModel(ModelType.IMAGE, {\n prompt: 'A beautiful sunset over a calm ocean',\n n: 1,\n size: '1024x1024',\n });\n logger.log({ image }, 'generated with test_image_generation');\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_image_generation: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'image-description',\n fn: async (runtime: IAgentRuntime) => {\n try {\n logger.log('openai_test_image_description');\n try {\n const result = await runtime.useModel(\n ModelType.IMAGE_DESCRIPTION,\n 'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1c/Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg/537px-Vitalik_Buterin_TechCrunch_London_2015_%28cropped%29.jpg'\n );\n\n if (\n result &&\n typeof result === 'object' &&\n 'title' in result &&\n 'description' in result\n ) {\n logger.log({ result }, 'Image description');\n } else {\n logger.error('Invalid image description result format:', result);\n }\n } catch (e: unknown) {\n const message = e instanceof Error ? e.message : String(e);\n logger.error(`Error in image description test: ${message}`);\n }\n } catch (e: unknown) {\n const message = e instanceof Error ? e.message : String(e);\n logger.error(`Error in openai_test_image_description: ${message}`);\n }\n },\n },\n {\n name: 'openai_test_transcription',\n fn: async (runtime: IAgentRuntime) => {\n logger.log('openai_test_transcription');\n try {\n const response = await fetch(\n 'https://upload.wikimedia.org/wikipedia/en/4/40/Chris_Benoit_Voice_Message.ogg'\n );\n const arrayBuffer = await response.arrayBuffer();\n const transcription = await runtime.useModel(\n ModelType.TRANSCRIPTION,\n Buffer.from(new Uint8Array(arrayBuffer))\n );\n logger.log({ transcription }, 'generated with test_transcription');\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in test_transcription: ${message}`);\n throw error;\n }\n },\n },\n {\n name: 'openai_test_text_tokenizer_encode',\n fn: async (runtime: IAgentRuntime) => {\n const prompt = 'Hello tokenizer encode!';\n const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });\n if (!Array.isArray(tokens) || tokens.length === 0) {\n throw new Error('Failed to tokenize text: expected non-empty array of tokens');\n }\n logger.log({ tokens }, 'Tokenized output');\n },\n },\n {\n name: 'openai_test_text_tokenizer_decode',\n fn: async (runtime: IAgentRuntime) => {\n const prompt = 'Hello tokenizer decode!';\n const tokens = await runtime.useModel(ModelType.TEXT_TOKENIZER_ENCODE, { prompt });\n const decodedText = await runtime.useModel(ModelType.TEXT_TOKENIZER_DECODE, { tokens });\n if (decodedText !== prompt) {\n throw new Error(\n `Decoded text does not match original. Expected \"${prompt}\", got \"${decodedText}\"`\n );\n }\n logger.log({ decodedText }, 'Decoded text');\n },\n },\n {\n name: 'openai_test_text_to_speech',\n fn: async (runtime: IAgentRuntime) => {\n try {\n const response = await fetchTextToSpeech(runtime, {\n text: 'Hello, this is a test for text-to-speech.',\n });\n if (!response) {\n throw new Error('Failed to generate speech');\n }\n logger.log('Generated speech successfully');\n } catch (error: unknown) {\n const message = error instanceof Error ? error.message : String(error);\n logger.error(`Error in openai_test_text_to_speech: ${message}`);\n throw error;\n }\n },\n },\n ],\n },\n ],\n};\nexport default openaiPlugin;\n"
6
6
  ],
7
- "mappings": ";AAAA;AAYA;AACA;AAAA;AAAA;AAAA;AAAA;AAOA;AA2BA,SAAS,UAAU,CACjB,SACA,KACA,cACoB;AAAA,EACpB,OAAO,QAAQ,WAAW,GAAG,KAAK,QAAQ,IAAI,QAAQ;AAAA;AAGxD,SAAS,SAAS,GAAY;AAAA,EAC5B,OACE,OAAO,eAAe,eACtB,OAAQ,WAAmB,aAAa;AAAA;AAQ5C,SAAS,WAAW,CAAC,SAAiC;AAAA,EACpD,OAAO,UAAU,KAAK,CAAC,CAAC,WAAW,SAAS,yBAAyB;AAAA;AAGvE,SAAS,aAAa,CACpB,SACA,eAAe,OACS;AAAA,EACxB,IAAI,UAAU;AAAA,IAAG,OAAO,CAAC;AAAA,EACzB,MAAM,MAAM,eAAe,mBAAmB,OAAO,IAAI,UAAU,OAAO;AAAA,EAC1E,OAAO,MAAM,EAAE,eAAe,UAAU,MAAM,IAAI,CAAC;AAAA;AAQrD,SAAS,UAAU,CAAC,SAAgC;AAAA,EAClD,MAAM,aAAa,WAAW,SAAS,yBAAyB;AAAA,EAChE,MAAM,UACJ,UAAU,KAAK,aACX,aACA,WAAW,SAAS,mBAAmB,2BAA2B;AAAA,EAExE,OAAO,MAAM,8BAA8B,SAAS;AAAA,EACpD,OAAO;AAAA;AAQT,SAAS,mBAAmB,CAAC,SAAgC;AAAA,EAC3D,MAAM,eAAe,UAAU,IAC3B,WAAW,SAAS,8BAA8B,KAClD,WAAW,SAAS,yBAAyB,IAC7C,WAAW,SAAS,sBAAsB;AAAA,EAC9C,IAAI,cAAc;AAAA,IAChB,OAAO,MAAM,+CAA+C,cAAc;AAAA,IAC1E,OAAO;AAAA,EACT;AAAA,EACA,OAAO,MAAM,2DAA2D;AAAA,EACxE,OAAO,WAAW,OAAO;AAAA;AAS3B,SAAS,SAAS,CAAC,SAA4C;AAAA,EAC7D,OAAO,WAAW,SAAS,gBAAgB;AAAA;AAS7C,SAAS,kBAAkB,CAAC,SAA4C;AAAA,EACtE,MAAM,kBAAkB,WAAW,SAAS,0BAA0B;AAAA,EACtE,IAAI,iBAAiB;AAAA,IACnB,OAAO,MAAM,qDAAqD;AAAA,IAClE,OAAO;AAAA,EACT;AAAA,EACA,OAAO,MAAM,0DAA0D;AAAA,EACvE,OAAO,UAAU,OAAO;AAAA;AAS1B,SAAS,aAAa,CAAC,SAAgC;AAAA,EACrD,OACE,WAAW,SAAS,oBAAoB,KACvC,WAAW,SAAS,eAAe,YAAY;AAAA;AAUpD,SAAS,aAAa,CAAC,SAAgC;AAAA,EACrD,OACE,WAAW,SAAS,oBAAoB,KACvC,WAAW,SAAS,eAAe,YAAY;AAAA;AAUpD,SAAS,wBAAwB,CAAC,SAAgC;AAAA,EAChE,OACE,WAAW,SAAS,kCAAkC,YAAY,KAClE;AAAA;AAUJ,SAAS,wBAAwB,CAAC,SAAiC;AAAA,EACjE,MAAM,UAAU,WAAW,SAAS,iCAAiC,OAAO;AAAA,EAE5E,MAAM,oBAAoB,OAAO,OAAO,EAAE,YAAY;AAAA,EACtD,MAAM,SAAS,sBAAsB;AAAA,EACrC,OAAO,MACL,iDAAiD,mBAAmB,OAAO,yBAAyB,+BAA+B,SACrI;AAAA,EACA,OAAO;AAAA;AAST,SAAS,kBAAkB,CAAC,SAAwB;AAAA,EAClD,MAAM,UAAU,WAAW,OAAO;AAAA,EAGlC,MAAM,SACJ,UAAU,OAAO,MAAM,YAAY,OAAO,IAAI,aAAa;AAAA,EAC7D,OAAO,aAAa,EAAE,QAAS,UAAU,IAAe,QAAQ,CAAC;AAAA;AAUnE,eAAe,YAAY,CAAC,OAAsB,QAAgB;AAAA,EAChE,MAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBACb,QAAQ,IAAI,eACZ,eACC,QAAQ,IAAI,eAAe;AAAA,EAClC,MAAM,SAAS,iBAAiB,SAA0B,EAAE,OAAO,MAAM;AAAA,EACzE,OAAO;AAAA;AAUT,eAAe,cAAc,CAAC,OAAsB,QAAkB;AAAA,EACpE,MAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBACb,QAAQ,IAAI,eACZ,eACC,QAAQ,IAAI,sBACb,QAAQ,IAAI,eACZ;AAAA,EACN,OAAO,iBAAiB,SAA0B,EAAE,OAAO,MAAM;AAAA;AAMnE,eAAe,yBAAyB,CACtC,SACA,QACA,WACA,YACoB;AAAA,EACpB,MAAM,SAAS,mBAAmB,OAAO;AAAA,EACzC,MAAM,YAAY,WAAW,OAAO;AAAA,EACpC,OAAO,IAAI,kBAAkB,oBAAoB,WAAW;AAAA,EAC5D,MAAM,cAAc,OAAO,eAAe;AAAA,EAC1C,MAAM,gBAAgB,CAAC,CAAC,OAAO;AAAA,EAE/B,IAAI,eAAe;AAAA,IACjB,OAAO,KACL,SAAS,4EACX;AAAA,EACF;AAAA,EAEA,IAAI;AAAA,IACF,QAAQ,QAAQ,UAAU,MAAM,eAAe;AAAA,MAC7C,OAAO,OAAO,cAAc,SAAS;AAAA,MACrC,QAAQ;AAAA,MACR,QAAQ,OAAO;AAAA,MACf;AAAA,MACA,yBAAyB,sBAAsB;AAAA,IACjD,CAAC;AAAA,IAED,IAAI,OAAO;AAAA,MACT,oBACE,SACA,WACA,OAAO,QACP,KACF;AAAA,IACF;AAAA,IACA,OAAO;AAAA,IACP,OAAO,OAAgB;AAAA,IACvB,IAAI,iBAAiB,gBAAgB;AAAA,MACnC,OAAO,MAAM,0CAA0C,MAAM,SAAS;AAAA,MAEtE,MAAM,iBAAiB,sBAAsB;AAAA,MAC7C,MAAM,qBAAqB,MAAM,eAAe;AAAA,QAC9C,MAAM,MAAM;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,MAED,IAAI,oBAAoB;AAAA,QACtB,IAAI;AAAA,UACF,MAAM,iBAAiB,KAAK,MAAM,kBAAkB;AAAA,UACpD,OAAO,KAAK,8CAA8C;AAAA,UAC1D,OAAO;AAAA,UACP,OAAO,kBAA2B;AAAA,UAClC,MAAM,UACJ,4BAA4B,QACxB,iBAAiB,UACjB,OAAO,gBAAgB;AAAA,UAC7B,OAAO,MACL,mDAAmD,SACrD;AAAA,UACA,MAAM;AAAA;AAAA,MAEV,EAAO;AAAA,QACL,OAAO,MAAM,sCAAsC;AAAA,QACnD,MAAM;AAAA;AAAA,IAEV,EAAO;AAAA,MACL,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MACrE,OAAO,MAAM,mCAAmC,SAAS;AAAA,MACzD,MAAM;AAAA;AAAA;AAAA;AAQZ,SAAS,qBAAqB,GAGD;AAAA,EAC3B,OAAO,SAAS,MAAM,YAA8C;AAAA,IAClE,IAAI;AAAA,MACF,IAAI,iBAAiB,gBAAgB;AAAA,QACnC,MAAM,cAAc,KAAK,QAAQ,wBAAwB,EAAE;AAAA,QAC3D,KAAK,MAAM,WAAW;AAAA,QACtB,OAAO;AAAA,MACT;AAAA,MACA,OAAO;AAAA,MACP,OAAO,WAAoB;AAAA,MAC3B,MAAM,UACJ,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS;AAAA,MACnE,OAAO,KAAK,+BAA+B,SAAS;AAAA,MACpD,OAAO;AAAA;AAAA;AAAA;AAYb,SAAS,mBAAmB,CAC1B,SACA,MACA,QACA,OACA;AAAA,EACA,QAAQ,UAAU,UAAU,YAAY;AAAA,IACtC,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,MACN,QAAQ,MAAM;AAAA,MACd,YAAY,MAAM;AAAA,MAClB,OAAO,MAAM;AAAA,IACf;AAAA,EACF,CAAC;AAAA;AAMH,eAAe,iBAAiB,CAC9B,SACA,SACA;AAAA,EACA,MAAM,eAAe,WACnB,SACA,oBACA,iBACF;AAAA,EACA,MAAM,eAAe,WAAW,SAAS,oBAAoB,MAAM;AAAA,EACnE,MAAM,sBAAsB,WAC1B,SACA,2BACA,EACF;AAAA,EACA,MAAM,UAAU,WAAW,OAAO;AAAA,EAElC,MAAM,QAAQ,QAAQ,SAAU;AAAA,EAChC,MAAM,QAAQ,QAAQ,SAAU;AAAA,EAChC,MAAM,eAAe,QAAQ,gBAAiB;AAAA,EAC9C,MAAM,SAAS,QAAQ,UAAU;AAAA,EAEjC,IAAI;AAAA,IACF,MAAM,MAAM,MAAM,MAAM,GAAG,wBAAwB;AAAA,MACjD,QAAQ;AAAA,MACR,SAAS;AAAA,WACJ,cAAc,OAAO;AAAA,QACxB,gBAAgB;AAAA,WAEZ,WAAW,QAAQ,EAAE,QAAQ,aAAa,IAAI,CAAC;AAAA,MACrD;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA;AAAA,QACA,OAAO,QAAQ;AAAA,QACf;AAAA,WACI,gBAAgB,EAAE,aAAa;AAAA,MACrC,CAAC;AAAA,IACH,CAAC;AAAA,IAED,IAAI,CAAC,IAAI,IAAI;AAAA,MACX,MAAM,MAAM,MAAM,IAAI,KAAK;AAAA,MAC3B,MAAM,IAAI,MAAM,oBAAoB,IAAI,WAAW,KAAK;AAAA,IAC1D;AAAA,IAEA,OAAO,IAAI;AAAA,IACX,OAAO,KAAc;AAAA,IACrB,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,IAC/D,MAAM,IAAI,MAAM,2CAA2C,SAAS;AAAA;AAAA;AAQjE,IAAM,eAAuB;AAAA,EAClC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,QAAQ;AAAA,IACN,gBAAgB,QAAQ,IAAI;AAAA,IAC5B,iBAAiB,QAAQ,IAAI;AAAA,IAC7B,oBAAoB,QAAQ,IAAI;AAAA,IAChC,oBAAoB,QAAQ,IAAI;AAAA,IAChC,aAAa,QAAQ,IAAI;AAAA,IACzB,aAAa,QAAQ,IAAI;AAAA,IACzB,wBAAwB,QAAQ,IAAI;AAAA,IACpC,0BAA0B,QAAQ,IAAI;AAAA,IACtC,sBAAsB,QAAQ,IAAI;AAAA,IAClC,6BAA6B,QAAQ,IAAI;AAAA,IACzC,gCAAgC,QAAQ,IAAI;AAAA,IAC5C,qCACE,QAAQ,IAAI;AAAA,IACd,+BAA+B,QAAQ,IAAI;AAAA,EAC7C;AAAA,OACM,KAAI,CAAC,SAAS,SAAS;AAAA,IAE3B,IAAI,QAAc,OAAO,YAAY;AAAA,MACnC,QAAQ;AAAA,MACR,IAAI;AAAA,QACF,IAAI,CAAC,UAAU,OAAO,KAAK,CAAC,UAAU,GAAG;AAAA,UACvC,OAAO,KACL,iFACF;AAAA,UACA;AAAA,QACF;AAAA,QACA,IAAI;AAAA,UACF,MAAM,UAAU,WAAW,OAAO;AAAA,UAClC,MAAM,WAAW,MAAM,MAAM,GAAG,kBAAkB;AAAA,YAChD,SAAS,KAAK,cAAc,OAAO,EAAE;AAAA,UACvC,CAAC;AAAA,UACD,IAAI,CAAC,SAAS,IAAI;AAAA,YAChB,OAAO,KACL,qCAAqC,SAAS,YAChD;AAAA,YACA,OAAO,KACL,wEACF;AAAA,UACF,EAAO;AAAA,YACL,OAAO,IAAI,uCAAuC;AAAA;AAAA,UAEpD,OAAO,YAAqB;AAAA,UAC5B,MAAM,UACJ,sBAAsB,QAClB,WAAW,UACX,OAAO,UAAU;AAAA,UACvB,OAAO,KAAK,oCAAoC,SAAS;AAAA,UACzD,OAAO,KACL,wEACF;AAAA;AAAA,QAEF,OAAO,OAAgB;AAAA,QACvB,MAAM,UACH,OAAmD,QAChD,IAAI,CAAC,MAAM,EAAE,OAAO,EACrB,KAAK,IAAI,MACX,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACxD,OAAO,KACL,sCAAsC,kFACxC;AAAA;AAAA,KAEH;AAAA;AAAA,EAGH,QAAQ;AAAA,KACL,UAAU,iBAAiB,OAC1B,SACA,WACsB;AAAA,MACtB,MAAM,qBAAqB,WACzB,SACA,0BACA,wBACF;AAAA,MACA,MAAM,qBAAqB,OAAO,SAChC,WAAW,SAAS,+BAA+B,MAAM,KAAK,QAC9D,EACF;AAAA,MAEA,IAAI,CAAC,OAAO,OAAO,WAAW,EAAE,SAAS,kBAAkB,GAAG;AAAA,QAC5D,MAAM,WAAW,gCAAgC,uCAAuC,OAAO,OAAO,WAAW,EAAE,KAAK,IAAI;AAAA,QAC5H,OAAO,MAAM,QAAQ;AAAA,QACrB,MAAM,IAAI,MAAM,QAAQ;AAAA,MAC1B;AAAA,MACA,IAAI,WAAW,MAAM;AAAA,QACnB,OAAO,MAAM,4CAA4C;AAAA,QACzD,MAAM,aAAa,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,QACnD,WAAW,KAAK;AAAA,QAChB,OAAO;AAAA,MACT;AAAA,MACA,IAAI;AAAA,MACJ,IAAI,OAAO,WAAW,UAAU;AAAA,QAC9B,OAAO;AAAA,MACT,EAAO,SAAI,OAAO,WAAW,YAAY,OAAO,MAAM;AAAA,QACpD,OAAO,OAAO;AAAA,MAChB,EAAO;AAAA,QACL,OAAO,KAAK,oCAAoC;AAAA,QAChD,MAAM,iBAAiB,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,QACvD,eAAe,KAAK;AAAA,QACpB,OAAO;AAAA;AAAA,MAET,IAAI,CAAC,KAAK,KAAK,GAAG;AAAA,QAChB,OAAO,KAAK,0BAA0B;AAAA,QACtC,MAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,QACpD,YAAY,KAAK;AAAA,QACjB,OAAO;AAAA,MACT;AAAA,MAEA,MAAM,mBAAmB,oBAAoB,OAAO;AAAA,MAEpD,IAAI;AAAA,QACF,MAAM,WAAW,MAAM,MAAM,GAAG,+BAA+B;AAAA,UAC7D,QAAQ;AAAA,UACR,SAAS;AAAA,eACJ,cAAc,SAAS,IAAI;AAAA,YAC9B,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP,OAAO;AAAA,UACT,CAAC;AAAA,QACH,CAAC;AAAA,QAKD,IAAI,CAAC,SAAS,IAAI;AAAA,UAChB,OAAO,MACL,qBAAqB,SAAS,YAAY,SAAS,YACrD;AAAA,UACA,MAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,UACpD,YAAY,KAAK;AAAA,UACjB,OAAO;AAAA,QACT;AAAA,QAEA,MAAM,OAAQ,MAAM,SAAS,KAAK;AAAA,QAKlC,IAAI,CAAC,MAAM,OAAO,IAAI,WAAW;AAAA,UAC/B,OAAO,MAAM,gCAAgC;AAAA,UAC7C,MAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,UACpD,YAAY,KAAK;AAAA,UACjB,OAAO;AAAA,QACT;AAAA,QAEA,MAAM,YAAY,KAAK,KAAK,GAAG;AAAA,QAE/B,IAAI,KAAK,OAAO;AAAA,UACd,MAAM,QAAQ;AAAA,YACZ,aAAa,KAAK,MAAM;AAAA,YACxB,cAAc;AAAA,YACd,aAAa,KAAK,MAAM;AAAA,UAC1B;AAAA,UAEA,oBAAoB,SAAS,UAAU,gBAAgB,MAAM,KAAK;AAAA,QACpE;AAAA,QAEA,OAAO,IAAI,mCAAmC,UAAU,QAAQ;AAAA,QAChE,OAAO;AAAA,QACP,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,OAAO,MAAM,+BAA+B,SAAS;AAAA,QACrD,MAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,QACpD,YAAY,KAAK;AAAA,QACjB,OAAO;AAAA;AAAA;AAAA,KAGV,UAAU,wBAAwB,OACjC,YACE,QAAQ,YAAY,UAAU,iBAC7B;AAAA,MACH,OAAO,MAAM,aAAa,aAAa,UAAU,YAAY,MAAM;AAAA;AAAA,KAEpE,UAAU,wBAAwB,OACjC,YACE,QAAQ,YAAY,UAAU,iBAC7B;AAAA,MACH,OAAO,MAAM,eAAe,aAAa,UAAU,YAAY,MAAM;AAAA;AAAA,KAEtE,UAAU,aAAa,OACtB;AAAA,MAEE;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,UAEjB;AAAA,MACH,MAAM,SAAS,mBAAmB,OAAO;AAAA,MACzC,MAAM,YAAY,cAAc,OAAO;AAAA,MACvC,MAAM,wBAAwB,yBAAyB,OAAO;AAAA,MAE9D,OAAO,IAAI,oCAAoC,WAAW;AAAA,MAC1D,OAAO,IAAI,MAAM;AAAA,MAEjB,QAAQ,MAAM,gBAAgB,UAAU,MAAM,aAAa;AAAA,QACzD,OAAO,OAAO,cAAc,SAAS;AAAA,QACrC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA,iBAAiB;AAAA,QACjB;AAAA,QACA;AAAA,QACA;AAAA,QACA,wBAAwB;AAAA,UACtB,WAAW;AAAA,QACb;AAAA,MACF,CAAC;AAAA,MAED,IAAI,OAAO;AAAA,QACT,oBAAoB,SAAS,UAAU,YAAY,QAAQ,KAAK;AAAA,MAClE;AAAA,MAEA,OAAO;AAAA;AAAA,KAER,UAAU,aAAa,OACtB;AAAA,MAEE;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,UAEjB;AAAA,MACH,MAAM,SAAS,mBAAmB,OAAO;AAAA,MACzC,MAAM,YAAY,cAAc,OAAO;AAAA,MACvC,MAAM,wBAAwB,yBAAyB,OAAO;AAAA,MAE9D,OAAO,IAAI,oCAAoC,WAAW;AAAA,MAC1D,OAAO,IAAI,MAAM;AAAA,MAEjB,QAAQ,MAAM,gBAAgB,UAAU,MAAM,aAAa;AAAA,QACzD,OAAO,OAAO,cAAc,SAAS;AAAA,QACrC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA,iBAAiB;AAAA,QACjB;AAAA,QACA;AAAA,QACA;AAAA,QACA,wBAAwB;AAAA,UACtB,WAAW;AAAA,QACb;AAAA,MACF,CAAC;AAAA,MAED,IAAI,OAAO;AAAA,QACT,oBAAoB,SAAS,UAAU,YAAY,QAAQ,KAAK;AAAA,MAClE;AAAA,MAEA,OAAO;AAAA;AAAA,KAER,UAAU,QAAQ,OACjB,SACA,WAKG;AAAA,MACH,MAAM,IAAI,OAAO,KAAK;AAAA,MACtB,MAAM,OAAO,OAAO,QAAQ;AAAA,MAC5B,MAAM,SAAS,OAAO;AAAA,MACtB,MAAM,YAAY;AAAA,MAClB,OAAO,IAAI,+BAA+B,WAAW;AAAA,MAErD,MAAM,UAAU,WAAW,OAAO;AAAA,MAElC,IAAI;AAAA,QACF,MAAM,WAAW,MAAM,MAAM,GAAG,8BAA8B;AAAA,UAC5D,QAAQ;AAAA,UACR,SAAS;AAAA,eACJ,cAAc,OAAO;AAAA,YACxB,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAAA,QAID,IAAI,CAAC,SAAS,IAAI;AAAA,UAChB,MAAM,IAAI,MAAM,6BAA6B,SAAS,YAAY;AAAA,QACpE;AAAA,QAEA,MAAM,OAAO,MAAM,SAAS,KAAK;AAAA,QACjC,MAAM,YAAY;AAAA,QAElB,OAAO,UAAU;AAAA,QACjB,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,MAAM;AAAA;AAAA;AAAA,KAGT,UAAU,oBAAoB,OAC7B,SACA,WACG;AAAA,MACH,IAAI;AAAA,MACJ,IAAI;AAAA,MACJ,MAAM,YAAY,yBAAyB,OAAO;AAAA,MAClD,OAAO,IAAI,2CAA2C,WAAW;AAAA,MACjE,MAAM,YAAY,OAAO,SACvB,WAAW,SAAS,uCAAuC,MAAM,KAC/D,QACF,EACF;AAAA,MAEA,IAAI,OAAO,WAAW,UAAU;AAAA,QAC9B,WAAW;AAAA,QACX,aACE;AAAA,MACJ,EAAO;AAAA,QACL,WAAW,OAAO;AAAA,QAClB,aACE,OAAO,UACP;AAAA;AAAA,MAGJ,MAAM,WAAW;AAAA,QACf;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,YACP,EAAE,MAAM,QAAQ,MAAM,WAAW;AAAA,YACjC,EAAE,MAAM,aAAa,WAAW,EAAE,KAAK,SAAS,EAAE;AAAA,UACpD;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,UAAU,WAAW,OAAO;AAAA,MAElC,IAAI;AAAA,QACF,MAAM,cAAmC;AAAA,UACvC,OAAO;AAAA,UACP;AAAA,UACA,YAAY;AAAA,QACd;AAAA,QAEA,MAAM,WAAW,MAAM,MAAM,GAAG,4BAA4B;AAAA,UAC1D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,eACb,cAAc,OAAO;AAAA,UAC1B;AAAA,UACA,MAAM,KAAK,UAAU,WAAW;AAAA,QAClC,CAAC;AAAA,QAID,IAAI,CAAC,SAAS,IAAI;AAAA,UAChB,MAAM,IAAI,MAAM,qBAAqB,SAAS,QAAQ;AAAA,QACxD;AAAA,QAEA,MAAM,SAAkB,MAAM,SAAS,KAAK;AAAA,QAc5C,MAAM,cAAc;AAAA,QACpB,MAAM,UAAU,YAAY,UAAU,IAAI,SAAS;AAAA,QAEnD,IAAI,YAAY,OAAO;AAAA,UACrB,oBACE,SACA,UAAU,mBACV,OAAO,WAAW,WAAW,SAAS,OAAO,UAAU,IACvD;AAAA,YACE,aAAa,YAAY,MAAM;AAAA,YAC/B,cAAc,YAAY,MAAM;AAAA,YAChC,aAAa,YAAY,MAAM;AAAA,UACjC,CACF;AAAA,QACF;AAAA,QAEA,IAAI,CAAC,SAAS;AAAA,UACZ,OAAO;AAAA,YACL,OAAO;AAAA,YACP,aAAa;AAAA,UACf;AAAA,QACF;AAAA,QAGA,MAAM,iBACJ,OAAO,WAAW,YAClB,OAAO,UACP,OAAO,WACL;AAAA,QAGJ,IAAI,gBAAgB;AAAA,UAClB,OAAO;AAAA,QACT;AAAA,QAGA,MAAM,aAAa,QAAQ,MAAM,2BAA2B;AAAA,QAC5D,MAAM,QAAQ,aAAa,IAAI,KAAK,KAAK;AAAA,QACzC,MAAM,cAAc,QACjB,QAAQ,6BAA6B,EAAE,EACvC,KAAK;AAAA,QAER,MAAM,kBAAkB,EAAE,OAAO,YAAY;AAAA,QAC7C,OAAO;AAAA,QACP,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,OAAO,MAAM,0BAA0B,SAAS;AAAA,QAChD,OAAO;AAAA,UACL,OAAO;AAAA,UACP,aAAa,UAAU;AAAA,QACzB;AAAA;AAAA;AAAA,KAGH,UAAU,gBAAgB,OACzB,SACA,UACG;AAAA,MACH,IAAI,YAAY,WACd,SACA,8BACA,wBACF;AAAA,MACA,OAAO,IAAI,uCAAuC,WAAW;AAAA,MAE7D,MAAM,UAAU,WAAW,OAAO;AAAA,MAGlC,IAAI;AAAA,MACJ,IAAI,cAAgD;AAAA,MAEpD,IAAI,iBAAiB,QAAQ,iBAAiB,MAAM;AAAA,QAClD,OAAO;AAAA,MACT,EAAO,SACL,OAAO,UAAU,YACjB,UAAU,QACT,MAAc,SAAS,MACxB;AAAA,QACA,MAAM,SAAS;AAAA,QACf,IACE,EAAE,OAAO,iBAAiB,SAC1B,EAAE,OAAO,iBAAiB,OAC1B;AAAA,UACA,MAAM,IAAI,MACR,8GACF;AAAA,QACF;AAAA,QACA,OAAO,OAAO;AAAA,QACd,cAAc;AAAA,QACd,IAAI,OAAO,OAAO,UAAU,YAAY,OAAO,OAAO;AAAA,UACpD,YAAY,OAAO;AAAA,QACrB;AAAA,MACF,EAAO;AAAA,QACL,MAAM,IAAI,MACR,0JACF;AAAA;AAAA,MAGF,MAAM,OAAQ,KAAc,QAAQ;AAAA,MACpC,MAAM,WACH,KAAc,SACd,KAAK,SAAS,KAAK,KAAK,KAAK,SAAS,MAAM,IACzC,kBACA,KAAK,SAAS,KAAK,IACjB,kBACA,KAAK,SAAS,KAAK,IACjB,kBACA,KAAK,SAAS,MAAM,IAClB,mBACA;AAAA,MAEZ,MAAM,WAAW,IAAI;AAAA,MACrB,SAAS,OAAO,QAAQ,MAAM,QAAQ;AAAA,MACtC,SAAS,OAAO,SAAS,OAAO,SAAS,CAAC;AAAA,MAC1C,IAAI,aAAa;AAAA,QACf,IAAI,OAAO,YAAY,aAAa,UAAU;AAAA,UAC5C,SAAS,OAAO,YAAY,OAAO,YAAY,QAAQ,CAAC;AAAA,QAC1D;AAAA,QACA,IAAI,OAAO,YAAY,oBAAoB,UAAU;AAAA,UACnD,SAAS,OACP,mBACA,OAAO,YAAY,eAAe,CACpC;AAAA,QACF;AAAA,QACA,IAAI,OAAO,YAAY,WAAW,UAAU;AAAA,UAC1C,SAAS,OAAO,UAAU,OAAO,YAAY,MAAM,CAAC;AAAA,QACtD;AAAA,QACA,IAAI,OAAO,YAAY,gBAAgB,UAAU;AAAA,UAC/C,SAAS,OAAO,eAAe,OAAO,YAAY,WAAW,CAAC;AAAA,QAChE;AAAA,QACA,IAAI,MAAM,QAAQ,YAAY,sBAAsB,GAAG;AAAA,UACrD,WAAW,KAAK,YAAY,wBAAwB;AAAA,YAClD,SAAS,OAAO,6BAA6B,OAAO,CAAC,CAAC;AAAA,UACxD;AAAA,QACF;AAAA,MACF;AAAA,MAEA,IAAI;AAAA,QACF,MAAM,WAAW,MAAM,MAAM,GAAG,gCAAgC;AAAA,UAC9D,QAAQ;AAAA,UACR,SAAS;AAAA,eACJ,cAAc,OAAO;AAAA,UAC1B;AAAA,UACA,MAAM;AAAA,QACR,CAAC;AAAA,QAED,IAAI,CAAC,SAAS,IAAI;AAAA,UAChB,MAAM,IAAI,MACR,+BAA+B,SAAS,UAAU,SAAS,YAC7D;AAAA,QACF;AAAA,QAEA,MAAM,OAAQ,MAAM,SAAS,KAAK;AAAA,QAClC,OAAO,KAAK,QAAQ;AAAA,QACpB,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,OAAO,MAAM,wBAAwB,SAAS;AAAA,QAC9C,MAAM;AAAA;AAAA;AAAA,KAGT,UAAU,iBAAiB,OAC1B,SACA,UACG;AAAA,MAEH,MAAM,UACJ,OAAO,UAAU,WACb,EAAE,MAAM,MAAM,IACb;AAAA,MAEP,MAAM,gBACJ,QAAQ,SACP,WAAW,SAAS,oBAAoB,iBAAiB;AAAA,MAC5D,OAAO,IAAI,wCAAwC,eAAe;AAAA,MAClE,IAAI;AAAA,QACF,MAAM,eAAe,MAAM,kBAAkB,SAAS,OAAO;AAAA,QAC7D,OAAO;AAAA,QACP,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,OAAO,MAAM,4BAA4B,SAAS;AAAA,QAClD,MAAM;AAAA;AAAA;AAAA,KAGT,UAAU,eAAe,OACxB,SACA,WACG;AAAA,MACH,OAAO,0BACL,SACA,QACA,UAAU,cACV,aACF;AAAA;AAAA,KAED,UAAU,eAAe,OACxB,SACA,WACG;AAAA,MACH,OAAO,0BACL,SACA,QACA,UAAU,cACV,aACF;AAAA;AAAA,EAEJ;AAAA,EACA,OAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,MAAM,UAAU,WAAW,OAAO;AAAA,YAClC,MAAM,WAAW,MAAM,MAAM,GAAG,kBAAkB;AAAA,cAChD,SAAS;AAAA,gBACP,eAAe,UAAU,UAAU,OAAO;AAAA,cAC5C;AAAA,YACF,CAAC;AAAA,YACD,MAAM,OAAO,MAAM,SAAS,KAAK;AAAA,YACjC,OAAO,IACL,EAAE,MAAO,MAA+B,MAAM,UAAU,MAAM,GAC9D,kBACF;AAAA,YACA,IAAI,CAAC,SAAS,IAAI;AAAA,cAChB,MAAM,IAAI,MACR,sCAAsC,SAAS,YACjD;AAAA,YACF;AAAA;AAAA,QAEJ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,MAAM,YAAY,MAAM,QAAQ,SAC9B,UAAU,gBACV;AAAA,gBACE,MAAM;AAAA,cACR,CACF;AAAA,cACA,OAAO,IAAI,EAAE,UAAU,GAAG,WAAW;AAAA,cACrC,OAAO,OAAgB;AAAA,cACvB,MAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACvD,OAAO,MAAM,iCAAiC,SAAS;AAAA,cACvD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,MAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AAAA,cACD,IAAI,KAAK,WAAW,GAAG;AAAA,gBACrB,MAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AAAA,cACA,OAAO,IAAI,EAAE,KAAK,GAAG,gCAAgC;AAAA,cACrD,OAAO,OAAgB;AAAA,cACvB,MAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACvD,OAAO,MAAM,6BAA6B,SAAS;AAAA,cACnD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,MAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AAAA,cACD,IAAI,KAAK,WAAW,GAAG;AAAA,gBACrB,MAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AAAA,cACA,OAAO,IAAI,EAAE,KAAK,GAAG,gCAAgC;AAAA,cACrD,OAAO,OAAgB;AAAA,cACvB,MAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACvD,OAAO,MAAM,6BAA6B,SAAS;AAAA,cACnD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,OAAO,IAAI,8BAA8B;AAAA,YACzC,IAAI;AAAA,cACF,MAAM,QAAQ,MAAM,QAAQ,SAAS,UAAU,OAAO;AAAA,gBACpD,QAAQ;AAAA,gBACR,GAAG;AAAA,gBACH,MAAM;AAAA,cACR,CAAC;AAAA,cACD,OAAO,IAAI,EAAE,MAAM,GAAG,sCAAsC;AAAA,cAC5D,OAAO,OAAgB;AAAA,cACvB,MAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACvD,OAAO,MAAM,mCAAmC,SAAS;AAAA,cACzD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,OAAO,IAAI,+BAA+B;AAAA,cAC1C,IAAI;AAAA,gBACF,MAAM,SAAS,MAAM,QAAQ,SAC3B,UAAU,mBACV,mLACF;AAAA,gBAEA,IACE,UACA,OAAO,WAAW,YAClB,WAAW,UACX,iBAAiB,QACjB;AAAA,kBACA,OAAO,IAAI,EAAE,OAAO,GAAG,mBAAmB;AAAA,gBAC5C,EAAO;AAAA,kBACL,OAAO,MACL,4CACA,MACF;AAAA;AAAA,gBAEF,OAAO,GAAY;AAAA,gBACnB,MAAM,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AAAA,gBACzD,OAAO,MAAM,oCAAoC,SAAS;AAAA;AAAA,cAE5D,OAAO,GAAY;AAAA,cACnB,MAAM,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AAAA,cACzD,OAAO,MACL,2CAA2C,SAC7C;AAAA;AAAA;AAAA,QAGN;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,OAAO,IAAI,2BAA2B;AAAA,YACtC,IAAI;AAAA,cACF,MAAM,WAAW,MAAM,MACrB,+EACF;AAAA,cACA,MAAM,cAAc,MAAM,SAAS,YAAY;AAAA,cAC/C,MAAM,gBAAgB,MAAM,QAAQ,SAClC,UAAU,eACV,OAAO,KAAK,IAAI,WAAW,WAAW,CAAC,CACzC;AAAA,cACA,OAAO,IACL,EAAE,cAAc,GAChB,mCACF;AAAA,cACA,OAAO,OAAgB;AAAA,cACvB,MAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACvD,OAAO,MAAM,gCAAgC,SAAS;AAAA,cACtD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,MAAM,SAAS;AAAA,YACf,MAAM,SAAS,MAAM,QAAQ,SAC3B,UAAU,uBACV,EAAE,OAAO,CACX;AAAA,YACA,IAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,OAAO,WAAW,GAAG;AAAA,cACjD,MAAM,IAAI,MACR,6DACF;AAAA,YACF;AAAA,YACA,OAAO,IAAI,EAAE,OAAO,GAAG,kBAAkB;AAAA;AAAA,QAE7C;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,MAAM,SAAS;AAAA,YACf,MAAM,SAAS,MAAM,QAAQ,SAC3B,UAAU,uBACV,EAAE,OAAO,CACX;AAAA,YACA,MAAM,cAAc,MAAM,QAAQ,SAChC,UAAU,uBACV,EAAE,OAAO,CACX;AAAA,YACA,IAAI,gBAAgB,QAAQ;AAAA,cAC1B,MAAM,IAAI,MACR,mDAAmD,iBAAiB,cACtE;AAAA,YACF;AAAA,YACA,OAAO,IAAI,EAAE,YAAY,GAAG,cAAc;AAAA;AAAA,QAE9C;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,MAAM,WAAW,MAAM,kBAAkB,SAAS;AAAA,gBAChD,MAAM;AAAA,cACR,CAAC;AAAA,cACD,IAAI,CAAC,UAAU;AAAA,gBACb,MAAM,IAAI,MAAM,2BAA2B;AAAA,cAC7C;AAAA,cACA,OAAO,IAAI,+BAA+B;AAAA,cAC1C,OAAO,OAAgB;AAAA,cACvB,MAAM,UACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACvD,OAAO,MAAM,wCAAwC,SAAS;AAAA,cAC9D,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAe;",
8
- "debugId": "B1A62305054258BB64756E2164756E21",
7
+ "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAYA;AACA;AAAA;AAAA;AAAA;AAAA;AAOA;AA4BA,SAAS,UAAU,CACjB,SACA,KACA,cACoB;AAAA,EACpB,OAAO,QAAQ,WAAW,GAAG,KAAK,QAAQ,IAAI,QAAQ;AAAA;AAGxD,SAAS,SAAS,GAAY;AAAA,EAC5B,OAAO,OAAO,eAAe,eAAe,OAAQ,WAAmB,aAAa;AAAA;AAOtF,SAAS,WAAW,CAAC,SAAiC;AAAA,EACpD,OAAO,UAAU,KAAK,CAAC,CAAC,WAAW,SAAS,yBAAyB;AAAA;AAGvE,SAAS,aAAa,CAAC,SAAwB,eAAe,OAA+B;AAAA,EAC3F,IAAI,UAAU;AAAA,IAAG,OAAO,CAAC;AAAA,EACzB,MAAM,MAAM,eAAe,mBAAmB,OAAO,IAAI,UAAU,OAAO;AAAA,EAC1E,OAAO,MAAM,EAAE,eAAe,UAAU,MAAM,IAAI,CAAC;AAAA;AAQrD,SAAS,UAAU,CAAC,SAAgC;AAAA,EAClD,MAAM,aAAa,WAAW,SAAS,yBAAyB;AAAA,EAChE,MAAM,UACJ,UAAU,KAAK,aACX,aACA,WAAW,SAAS,mBAAmB,2BAA2B;AAAA,EAExE,OAAO,MAAM,8BAA8B,SAAS;AAAA,EACpD,OAAO;AAAA;AAQT,SAAS,mBAAmB,CAAC,SAAgC;AAAA,EAC3D,MAAM,eAAe,UAAU,IAC3B,WAAW,SAAS,8BAA8B,KAClD,WAAW,SAAS,yBAAyB,IAC7C,WAAW,SAAS,sBAAsB;AAAA,EAC9C,IAAI,cAAc;AAAA,IAChB,OAAO,MAAM,+CAA+C,cAAc;AAAA,IAC1E,OAAO;AAAA,EACT;AAAA,EACA,OAAO,MAAM,2DAA2D;AAAA,EACxE,OAAO,WAAW,OAAO;AAAA;AAS3B,SAAS,SAAS,CAAC,SAA4C;AAAA,EAC7D,OAAO,WAAW,SAAS,gBAAgB;AAAA;AAS7C,SAAS,kBAAkB,CAAC,SAA4C;AAAA,EACtE,MAAM,kBAAkB,WAAW,SAAS,0BAA0B;AAAA,EACtE,IAAI,iBAAiB;AAAA,IACnB,OAAO,MAAM,qDAAqD;AAAA,IAClE,OAAO;AAAA,EACT;AAAA,EACA,OAAO,MAAM,0DAA0D;AAAA,EACvE,OAAO,UAAU,OAAO;AAAA;AAS1B,SAAS,aAAa,CAAC,SAAgC;AAAA,EACrD,OACE,WAAW,SAAS,oBAAoB,KACvC,WAAW,SAAS,eAAe,YAAY;AAAA;AAUpD,SAAS,aAAa,CAAC,SAAgC;AAAA,EACrD,OACE,WAAW,SAAS,oBAAoB,KACvC,WAAW,SAAS,eAAe,YAAY;AAAA;AAUpD,SAAS,wBAAwB,CAAC,SAAgC;AAAA,EAChE,OAAO,WAAW,SAAS,kCAAkC,YAAY,KAAK;AAAA;AAShF,SAAS,wBAAwB,CAAC,SAAiC;AAAA,EACjE,MAAM,UAAU,WAAW,SAAS,iCAAiC,OAAO;AAAA,EAE5E,MAAM,oBAAoB,OAAO,OAAO,EAAE,YAAY;AAAA,EACtD,MAAM,SAAS,sBAAsB;AAAA,EACrC,OAAO,MACL,iDAAiD,mBAAmB,OAAO,yBAAyB,+BAA+B,SACrI;AAAA,EACA,OAAO;AAAA;AAST,SAAS,kBAAkB,CAAC,SAAwB;AAAA,EAClD,MAAM,UAAU,WAAW,OAAO;AAAA,EAGlC,MAAM,SAAS,UAAU,OAAO,MAAM,YAAY,OAAO,IAAI,aAAa;AAAA,EAC1E,OAAO,aAAa,EAAE,QAAS,UAAU,IAAe,QAAQ,CAAC;AAAA;AAUnE,eAAe,YAAY,CAAC,OAAsB,QAAgB;AAAA,EAChE,MAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe,eAC7D,QAAQ,IAAI,eAAe;AAAA,EAClC,MAAM,SAAS,iBAAiB,SAA0B,EAAE,OAAO,MAAM;AAAA,EACzE,OAAO;AAAA;AAUT,eAAe,cAAc,CAAC,OAAsB,QAAkB;AAAA,EACpE,MAAM,YACJ,UAAU,UAAU,aACf,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe,eAC7D,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,eAAe;AAAA,EACpE,OAAO,iBAAiB,SAA0B,EAAE,OAAO,MAAM;AAAA;AAMnE,eAAe,yBAAyB,CACtC,SACA,QACA,WACA,YACoB;AAAA,EACpB,MAAM,SAAS,mBAAmB,OAAO;AAAA,EACzC,MAAM,YAAY,WAAW,OAAO;AAAA,EACpC,OAAO,IAAI,kBAAkB,oBAAoB,WAAW;AAAA,EAC5D,MAAM,cAAc,OAAO,eAAe;AAAA,EAC1C,MAAM,gBAAgB,CAAC,CAAC,OAAO;AAAA,EAE/B,IAAI,eAAe;AAAA,IACjB,OAAO,KACL,SAAS,4EACX;AAAA,EACF;AAAA,EAEA,IAAI;AAAA,IACF,QAAQ,QAAQ,UAAU,MAAM,eAAe;AAAA,MAC7C,OAAO,OAAO,cAAc,SAAS;AAAA,MACrC,QAAQ;AAAA,MACR,QAAQ,OAAO;AAAA,MACf;AAAA,MACA,yBAAyB,sBAAsB;AAAA,IACjD,CAAC;AAAA,IAED,IAAI,OAAO;AAAA,MACT,oBAAoB,SAAS,WAA4B,OAAO,QAAQ,KAAK;AAAA,IAC/E;AAAA,IACA,OAAO;AAAA,IACP,OAAO,OAAgB;AAAA,IACvB,IAAI,iBAAiB,gBAAgB;AAAA,MACnC,OAAO,MAAM,0CAA0C,MAAM,SAAS;AAAA,MAEtE,MAAM,iBAAiB,sBAAsB;AAAA,MAC7C,MAAM,qBAAqB,MAAM,eAAe;AAAA,QAC9C,MAAM,MAAM;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,MAED,IAAI,oBAAoB;AAAA,QACtB,IAAI;AAAA,UACF,MAAM,iBAAiB,KAAK,MAAM,kBAAkB;AAAA,UACpD,OAAO,KAAK,8CAA8C;AAAA,UAC1D,OAAO;AAAA,UACP,OAAO,kBAA2B;AAAA,UAClC,MAAM,UACJ,4BAA4B,QAAQ,iBAAiB,UAAU,OAAO,gBAAgB;AAAA,UACxF,OAAO,MAAM,mDAAmD,SAAS;AAAA,UACzE,MAAM;AAAA;AAAA,MAEV,EAAO;AAAA,QACL,OAAO,MAAM,sCAAsC;AAAA,QACnD,MAAM;AAAA;AAAA,IAEV,EAAO;AAAA,MACL,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MACrE,OAAO,MAAM,mCAAmC,SAAS;AAAA,MACzD,MAAM;AAAA;AAAA;AAAA;AAQZ,SAAS,qBAAqB,GAGD;AAAA,EAC3B,OAAO,SAAS,MAAM,YAA8C;AAAA,IAClE,IAAI;AAAA,MACF,IAAI,iBAAiB,gBAAgB;AAAA,QACnC,MAAM,cAAc,KAAK,QAAQ,wBAAwB,EAAE;AAAA,QAC3D,KAAK,MAAM,WAAW;AAAA,QACtB,OAAO;AAAA,MACT;AAAA,MACA,OAAO;AAAA,MACP,OAAO,WAAoB;AAAA,MAC3B,MAAM,UAAU,qBAAqB,QAAQ,UAAU,UAAU,OAAO,SAAS;AAAA,MACjF,OAAO,KAAK,+BAA+B,SAAS;AAAA,MACpD,OAAO;AAAA;AAAA;AAAA;AAYb,SAAS,mBAAmB,CAC1B,SACA,MACA,QACA,OACA;AAAA,EACA,QAAQ,UAAU,UAAU,YAAY;AAAA,IACtC,UAAU;AAAA,IACV;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,MACN,QAAQ,MAAM;AAAA,MACd,YAAY,MAAM;AAAA,MAClB,OAAO,MAAM;AAAA,IACf;AAAA,EACF,CAAC;AAAA;AAQH,SAAS,mBAAmB,CAAC,QAAwB;AAAA,EACnD,IAAI,OAAO,SAAS,IAAI;AAAA,IACtB,OAAO;AAAA,EACT;AAAA,EAIA,IACE,OAAO,OAAO,MACd,OAAO,OAAO,MACd,OAAO,OAAO,MACd,OAAO,OAAO,MACd,OAAO,OAAO,MACd,OAAO,OAAO,MACd,OAAO,QAAQ,MACf,OAAO,QAAQ,IACf;AAAA,IACA,OAAO;AAAA,EACT;AAAA,EAGA,IACG,OAAO,OAAO,MAAQ,OAAO,OAAO,MAAQ,OAAO,OAAO,MAC1D,OAAO,OAAO,QAAS,OAAO,KAAK,SAAU,KAC9C;AAAA,IACA,OAAO;AAAA,EACT;AAAA,EAGA,IAAI,OAAO,OAAO,MAAQ,OAAO,OAAO,OAAQ,OAAO,OAAO,OAAQ,OAAO,OAAO,IAAM;AAAA,IACxF,OAAO;AAAA,EACT;AAAA,EAGA,IAAI,OAAO,OAAO,OAAQ,OAAO,OAAO,MAAQ,OAAO,OAAO,MAAQ,OAAO,OAAO,IAAM;AAAA,IACxF,OAAO;AAAA,EACT;AAAA,EAGA,IAAI,OAAO,OAAO,OAAQ,OAAO,OAAO,OAAQ,OAAO,OAAO,OAAQ,OAAO,OAAO,KAAM;AAAA,IACxF,OAAO;AAAA,EACT;AAAA,EAGA,IAAI,OAAO,OAAO,MAAQ,OAAO,OAAO,MAAQ,OAAO,OAAO,OAAQ,OAAO,OAAO,KAAM;AAAA,IACxF,OAAO;AAAA,EACT;AAAA,EAGA,OAAO,KAAK,sEAAsE;AAAA,EAClF,OAAO;AAAA;AAQT,eAAe,qBAAqB,CAAC,WAAuC;AAAA,EAC1E,IAAI;AAAA,IAEF,QAAQ,aAAa,MAAa;AAAA,IAClC,MAAM,SAAS,UAAU,UAAU;AAAA,IAEnC,OAAO,IAAI,SAAS;AAAA,WACZ,KAAI,GAAG;AAAA,QACX,IAAI;AAAA,UACF,QAAQ,MAAM,UAAU,MAAM,OAAO,KAAK;AAAA,UAC1C,IAAI,MAAM;AAAA,YACR,KAAK,KAAK,IAAI;AAAA,UAChB,EAAO;AAAA,YAEL,KAAK,KAAK,KAAK;AAAA;AAAA,UAEjB,OAAO,OAAO;AAAA,UACd,KAAK,QAAQ,KAAc;AAAA;AAAA;AAAA,MAG/B,OAAO,CAAC,OAAO,UAAU;AAAA,QACvB,OAAO,OAAO,EAAE,QAAQ,MAAM,SAAS,KAAK,CAAC;AAAA;AAAA,IAEjD,CAAC;AAAA,IACD,OAAO,OAAO;AAAA,IACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IACrE,OAAO,MAAM,sCAAsC,SAAS;AAAA,IAC5D,MAAM,IAAI,MACR,qGACF;AAAA;AAAA;AAOJ,eAAe,iBAAiB,CAAC,SAAwB,SAAmC;AAAA,EAC1F,MAAM,eAAe,WAAW,SAAS,oBAAoB,iBAAiB;AAAA,EAC9E,MAAM,eAAe,WAAW,SAAS,oBAAoB,MAAM;AAAA,EACnE,MAAM,sBAAsB,WAAW,SAAS,2BAA2B,EAAE;AAAA,EAC7E,MAAM,UAAU,WAAW,OAAO;AAAA,EAElC,MAAM,QAAQ,QAAQ,SAAU;AAAA,EAChC,MAAM,QAAQ,QAAQ,SAAU;AAAA,EAChC,MAAM,eAAe,QAAQ,gBAAiB;AAAA,EAC9C,MAAM,SAAS,QAAQ,UAAU;AAAA,EAEjC,IAAI;AAAA,IACF,MAAM,MAAM,MAAM,MAAM,GAAG,wBAAwB;AAAA,MACjD,QAAQ;AAAA,MACR,SAAS;AAAA,WACJ,cAAc,OAAO;AAAA,QACxB,gBAAgB;AAAA,WAEZ,WAAW,QAAQ,EAAE,QAAQ,aAAa,IAAI,CAAC;AAAA,MACrD;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA;AAAA,QACA,OAAO,QAAQ;AAAA,QACf;AAAA,WACI,gBAAgB,EAAE,aAAa;AAAA,MACrC,CAAC;AAAA,IACH,CAAC;AAAA,IAED,IAAI,CAAC,IAAI,IAAI;AAAA,MACX,MAAM,MAAM,MAAM,IAAI,KAAK;AAAA,MAC3B,MAAM,IAAI,MAAM,oBAAoB,IAAI,WAAW,KAAK;AAAA,IAC1D;AAAA,IAGA,IAAI,CAAC,IAAI,MAAM;AAAA,MACb,MAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAAA,IAIA,IAAI,CAAC,UAAU,GAAG;AAAA,MAChB,OAAO,MAAM,sBAAsB,IAAI,IAAI;AAAA,IAC7C;AAAA,IAEA,OAAO,IAAI;AAAA,IACX,OAAO,KAAc;AAAA,IACrB,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,IAC/D,MAAM,IAAI,MAAM,2CAA2C,SAAS;AAAA;AAAA;AAQjE,IAAM,eAAuB;AAAA,EAClC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,QAAQ;AAAA,IACN,gBAAgB,QAAQ,IAAI;AAAA,IAC5B,iBAAiB,QAAQ,IAAI;AAAA,IAC7B,oBAAoB,QAAQ,IAAI;AAAA,IAChC,oBAAoB,QAAQ,IAAI;AAAA,IAChC,aAAa,QAAQ,IAAI;AAAA,IACzB,aAAa,QAAQ,IAAI;AAAA,IACzB,wBAAwB,QAAQ,IAAI;AAAA,IACpC,0BAA0B,QAAQ,IAAI;AAAA,IACtC,sBAAsB,QAAQ,IAAI;AAAA,IAClC,6BAA6B,QAAQ,IAAI;AAAA,IACzC,gCAAgC,QAAQ,IAAI;AAAA,IAC5C,qCAAqC,QAAQ,IAAI;AAAA,IACjD,+BAA+B,QAAQ,IAAI;AAAA,EAC7C;AAAA,OACM,KAAI,CAAC,SAAS,SAAS;AAAA,IAE3B,IAAI,QAAc,OAAO,YAAY;AAAA,MACnC,QAAQ;AAAA,MACR,IAAI;AAAA,QACF,IAAI,CAAC,UAAU,OAAO,KAAK,CAAC,UAAU,GAAG;AAAA,UACvC,OAAO,KACL,iFACF;AAAA,UACA;AAAA,QACF;AAAA,QACA,IAAI;AAAA,UACF,MAAM,UAAU,WAAW,OAAO;AAAA,UAClC,MAAM,WAAW,MAAM,MAAM,GAAG,kBAAkB;AAAA,YAChD,SAAS,KAAK,cAAc,OAAO,EAAE;AAAA,UACvC,CAAC;AAAA,UACD,IAAI,CAAC,SAAS,IAAI;AAAA,YAChB,OAAO,KAAK,qCAAqC,SAAS,YAAY;AAAA,YACtE,OAAO,KAAK,wEAAwE;AAAA,UACtF,EAAO;AAAA,YACL,OAAO,IAAI,uCAAuC;AAAA;AAAA,UAEpD,OAAO,YAAqB;AAAA,UAC5B,MAAM,UAAU,sBAAsB,QAAQ,WAAW,UAAU,OAAO,UAAU;AAAA,UACpF,OAAO,KAAK,oCAAoC,SAAS;AAAA,UACzD,OAAO,KAAK,wEAAwE;AAAA;AAAA,QAEtF,OAAO,OAAgB;AAAA,QACvB,MAAM,UACH,OAAmD,QAChD,IAAI,CAAC,MAAM,EAAE,OAAO,EACrB,KAAK,IAAI,MAAM,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACzE,OAAO,KACL,sCAAsC,kFACxC;AAAA;AAAA,KAEH;AAAA;AAAA,EAGH,QAAQ;AAAA,KACL,UAAU,iBAAiB,OAC1B,SACA,WACsB;AAAA,MACtB,MAAM,qBAAqB,WACzB,SACA,0BACA,wBACF;AAAA,MACA,MAAM,qBAAqB,OAAO,SAChC,WAAW,SAAS,+BAA+B,MAAM,KAAK,QAC9D,EACF;AAAA,MAEA,IAAI,CAAC,OAAO,OAAO,WAAW,EAAE,SAAS,kBAAkB,GAAG;AAAA,QAC5D,MAAM,WAAW,gCAAgC,uCAAuC,OAAO,OAAO,WAAW,EAAE,KAAK,IAAI;AAAA,QAC5H,OAAO,MAAM,QAAQ;AAAA,QACrB,MAAM,IAAI,MAAM,QAAQ;AAAA,MAC1B;AAAA,MACA,IAAI,WAAW,MAAM;AAAA,QACnB,OAAO,MAAM,4CAA4C;AAAA,QACzD,MAAM,aAAa,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,QACnD,WAAW,KAAK;AAAA,QAChB,OAAO;AAAA,MACT;AAAA,MACA,IAAI;AAAA,MACJ,IAAI,OAAO,WAAW,UAAU;AAAA,QAC9B,OAAO;AAAA,MACT,EAAO,SAAI,OAAO,WAAW,YAAY,OAAO,MAAM;AAAA,QACpD,OAAO,OAAO;AAAA,MAChB,EAAO;AAAA,QACL,OAAO,KAAK,oCAAoC;AAAA,QAChD,MAAM,iBAAiB,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,QACvD,eAAe,KAAK;AAAA,QACpB,OAAO;AAAA;AAAA,MAET,IAAI,CAAC,KAAK,KAAK,GAAG;AAAA,QAChB,OAAO,KAAK,0BAA0B;AAAA,QACtC,MAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,QACpD,YAAY,KAAK;AAAA,QACjB,OAAO;AAAA,MACT;AAAA,MAEA,MAAM,mBAAmB,oBAAoB,OAAO;AAAA,MAEpD,IAAI;AAAA,QACF,MAAM,WAAW,MAAM,MAAM,GAAG,+BAA+B;AAAA,UAC7D,QAAQ;AAAA,UACR,SAAS;AAAA,eACJ,cAAc,SAAS,IAAI;AAAA,YAC9B,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP,OAAO;AAAA,UACT,CAAC;AAAA,QACH,CAAC;AAAA,QAKD,IAAI,CAAC,SAAS,IAAI;AAAA,UAChB,OAAO,MAAM,qBAAqB,SAAS,YAAY,SAAS,YAAY;AAAA,UAC5E,MAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,UACpD,YAAY,KAAK;AAAA,UACjB,OAAO;AAAA,QACT;AAAA,QAEA,MAAM,OAAQ,MAAM,SAAS,KAAK;AAAA,QAKlC,IAAI,CAAC,MAAM,OAAO,IAAI,WAAW;AAAA,UAC/B,OAAO,MAAM,gCAAgC;AAAA,UAC7C,MAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,UACpD,YAAY,KAAK;AAAA,UACjB,OAAO;AAAA,QACT;AAAA,QAEA,MAAM,YAAY,KAAK,KAAK,GAAG;AAAA,QAE/B,IAAI,KAAK,OAAO;AAAA,UACd,MAAM,QAAQ;AAAA,YACZ,aAAa,KAAK,MAAM;AAAA,YACxB,cAAc;AAAA,YACd,aAAa,KAAK,MAAM;AAAA,UAC1B;AAAA,UAEA,oBAAoB,SAAS,UAAU,gBAAgB,MAAM,KAAK;AAAA,QACpE;AAAA,QAEA,OAAO,IAAI,mCAAmC,UAAU,QAAQ;AAAA,QAChE,OAAO;AAAA,QACP,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,OAAO,MAAM,+BAA+B,SAAS;AAAA,QACrD,MAAM,cAAc,MAAM,kBAAkB,EAAE,KAAK,CAAC;AAAA,QACpD,YAAY,KAAK;AAAA,QACjB,OAAO;AAAA;AAAA;AAAA,KAGV,UAAU,wBAAwB,OACjC,YACE,QAAQ,YAAY,UAAU,iBAC7B;AAAA,MACH,OAAO,MAAM,aAAa,aAAa,UAAU,YAAY,MAAM;AAAA;AAAA,KAEpE,UAAU,wBAAwB,OACjC,YACE,QAAQ,YAAY,UAAU,iBAC7B;AAAA,MACH,OAAO,MAAM,eAAe,aAAa,UAAU,YAAY,MAAM;AAAA;AAAA,KAEtE,UAAU,aAAa,OACtB;AAAA,MAEE;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,UAEjB;AAAA,MACH,MAAM,SAAS,mBAAmB,OAAO;AAAA,MACzC,MAAM,YAAY,cAAc,OAAO;AAAA,MACvC,MAAM,wBAAwB,yBAAyB,OAAO;AAAA,MAE9D,OAAO,IAAI,oCAAoC,WAAW;AAAA,MAC1D,OAAO,IAAI,MAAM;AAAA,MAEjB,QAAQ,MAAM,gBAAgB,UAAU,MAAM,aAAa;AAAA,QACzD,OAAO,OAAO,cAAc,SAAS;AAAA,QACrC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA,iBAAiB;AAAA,QACjB;AAAA,QACA;AAAA,QACA;AAAA,QACA,wBAAwB;AAAA,UACtB,WAAW;AAAA,QACb;AAAA,MACF,CAAC;AAAA,MAED,IAAI,OAAO;AAAA,QACT,oBAAoB,SAAS,UAAU,YAAY,QAAQ,KAAK;AAAA,MAClE;AAAA,MAEA,OAAO;AAAA;AAAA,KAER,UAAU,aAAa,OACtB;AAAA,MAEE;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,UAEjB;AAAA,MACH,MAAM,SAAS,mBAAmB,OAAO;AAAA,MACzC,MAAM,YAAY,cAAc,OAAO;AAAA,MACvC,MAAM,wBAAwB,yBAAyB,OAAO;AAAA,MAE9D,OAAO,IAAI,oCAAoC,WAAW;AAAA,MAC1D,OAAO,IAAI,MAAM;AAAA,MAEjB,QAAQ,MAAM,gBAAgB,UAAU,MAAM,aAAa;AAAA,QACzD,OAAO,OAAO,cAAc,SAAS;AAAA,QACrC;AAAA,QACA,QAAQ,QAAQ,UAAU,UAAU;AAAA,QACpC;AAAA,QACA,iBAAiB;AAAA,QACjB;AAAA,QACA;AAAA,QACA;AAAA,QACA,wBAAwB;AAAA,UACtB,WAAW;AAAA,QACb;AAAA,MACF,CAAC;AAAA,MAED,IAAI,OAAO;AAAA,QACT,oBAAoB,SAAS,UAAU,YAAY,QAAQ,KAAK;AAAA,MAClE;AAAA,MAEA,OAAO;AAAA;AAAA,KAER,UAAU,QAAQ,OACjB,SACA,WAKG;AAAA,MACH,MAAM,IAAI,OAAO,KAAK;AAAA,MACtB,MAAM,OAAO,OAAO,QAAQ;AAAA,MAC5B,MAAM,SAAS,OAAO;AAAA,MACtB,MAAM,YAAY;AAAA,MAClB,OAAO,IAAI,+BAA+B,WAAW;AAAA,MAErD,MAAM,UAAU,WAAW,OAAO;AAAA,MAElC,IAAI;AAAA,QACF,MAAM,WAAW,MAAM,MAAM,GAAG,8BAA8B;AAAA,UAC5D,QAAQ;AAAA,UACR,SAAS;AAAA,eACJ,cAAc,OAAO;AAAA,YACxB,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU;AAAA,YACnB,OAAO;AAAA,YACP;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAAA,QAID,IAAI,CAAC,SAAS,IAAI;AAAA,UAChB,MAAM,IAAI,MAAM,6BAA6B,SAAS,YAAY;AAAA,QACpE;AAAA,QAEA,MAAM,OAAO,MAAM,SAAS,KAAK;AAAA,QACjC,MAAM,YAAY;AAAA,QAElB,OAAO,UAAU;AAAA,QACjB,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,MAAM;AAAA;AAAA;AAAA,KAGT,UAAU,oBAAoB,OAC7B,SACA,WACG;AAAA,MACH,IAAI;AAAA,MACJ,IAAI;AAAA,MACJ,MAAM,YAAY,yBAAyB,OAAO;AAAA,MAClD,OAAO,IAAI,2CAA2C,WAAW;AAAA,MACjE,MAAM,YAAY,OAAO,SACvB,WAAW,SAAS,uCAAuC,MAAM,KAAK,QACtE,EACF;AAAA,MAEA,IAAI,OAAO,WAAW,UAAU;AAAA,QAC9B,WAAW;AAAA,QACX,aAAa;AAAA,MACf,EAAO;AAAA,QACL,WAAW,OAAO;AAAA,QAClB,aACE,OAAO,UACP;AAAA;AAAA,MAGJ,MAAM,WAAW;AAAA,QACf;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,YACP,EAAE,MAAM,QAAQ,MAAM,WAAW;AAAA,YACjC,EAAE,MAAM,aAAa,WAAW,EAAE,KAAK,SAAS,EAAE;AAAA,UACpD;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,UAAU,WAAW,OAAO;AAAA,MAElC,IAAI;AAAA,QACF,MAAM,cAAmC;AAAA,UACvC,OAAO;AAAA,UACP;AAAA,UACA,YAAY;AAAA,QACd;AAAA,QAEA,MAAM,WAAW,MAAM,MAAM,GAAG,4BAA4B;AAAA,UAC1D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,eACb,cAAc,OAAO;AAAA,UAC1B;AAAA,UACA,MAAM,KAAK,UAAU,WAAW;AAAA,QAClC,CAAC;AAAA,QAID,IAAI,CAAC,SAAS,IAAI;AAAA,UAChB,MAAM,IAAI,MAAM,qBAAqB,SAAS,QAAQ;AAAA,QACxD;AAAA,QAEA,MAAM,SAAkB,MAAM,SAAS,KAAK;AAAA,QAc5C,MAAM,cAAc;AAAA,QACpB,MAAM,UAAU,YAAY,UAAU,IAAI,SAAS;AAAA,QAEnD,IAAI,YAAY,OAAO;AAAA,UACrB,oBACE,SACA,UAAU,mBACV,OAAO,WAAW,WAAW,SAAS,OAAO,UAAU,IACvD;AAAA,YACE,aAAa,YAAY,MAAM;AAAA,YAC/B,cAAc,YAAY,MAAM;AAAA,YAChC,aAAa,YAAY,MAAM;AAAA,UACjC,CACF;AAAA,QACF;AAAA,QAEA,IAAI,CAAC,SAAS;AAAA,UACZ,OAAO;AAAA,YACL,OAAO;AAAA,YACP,aAAa;AAAA,UACf;AAAA,QACF;AAAA,QAGA,MAAM,iBACJ,OAAO,WAAW,YAClB,OAAO,UACP,OAAO,WACL;AAAA,QAGJ,IAAI,gBAAgB;AAAA,UAClB,OAAO;AAAA,QACT;AAAA,QAGA,MAAM,aAAa,QAAQ,MAAM,2BAA2B;AAAA,QAC5D,MAAM,QAAQ,aAAa,IAAI,KAAK,KAAK;AAAA,QACzC,MAAM,cAAc,QAAQ,QAAQ,6BAA6B,EAAE,EAAE,KAAK;AAAA,QAE1E,MAAM,kBAAkB,EAAE,OAAO,YAAY;AAAA,QAC7C,OAAO;AAAA,QACP,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,OAAO,MAAM,0BAA0B,SAAS;AAAA,QAChD,OAAO;AAAA,UACL,OAAO;AAAA,UACP,aAAa,UAAU;AAAA,QACzB;AAAA;AAAA;AAAA,KAGH,UAAU,gBAAgB,OACzB,SACA,UACG;AAAA,MACH,IAAI,YAAY,WAAW,SAAS,8BAA8B,wBAAwB;AAAA,MAC1F,OAAO,IAAI,uCAAuC,WAAW;AAAA,MAE7D,MAAM,UAAU,WAAW,OAAO;AAAA,MAGlC,IAAI;AAAA,MACJ,IAAI,cAAgD;AAAA,MAEpD,IAAI,iBAAiB,QAAQ,iBAAiB,MAAM;AAAA,QAClD,OAAO;AAAA,MACT,EAAO,SAAI,OAAO,SAAS,KAAK,GAAG;AAAA,QAGjC,MAAM,mBAAmB,oBAAoB,KAAK;AAAA,QAClD,OAAO,MAAM,kCAAkC,kBAAkB;AAAA,QAGjE,OAAO,IAAI,KAAK,CAAC,KAAK,GAAU,EAAE,MAAM,iBAAiB,CAAC;AAAA,MAC5D,EAAO,SAAI,OAAO,UAAU,YAAY,UAAU,QAAS,MAAc,SAAS,MAAM;AAAA,QACtF,MAAM,SAAS;AAAA,QACf,IACE,EAAE,OAAO,iBAAiB,SAC1B,EAAE,OAAO,iBAAiB,SAC1B,CAAC,OAAO,SAAS,OAAO,KAAK,GAC7B;AAAA,UACA,MAAM,IAAI,MAAM,yDAAyD;AAAA,QAC3E;AAAA,QAEA,IAAI,OAAO,SAAS,OAAO,KAAK,GAAG;AAAA,UAEjC,IAAI,WAAW,OAAO;AAAA,UACtB,IAAI,CAAC,UAAU;AAAA,YACb,WAAW,oBAAoB,OAAO,KAAK;AAAA,YAC3C,OAAO,MAAM,kCAAkC,UAAU;AAAA,UAC3D,EAAO;AAAA,YACL,OAAO,MAAM,6BAA6B,UAAU;AAAA;AAAA,UAItD,OAAO,IAAI,KAAK,CAAC,OAAO,KAAK,GAAU,EAAE,MAAM,SAAS,CAAC;AAAA,QAC3D,EAAO;AAAA,UACL,OAAO,OAAO;AAAA;AAAA,QAEhB,cAAc;AAAA,QACd,IAAI,OAAO,OAAO,UAAU,YAAY,OAAO,OAAO;AAAA,UACpD,YAAY,OAAO;AAAA,QACrB;AAAA,MACF,EAAO;AAAA,QACL,MAAM,IAAI,MACR,mLACF;AAAA;AAAA,MAGF,MAAM,OAAQ,KAAc,QAAQ;AAAA,MACpC,MAAM,WACH,KAAc,SACd,KAAK,SAAS,KAAK,KAAK,KAAK,SAAS,MAAM,IACzC,kBACA,KAAK,SAAS,KAAK,IACjB,kBACA,KAAK,SAAS,KAAK,IACjB,kBACA,KAAK,SAAS,MAAM,IAClB,mBACA;AAAA,MAEZ,MAAM,WAAW,IAAI;AAAA,MACrB,SAAS,OAAO,QAAQ,MAAM,QAAQ;AAAA,MACtC,SAAS,OAAO,SAAS,OAAO,SAAS,CAAC;AAAA,MAC1C,IAAI,aAAa;AAAA,QACf,IAAI,OAAO,YAAY,aAAa,UAAU;AAAA,UAC5C,SAAS,OAAO,YAAY,OAAO,YAAY,QAAQ,CAAC;AAAA,QAC1D;AAAA,QACA,IAAI,OAAO,YAAY,oBAAoB,UAAU;AAAA,UACnD,SAAS,OAAO,mBAAmB,OAAO,YAAY,eAAe,CAAC;AAAA,QACxE;AAAA,QACA,IAAI,OAAO,YAAY,WAAW,UAAU;AAAA,UAC1C,SAAS,OAAO,UAAU,OAAO,YAAY,MAAM,CAAC;AAAA,QACtD;AAAA,QACA,IAAI,OAAO,YAAY,gBAAgB,UAAU;AAAA,UAC/C,SAAS,OAAO,eAAe,OAAO,YAAY,WAAW,CAAC;AAAA,QAChE;AAAA,QACA,IAAI,MAAM,QAAQ,YAAY,sBAAsB,GAAG;AAAA,UACrD,WAAW,KAAK,YAAY,wBAAwB;AAAA,YAClD,SAAS,OAAO,6BAA6B,OAAO,CAAC,CAAC;AAAA,UACxD;AAAA,QACF;AAAA,MACF;AAAA,MAEA,IAAI;AAAA,QACF,MAAM,WAAW,MAAM,MAAM,GAAG,gCAAgC;AAAA,UAC9D,QAAQ;AAAA,UACR,SAAS;AAAA,eACJ,cAAc,OAAO;AAAA,UAC1B;AAAA,UACA,MAAM;AAAA,QACR,CAAC;AAAA,QAED,IAAI,CAAC,SAAS,IAAI;AAAA,UAChB,MAAM,IAAI,MAAM,+BAA+B,SAAS,UAAU,SAAS,YAAY;AAAA,QACzF;AAAA,QAEA,MAAM,OAAQ,MAAM,SAAS,KAAK;AAAA,QAClC,OAAO,KAAK,QAAQ;AAAA,QACpB,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,OAAO,MAAM,wBAAwB,SAAS;AAAA,QAC9C,MAAM;AAAA;AAAA;AAAA,KAGT,UAAU,iBAAiB,OAC1B,SACA,UACG;AAAA,MAEH,MAAM,UACJ,OAAO,UAAU,WAAW,EAAE,MAAM,MAAM,IAAK;AAAA,MAEjD,MAAM,gBACJ,QAAQ,SAAU,WAAW,SAAS,oBAAoB,iBAAiB;AAAA,MAC7E,OAAO,IAAI,wCAAwC,eAAe;AAAA,MAClE,IAAI;AAAA,QACF,MAAM,eAAe,MAAM,kBAAkB,SAAS,OAAO;AAAA,QAC7D,OAAO;AAAA,QACP,OAAO,OAAgB;AAAA,QACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACrE,OAAO,MAAM,4BAA4B,SAAS;AAAA,QAClD,MAAM;AAAA;AAAA;AAAA,KAGT,UAAU,eAAe,OAAO,SAAwB,WAAmC;AAAA,MAC1F,OAAO,0BAA0B,SAAS,QAAQ,UAAU,cAAc,aAAa;AAAA;AAAA,KAExF,UAAU,eAAe,OAAO,SAAwB,WAAmC;AAAA,MAC1F,OAAO,0BAA0B,SAAS,QAAQ,UAAU,cAAc,aAAa;AAAA;AAAA,EAE3F;AAAA,EACA,OAAO;AAAA,IACL;AAAA,MACE,MAAM;AAAA,MACN,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,MAAM,UAAU,WAAW,OAAO;AAAA,YAClC,MAAM,WAAW,MAAM,MAAM,GAAG,kBAAkB;AAAA,cAChD,SAAS;AAAA,gBACP,eAAe,UAAU,UAAU,OAAO;AAAA,cAC5C;AAAA,YACF,CAAC;AAAA,YACD,MAAM,OAAO,MAAM,SAAS,KAAK;AAAA,YACjC,OAAO,IACL,EAAE,MAAO,MAA+B,MAAM,UAAU,MAAM,GAC9D,kBACF;AAAA,YACA,IAAI,CAAC,SAAS,IAAI;AAAA,cAChB,MAAM,IAAI,MAAM,sCAAsC,SAAS,YAAY;AAAA,YAC7E;AAAA;AAAA,QAEJ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,MAAM,YAAY,MAAM,QAAQ,SAAS,UAAU,gBAAgB;AAAA,gBACjE,MAAM;AAAA,cACR,CAAC;AAAA,cACD,OAAO,IAAI,EAAE,UAAU,GAAG,WAAW;AAAA,cACrC,OAAO,OAAgB;AAAA,cACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACrE,OAAO,MAAM,iCAAiC,SAAS;AAAA,cACvD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,MAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AAAA,cACD,IAAI,KAAK,WAAW,GAAG;AAAA,gBACrB,MAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AAAA,cACA,OAAO,IAAI,EAAE,KAAK,GAAG,gCAAgC;AAAA,cACrD,OAAO,OAAgB;AAAA,cACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACrE,OAAO,MAAM,6BAA6B,SAAS;AAAA,cACnD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,MAAM,OAAO,MAAM,QAAQ,SAAS,UAAU,YAAY;AAAA,gBACxD,QAAQ;AAAA,cACV,CAAC;AAAA,cACD,IAAI,KAAK,WAAW,GAAG;AAAA,gBACrB,MAAM,IAAI,MAAM,yBAAyB;AAAA,cAC3C;AAAA,cACA,OAAO,IAAI,EAAE,KAAK,GAAG,gCAAgC;AAAA,cACrD,OAAO,OAAgB;AAAA,cACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACrE,OAAO,MAAM,6BAA6B,SAAS;AAAA,cACnD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,OAAO,IAAI,8BAA8B;AAAA,YACzC,IAAI;AAAA,cACF,MAAM,QAAQ,MAAM,QAAQ,SAAS,UAAU,OAAO;AAAA,gBACpD,QAAQ;AAAA,gBACR,GAAG;AAAA,gBACH,MAAM;AAAA,cACR,CAAC;AAAA,cACD,OAAO,IAAI,EAAE,MAAM,GAAG,sCAAsC;AAAA,cAC5D,OAAO,OAAgB;AAAA,cACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACrE,OAAO,MAAM,mCAAmC,SAAS;AAAA,cACzD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,OAAO,IAAI,+BAA+B;AAAA,cAC1C,IAAI;AAAA,gBACF,MAAM,SAAS,MAAM,QAAQ,SAC3B,UAAU,mBACV,mLACF;AAAA,gBAEA,IACE,UACA,OAAO,WAAW,YAClB,WAAW,UACX,iBAAiB,QACjB;AAAA,kBACA,OAAO,IAAI,EAAE,OAAO,GAAG,mBAAmB;AAAA,gBAC5C,EAAO;AAAA,kBACL,OAAO,MAAM,4CAA4C,MAAM;AAAA;AAAA,gBAEjE,OAAO,GAAY;AAAA,gBACnB,MAAM,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AAAA,gBACzD,OAAO,MAAM,oCAAoC,SAAS;AAAA;AAAA,cAE5D,OAAO,GAAY;AAAA,cACnB,MAAM,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC;AAAA,cACzD,OAAO,MAAM,2CAA2C,SAAS;AAAA;AAAA;AAAA,QAGvE;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,OAAO,IAAI,2BAA2B;AAAA,YACtC,IAAI;AAAA,cACF,MAAM,WAAW,MAAM,MACrB,+EACF;AAAA,cACA,MAAM,cAAc,MAAM,SAAS,YAAY;AAAA,cAC/C,MAAM,gBAAgB,MAAM,QAAQ,SAClC,UAAU,eACV,OAAO,KAAK,IAAI,WAAW,WAAW,CAAC,CACzC;AAAA,cACA,OAAO,IAAI,EAAE,cAAc,GAAG,mCAAmC;AAAA,cACjE,OAAO,OAAgB;AAAA,cACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACrE,OAAO,MAAM,gCAAgC,SAAS;AAAA,cACtD,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,MAAM,SAAS;AAAA,YACf,MAAM,SAAS,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AAAA,YACjF,IAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,OAAO,WAAW,GAAG;AAAA,cACjD,MAAM,IAAI,MAAM,6DAA6D;AAAA,YAC/E;AAAA,YACA,OAAO,IAAI,EAAE,OAAO,GAAG,kBAAkB;AAAA;AAAA,QAE7C;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,MAAM,SAAS;AAAA,YACf,MAAM,SAAS,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AAAA,YACjF,MAAM,cAAc,MAAM,QAAQ,SAAS,UAAU,uBAAuB,EAAE,OAAO,CAAC;AAAA,YACtF,IAAI,gBAAgB,QAAQ;AAAA,cAC1B,MAAM,IAAI,MACR,mDAAmD,iBAAiB,cACtE;AAAA,YACF;AAAA,YACA,OAAO,IAAI,EAAE,YAAY,GAAG,cAAc;AAAA;AAAA,QAE9C;AAAA,QACA;AAAA,UACE,MAAM;AAAA,UACN,IAAI,OAAO,YAA2B;AAAA,YACpC,IAAI;AAAA,cACF,MAAM,WAAW,MAAM,kBAAkB,SAAS;AAAA,gBAChD,MAAM;AAAA,cACR,CAAC;AAAA,cACD,IAAI,CAAC,UAAU;AAAA,gBACb,MAAM,IAAI,MAAM,2BAA2B;AAAA,cAC7C;AAAA,cACA,OAAO,IAAI,+BAA+B;AAAA,cAC1C,OAAO,OAAgB;AAAA,cACvB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,cACrE,OAAO,MAAM,wCAAwC,SAAS;AAAA,cAC9D,MAAM;AAAA;AAAA;AAAA,QAGZ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAe;",
8
+ "debugId": "05D284FD8B26700A64756E2164756E21",
9
9
  "names": []
10
10
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@elizaos/plugin-openai",
3
- "version": "1.5.14",
3
+ "version": "1.5.16",
4
4
  "type": "module",
5
5
  "main": "dist/cjs/index.node.cjs",
6
6
  "module": "dist/node/index.node.js",
@@ -44,7 +44,7 @@
44
44
  ],
45
45
  "dependencies": {
46
46
  "@ai-sdk/openai": "^2.0.32",
47
- "@elizaos/core": "^1.5.10",
47
+ "@elizaos/core": "^1.6.0-alpha.4",
48
48
  "ai": "^5.0.47",
49
49
  "js-tiktoken": "^1.0.21",
50
50
  "undici": "^7.16.0"
@@ -54,6 +54,9 @@
54
54
  "prettier": "3.6.2",
55
55
  "typescript": "^5.9.2"
56
56
  },
57
+ "peerDependencies": {
58
+ "zod": "^3.25.76 || ^4.1.8"
59
+ },
57
60
  "scripts": {
58
61
  "build": "bun run build.ts",
59
62
  "dev": "bun --hot build.ts",