@ai-sdk/openai 2.0.0-canary.12 → 2.0.0-canary.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -0
- package/dist/index.d.mts +8 -26
- package/dist/index.d.ts +8 -26
- package/dist/index.js +200 -180
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +200 -180
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +31 -47
- package/dist/internal/index.d.ts +31 -47
- package/dist/internal/index.js +196 -174
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +195 -174
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/openai-provider.ts","../src/openai-chat-language-model.ts","../src/convert-to-openai-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-finish-reason.ts","../src/openai-chat-options.ts","../src/openai-error.ts","../src/openai-prepare-tools.ts","../src/openai-completion-language-model.ts","../src/convert-to-openai-completion-prompt.ts","../src/openai-completion-options.ts","../src/openai-embedding-model.ts","../src/openai-embedding-options.ts","../src/openai-image-model.ts","../src/openai-image-settings.ts","../src/openai-tools.ts","../src/openai-transcription-model.ts","../src/responses/openai-responses-language-model.ts","../src/responses/convert-to-openai-responses-messages.ts","../src/responses/map-openai-responses-finish-reason.ts","../src/responses/openai-responses-prepare-tools.ts","../src/openai-speech-model.ts"],"sourcesContent":["export { createOpenAI, openai } from './openai-provider';\nexport type { OpenAIProvider, OpenAIProviderSettings } from './openai-provider';\nexport type { OpenAIResponsesProviderOptions } from './responses/openai-responses-language-model';\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n ProviderV2,\n TranscriptionModelV1,\n SpeechModelV1,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { OpenAIChatLanguageModel } from './openai-chat-language-model';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-options';\nimport { OpenAICompletionLanguageModel } from './openai-completion-language-model';\nimport { OpenAICompletionModelId } from './openai-completion-options';\nimport { OpenAIEmbeddingModel } from './openai-embedding-model';\nimport {\n OpenAIEmbeddingModelId,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-options';\nimport { OpenAIImageModel } from './openai-image-model';\nimport {\n OpenAIImageModelId,\n OpenAIImageSettings,\n} from './openai-image-settings';\nimport { openaiTools } from './openai-tools';\nimport { OpenAITranscriptionModel } from './openai-transcription-model';\nimport { OpenAITranscriptionModelId } from './openai-transcription-settings';\nimport { OpenAIResponsesLanguageModel } from './responses/openai-responses-language-model';\nimport { OpenAIResponsesModelId } from './responses/openai-responses-settings';\nimport { OpenAISpeechModel } from './openai-speech-model';\nimport { OpenAISpeechModelId } from './openai-speech-options';\n\nexport interface OpenAIProvider extends ProviderV2 {\n (modelId: 'gpt-3.5-turbo-instruct'): OpenAICompletionLanguageModel;\n (modelId: OpenAIChatModelId, settings?: OpenAIChatSettings): LanguageModelV2;\n\n /**\nCreates an OpenAI model for text generation.\n */\n languageModel(\n modelId: 'gpt-3.5-turbo-instruct',\n ): OpenAICompletionLanguageModel;\n languageModel(\n modelId: OpenAIChatModelId,\n settings?: OpenAIChatSettings,\n ): LanguageModelV2;\n\n /**\nCreates an OpenAI chat model for text generation.\n */\n chat(\n modelId: OpenAIChatModelId,\n settings?: OpenAIChatSettings,\n ): LanguageModelV2;\n\n /**\nCreates an OpenAI responses API model for text generation.\n */\n responses(modelId: OpenAIResponsesModelId): LanguageModelV2;\n\n /**\nCreates an OpenAI completion model for text generation.\n */\n completion(modelId: OpenAICompletionModelId): LanguageModelV2;\n\n /**\nCreates a model for text embeddings.\n */\n embedding(\n modelId: OpenAIEmbeddingModelId,\n settings?: OpenAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n /**\nCreates a model for text embeddings.\n\n@deprecated Use `textEmbeddingModel` instead.\n */\n textEmbedding(\n modelId: OpenAIEmbeddingModelId,\n settings?: OpenAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n /**\nCreates a model for text embeddings.\n */\n textEmbeddingModel(\n modelId: OpenAIEmbeddingModelId,\n settings?: OpenAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n /**\nCreates a model for image generation.\n */\n image(\n modelId: OpenAIImageModelId,\n settings?: OpenAIImageSettings,\n ): ImageModelV2;\n\n /**\nCreates a model for image generation.\n */\n imageModel(\n modelId: OpenAIImageModelId,\n settings?: OpenAIImageSettings,\n ): ImageModelV2;\n\n /**\nCreates a model for transcription.\n */\n transcription(modelId: OpenAITranscriptionModelId): TranscriptionModelV1;\n\n /**\nCreates a model for speech generation.\n */\n speech(modelId: OpenAISpeechModelId): SpeechModelV1;\n\n /**\nOpenAI-specific tools.\n */\n tools: typeof openaiTools;\n}\n\nexport interface OpenAIProviderSettings {\n /**\nBase URL for the OpenAI API calls.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nOpenAI Organization.\n */\n organization?: string;\n\n /**\nOpenAI project.\n */\n project?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nOpenAI compatibility mode. Should be set to `strict` when using the OpenAI API,\nand `compatible` when using 3rd party providers. In `compatible` mode, newer\ninformation such as streamOptions are not being sent. Defaults to 'compatible'.\n */\n compatibility?: 'strict' | 'compatible';\n\n /**\nProvider name. Overrides the `openai` default name for 3rd party providers.\n */\n name?: string;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAI provider instance.\n */\nexport function createOpenAI(\n options: OpenAIProviderSettings = {},\n): OpenAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.openai.com/v1';\n\n // we default to compatible, because strict breaks providers like Groq:\n const compatibility = options.compatibility ?? 'compatible';\n\n const providerName = options.name ?? 'openai';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'OPENAI_API_KEY',\n description: 'OpenAI',\n })}`,\n 'OpenAI-Organization': options.organization,\n 'OpenAI-Project': options.project,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings = {},\n ) =>\n new OpenAIChatLanguageModel(modelId, settings, {\n provider: `${providerName}.chat`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n compatibility,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: OpenAICompletionModelId) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: `${providerName}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n compatibility,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings = {},\n ) =>\n new OpenAIEmbeddingModel(modelId, settings, {\n provider: `${providerName}.embedding`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createImageModel = (\n modelId: OpenAIImageModelId,\n settings: OpenAIImageSettings = {},\n ) =>\n new OpenAIImageModel(modelId, settings, {\n provider: `${providerName}.image`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: OpenAITranscriptionModelId) =>\n new OpenAITranscriptionModel(modelId, {\n provider: `${providerName}.transcription`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: OpenAISpeechModelId) =>\n new OpenAISpeechModel(modelId, {\n provider: `${providerName}.speech`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: OpenAIChatModelId | OpenAICompletionModelId,\n settings?: OpenAIChatSettings,\n ) => {\n if (new.target) {\n throw new Error(\n 'The OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n if (modelId === 'gpt-3.5-turbo-instruct') {\n return createCompletionModel(modelId);\n }\n\n return createChatModel(modelId, settings as OpenAIChatSettings);\n };\n\n const createResponsesModel = (modelId: OpenAIResponsesModelId) => {\n return new OpenAIResponsesLanguageModel(modelId, {\n provider: `${providerName}.responses`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n };\n\n const provider = function (\n modelId: OpenAIChatModelId | OpenAICompletionModelId,\n settings?: OpenAIChatSettings,\n ) {\n return createLanguageModel(modelId, settings);\n };\n\n provider.languageModel = createLanguageModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.responses = createResponsesModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n\n provider.transcription = createTranscriptionModel;\n provider.transcriptionModel = createTranscriptionModel;\n\n provider.speech = createSpeechModel;\n provider.speechModel = createSpeechModel;\n\n provider.tools = openaiTools;\n\n return provider as OpenAIProvider;\n}\n\n/**\nDefault OpenAI provider instance. It uses 'strict' compatibility mode.\n */\nexport const openai = createOpenAI({\n compatibility: 'strict', // strict for OpenAI API\n});\n","import {\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAIChatMessages } from './convert-to-openai-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAIChatModelId,\n OpenAIChatSettings,\n openaiProviderOptions,\n} from './openai-chat-options';\nimport {\n openaiErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\nimport { prepareTools } from './openai-prepare-tools';\n\ntype OpenAIChatConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAIChatModelId;\n readonly settings: OpenAIChatSettings;\n\n private readonly config: OpenAIChatConfig;\n\n constructor(\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings,\n config: OpenAIChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n async getSupportedUrls(): Promise<Record<string, RegExp[]>> {\n return {\n 'image/*': [/^https?:\\/\\/.*$/],\n };\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: LanguageModelV2CallOptions) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n const openaiOptions =\n (await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.settings.structuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(\n {\n prompt,\n systemMessageMode: getSystemMessageMode(this.modelId),\n },\n );\n\n warnings.push(...messageWarnings);\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n logit_bias: openaiOptions.logitBias,\n user: openaiOptions.user,\n parallel_tool_calls: openaiOptions.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? // TODO convert into provider option\n this.settings.structuredOutputs && responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n strict: true,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n stop: stopSequences,\n seed,\n\n // openai specific settings:\n // TODO remove in next major version; we auto-map maxOutputTokens now\n max_completion_tokens: openaiOptions.maxCompletionTokens,\n store: openaiOptions.store,\n metadata: openaiOptions.metadata,\n prediction: openaiOptions.prediction,\n reasoning_effort: openaiOptions.reasoningEffort,\n\n // messages:\n messages,\n };\n\n if (isReasoningModel(this.modelId)) {\n // remove unsupported settings for reasoning models\n // see https://platform.openai.com/docs/guides/reasoning#limitations\n if (baseArgs.temperature != null) {\n baseArgs.temperature = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'temperature',\n details: 'temperature is not supported for reasoning models',\n });\n }\n if (baseArgs.top_p != null) {\n baseArgs.top_p = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topP',\n details: 'topP is not supported for reasoning models',\n });\n }\n if (baseArgs.frequency_penalty != null) {\n baseArgs.frequency_penalty = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n details: 'frequencyPenalty is not supported for reasoning models',\n });\n }\n if (baseArgs.presence_penalty != null) {\n baseArgs.presence_penalty = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n details: 'presencePenalty is not supported for reasoning models',\n });\n }\n if (baseArgs.logit_bias != null) {\n baseArgs.logit_bias = undefined;\n warnings.push({\n type: 'other',\n message: 'logitBias is not supported for reasoning models',\n });\n }\n\n // reasoning models use max_completion_tokens instead of max_tokens:\n if (baseArgs.max_tokens != null) {\n if (baseArgs.max_completion_tokens == null) {\n baseArgs.max_completion_tokens = baseArgs.max_tokens;\n }\n baseArgs.max_tokens = undefined;\n }\n } else if (\n this.modelId.startsWith('gpt-4o-search-preview') ||\n this.modelId.startsWith('gpt-4o-mini-search-preview')\n ) {\n if (baseArgs.temperature != null) {\n baseArgs.temperature = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'temperature',\n details:\n 'temperature is not supported for the search preview models and has been removed.',\n });\n }\n }\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n structuredOutputs: this.settings.structuredOutputs ?? false,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // tool calls:\n for (const toolCall of choice.message.tool_calls ?? []) {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n });\n }\n\n // provider metadata:\n const completionTokenDetails = response.usage?.completion_tokens_details;\n const promptTokenDetails = response.usage?.prompt_tokens_details;\n const providerMetadata: SharedV2ProviderMetadata = { openai: {} };\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata.openai.acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata.openai.rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n content,\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage?.prompt_tokens ?? undefined,\n outputTokens: response.usage?.completion_tokens ?? undefined,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n providerMetadata,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let isFirstChunk = true;\n\n const providerMetadata: SharedV2ProviderMetadata = { openai: {} };\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.inputTokens = prompt_tokens ?? undefined;\n usage.outputTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n providerMetadata.openai.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n providerMetadata.openai.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text',\n text: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n // Tool call start. OpenAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n ...(providerMetadata != null ? { providerMetadata } : {}),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst openaiTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: openaiTokenUsageSchema,\n }),\n openaiErrorDataSchema,\n]);\n\nfunction isReasoningModel(modelId: string) {\n return modelId.startsWith('o');\n}\n\nfunction isAudioModel(modelId: string) {\n return modelId.startsWith('gpt-4o-audio-preview');\n}\n\nfunction getSystemMessageMode(modelId: string) {\n if (!isReasoningModel(modelId)) {\n return 'system';\n }\n\n return (\n reasoningModels[modelId as keyof typeof reasoningModels]\n ?.systemMessageMode ?? 'developer'\n );\n}\n\nconst reasoningModels = {\n 'o1-mini': {\n systemMessageMode: 'remove',\n },\n 'o1-mini-2024-09-12': {\n systemMessageMode: 'remove',\n },\n 'o1-preview': {\n systemMessageMode: 'remove',\n },\n 'o1-preview-2024-09-12': {\n systemMessageMode: 'remove',\n },\n 'o3-mini': {\n systemMessageMode: 'developer',\n },\n 'o3-mini-2025-01-31': {\n systemMessageMode: 'developer',\n },\n} as const;\n","import {\n LanguageModelV2CallWarning,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIChatPrompt } from './openai-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToOpenAIChatMessages({\n prompt,\n systemMessageMode = 'system',\n}: {\n prompt: LanguageModelV2Prompt;\n systemMessageMode?: 'system' | 'developer' | 'remove';\n}): {\n messages: OpenAIChatPrompt;\n warnings: Array<LanguageModelV2CallWarning>;\n} {\n const messages: OpenAIChatPrompt = [];\n const warnings: Array<LanguageModelV2CallWarning> = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n switch (systemMessageMode) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n case 'developer': {\n messages.push({ role: 'developer', content });\n break;\n }\n case 'remove': {\n warnings.push({\n type: 'other',\n message: 'system messages are removed for this model',\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = systemMessageMode;\n throw new Error(\n `Unsupported system message mode: ${_exhaustiveCheck}`,\n );\n }\n }\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({ role: 'user', content: content[0].text });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map((part, index) => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n\n // OpenAI specific extension: image detail\n detail: part.providerOptions?.openai?.imageDetail,\n },\n };\n } else if (part.mediaType.startsWith('audio/')) {\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'audio file parts with URLs',\n });\n }\n\n switch (part.mediaType) {\n case 'audio/wav': {\n return {\n type: 'input_audio',\n input_audio: {\n data: convertToBase64(part.data),\n format: 'wav',\n },\n };\n }\n case 'audio/mp3':\n case 'audio/mpeg': {\n return {\n type: 'input_audio',\n input_audio: {\n data: convertToBase64(part.data),\n format: 'mp3',\n },\n };\n }\n\n default: {\n throw new UnsupportedFunctionalityError({\n functionality: `audio content parts with media type ${part.mediaType}`,\n });\n }\n }\n } else if (part.mediaType === 'application/pdf') {\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'PDF file parts with URLs',\n });\n }\n\n return {\n type: 'file',\n file: {\n filename: part.filename ?? `part-${index}.pdf`,\n file_data: `data:application/pdf;base64,${part.data}`,\n },\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return { messages, warnings };\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod';\n\n// https://platform.openai.com/docs/models\nexport type OpenAIChatModelId =\n | 'o1'\n | 'o1-2024-12-17'\n | 'o1-mini'\n | 'o1-mini-2024-09-12'\n | 'o1-preview'\n | 'o1-preview-2024-09-12'\n | 'o3-mini'\n | 'o3-mini-2025-01-31'\n | 'o3'\n | 'o3-2025-04-16'\n | 'o4-mini'\n | 'o4-mini-2025-04-16'\n | 'gpt-4.1'\n | 'gpt-4.1-2025-04-14'\n | 'gpt-4.1-mini'\n | 'gpt-4.1-mini-2025-04-14'\n | 'gpt-4.1-nano'\n | 'gpt-4.1-nano-2025-04-14'\n | 'gpt-4o'\n | 'gpt-4o-2024-05-13'\n | 'gpt-4o-2024-08-06'\n | 'gpt-4o-2024-11-20'\n | 'gpt-4o-audio-preview'\n | 'gpt-4o-audio-preview-2024-10-01'\n | 'gpt-4o-audio-preview-2024-12-17'\n | 'gpt-4o-search-preview'\n | 'gpt-4o-search-preview-2025-03-11'\n | 'gpt-4o-mini-search-preview'\n | 'gpt-4o-mini-search-preview-2025-03-11'\n | 'gpt-4o-mini'\n | 'gpt-4o-mini-2024-07-18'\n | 'gpt-4-turbo'\n | 'gpt-4-turbo-2024-04-09'\n | 'gpt-4-turbo-preview'\n | 'gpt-4-0125-preview'\n | 'gpt-4-1106-preview'\n | 'gpt-4'\n | 'gpt-4-0613'\n | 'gpt-4.5-preview'\n | 'gpt-4.5-preview-2025-02-27'\n | 'gpt-3.5-turbo-0125'\n | 'gpt-3.5-turbo'\n | 'gpt-3.5-turbo-1106'\n | 'chatgpt-4o-latest'\n | (string & {});\n\nexport const openaiProviderOptions = z.object({\n /**\n * Modify the likelihood of specified tokens appearing in the completion.\n *\n * Accepts a JSON object that maps tokens (specified by their token ID in\n * the GPT tokenizer) to an associated bias value from -100 to 100.\n */\n logitBias: z.record(z.coerce.number(), z.number()).optional(),\n\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help OpenAI to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n\n /**\n * Reasoning effort for reasoning models. Defaults to `medium`.\n */\n reasoningEffort: z.enum(['low', 'medium', 'high']).optional(),\n\n /**\n * Maximum number of completion tokens to generate. Useful for reasoning models.\n */\n maxCompletionTokens: z.number().optional(),\n\n /**\n * Whether to enable persistence in responses API.\n */\n store: z.boolean().optional(),\n\n /**\n * Metadata to associate with the request.\n */\n metadata: z.record(z.string()).optional(),\n\n /**\n * Parameters for prediction mode.\n */\n prediction: z.record(z.any()).optional(),\n});\n\nexport type OpenAIProviderOptions = z.infer<typeof openaiProviderOptions>;\n\nexport interface OpenAIChatSettings {\n /**\nWhether to use structured outputs. Defaults to false.\n\nWhen enabled, tool calls and object generation will be strict and follow the provided schema.\n*/\n structuredOutputs?: boolean;\n}\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const openaiErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAIErrorData = z.infer<typeof openaiErrorDataSchema>;\n\nexport const openaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: openaiErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n JSONSchema7,\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n structuredOutputs,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n structuredOutputs: boolean;\n}): {\n tools?: {\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict?: boolean;\n };\n }[];\n toolChoice?:\n | 'auto'\n | 'none'\n | 'required'\n | { type: 'function'; function: { name: string } };\n\n toolWarnings: Array<LanguageModelV2CallWarning>;\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict: boolean | undefined;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n strict: structuredOutputs ? true : undefined,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiTools,\n toolChoice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompletionPrompt } from './convert-to-openai-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAICompletionModelId,\n openaiCompletionProviderOptions,\n} from './openai-completion-options';\nimport {\n openaiErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\n\ntype OpenAICompletionConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAICompletionLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAICompletionModelId;\n\n private readonly config: OpenAICompletionConfig;\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n constructor(\n modelId: OpenAICompletionModelId,\n config: OpenAICompletionConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n async getSupportedUrls(): Promise<Record<string, RegExp[]>> {\n return {\n // no supported urls for completion models\n };\n }\n\n private async getArgs({\n inputFormat,\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n tools,\n toolChoice,\n seed,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n const openaiOptions = {\n ...(await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiCompletionProviderOptions,\n })),\n ...(await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompletionProviderOptions,\n })),\n };\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: openaiOptions.echo,\n logit_bias: openaiOptions.logitBias,\n suffix: openaiOptions.suffix,\n user: openaiOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n\n return {\n content: [{ type: 'text', text: choice.text }],\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n },\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiCompletionChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiCompletionChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text',\n text: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n openaiErrorDataSchema,\n]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { z } from 'zod';\n\n// https://platform.openai.com/docs/models\nexport type OpenAICompletionModelId = 'gpt-3.5-turbo-instruct' | (string & {});\n\nexport const openaiCompletionProviderOptions = z.object({\n /**\nEcho back the prompt in addition to the completion.\n */\n echo: z.boolean().optional(),\n\n /**\nModify the likelihood of specified tokens appearing in the completion.\n\nAccepts a JSON object that maps tokens (specified by their token ID in\nthe GPT tokenizer) to an associated bias value from -100 to 100. You\ncan use this tokenizer tool to convert text to token IDs. Mathematically,\nthe bias is added to the logits generated by the model prior to sampling.\nThe exact effect will vary per model, but values between -1 and 1 should\ndecrease or increase likelihood of selection; values like -100 or 100\nshould result in a ban or exclusive selection of the relevant token.\n\nAs an example, you can pass {\"50256\": -100} to prevent the <|endoftext|>\ntoken from being generated.\n */\n logitBias: z.record(z.string(), z.number()).optional(),\n\n /**\nThe suffix that comes after a completion of inserted text.\n */\n suffix: z.string().optional(),\n\n /**\nA unique identifier representing your end-user, which can help OpenAI to\nmonitor and detect abuse. Learn more.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompletionProviderOptions = z.infer<\n typeof openaiCompletionProviderOptions\n>;\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport {\n OpenAIEmbeddingModelId,\n openaiEmbeddingProviderOptions,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-options';\nimport { openaiFailedResponseHandler } from './openai-error';\n\nexport class OpenAIEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: OpenAIEmbeddingModelId;\n\n private readonly config: OpenAIConfig;\n private readonly settings: OpenAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.settings.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings,\n config: OpenAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n // Parse provider options\n const openaiOptions =\n (await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiEmbeddingProviderOptions,\n })) ?? {};\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: openaiOptions.dimensions,\n user: openaiOptions.user,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { z } from 'zod';\n\nexport type OpenAIEmbeddingModelId =\n | 'text-embedding-3-small'\n | 'text-embedding-3-large'\n | 'text-embedding-ada-002'\n | (string & {});\n\nexport interface OpenAIEmbeddingSettings {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n}\n\nexport const openaiEmbeddingProviderOptions = z.object({\n /**\nThe number of dimensions the resulting output embeddings should have.\nOnly supported in text-embedding-3 and later models.\n */\n dimensions: z.number().optional(),\n\n /**\nA unique identifier representing your end-user, which can help OpenAI to\nmonitor and detect abuse. Learn more.\n*/\n user: z.string().optional(),\n});\n\nexport type OpenAIEmbeddingProviderOptions = z.infer<\n typeof openaiEmbeddingProviderOptions\n>;\n","import { ImageModelV2, ImageModelV2CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport { openaiFailedResponseHandler } from './openai-error';\nimport {\n OpenAIImageModelId,\n OpenAIImageSettings,\n modelMaxImagesPerCall,\n hasDefaultResponseFormat,\n} from './openai-image-settings';\n\ninterface OpenAIImageModelConfig extends OpenAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\nexport class OpenAIImageModel implements ImageModelV2 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return (\n this.settings.maxImagesPerCall ?? modelMaxImagesPerCall[this.modelId] ?? 1\n );\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAIImageModelId,\n private readonly settings: OpenAIImageSettings,\n private readonly config: OpenAIImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV2['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV2['doGenerate']>>\n > {\n const warnings: Array<ImageModelV2CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n ...(!hasDefaultResponseFormat.has(this.modelId)\n ? { response_format: 'b64_json' }\n : {}),\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","export type OpenAIImageModelId = 'dall-e-3' | 'dall-e-2' | (string & {});\n\n// https://platform.openai.com/docs/guides/images\nexport const modelMaxImagesPerCall: Record<OpenAIImageModelId, number> = {\n 'dall-e-3': 1,\n 'dall-e-2': 10,\n 'gpt-image-1': 10,\n};\n\nexport const hasDefaultResponseFormat = new Set(['gpt-image-1']);\n\nexport interface OpenAIImageSettings {\n /**\nOverride the maximum number of images per call (default is dependent on the\nmodel, or 1 for an unknown model).\n */\n maxImagesPerCall?: number;\n}\n","import { z } from 'zod';\n\nconst WebSearchPreviewParameters = z.object({});\n\nfunction webSearchPreviewTool({\n searchContextSize,\n userLocation,\n}: {\n searchContextSize?: 'low' | 'medium' | 'high';\n userLocation?: {\n type?: 'approximate';\n city?: string;\n region?: string;\n country?: string;\n timezone?: string;\n };\n} = {}): {\n type: 'provider-defined';\n id: 'openai.web_search_preview';\n args: {};\n parameters: typeof WebSearchPreviewParameters;\n} {\n return {\n type: 'provider-defined',\n id: 'openai.web_search_preview',\n args: {\n searchContextSize,\n userLocation,\n },\n parameters: WebSearchPreviewParameters,\n };\n}\n\nexport const openaiTools = {\n webSearchPreview: webSearchPreviewTool,\n};\n","import {\n TranscriptionModelV1,\n TranscriptionModelV1CallOptions,\n TranscriptionModelV1CallWarning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n convertBase64ToUint8Array,\n createJsonResponseHandler,\n parseProviderOptions,\n postFormDataToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport { openaiFailedResponseHandler } from './openai-error';\nimport {\n OpenAITranscriptionModelId,\n OpenAITranscriptionModelOptions,\n} from './openai-transcription-settings';\n\n// https://platform.openai.com/docs/api-reference/audio/createTranscription\nconst openAIProviderOptionsSchema = z.object({\n include: z.array(z.string()).nullish(),\n language: z.string().nullish(),\n prompt: z.string().nullish(),\n temperature: z.number().min(0).max(1).nullish().default(0),\n timestampGranularities: z\n .array(z.enum(['word', 'segment']))\n .nullish()\n .default(['segment']),\n});\n\nexport type OpenAITranscriptionCallOptions = Omit<\n TranscriptionModelV1CallOptions,\n 'providerOptions'\n> & {\n providerOptions?: {\n openai?: z.infer<typeof openAIProviderOptionsSchema>;\n };\n};\n\ninterface OpenAITranscriptionModelConfig extends OpenAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\n// https://platform.openai.com/docs/guides/speech-to-text#supported-languages\nconst languageMap = {\n afrikaans: 'af',\n arabic: 'ar',\n armenian: 'hy',\n azerbaijani: 'az',\n belarusian: 'be',\n bosnian: 'bs',\n bulgarian: 'bg',\n catalan: 'ca',\n chinese: 'zh',\n croatian: 'hr',\n czech: 'cs',\n danish: 'da',\n dutch: 'nl',\n english: 'en',\n estonian: 'et',\n finnish: 'fi',\n french: 'fr',\n galician: 'gl',\n german: 'de',\n greek: 'el',\n hebrew: 'he',\n hindi: 'hi',\n hungarian: 'hu',\n icelandic: 'is',\n indonesian: 'id',\n italian: 'it',\n japanese: 'ja',\n kannada: 'kn',\n kazakh: 'kk',\n korean: 'ko',\n latvian: 'lv',\n lithuanian: 'lt',\n macedonian: 'mk',\n malay: 'ms',\n marathi: 'mr',\n maori: 'mi',\n nepali: 'ne',\n norwegian: 'no',\n persian: 'fa',\n polish: 'pl',\n portuguese: 'pt',\n romanian: 'ro',\n russian: 'ru',\n serbian: 'sr',\n slovak: 'sk',\n slovenian: 'sl',\n spanish: 'es',\n swahili: 'sw',\n swedish: 'sv',\n tagalog: 'tl',\n tamil: 'ta',\n thai: 'th',\n turkish: 'tr',\n ukrainian: 'uk',\n urdu: 'ur',\n vietnamese: 'vi',\n welsh: 'cy',\n};\n\nexport class OpenAITranscriptionModel implements TranscriptionModelV1 {\n readonly specificationVersion = 'v1';\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAITranscriptionModelId,\n private readonly config: OpenAITranscriptionModelConfig,\n ) {}\n\n private async getArgs({\n audio,\n mediaType,\n providerOptions,\n }: OpenAITranscriptionCallOptions) {\n const warnings: TranscriptionModelV1CallWarning[] = [];\n\n // Parse provider options\n const openAIOptions = await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openAIProviderOptionsSchema,\n });\n\n // Create form data with base fields\n const formData = new FormData();\n const blob =\n audio instanceof Uint8Array\n ? new Blob([audio])\n : new Blob([convertBase64ToUint8Array(audio)]);\n\n formData.append('model', this.modelId);\n formData.append('file', new File([blob], 'audio', { type: mediaType }));\n\n // Add provider-specific options\n if (openAIOptions) {\n const transcriptionModelOptions: OpenAITranscriptionModelOptions = {\n include: openAIOptions.include ?? undefined,\n language: openAIOptions.language ?? undefined,\n prompt: openAIOptions.prompt ?? undefined,\n temperature: openAIOptions.temperature ?? undefined,\n timestamp_granularities:\n openAIOptions.timestampGranularities ?? undefined,\n };\n\n for (const key in transcriptionModelOptions) {\n const value =\n transcriptionModelOptions[\n key as keyof OpenAITranscriptionModelOptions\n ];\n if (value !== undefined) {\n formData.append(key, String(value));\n }\n }\n }\n\n return {\n formData,\n warnings,\n };\n }\n\n async doGenerate(\n options: OpenAITranscriptionCallOptions,\n ): Promise<Awaited<ReturnType<TranscriptionModelV1['doGenerate']>>> {\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { formData, warnings } = await this.getArgs(options);\n\n const {\n value: response,\n responseHeaders,\n rawValue: rawResponse,\n } = await postFormDataToApi({\n url: this.config.url({\n path: '/audio/transcriptions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n formData,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTranscriptionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const language =\n response.language != null && response.language in languageMap\n ? languageMap[response.language as keyof typeof languageMap]\n : undefined;\n\n return {\n text: response.text,\n segments:\n response.words?.map(word => ({\n text: word.word,\n startSecond: word.start,\n endSecond: word.end,\n })) ?? [],\n language,\n durationInSeconds: response.duration ?? undefined,\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n}\n\nconst openaiTranscriptionResponseSchema = z.object({\n text: z.string(),\n language: z.string().nullish(),\n duration: z.number().nullish(),\n words: z\n .array(\n z.object({\n word: z.string(),\n start: z.number(),\n end: z.number(),\n }),\n )\n .nullish(),\n});\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from '../openai-config';\nimport { openaiFailedResponseHandler } from '../openai-error';\nimport { convertToOpenAIResponsesMessages } from './convert-to-openai-responses-messages';\nimport { mapOpenAIResponseFinishReason } from './map-openai-responses-finish-reason';\nimport { prepareResponsesTools } from './openai-responses-prepare-tools';\nimport { OpenAIResponsesModelId } from './openai-responses-settings';\n\nexport class OpenAIResponsesLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAIResponsesModelId;\n\n private readonly config: OpenAIConfig;\n\n constructor(modelId: OpenAIResponsesModelId, config: OpenAIConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async getSupportedUrls(): Promise<Record<string, RegExp[]>> {\n return {\n 'image/*': [/^https?:\\/\\/.*$/],\n };\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n maxOutputTokens,\n temperature,\n stopSequences,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n seed,\n prompt,\n providerOptions,\n tools,\n toolChoice,\n responseFormat,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n const modelConfig = getResponsesModelConfig(this.modelId);\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'stopSequences' });\n }\n\n const { messages, warnings: messageWarnings } =\n convertToOpenAIResponsesMessages({\n prompt,\n systemMessageMode: modelConfig.systemMessageMode,\n });\n\n warnings.push(...messageWarnings);\n\n const openaiOptions = await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiResponsesProviderOptionsSchema,\n });\n\n const isStrict = openaiOptions?.strictSchemas ?? true;\n\n const baseArgs = {\n model: this.modelId,\n input: messages,\n temperature,\n top_p: topP,\n max_output_tokens: maxOutputTokens,\n\n ...(responseFormat?.type === 'json' && {\n text: {\n format:\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n strict: isStrict,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n schema: responseFormat.schema,\n }\n : { type: 'json_object' },\n },\n }),\n\n // provider options:\n metadata: openaiOptions?.metadata,\n parallel_tool_calls: openaiOptions?.parallelToolCalls,\n previous_response_id: openaiOptions?.previousResponseId,\n store: openaiOptions?.store,\n user: openaiOptions?.user,\n instructions: openaiOptions?.instructions,\n\n // model-specific settings:\n ...(modelConfig.isReasoningModel &&\n (openaiOptions?.reasoningEffort != null ||\n openaiOptions?.reasoningSummary != null) && {\n reasoning: {\n ...(openaiOptions?.reasoningEffort != null && {\n effort: openaiOptions.reasoningEffort,\n }),\n ...(openaiOptions?.reasoningSummary != null && {\n summary: openaiOptions.reasoningSummary,\n }),\n },\n }),\n ...(modelConfig.requiredAutoTruncation && {\n truncation: 'auto',\n }),\n };\n\n if (modelConfig.isReasoningModel) {\n // remove unsupported settings for reasoning models\n // see https://platform.openai.com/docs/guides/reasoning#limitations\n if (baseArgs.temperature != null) {\n baseArgs.temperature = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'temperature',\n details: 'temperature is not supported for reasoning models',\n });\n }\n\n if (baseArgs.top_p != null) {\n baseArgs.top_p = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topP',\n details: 'topP is not supported for reasoning models',\n });\n }\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareResponsesTools({\n tools,\n toolChoice,\n strict: isStrict,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/responses',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n z.object({\n id: z.string(),\n created_at: z.number(),\n model: z.string(),\n output: z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n role: z.literal('assistant'),\n content: z.array(\n z.object({\n type: z.literal('output_text'),\n text: z.string(),\n annotations: z.array(\n z.object({\n type: z.literal('url_citation'),\n start_index: z.number(),\n end_index: z.number(),\n url: z.string(),\n title: z.string(),\n }),\n ),\n }),\n ),\n }),\n z.object({\n type: z.literal('function_call'),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n }),\n z.object({\n type: z.literal('web_search_call'),\n }),\n z.object({\n type: z.literal('computer_call'),\n }),\n z.object({\n type: z.literal('reasoning'),\n summary: z.array(\n z.object({\n type: z.literal('summary_text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n incomplete_details: z.object({ reason: z.string() }).nullable(),\n usage: usageSchema,\n }),\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const content: Array<LanguageModelV2Content> = [];\n\n // map response content to content array\n for (const part of response.output) {\n switch (part.type) {\n case 'reasoning': {\n content.push({\n type: 'reasoning',\n text: part.summary.map(summary => summary.text).join(),\n });\n break;\n }\n\n case 'message': {\n for (const contentPart of part.content) {\n content.push({\n type: 'text',\n text: contentPart.text,\n });\n\n for (const annotation of contentPart.annotations) {\n content.push({\n type: 'source',\n sourceType: 'url',\n id: this.config.generateId?.() ?? generateId(),\n url: annotation.url,\n title: annotation.title,\n });\n }\n }\n break;\n }\n\n case 'function_call': {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: part.call_id,\n toolName: part.name,\n args: part.arguments,\n });\n break;\n }\n }\n }\n\n return {\n content,\n finishReason: mapOpenAIResponseFinishReason({\n finishReason: response.incomplete_details?.reason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens,\n },\n request: { body },\n response: {\n id: response.id,\n timestamp: new Date(response.created_at * 1000),\n modelId: response.model,\n headers: responseHeaders,\n body: rawResponse,\n },\n providerMetadata: {\n openai: {\n responseId: response.id,\n cachedPromptTokens:\n response.usage.input_tokens_details?.cached_tokens ?? null,\n reasoningTokens:\n response.usage.output_tokens_details?.reasoning_tokens ?? null,\n },\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/responses',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...body,\n stream: true,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiResponsesChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const self = this;\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let cachedPromptTokens: number | null = null;\n let reasoningTokens: number | null = null;\n let responseId: string | null = null;\n const ongoingToolCalls: Record<\n number,\n { toolName: string; toolCallId: string } | undefined\n > = {};\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiResponsesChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isResponseOutputItemAddedChunk(value)) {\n if (value.item.type === 'function_call') {\n ongoingToolCalls[value.output_index] = {\n toolName: value.item.name,\n toolCallId: value.item.call_id,\n };\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: value.item.call_id,\n toolName: value.item.name,\n argsTextDelta: value.item.arguments,\n });\n }\n } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {\n const toolCall = ongoingToolCalls[value.output_index];\n\n if (toolCall != null) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: value.delta,\n });\n }\n } else if (isResponseCreatedChunk(value)) {\n responseId = value.response.id;\n controller.enqueue({\n type: 'response-metadata',\n id: value.response.id,\n timestamp: new Date(value.response.created_at * 1000),\n modelId: value.response.model,\n });\n } else if (isTextDeltaChunk(value)) {\n controller.enqueue({\n type: 'text',\n text: value.delta,\n });\n } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {\n controller.enqueue({\n type: 'reasoning',\n text: value.delta,\n });\n } else if (\n isResponseOutputItemDoneChunk(value) &&\n value.item.type === 'function_call'\n ) {\n ongoingToolCalls[value.output_index] = undefined;\n hasToolCalls = true;\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: value.item.call_id,\n toolName: value.item.name,\n args: value.item.arguments,\n });\n } else if (isResponseFinishedChunk(value)) {\n finishReason = mapOpenAIResponseFinishReason({\n finishReason: value.response.incomplete_details?.reason,\n hasToolCalls,\n });\n usage.inputTokens = value.response.usage.input_tokens;\n usage.outputTokens = value.response.usage.output_tokens;\n cachedPromptTokens =\n value.response.usage.input_tokens_details?.cached_tokens ??\n cachedPromptTokens;\n reasoningTokens =\n value.response.usage.output_tokens_details?.reasoning_tokens ??\n reasoningTokens;\n } else if (isResponseAnnotationAddedChunk(value)) {\n controller.enqueue({\n type: 'source',\n sourceType: 'url',\n id: self.config.generateId?.() ?? generateId(),\n url: value.annotation.url,\n title: value.annotation.title,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n ...((cachedPromptTokens != null || reasoningTokens != null) && {\n providerMetadata: {\n openai: {\n responseId,\n cachedPromptTokens,\n reasoningTokens,\n },\n },\n }),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst usageSchema = z.object({\n input_tokens: z.number(),\n input_tokens_details: z\n .object({ cached_tokens: z.number().nullish() })\n .nullish(),\n output_tokens: z.number(),\n output_tokens_details: z\n .object({ reasoning_tokens: z.number().nullish() })\n .nullish(),\n});\n\nconst textDeltaChunkSchema = z.object({\n type: z.literal('response.output_text.delta'),\n delta: z.string(),\n});\n\nconst responseFinishedChunkSchema = z.object({\n type: z.enum(['response.completed', 'response.incomplete']),\n response: z.object({\n incomplete_details: z.object({ reason: z.string() }).nullish(),\n usage: usageSchema,\n }),\n});\n\nconst responseCreatedChunkSchema = z.object({\n type: z.literal('response.created'),\n response: z.object({\n id: z.string(),\n created_at: z.number(),\n model: z.string(),\n }),\n});\n\nconst responseOutputItemDoneSchema = z.object({\n type: z.literal('response.output_item.done'),\n output_index: z.number(),\n item: z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n }),\n z.object({\n type: z.literal('function_call'),\n id: z.string(),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n status: z.literal('completed'),\n }),\n ]),\n});\n\nconst responseFunctionCallArgumentsDeltaSchema = z.object({\n type: z.literal('response.function_call_arguments.delta'),\n item_id: z.string(),\n output_index: z.number(),\n delta: z.string(),\n});\n\nconst responseOutputItemAddedSchema = z.object({\n type: z.literal('response.output_item.added'),\n output_index: z.number(),\n item: z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n }),\n z.object({\n type: z.literal('function_call'),\n id: z.string(),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n }),\n ]),\n});\n\nconst responseAnnotationAddedSchema = z.object({\n type: z.literal('response.output_text.annotation.added'),\n annotation: z.object({\n type: z.literal('url_citation'),\n url: z.string(),\n title: z.string(),\n }),\n});\n\nconst responseReasoningSummaryTextDeltaSchema = z.object({\n type: z.literal('response.reasoning_summary_text.delta'),\n item_id: z.string(),\n output_index: z.number(),\n summary_index: z.number(),\n delta: z.string(),\n});\n\nconst openaiResponsesChunkSchema = z.union([\n textDeltaChunkSchema,\n responseFinishedChunkSchema,\n responseCreatedChunkSchema,\n responseOutputItemDoneSchema,\n responseFunctionCallArgumentsDeltaSchema,\n responseOutputItemAddedSchema,\n responseAnnotationAddedSchema,\n responseReasoningSummaryTextDeltaSchema,\n z.object({ type: z.string() }).passthrough(), // fallback for unknown chunks\n]);\n\nfunction isTextDeltaChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof textDeltaChunkSchema> {\n return chunk.type === 'response.output_text.delta';\n}\n\nfunction isResponseOutputItemDoneChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseOutputItemDoneSchema> {\n return chunk.type === 'response.output_item.done';\n}\n\nfunction isResponseFinishedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseFinishedChunkSchema> {\n return (\n chunk.type === 'response.completed' || chunk.type === 'response.incomplete'\n );\n}\n\nfunction isResponseCreatedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseCreatedChunkSchema> {\n return chunk.type === 'response.created';\n}\n\nfunction isResponseFunctionCallArgumentsDeltaChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseFunctionCallArgumentsDeltaSchema> {\n return chunk.type === 'response.function_call_arguments.delta';\n}\n\nfunction isResponseOutputItemAddedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseOutputItemAddedSchema> {\n return chunk.type === 'response.output_item.added';\n}\n\nfunction isResponseAnnotationAddedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseAnnotationAddedSchema> {\n return chunk.type === 'response.output_text.annotation.added';\n}\n\nfunction isResponseReasoningSummaryTextDeltaChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseReasoningSummaryTextDeltaSchema> {\n return chunk.type === 'response.reasoning_summary_text.delta';\n}\n\ntype ResponsesModelConfig = {\n isReasoningModel: boolean;\n systemMessageMode: 'remove' | 'system' | 'developer';\n requiredAutoTruncation: boolean;\n};\n\nfunction getResponsesModelConfig(modelId: string): ResponsesModelConfig {\n // o series reasoning models:\n if (modelId.startsWith('o')) {\n if (modelId.startsWith('o1-mini') || modelId.startsWith('o1-preview')) {\n return {\n isReasoningModel: true,\n systemMessageMode: 'remove',\n requiredAutoTruncation: false,\n };\n }\n\n return {\n isReasoningModel: true,\n systemMessageMode: 'developer',\n requiredAutoTruncation: false,\n };\n }\n\n // gpt models:\n return {\n isReasoningModel: false,\n systemMessageMode: 'system',\n requiredAutoTruncation: false,\n };\n}\n\nconst openaiResponsesProviderOptionsSchema = z.object({\n metadata: z.any().nullish(),\n parallelToolCalls: z.boolean().nullish(),\n previousResponseId: z.string().nullish(),\n store: z.boolean().nullish(),\n user: z.string().nullish(),\n reasoningEffort: z.string().nullish(),\n strictSchemas: z.boolean().nullish(),\n instructions: z.string().nullish(),\n reasoningSummary: z.string().nullish(),\n});\n\nexport type OpenAIResponsesProviderOptions = z.infer<\n typeof openaiResponsesProviderOptionsSchema\n>;\n","import {\n LanguageModelV2CallWarning,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIResponsesPrompt } from './openai-responses-api-types';\n\nexport function convertToOpenAIResponsesMessages({\n prompt,\n systemMessageMode,\n}: {\n prompt: LanguageModelV2Prompt;\n systemMessageMode: 'system' | 'developer' | 'remove';\n}): {\n messages: OpenAIResponsesPrompt;\n warnings: Array<LanguageModelV2CallWarning>;\n} {\n const messages: OpenAIResponsesPrompt = [];\n const warnings: Array<LanguageModelV2CallWarning> = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n switch (systemMessageMode) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n case 'developer': {\n messages.push({ role: 'developer', content });\n break;\n }\n case 'remove': {\n warnings.push({\n type: 'other',\n message: 'system messages are removed for this model',\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = systemMessageMode;\n throw new Error(\n `Unsupported system message mode: ${_exhaustiveCheck}`,\n );\n }\n }\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map((part, index) => {\n switch (part.type) {\n case 'text': {\n return { type: 'input_text', text: part.text };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'input_image',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n\n // OpenAI specific extension: image detail\n detail: part.providerOptions?.openai?.imageDetail,\n };\n } else if (part.mediaType === 'application/pdf') {\n if (part.data instanceof URL) {\n // The AI SDK automatically downloads files for user file parts with URLs\n throw new UnsupportedFunctionalityError({\n functionality: 'PDF file parts with URLs',\n });\n }\n\n return {\n type: 'input_file',\n filename: part.filename ?? `part-${index}.pdf`,\n file_data: `data:application/pdf;base64,${part.data}`,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n messages.push({\n role: 'assistant',\n content: [{ type: 'output_text', text: part.text }],\n });\n break;\n }\n case 'tool-call': {\n messages.push({\n type: 'function_call',\n call_id: part.toolCallId,\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n });\n break;\n }\n }\n }\n\n break;\n }\n\n case 'tool': {\n for (const part of content) {\n messages.push({\n type: 'function_call_output',\n call_id: part.toolCallId,\n output: JSON.stringify(part.result),\n });\n }\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return { messages, warnings };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIResponseFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case undefined:\n case null:\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'max_output_tokens':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n default:\n return hasToolCalls ? 'tool-calls' : 'unknown';\n }\n}\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIResponsesTool } from './openai-responses-api-types';\n\nexport function prepareResponsesTools({\n tools,\n toolChoice,\n strict,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n strict: boolean;\n}): {\n tools?: Array<OpenAIResponsesTool>;\n toolChoice?:\n | 'auto'\n | 'none'\n | 'required'\n | { type: 'web_search_preview' }\n | { type: 'function'; name: string };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiTools: Array<OpenAIResponsesTool> = [];\n\n for (const tool of tools) {\n switch (tool.type) {\n case 'function':\n openaiTools.push({\n type: 'function',\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n strict: strict ? true : undefined,\n });\n break;\n case 'provider-defined':\n switch (tool.id) {\n case 'openai.web_search_preview':\n openaiTools.push({\n type: 'web_search_preview',\n search_context_size: tool.args.searchContextSize as\n | 'low'\n | 'medium'\n | 'high',\n user_location: tool.args.userLocation as {\n type: 'approximate';\n city: string;\n region: string;\n },\n });\n break;\n default:\n toolWarnings.push({ type: 'unsupported-tool', tool });\n break;\n }\n break;\n default:\n toolWarnings.push({ type: 'unsupported-tool', tool });\n break;\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiTools,\n toolChoice:\n toolChoice.toolName === 'web_search_preview'\n ? { type: 'web_search_preview' }\n : { type: 'function', name: toolChoice.toolName },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { SpeechModelV1, SpeechModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createBinaryResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport { openaiFailedResponseHandler } from './openai-error';\nimport { OpenAISpeechModelId } from './openai-speech-options';\nimport { OpenAISpeechAPITypes } from './openai-api-types';\n\n// https://platform.openai.com/docs/api-reference/audio/createSpeech\nconst OpenAIProviderOptionsSchema = z.object({\n instructions: z.string().nullish(),\n speed: z.number().min(0.25).max(4.0).default(1.0).nullish(),\n});\n\nexport type OpenAISpeechCallOptions = z.infer<\n typeof OpenAIProviderOptionsSchema\n>;\n\ninterface OpenAISpeechModelConfig extends OpenAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\nexport class OpenAISpeechModel implements SpeechModelV1 {\n readonly specificationVersion = 'v1';\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAISpeechModelId,\n private readonly config: OpenAISpeechModelConfig,\n ) {}\n\n private async getArgs({\n text,\n voice = 'alloy',\n outputFormat = 'mp3',\n speed,\n instructions,\n providerOptions,\n }: Parameters<SpeechModelV1['doGenerate']>[0]) {\n const warnings: SpeechModelV1CallWarning[] = [];\n\n // Parse provider options\n const openAIOptions = await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: OpenAIProviderOptionsSchema,\n });\n\n // Create request body\n const requestBody: Record<string, unknown> = {\n model: this.modelId,\n input: text,\n voice,\n response_format: 'mp3',\n speed,\n instructions,\n };\n\n if (outputFormat) {\n if (['mp3', 'opus', 'aac', 'flac', 'wav', 'pcm'].includes(outputFormat)) {\n requestBody.response_format = outputFormat;\n } else {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'outputFormat',\n details: `Unsupported output format: ${outputFormat}. Using mp3 instead.`,\n });\n }\n }\n\n // Add provider-specific options\n if (openAIOptions) {\n const speechModelOptions: OpenAISpeechAPITypes = {};\n\n for (const key in speechModelOptions) {\n const value = speechModelOptions[key as keyof OpenAISpeechAPITypes];\n if (value !== undefined) {\n requestBody[key] = value;\n }\n }\n }\n\n return {\n requestBody,\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<SpeechModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<SpeechModelV1['doGenerate']>>> {\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { requestBody, warnings } = await this.getArgs(options);\n\n const {\n value: audio,\n responseHeaders,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/audio/speech',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: requestBody,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createBinaryResponseHandler(),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n audio,\n warnings,\n request: {\n body: JSON.stringify(requestBody),\n },\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACQA,IAAAA,0BAIO;;;ACZP,IAAAC,mBAUO;AACP,IAAAC,yBAUO;AACP,IAAAC,cAAkB;;;ACtBlB,sBAIO;AAEP,4BAAgC;AAEzB,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA,oBAAoB;AACtB,GAME;AACA,QAAM,WAA6B,CAAC;AACpC,QAAM,WAA8C,CAAC;AAErD,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,gBAAQ,mBAAmB;AAAA,UACzB,KAAK,UAAU;AACb,qBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,UACF;AAAA,UACA,KAAK,aAAa;AAChB,qBAAS,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAC5C;AAAA,UACF;AAAA,UACA,KAAK,UAAU;AACb,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AACD;AAAA,UACF;AAAA,UACA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI;AAAA,cACR,oCAAoC,gBAAgB;AAAA,YACtD;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,QAAQ,CAAC,EAAE,KAAK,CAAC;AACxD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,CAAC,MAAM,UAAU;AA1DhD;AA2DY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA;AAAA,sBAG5D,SAAQ,gBAAK,oBAAL,mBAAsB,WAAtB,mBAA8B;AAAA,oBACxC;AAAA,kBACF;AAAA,gBACF,WAAW,KAAK,UAAU,WAAW,QAAQ,GAAG;AAC9C,sBAAI,KAAK,gBAAgB,KAAK;AAC5B,0BAAM,IAAI,8CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,0BAAQ,KAAK,WAAW;AAAA,oBACtB,KAAK,aAAa;AAChB,6BAAO;AAAA,wBACL,MAAM;AAAA,wBACN,aAAa;AAAA,0BACX,UAAM,uCAAgB,KAAK,IAAI;AAAA,0BAC/B,QAAQ;AAAA,wBACV;AAAA,sBACF;AAAA,oBACF;AAAA,oBACA,KAAK;AAAA,oBACL,KAAK,cAAc;AACjB,6BAAO;AAAA,wBACL,MAAM;AAAA,wBACN,aAAa;AAAA,0BACX,UAAM,uCAAgB,KAAK,IAAI;AAAA,0BAC/B,QAAQ;AAAA,wBACV;AAAA,sBACF;AAAA,oBACF;AAAA,oBAEA,SAAS;AACP,4BAAM,IAAI,8CAA8B;AAAA,wBACtC,eAAe,uCAAuC,KAAK,SAAS;AAAA,sBACtE,CAAC;AAAA,oBACH;AAAA,kBACF;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,sBAAI,KAAK,gBAAgB,KAAK;AAC5B,0BAAM,IAAI,8CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,MAAM;AAAA,sBACJ,WAAU,UAAK,aAAL,YAAiB,QAAQ,KAAK;AAAA,sBACxC,WAAW,+BAA+B,KAAK,IAAI;AAAA,oBACrD;AAAA,kBACF;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,SAAS;AAC9B;;;ACvMO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAAkB;AAkDX,IAAM,wBAAwB,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAO5C,WAAW,aAAE,OAAO,aAAE,OAAO,OAAO,GAAG,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK5D,mBAAmB,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMxC,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK1B,iBAAiB,aAAE,KAAK,CAAC,OAAO,UAAU,MAAM,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK5D,qBAAqB,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKzC,OAAO,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK5B,UAAU,aAAE,OAAO,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKxC,YAAY,aAAE,OAAO,aAAE,IAAI,CAAC,EAAE,SAAS;AACzC,CAAC;;;AC9FD,IAAAC,cAAkB;AAClB,IAAAC,yBAA+C;AAExC,IAAM,wBAAwB,cAAE,OAAO;AAAA,EAC5C,OAAO,cAAE,OAAO;AAAA,IACd,SAAS,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,cAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ACrBD,IAAAC,mBAKO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AACF,GAqBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAMC,eAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,MAAAA,aAAY,KAAK;AAAA,QACf,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,QAAQ,oBAAoB,OAAO;AAAA,QACrC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAOA,cAAa,YAAY,QAAW,aAAa;AAAA,EACnE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAOA,cAAa,YAAY,MAAM,aAAa;AAAA,IAC9D,KAAK;AACH,aAAO;AAAA,QACL,OAAOA;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANpDO,IAAM,0BAAN,MAAyD;AAAA,EAQ9D,YACE,SACA,UACA,QACA;AAXF,SAAS,uBAAuB;AAY9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAM,mBAAsD;AAC1D,WAAO;AAAA,MACL,WAAW,CAAC,iBAAiB;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAA+B;AAvFjC;AAwFI,UAAM,WAAyC,CAAC;AAGhD,UAAM,iBACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,SAAS,mBACf;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,UAAU,UAAU,gBAAgB,IAAI;AAAA,MAC9C;AAAA,QACE;AAAA,QACA,mBAAmB,qBAAqB,KAAK,OAAO;AAAA,MACtD;AAAA,IACF;AAEA,aAAS,KAAK,GAAG,eAAe;AAEhC,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,YAAY,cAAc;AAAA,MAC1B,MAAM,cAAc;AAAA,MACpB,qBAAqB,cAAc;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS;AAAA;AAAA,QAErB,KAAK,SAAS,qBAAqB,eAAe,UAAU,OAC1D;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,QAAQ;AAAA,YACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc;AAAA,UACxB;AAAA,MACN,MAAM;AAAA,MACN;AAAA;AAAA;AAAA,MAIA,uBAAuB,cAAc;AAAA,MACrC,OAAO,cAAc;AAAA,MACrB,UAAU,cAAc;AAAA,MACxB,YAAY,cAAc;AAAA,MAC1B,kBAAkB,cAAc;AAAA;AAAA,MAGhC;AAAA,IACF;AAEA,QAAI,iBAAiB,KAAK,OAAO,GAAG;AAGlC,UAAI,SAAS,eAAe,MAAM;AAChC,iBAAS,cAAc;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,SAAS,MAAM;AAC1B,iBAAS,QAAQ;AACjB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,qBAAqB,MAAM;AACtC,iBAAS,oBAAoB;AAC7B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,oBAAoB,MAAM;AACrC,iBAAS,mBAAmB;AAC5B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,cAAc,MAAM;AAC/B,iBAAS,aAAa;AACtB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAGA,UAAI,SAAS,cAAc,MAAM;AAC/B,YAAI,SAAS,yBAAyB,MAAM;AAC1C,mBAAS,wBAAwB,SAAS;AAAA,QAC5C;AACA,iBAAS,aAAa;AAAA,MACxB;AAAA,IACF,WACE,KAAK,QAAQ,WAAW,uBAAuB,KAC/C,KAAK,QAAQ,WAAW,4BAA4B,GACpD;AACA,UAAI,SAAS,eAAe,MAAM;AAChC,iBAAS,cAAc;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SACE;AAAA,QACJ,CAAC;AAAA,MACH;AAAA,IACF;AACA,UAAM;AAAA,MACJ,OAAOC;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,oBAAmB,UAAK,SAAS,sBAAd,YAAmC;AAAA,IACxD,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAOA;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAlQjE;AAmQI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,eAAW,aAAY,YAAO,QAAQ,eAAf,YAA6B,CAAC,GAAG;AACtD,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,cAAc;AAAA,QACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,QACtC,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B,CAAC;AAAA,IACH;AAGA,UAAM,0BAAyB,cAAS,UAAT,mBAAgB;AAC/C,UAAM,sBAAqB,cAAS,UAAT,mBAAgB;AAC3C,UAAM,mBAA6C,EAAE,QAAQ,CAAC,EAAE;AAChE,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,OAAO,kBACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,OAAO,2BACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,OAAO,2BACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,OAAO,qBACtB,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,OAAO;AAAA,QACL,cAAa,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC9C,eAAc,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACrD;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,eAAe;AAEnB,UAAM,mBAA6C,EAAE,QAAQ,CAAC,EAAE;AAEhE,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAjZvC;AAmZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,cAAc,wCAAiB;AACrC,oBAAM,eAAe,gDAAqB;AAE1C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,iCAAiB,OAAO,kBACtB,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,iCAAiB,OAAO,2BACtB,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,iCAAiB,OAAO,2BACtB,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,iCAAiB,OAAO,qBACtB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,yBAAyB,cAC5B,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,2BAA2B,cAAE,OAAO;AAAA,EACxC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wBAAwB,cAAE,MAAM;AAAA,EACpC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QAC9C,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAED,SAAS,iBAAiB,SAAiB;AACzC,SAAO,QAAQ,WAAW,GAAG;AAC/B;AAMA,SAAS,qBAAqB,SAAiB;AA7rB/C;AA8rBE,MAAI,CAAC,iBAAiB,OAAO,GAAG;AAC9B,WAAO;AAAA,EACT;AAEA,UACE,2BAAgB,OAAuC,MAAvD,mBACI,sBADJ,YACyB;AAE7B;AAEA,IAAM,kBAAkB;AAAA,EACtB,WAAW;AAAA,IACT,mBAAmB;AAAA,EACrB;AAAA,EACA,sBAAsB;AAAA,IACpB,mBAAmB;AAAA,EACrB;AAAA,EACA,cAAc;AAAA,IACZ,mBAAmB;AAAA,EACrB;AAAA,EACA,yBAAyB;AAAA,IACvB,mBAAmB;AAAA,EACrB;AAAA,EACA,WAAW;AAAA,IACT,mBAAmB;AAAA,EACrB;AAAA,EACA,sBAAsB;AAAA,IACpB,mBAAmB;AAAA,EACrB;AACF;;;AOptBA,IAAAC,yBAQO;AACP,IAAAC,cAAkB;;;AChBlB,IAAAC,mBAIO;AAEA,SAAS,gCAAgC;AAAA,EAC9C;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ACzGA,IAAAC,cAAkB;AAKX,IAAM,kCAAkC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAItD,MAAM,cAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgB3B,WAAW,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrD,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B,MAAM,cAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;AFAM,IAAM,gCAAN,MAA+D;AAAA,EAWpE,YACE,SACA,QACA;AAbF,SAAS,uBAAuB;AAc9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAUA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAM,mBAAsD;AAC1D,WAAO;AAAA;AAAA,IAEP;AAAA,EACF;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,WAAyC,CAAC;AAGhD,UAAM,gBAAgB;AAAA,MACpB,GAAI,UAAM,6CAAqB;AAAA,QAC7B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,GAAI,UAAM,6CAAqB;AAAA,QAC7B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,gCAAgC,EAAE,QAAQ,YAAY,CAAC;AAEzD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,cAAc;AAAA,QACpB,YAAY,cAAc;AAAA,QAC1B,QAAQ,cAAc;AAAA,QACtB,MAAM,cAAc;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA;AAAA,QAGA,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,MAC7C,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAC/B;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AAAA,YACnC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,OAAO;AAAA,cACf,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,iCAAiC,cAAE,OAAO;AAAA,EAC9C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,8BAA8B,cAAE,MAAM;AAAA,EAC1C,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AGvVD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;;;ACVlB,IAAAC,cAAkB;AAoBX,IAAM,iCAAiC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKrD,YAAY,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhC,MAAM,cAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ADbM,IAAM,uBAAN,MAA+D;AAAA,EAmBpE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AA9BrC;AA+BI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAlCvC;AAmCI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAvDJ;AAwDI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,iBACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,cAAc;AAAA,QAC1B,MAAM,cAAc;AAAA,MACtB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AEhHD,IAAAC,yBAIO;AACP,IAAAC,cAAkB;;;ACHX,IAAM,wBAA4D;AAAA,EACvE,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,eAAe;AACjB;AAEO,IAAM,2BAA2B,oBAAI,IAAI,CAAC,aAAa,CAAC;;;ADaxD,IAAM,mBAAN,MAA+C;AAAA,EAapD,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAfnB,SAAS,uBAAuB;AAAA,EAgB7B;AAAA,EAdH,IAAI,mBAA2B;AAzBjC;AA0BI,YACE,gBAAK,SAAS,qBAAd,YAAkC,sBAAsB,KAAK,OAAO,MAApE,YAAyE;AAAA,EAE7E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AApDJ;AAqDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,GAAI,CAAC,yBAAyB,IAAI,KAAK,OAAO,IAC1C,EAAE,iBAAiB,WAAW,IAC9B,CAAC;AAAA,MACP;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,UAAU,cAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AE7GD,IAAAC,cAAkB;AAElB,IAAM,6BAA6B,cAAE,OAAO,CAAC,CAAC;AAE9C,SAAS,qBAAqB;AAAA,EAC5B;AAAA,EACA;AACF,IASI,CAAC,GAKH;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,MACJ;AAAA,MACA;AAAA,IACF;AAAA,IACA,YAAY;AAAA,EACd;AACF;AAEO,IAAM,cAAc;AAAA,EACzB,kBAAkB;AACpB;;;AC9BA,IAAAC,yBAMO;AACP,IAAAC,eAAkB;AASlB,IAAM,8BAA8B,eAAE,OAAO;AAAA,EAC3C,SAAS,eAAE,MAAM,eAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EACrC,UAAU,eAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,QAAQ,eAAE,OAAO,EAAE,QAAQ;AAAA,EAC3B,aAAa,eAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE,QAAQ,EAAE,QAAQ,CAAC;AAAA,EACzD,wBAAwB,eACrB,MAAM,eAAE,KAAK,CAAC,QAAQ,SAAS,CAAC,CAAC,EACjC,QAAQ,EACR,QAAQ,CAAC,SAAS,CAAC;AACxB,CAAC;AAkBD,IAAM,cAAc;AAAA,EAClB,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,MAAM;AAAA,EACN,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,YAAY;AAAA,EACZ,OAAO;AACT;AAEO,IAAM,2BAAN,MAA+D;AAAA,EAOpE,YACW,SACQ,QACjB;AAFS;AACQ;AARnB,SAAS,uBAAuB;AAAA,EAS7B;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAmC;AA5HrC;AA6HI,UAAM,WAA8C,CAAC;AAGrD,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,WAAW,IAAI,SAAS;AAC9B,UAAM,OACJ,iBAAiB,aACb,IAAI,KAAK,CAAC,KAAK,CAAC,IAChB,IAAI,KAAK,KAAC,kDAA0B,KAAK,CAAC,CAAC;AAEjD,aAAS,OAAO,SAAS,KAAK,OAAO;AACrC,aAAS,OAAO,QAAQ,IAAI,KAAK,CAAC,IAAI,GAAG,SAAS,EAAE,MAAM,UAAU,CAAC,CAAC;AAGtE,QAAI,eAAe;AACjB,YAAM,4BAA6D;AAAA,QACjE,UAAS,mBAAc,YAAd,YAAyB;AAAA,QAClC,WAAU,mBAAc,aAAd,YAA0B;AAAA,QACpC,SAAQ,mBAAc,WAAd,YAAwB;AAAA,QAChC,cAAa,mBAAc,gBAAd,YAA6B;AAAA,QAC1C,0BACE,mBAAc,2BAAd,YAAwC;AAAA,MAC5C;AAEA,iBAAW,OAAO,2BAA2B;AAC3C,cAAM,QACJ,0BACE,GACF;AACF,YAAI,UAAU,QAAW;AACvB,mBAAS,OAAO,KAAK,OAAO,KAAK,CAAC;AAAA,QACpC;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SACkE;AA9KtE;AA+KI,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,UAAU,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAEzD,UAAM;AAAA,MACJ,OAAO;AAAA,MACP;AAAA,MACA,UAAU;AAAA,IACZ,IAAI,UAAM,0CAAkB;AAAA,MAC1B,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,WACJ,SAAS,YAAY,QAAQ,SAAS,YAAY,cAC9C,YAAY,SAAS,QAAoC,IACzD;AAEN,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf,WACE,oBAAS,UAAT,mBAAgB,IAAI,WAAS;AAAA,QAC3B,MAAM,KAAK;AAAA,QACX,aAAa,KAAK;AAAA,QAClB,WAAW,KAAK;AAAA,MAClB,QAJA,YAIO,CAAC;AAAA,MACV;AAAA,MACA,oBAAmB,cAAS,aAAT,YAAqB;AAAA,MACxC;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,oCAAoC,eAAE,OAAO;AAAA,EACjD,MAAM,eAAE,OAAO;AAAA,EACf,UAAU,eAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,UAAU,eAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,OAAO,eACJ;AAAA,IACC,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,OAAO;AAAA,MACf,OAAO,eAAE,OAAO;AAAA,MAChB,KAAK,eAAE,OAAO;AAAA,IAChB,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;;;ACpOD,IAAAC,yBAQO;AACP,IAAAC,eAAkB;;;ACjBlB,IAAAC,mBAIO;AAGA,SAAS,iCAAiC;AAAA,EAC/C;AAAA,EACA;AACF,GAME;AACA,QAAM,WAAkC,CAAC;AACzC,QAAM,WAA8C,CAAC;AAErD,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,gBAAQ,mBAAmB;AAAA,UACzB,KAAK,UAAU;AACb,qBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,UACF;AAAA,UACA,KAAK,aAAa;AAChB,qBAAS,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAC5C;AAAA,UACF;AAAA,UACA,KAAK,UAAU;AACb,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AACD;AAAA,UACF;AAAA,UACA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI;AAAA,cACR,oCAAoC,gBAAgB;AAAA,YACtD;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,CAAC,MAAM,UAAU;AApDhD;AAqDY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,cAAc,MAAM,KAAK,KAAK;AAAA,cAC/C;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA;AAAA,oBAG3C,SAAQ,gBAAK,oBAAL,mBAAsB,WAAtB,mBAA8B;AAAA,kBACxC;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,sBAAI,KAAK,gBAAgB,KAAK;AAE5B,0BAAM,IAAI,+CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAU,UAAK,aAAL,YAAiB,QAAQ,KAAK;AAAA,oBACxC,WAAW,+BAA+B,KAAK,IAAI;AAAA,kBACrD;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,uBAAS,KAAK;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,CAAC,EAAE,MAAM,eAAe,MAAM,KAAK,KAAK,CAAC;AAAA,cACpD,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,uBAAS,KAAK;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,KAAK;AAAA,gBACd,MAAM,KAAK;AAAA,gBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,cACrC,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,QAAQ,SAAS;AAC1B,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,KAAK;AAAA,YACd,QAAQ,KAAK,UAAU,KAAK,MAAM;AAAA,UACpC,CAAC;AAAA,QACH;AAEA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,SAAS;AAC9B;;;AC/IO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AAAA,IACL,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO,eAAe,eAAe;AAAA,EACzC;AACF;;;ACpBA,IAAAC,mBAIO;AAGA,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AACF,GAaE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAMC,eAA0C,CAAC;AAEjD,aAAW,QAAQ,OAAO;AACxB,YAAQ,KAAK,MAAM;AAAA,MACjB,KAAK;AACH,QAAAA,aAAY,KAAK;AAAA,UACf,MAAM;AAAA,UACN,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,QAAQ,SAAS,OAAO;AAAA,QAC1B,CAAC;AACD;AAAA,MACF,KAAK;AACH,gBAAQ,KAAK,IAAI;AAAA,UACf,KAAK;AACH,YAAAA,aAAY,KAAK;AAAA,cACf,MAAM;AAAA,cACN,qBAAqB,KAAK,KAAK;AAAA,cAI/B,eAAe,KAAK,KAAK;AAAA,YAK3B,CAAC;AACD;AAAA,UACF;AACE,yBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AACpD;AAAA,QACJ;AACA;AAAA,MACF;AACE,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AACpD;AAAA,IACJ;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAOA,cAAa,YAAY,QAAW,aAAa;AAAA,EACnE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAOA,cAAa,YAAY,MAAM,aAAa;AAAA,IAC9D,KAAK;AACH,aAAO;AAAA,QACL,OAAOA;AAAA,QACP,YACE,WAAW,aAAa,uBACpB,EAAE,MAAM,qBAAqB,IAC7B,EAAE,MAAM,YAAY,MAAM,WAAW,SAAS;AAAA,QACpD;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AH5EO,IAAM,+BAAN,MAA8D;AAAA,EAOnE,YAAY,SAAiC,QAAsB;AANnE,SAAS,uBAAuB;AAO9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,MAAM,mBAAsD;AAC1D,WAAO;AAAA,MACL,WAAW,CAAC,iBAAiB;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7DnD;AA8DI,UAAM,WAAyC,CAAC;AAChD,UAAM,cAAc,wBAAwB,KAAK,OAAO;AAExD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,gBAAgB,CAAC;AAAA,IACzE;AAEA,UAAM,EAAE,UAAU,UAAU,gBAAgB,IAC1C,iCAAiC;AAAA,MAC/B;AAAA,MACA,mBAAmB,YAAY;AAAA,IACjC,CAAC;AAEH,aAAS,KAAK,GAAG,eAAe;AAEhC,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,YAAW,oDAAe,kBAAf,YAAgC;AAEjD,UAAM,WAAW;AAAA,MACf,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,MACP;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MAEnB,IAAI,iDAAgB,UAAS,UAAU;AAAA,QACrC,MAAM;AAAA,UACJ,QACE,eAAe,UAAU,OACrB;AAAA,YACE,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,YAC5B,QAAQ,eAAe;AAAA,UACzB,IACA,EAAE,MAAM,cAAc;AAAA,QAC9B;AAAA,MACF;AAAA;AAAA,MAGA,UAAU,+CAAe;AAAA,MACzB,qBAAqB,+CAAe;AAAA,MACpC,sBAAsB,+CAAe;AAAA,MACrC,OAAO,+CAAe;AAAA,MACtB,MAAM,+CAAe;AAAA,MACrB,cAAc,+CAAe;AAAA;AAAA,MAG7B,GAAI,YAAY,sBACb,+CAAe,oBAAmB,SACjC,+CAAe,qBAAoB,SAAS;AAAA,QAC5C,WAAW;AAAA,UACT,IAAI,+CAAe,oBAAmB,QAAQ;AAAA,YAC5C,QAAQ,cAAc;AAAA,UACxB;AAAA,UACA,IAAI,+CAAe,qBAAoB,QAAQ;AAAA,YAC7C,SAAS,cAAc;AAAA,UACzB;AAAA,QACF;AAAA,MACF;AAAA,MACF,GAAI,YAAY,0BAA0B;AAAA,QACxC,YAAY;AAAA,MACd;AAAA,IACF;AAEA,QAAI,YAAY,kBAAkB;AAGhC,UAAI,SAAS,eAAe,MAAM;AAChC,iBAAS,cAAc;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAEA,UAAI,SAAS,SAAS,MAAM;AAC1B,iBAAS,QAAQ;AACjB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM;AAAA,MACJ,OAAOC;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,sBAAsB;AAAA,MACxB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAOA;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAvMjE;AAwMI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB,eAAE,OAAO;AAAA,UACP,IAAI,eAAE,OAAO;AAAA,UACb,YAAY,eAAE,OAAO;AAAA,UACrB,OAAO,eAAE,OAAO;AAAA,UAChB,QAAQ,eAAE;AAAA,YACR,eAAE,mBAAmB,QAAQ;AAAA,cAC3B,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,SAAS;AAAA,gBACzB,MAAM,eAAE,QAAQ,WAAW;AAAA,gBAC3B,SAAS,eAAE;AAAA,kBACT,eAAE,OAAO;AAAA,oBACP,MAAM,eAAE,QAAQ,aAAa;AAAA,oBAC7B,MAAM,eAAE,OAAO;AAAA,oBACf,aAAa,eAAE;AAAA,sBACb,eAAE,OAAO;AAAA,wBACP,MAAM,eAAE,QAAQ,cAAc;AAAA,wBAC9B,aAAa,eAAE,OAAO;AAAA,wBACtB,WAAW,eAAE,OAAO;AAAA,wBACpB,KAAK,eAAE,OAAO;AAAA,wBACd,OAAO,eAAE,OAAO;AAAA,sBAClB,CAAC;AAAA,oBACH;AAAA,kBACF,CAAC;AAAA,gBACH;AAAA,cACF,CAAC;AAAA,cACD,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,eAAe;AAAA,gBAC/B,SAAS,eAAE,OAAO;AAAA,gBAClB,MAAM,eAAE,OAAO;AAAA,gBACf,WAAW,eAAE,OAAO;AAAA,cACtB,CAAC;AAAA,cACD,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,iBAAiB;AAAA,cACnC,CAAC;AAAA,cACD,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,eAAe;AAAA,cACjC,CAAC;AAAA,cACD,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,WAAW;AAAA,gBAC3B,SAAS,eAAE;AAAA,kBACT,eAAE,OAAO;AAAA,oBACP,MAAM,eAAE,QAAQ,cAAc;AAAA,oBAC9B,MAAM,eAAE,OAAO;AAAA,kBACjB,CAAC;AAAA,gBACH;AAAA,cACF,CAAC;AAAA,YACH,CAAC;AAAA,UACH;AAAA,UACA,oBAAoB,eAAE,OAAO,EAAE,QAAQ,eAAE,OAAO,EAAE,CAAC,EAAE,SAAS;AAAA,UAC9D,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,UAAyC,CAAC;AAGhD,eAAW,QAAQ,SAAS,QAAQ;AAClC,cAAQ,KAAK,MAAM;AAAA,QACjB,KAAK,aAAa;AAChB,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,MAAM,KAAK,QAAQ,IAAI,aAAW,QAAQ,IAAI,EAAE,KAAK;AAAA,UACvD,CAAC;AACD;AAAA,QACF;AAAA,QAEA,KAAK,WAAW;AACd,qBAAW,eAAe,KAAK,SAAS;AACtC,oBAAQ,KAAK;AAAA,cACX,MAAM;AAAA,cACN,MAAM,YAAY;AAAA,YACpB,CAAC;AAED,uBAAW,cAAc,YAAY,aAAa;AAChD,sBAAQ,KAAK;AAAA,gBACX,MAAM;AAAA,gBACN,YAAY;AAAA,gBACZ,KAAI,sBAAK,QAAO,eAAZ,gDAA8B,mCAAW;AAAA,gBAC7C,KAAK,WAAW;AAAA,gBAChB,OAAO,WAAW;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAEA,KAAK,iBAAiB;AACpB,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,cAAc;AAAA,YACd,YAAY,KAAK;AAAA,YACjB,UAAU,KAAK;AAAA,YACf,MAAM,KAAK;AAAA,UACb,CAAC;AACD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,8BAA8B;AAAA,QAC1C,eAAc,cAAS,uBAAT,mBAA6B;AAAA,QAC3C,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAC/B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,IAAI,SAAS;AAAA,QACb,WAAW,IAAI,KAAK,SAAS,aAAa,GAAI;AAAA,QAC9C,SAAS,SAAS;AAAA,QAClB,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,qBACE,oBAAS,MAAM,yBAAf,mBAAqC,kBAArC,YAAsD;AAAA,UACxD,kBACE,oBAAS,MAAM,0BAAf,mBAAsC,qBAAtC,YAA0D;AAAA,QAC9D;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,OAAO;AAEb,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,qBAAoC;AACxC,QAAI,kBAAiC;AACrC,QAAI,aAA4B;AAChC,UAAM,mBAGF,CAAC;AACL,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AA7YvC;AA+YY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,+BAA+B,KAAK,GAAG;AACzC,kBAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,iCAAiB,MAAM,YAAY,IAAI;AAAA,kBACrC,UAAU,MAAM,KAAK;AAAA,kBACrB,YAAY,MAAM,KAAK;AAAA,gBACzB;AAEA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,MAAM,KAAK;AAAA,kBACvB,UAAU,MAAM,KAAK;AAAA,kBACrB,eAAe,MAAM,KAAK;AAAA,gBAC5B,CAAC;AAAA,cACH;AAAA,YACF,WAAW,0CAA0C,KAAK,GAAG;AAC3D,oBAAM,WAAW,iBAAiB,MAAM,YAAY;AAEpD,kBAAI,YAAY,MAAM;AACpB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,MAAM;AAAA,gBACvB,CAAC;AAAA,cACH;AAAA,YACF,WAAW,uBAAuB,KAAK,GAAG;AACxC,2BAAa,MAAM,SAAS;AAC5B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,MAAM,SAAS;AAAA,gBACnB,WAAW,IAAI,KAAK,MAAM,SAAS,aAAa,GAAI;AAAA,gBACpD,SAAS,MAAM,SAAS;AAAA,cAC1B,CAAC;AAAA,YACH,WAAW,iBAAiB,KAAK,GAAG;AAClC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH,WAAW,yCAAyC,KAAK,GAAG;AAC1D,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH,WACE,8BAA8B,KAAK,KACnC,MAAM,KAAK,SAAS,iBACpB;AACA,+BAAiB,MAAM,YAAY,IAAI;AACvC,6BAAe;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,cAAc;AAAA,gBACd,YAAY,MAAM,KAAK;AAAA,gBACvB,UAAU,MAAM,KAAK;AAAA,gBACrB,MAAM,MAAM,KAAK;AAAA,cACnB,CAAC;AAAA,YACH,WAAW,wBAAwB,KAAK,GAAG;AACzC,6BAAe,8BAA8B;AAAA,gBAC3C,eAAc,WAAM,SAAS,uBAAf,mBAAmC;AAAA,gBACjD;AAAA,cACF,CAAC;AACD,oBAAM,cAAc,MAAM,SAAS,MAAM;AACzC,oBAAM,eAAe,MAAM,SAAS,MAAM;AAC1C,oCACE,iBAAM,SAAS,MAAM,yBAArB,mBAA2C,kBAA3C,YACA;AACF,iCACE,iBAAM,SAAS,MAAM,0BAArB,mBAA4C,qBAA5C,YACA;AAAA,YACJ,WAAW,+BAA+B,KAAK,GAAG;AAChD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,YAAY;AAAA,gBACZ,KAAI,sBAAK,QAAO,eAAZ,gDAA8B,mCAAW;AAAA,gBAC7C,KAAK,MAAM,WAAW;AAAA,gBACtB,OAAO,MAAM,WAAW;AAAA,cAC1B,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,IAAK,sBAAsB,QAAQ,mBAAmB,SAAS;AAAA,gBAC7D,kBAAkB;AAAA,kBAChB,QAAQ;AAAA,oBACN;AAAA,oBACA;AAAA,oBACA;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,cAAc,eAAE,OAAO;AAAA,EAC3B,cAAc,eAAE,OAAO;AAAA,EACvB,sBAAsB,eACnB,OAAO,EAAE,eAAe,eAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,EAC9C,QAAQ;AAAA,EACX,eAAe,eAAE,OAAO;AAAA,EACxB,uBAAuB,eACpB,OAAO,EAAE,kBAAkB,eAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,EACjD,QAAQ;AACb,CAAC;AAED,IAAM,uBAAuB,eAAE,OAAO;AAAA,EACpC,MAAM,eAAE,QAAQ,4BAA4B;AAAA,EAC5C,OAAO,eAAE,OAAO;AAClB,CAAC;AAED,IAAM,8BAA8B,eAAE,OAAO;AAAA,EAC3C,MAAM,eAAE,KAAK,CAAC,sBAAsB,qBAAqB,CAAC;AAAA,EAC1D,UAAU,eAAE,OAAO;AAAA,IACjB,oBAAoB,eAAE,OAAO,EAAE,QAAQ,eAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,IAC7D,OAAO;AAAA,EACT,CAAC;AACH,CAAC;AAED,IAAM,6BAA6B,eAAE,OAAO;AAAA,EAC1C,MAAM,eAAE,QAAQ,kBAAkB;AAAA,EAClC,UAAU,eAAE,OAAO;AAAA,IACjB,IAAI,eAAE,OAAO;AAAA,IACb,YAAY,eAAE,OAAO;AAAA,IACrB,OAAO,eAAE,OAAO;AAAA,EAClB,CAAC;AACH,CAAC;AAED,IAAM,+BAA+B,eAAE,OAAO;AAAA,EAC5C,MAAM,eAAE,QAAQ,2BAA2B;AAAA,EAC3C,cAAc,eAAE,OAAO;AAAA,EACvB,MAAM,eAAE,mBAAmB,QAAQ;AAAA,IACjC,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,QAAQ,SAAS;AAAA,IAC3B,CAAC;AAAA,IACD,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,QAAQ,eAAe;AAAA,MAC/B,IAAI,eAAE,OAAO;AAAA,MACb,SAAS,eAAE,OAAO;AAAA,MAClB,MAAM,eAAE,OAAO;AAAA,MACf,WAAW,eAAE,OAAO;AAAA,MACpB,QAAQ,eAAE,QAAQ,WAAW;AAAA,IAC/B,CAAC;AAAA,EACH,CAAC;AACH,CAAC;AAED,IAAM,2CAA2C,eAAE,OAAO;AAAA,EACxD,MAAM,eAAE,QAAQ,wCAAwC;AAAA,EACxD,SAAS,eAAE,OAAO;AAAA,EAClB,cAAc,eAAE,OAAO;AAAA,EACvB,OAAO,eAAE,OAAO;AAClB,CAAC;AAED,IAAM,gCAAgC,eAAE,OAAO;AAAA,EAC7C,MAAM,eAAE,QAAQ,4BAA4B;AAAA,EAC5C,cAAc,eAAE,OAAO;AAAA,EACvB,MAAM,eAAE,mBAAmB,QAAQ;AAAA,IACjC,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,QAAQ,SAAS;AAAA,IAC3B,CAAC;AAAA,IACD,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,QAAQ,eAAe;AAAA,MAC/B,IAAI,eAAE,OAAO;AAAA,MACb,SAAS,eAAE,OAAO;AAAA,MAClB,MAAM,eAAE,OAAO;AAAA,MACf,WAAW,eAAE,OAAO;AAAA,IACtB,CAAC;AAAA,EACH,CAAC;AACH,CAAC;AAED,IAAM,gCAAgC,eAAE,OAAO;AAAA,EAC7C,MAAM,eAAE,QAAQ,uCAAuC;AAAA,EACvD,YAAY,eAAE,OAAO;AAAA,IACnB,MAAM,eAAE,QAAQ,cAAc;AAAA,IAC9B,KAAK,eAAE,OAAO;AAAA,IACd,OAAO,eAAE,OAAO;AAAA,EAClB,CAAC;AACH,CAAC;AAED,IAAM,0CAA0C,eAAE,OAAO;AAAA,EACvD,MAAM,eAAE,QAAQ,uCAAuC;AAAA,EACvD,SAAS,eAAE,OAAO;AAAA,EAClB,cAAc,eAAE,OAAO;AAAA,EACvB,eAAe,eAAE,OAAO;AAAA,EACxB,OAAO,eAAE,OAAO;AAClB,CAAC;AAED,IAAM,6BAA6B,eAAE,MAAM;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,eAAE,OAAO,EAAE,MAAM,eAAE,OAAO,EAAE,CAAC,EAAE,YAAY;AAAA;AAC7C,CAAC;AAED,SAAS,iBACP,OAC+C;AAC/C,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,8BACP,OACuD;AACvD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,wBACP,OACsD;AACtD,SACE,MAAM,SAAS,wBAAwB,MAAM,SAAS;AAE1D;AAEA,SAAS,uBACP,OACqD;AACrD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,0CACP,OACmE;AACnE,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,+BACP,OACwD;AACxD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,+BACP,OACwD;AACxD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,yCACP,OACkE;AAClE,SAAO,MAAM,SAAS;AACxB;AAQA,SAAS,wBAAwB,SAAuC;AAEtE,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,QAAI,QAAQ,WAAW,SAAS,KAAK,QAAQ,WAAW,YAAY,GAAG;AACrE,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,mBAAmB;AAAA,QACnB,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAEA,WAAO;AAAA,MACL,kBAAkB;AAAA,MAClB,mBAAmB;AAAA,MACnB,wBAAwB;AAAA,IAC1B;AAAA,EACF;AAGA,SAAO;AAAA,IACL,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,IACnB,wBAAwB;AAAA,EAC1B;AACF;AAEA,IAAM,uCAAuC,eAAE,OAAO;AAAA,EACpD,UAAU,eAAE,IAAI,EAAE,QAAQ;AAAA,EAC1B,mBAAmB,eAAE,QAAQ,EAAE,QAAQ;AAAA,EACvC,oBAAoB,eAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,OAAO,eAAE,QAAQ,EAAE,QAAQ;AAAA,EAC3B,MAAM,eAAE,OAAO,EAAE,QAAQ;AAAA,EACzB,iBAAiB,eAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,eAAe,eAAE,QAAQ,EAAE,QAAQ;AAAA,EACnC,cAAc,eAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,kBAAkB,eAAE,OAAO,EAAE,QAAQ;AACvC,CAAC;;;AIpsBD,IAAAC,yBAKO;AACP,IAAAC,eAAkB;AAOlB,IAAM,8BAA8B,eAAE,OAAO;AAAA,EAC3C,cAAc,eAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,OAAO,eAAE,OAAO,EAAE,IAAI,IAAI,EAAE,IAAI,CAAG,EAAE,QAAQ,CAAG,EAAE,QAAQ;AAC5D,CAAC;AAYM,IAAM,oBAAN,MAAiD;AAAA,EAOtD,YACW,SACQ,QACjB;AAFS;AACQ;AARnB,SAAS,uBAAuB;AAAA,EAS7B;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA,QAAQ;AAAA,IACR,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAA+C;AAC7C,UAAM,WAAuC,CAAC;AAG9C,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,cAAuC;AAAA,MAC3C,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,MACP;AAAA,MACA,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,IACF;AAEA,QAAI,cAAc;AAChB,UAAI,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,KAAK,EAAE,SAAS,YAAY,GAAG;AACvE,oBAAY,kBAAkB;AAAA,MAChC,OAAO;AACL,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS,8BAA8B,YAAY;AAAA,QACrD,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,eAAe;AACjB,YAAM,qBAA2C,CAAC;AAElD,iBAAW,OAAO,oBAAoB;AACpC,cAAM,QAAQ,mBAAmB,GAAiC;AAClE,YAAI,UAAU,QAAW;AACvB,sBAAY,GAAG,IAAI;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC2D;AApG/D;AAqGI,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,aAAa,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE5D,UAAM;AAAA,MACJ,OAAO;AAAA,MACP;AAAA,MACA,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,oDAA4B;AAAA,MACvD,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,SAAS;AAAA,QACP,MAAM,KAAK,UAAU,WAAW;AAAA,MAClC;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;;;ArBuCO,SAAS,aACd,UAAkC,CAAC,GACnB;AAhLlB;AAiLE,QAAM,WACJ,uDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAG3C,QAAM,iBAAgB,aAAQ,kBAAR,YAAyB;AAE/C,QAAM,gBAAe,aAAQ,SAAR,YAAgB;AAErC,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,oCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,uBAAuB,QAAQ;AAAA,IAC/B,kBAAkB,QAAQ;AAAA,IAC1B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAA+B,CAAC,MAEhC,IAAI,wBAAwB,SAAS,UAAU;AAAA,IAC7C,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,8BAA8B,SAAS;AAAA,IACzC,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAoC,CAAC,MAErC,IAAI,qBAAqB,SAAS,UAAU;AAAA,IAC1C,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,mBAAmB,CACvB,SACA,WAAgC,CAAC,MAEjC,IAAI,iBAAiB,SAAS,UAAU;AAAA,IACtC,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,kBAAkB,SAAS;AAAA,IAC7B,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,sBAAsB,CAC1B,SACA,aACG;AACH,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,QAAI,YAAY,0BAA0B;AACxC,aAAO,sBAAsB,OAAO;AAAA,IACtC;AAEA,WAAO,gBAAgB,SAAS,QAA8B;AAAA,EAChE;AAEA,QAAM,uBAAuB,CAAC,YAAoC;AAChE,WAAO,IAAI,6BAA6B,SAAS;AAAA,MAC/C,UAAU,GAAG,YAAY;AAAA,MACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,SACf,SACA,UACA;AACA,WAAO,oBAAoB,SAAS,QAAQ;AAAA,EAC9C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,SAAS;AAClB,WAAS,cAAc;AAEvB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,SAAS,aAAa;AAAA,EACjC,eAAe;AAAA;AACjB,CAAC;","names":["import_provider_utils","import_provider","import_provider_utils","import_zod","import_zod","import_provider_utils","import_provider","openaiTools","openaiTools","toolCall","import_provider_utils","import_zod","import_provider","import_zod","import_provider","import_provider_utils","import_zod","import_zod","import_provider_utils","import_zod","import_zod","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider","import_provider","openaiTools","openaiTools","import_provider_utils","import_zod"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/openai-provider.ts","../src/openai-chat-language-model.ts","../src/convert-to-openai-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-finish-reason.ts","../src/openai-chat-options.ts","../src/openai-error.ts","../src/openai-prepare-tools.ts","../src/openai-completion-language-model.ts","../src/convert-to-openai-completion-prompt.ts","../src/openai-completion-options.ts","../src/openai-embedding-model.ts","../src/openai-embedding-options.ts","../src/openai-image-model.ts","../src/openai-image-settings.ts","../src/openai-tools.ts","../src/openai-transcription-model.ts","../src/openai-transcription-options.ts","../src/responses/openai-responses-language-model.ts","../src/responses/convert-to-openai-responses-messages.ts","../src/responses/map-openai-responses-finish-reason.ts","../src/responses/openai-responses-prepare-tools.ts","../src/openai-speech-model.ts"],"sourcesContent":["export { createOpenAI, openai } from './openai-provider';\nexport type { OpenAIProvider, OpenAIProviderSettings } from './openai-provider';\nexport type { OpenAIResponsesProviderOptions } from './responses/openai-responses-language-model';\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n ProviderV2,\n TranscriptionModelV1,\n SpeechModelV1,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { OpenAIChatLanguageModel } from './openai-chat-language-model';\nimport { OpenAIChatModelId } from './openai-chat-options';\nimport { OpenAICompletionLanguageModel } from './openai-completion-language-model';\nimport { OpenAICompletionModelId } from './openai-completion-options';\nimport { OpenAIEmbeddingModel } from './openai-embedding-model';\nimport { OpenAIEmbeddingModelId } from './openai-embedding-options';\nimport { OpenAIImageModel } from './openai-image-model';\nimport {\n OpenAIImageModelId,\n OpenAIImageSettings,\n} from './openai-image-settings';\nimport { openaiTools } from './openai-tools';\nimport { OpenAITranscriptionModel } from './openai-transcription-model';\nimport { OpenAITranscriptionModelId } from './openai-transcription-options';\nimport { OpenAIResponsesLanguageModel } from './responses/openai-responses-language-model';\nimport { OpenAIResponsesModelId } from './responses/openai-responses-settings';\nimport { OpenAISpeechModel } from './openai-speech-model';\nimport { OpenAISpeechModelId } from './openai-speech-options';\n\nexport interface OpenAIProvider extends ProviderV2 {\n (modelId: 'gpt-3.5-turbo-instruct'): OpenAICompletionLanguageModel;\n (modelId: OpenAIChatModelId): LanguageModelV2;\n\n /**\nCreates an OpenAI model for text generation.\n */\n languageModel(\n modelId: 'gpt-3.5-turbo-instruct',\n ): OpenAICompletionLanguageModel;\n languageModel(modelId: OpenAIChatModelId): LanguageModelV2;\n\n /**\nCreates an OpenAI chat model for text generation.\n */\n chat(modelId: OpenAIChatModelId): LanguageModelV2;\n\n /**\nCreates an OpenAI responses API model for text generation.\n */\n responses(modelId: OpenAIResponsesModelId): LanguageModelV2;\n\n /**\nCreates an OpenAI completion model for text generation.\n */\n completion(modelId: OpenAICompletionModelId): LanguageModelV2;\n\n /**\nCreates a model for text embeddings.\n */\n embedding(modelId: OpenAIEmbeddingModelId): EmbeddingModelV2<string>;\n\n /**\nCreates a model for text embeddings.\n\n@deprecated Use `textEmbeddingModel` instead.\n */\n textEmbedding(modelId: OpenAIEmbeddingModelId): EmbeddingModelV2<string>;\n\n /**\nCreates a model for text embeddings.\n */\n textEmbeddingModel(modelId: OpenAIEmbeddingModelId): EmbeddingModelV2<string>;\n\n /**\nCreates a model for image generation.\n */\n image(\n modelId: OpenAIImageModelId,\n settings?: OpenAIImageSettings,\n ): ImageModelV2;\n\n /**\nCreates a model for image generation.\n */\n imageModel(\n modelId: OpenAIImageModelId,\n settings?: OpenAIImageSettings,\n ): ImageModelV2;\n\n /**\nCreates a model for transcription.\n */\n transcription(modelId: OpenAITranscriptionModelId): TranscriptionModelV1;\n\n /**\nCreates a model for speech generation.\n */\n speech(modelId: OpenAISpeechModelId): SpeechModelV1;\n\n /**\nOpenAI-specific tools.\n */\n tools: typeof openaiTools;\n}\n\nexport interface OpenAIProviderSettings {\n /**\nBase URL for the OpenAI API calls.\n */\n baseURL?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nOpenAI Organization.\n */\n organization?: string;\n\n /**\nOpenAI project.\n */\n project?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nOpenAI compatibility mode. Should be set to `strict` when using the OpenAI API,\nand `compatible` when using 3rd party providers. In `compatible` mode, newer\ninformation such as streamOptions are not being sent. Defaults to 'compatible'.\n */\n compatibility?: 'strict' | 'compatible';\n\n /**\nProvider name. Overrides the `openai` default name for 3rd party providers.\n */\n name?: string;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAI provider instance.\n */\nexport function createOpenAI(\n options: OpenAIProviderSettings = {},\n): OpenAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.openai.com/v1';\n\n // we default to compatible, because strict breaks providers like Groq:\n const compatibility = options.compatibility ?? 'compatible';\n\n const providerName = options.name ?? 'openai';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'OPENAI_API_KEY',\n description: 'OpenAI',\n })}`,\n 'OpenAI-Organization': options.organization,\n 'OpenAI-Project': options.project,\n ...options.headers,\n });\n\n const createChatModel = (modelId: OpenAIChatModelId) =>\n new OpenAIChatLanguageModel(modelId, {\n provider: `${providerName}.chat`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n compatibility,\n fetch: options.fetch,\n });\n\n const createCompletionModel = (modelId: OpenAICompletionModelId) =>\n new OpenAICompletionLanguageModel(modelId, {\n provider: `${providerName}.completion`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n compatibility,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: OpenAIEmbeddingModelId) =>\n new OpenAIEmbeddingModel(modelId, {\n provider: `${providerName}.embedding`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createImageModel = (\n modelId: OpenAIImageModelId,\n settings: OpenAIImageSettings = {},\n ) =>\n new OpenAIImageModel(modelId, settings, {\n provider: `${providerName}.image`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createTranscriptionModel = (modelId: OpenAITranscriptionModelId) =>\n new OpenAITranscriptionModel(modelId, {\n provider: `${providerName}.transcription`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createSpeechModel = (modelId: OpenAISpeechModelId) =>\n new OpenAISpeechModel(modelId, {\n provider: `${providerName}.speech`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: OpenAIChatModelId | OpenAICompletionModelId,\n ) => {\n if (new.target) {\n throw new Error(\n 'The OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n if (modelId === 'gpt-3.5-turbo-instruct') {\n return createCompletionModel(modelId);\n }\n\n return createChatModel(modelId);\n };\n\n const createResponsesModel = (modelId: OpenAIResponsesModelId) => {\n return new OpenAIResponsesLanguageModel(modelId, {\n provider: `${providerName}.responses`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n };\n\n const provider = function (\n modelId: OpenAIChatModelId | OpenAICompletionModelId,\n ) {\n return createLanguageModel(modelId);\n };\n\n provider.languageModel = createLanguageModel;\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.responses = createResponsesModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.image = createImageModel;\n provider.imageModel = createImageModel;\n\n provider.transcription = createTranscriptionModel;\n provider.transcriptionModel = createTranscriptionModel;\n\n provider.speech = createSpeechModel;\n provider.speechModel = createSpeechModel;\n\n provider.tools = openaiTools;\n\n return provider as OpenAIProvider;\n}\n\n/**\nDefault OpenAI provider instance. It uses 'strict' compatibility mode.\n */\nexport const openai = createOpenAI({\n compatibility: 'strict', // strict for OpenAI API\n});\n","import {\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAIChatMessages } from './convert-to-openai-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAIChatModelId,\n openaiProviderOptions,\n} from './openai-chat-options';\nimport {\n openaiErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\nimport { prepareTools } from './openai-prepare-tools';\n\ntype OpenAIChatConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAIChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAIChatModelId;\n\n readonly supportedUrls = {\n 'image/*': [/^https?:\\/\\/.*$/],\n };\n\n private readonly config: OpenAIChatConfig;\n\n constructor(modelId: OpenAIChatModelId, config: OpenAIChatConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: LanguageModelV2CallOptions) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n const openaiOptions =\n (await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiProviderOptions,\n })) ?? {};\n\n const structuredOutputs = openaiOptions.structuredOutputs ?? true;\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !structuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(\n {\n prompt,\n systemMessageMode: getSystemMessageMode(this.modelId),\n },\n );\n\n warnings.push(...messageWarnings);\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n logit_bias: openaiOptions.logitBias,\n user: openaiOptions.user,\n parallel_tool_calls: openaiOptions.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? // TODO convert into provider option\n structuredOutputs && responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n strict: true,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n stop: stopSequences,\n seed,\n\n // openai specific settings:\n // TODO remove in next major version; we auto-map maxOutputTokens now\n max_completion_tokens: openaiOptions.maxCompletionTokens,\n store: openaiOptions.store,\n metadata: openaiOptions.metadata,\n prediction: openaiOptions.prediction,\n reasoning_effort: openaiOptions.reasoningEffort,\n\n // messages:\n messages,\n };\n\n if (isReasoningModel(this.modelId)) {\n // remove unsupported settings for reasoning models\n // see https://platform.openai.com/docs/guides/reasoning#limitations\n if (baseArgs.temperature != null) {\n baseArgs.temperature = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'temperature',\n details: 'temperature is not supported for reasoning models',\n });\n }\n if (baseArgs.top_p != null) {\n baseArgs.top_p = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topP',\n details: 'topP is not supported for reasoning models',\n });\n }\n if (baseArgs.frequency_penalty != null) {\n baseArgs.frequency_penalty = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n details: 'frequencyPenalty is not supported for reasoning models',\n });\n }\n if (baseArgs.presence_penalty != null) {\n baseArgs.presence_penalty = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n details: 'presencePenalty is not supported for reasoning models',\n });\n }\n if (baseArgs.logit_bias != null) {\n baseArgs.logit_bias = undefined;\n warnings.push({\n type: 'other',\n message: 'logitBias is not supported for reasoning models',\n });\n }\n\n // reasoning models use max_completion_tokens instead of max_tokens:\n if (baseArgs.max_tokens != null) {\n if (baseArgs.max_completion_tokens == null) {\n baseArgs.max_completion_tokens = baseArgs.max_tokens;\n }\n baseArgs.max_tokens = undefined;\n }\n } else if (\n this.modelId.startsWith('gpt-4o-search-preview') ||\n this.modelId.startsWith('gpt-4o-mini-search-preview')\n ) {\n if (baseArgs.temperature != null) {\n baseArgs.temperature = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'temperature',\n details:\n 'temperature is not supported for the search preview models and has been removed.',\n });\n }\n }\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n structuredOutputs,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // tool calls:\n for (const toolCall of choice.message.tool_calls ?? []) {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n });\n }\n\n // provider metadata:\n const completionTokenDetails = response.usage?.completion_tokens_details;\n const promptTokenDetails = response.usage?.prompt_tokens_details;\n const providerMetadata: SharedV2ProviderMetadata = { openai: {} };\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata.openai.acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata.openai.rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n content,\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage?.prompt_tokens ?? undefined,\n outputTokens: response.usage?.completion_tokens ?? undefined,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n providerMetadata,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let isFirstChunk = true;\n\n const providerMetadata: SharedV2ProviderMetadata = { openai: {} };\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.inputTokens = prompt_tokens ?? undefined;\n usage.outputTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n providerMetadata.openai.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n providerMetadata.openai.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text',\n text: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n // Tool call start. OpenAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n ...(providerMetadata != null ? { providerMetadata } : {}),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst openaiTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: openaiTokenUsageSchema,\n }),\n openaiErrorDataSchema,\n]);\n\nfunction isReasoningModel(modelId: string) {\n return modelId.startsWith('o');\n}\n\nfunction isAudioModel(modelId: string) {\n return modelId.startsWith('gpt-4o-audio-preview');\n}\n\nfunction getSystemMessageMode(modelId: string) {\n if (!isReasoningModel(modelId)) {\n return 'system';\n }\n\n return (\n reasoningModels[modelId as keyof typeof reasoningModels]\n ?.systemMessageMode ?? 'developer'\n );\n}\n\nconst reasoningModels = {\n 'o1-mini': {\n systemMessageMode: 'remove',\n },\n 'o1-mini-2024-09-12': {\n systemMessageMode: 'remove',\n },\n 'o1-preview': {\n systemMessageMode: 'remove',\n },\n 'o1-preview-2024-09-12': {\n systemMessageMode: 'remove',\n },\n o3: {\n systemMessageMode: 'developer',\n },\n 'o3-2025-04-16': {\n systemMessageMode: 'developer',\n },\n 'o3-mini': {\n systemMessageMode: 'developer',\n },\n 'o3-mini-2025-01-31': {\n systemMessageMode: 'developer',\n },\n 'o4-mini': {\n systemMessageMode: 'developer',\n },\n 'o4-mini-2025-04-16': {\n systemMessageMode: 'developer',\n },\n} as const;\n","import {\n LanguageModelV2CallWarning,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIChatPrompt } from './openai-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToOpenAIChatMessages({\n prompt,\n systemMessageMode = 'system',\n}: {\n prompt: LanguageModelV2Prompt;\n systemMessageMode?: 'system' | 'developer' | 'remove';\n}): {\n messages: OpenAIChatPrompt;\n warnings: Array<LanguageModelV2CallWarning>;\n} {\n const messages: OpenAIChatPrompt = [];\n const warnings: Array<LanguageModelV2CallWarning> = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n switch (systemMessageMode) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n case 'developer': {\n messages.push({ role: 'developer', content });\n break;\n }\n case 'remove': {\n warnings.push({\n type: 'other',\n message: 'system messages are removed for this model',\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = systemMessageMode;\n throw new Error(\n `Unsupported system message mode: ${_exhaustiveCheck}`,\n );\n }\n }\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({ role: 'user', content: content[0].text });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map((part, index) => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n\n // OpenAI specific extension: image detail\n detail: part.providerOptions?.openai?.imageDetail,\n },\n };\n } else if (part.mediaType.startsWith('audio/')) {\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'audio file parts with URLs',\n });\n }\n\n switch (part.mediaType) {\n case 'audio/wav': {\n return {\n type: 'input_audio',\n input_audio: {\n data: convertToBase64(part.data),\n format: 'wav',\n },\n };\n }\n case 'audio/mp3':\n case 'audio/mpeg': {\n return {\n type: 'input_audio',\n input_audio: {\n data: convertToBase64(part.data),\n format: 'mp3',\n },\n };\n }\n\n default: {\n throw new UnsupportedFunctionalityError({\n functionality: `audio content parts with media type ${part.mediaType}`,\n });\n }\n }\n } else if (part.mediaType === 'application/pdf') {\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality: 'PDF file parts with URLs',\n });\n }\n\n return {\n type: 'file',\n file: {\n filename: part.filename ?? `part-${index}.pdf`,\n file_data: `data:application/pdf;base64,${part.data}`,\n },\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return { messages, warnings };\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod';\n\n// https://platform.openai.com/docs/models\nexport type OpenAIChatModelId =\n | 'o1'\n | 'o1-2024-12-17'\n | 'o1-mini'\n | 'o1-mini-2024-09-12'\n | 'o1-preview'\n | 'o1-preview-2024-09-12'\n | 'o3-mini'\n | 'o3-mini-2025-01-31'\n | 'o3'\n | 'o3-2025-04-16'\n | 'o4-mini'\n | 'o4-mini-2025-04-16'\n | 'gpt-4.1'\n | 'gpt-4.1-2025-04-14'\n | 'gpt-4.1-mini'\n | 'gpt-4.1-mini-2025-04-14'\n | 'gpt-4.1-nano'\n | 'gpt-4.1-nano-2025-04-14'\n | 'gpt-4o'\n | 'gpt-4o-2024-05-13'\n | 'gpt-4o-2024-08-06'\n | 'gpt-4o-2024-11-20'\n | 'gpt-4o-audio-preview'\n | 'gpt-4o-audio-preview-2024-10-01'\n | 'gpt-4o-audio-preview-2024-12-17'\n | 'gpt-4o-search-preview'\n | 'gpt-4o-search-preview-2025-03-11'\n | 'gpt-4o-mini-search-preview'\n | 'gpt-4o-mini-search-preview-2025-03-11'\n | 'gpt-4o-mini'\n | 'gpt-4o-mini-2024-07-18'\n | 'gpt-4-turbo'\n | 'gpt-4-turbo-2024-04-09'\n | 'gpt-4-turbo-preview'\n | 'gpt-4-0125-preview'\n | 'gpt-4-1106-preview'\n | 'gpt-4'\n | 'gpt-4-0613'\n | 'gpt-4.5-preview'\n | 'gpt-4.5-preview-2025-02-27'\n | 'gpt-3.5-turbo-0125'\n | 'gpt-3.5-turbo'\n | 'gpt-3.5-turbo-1106'\n | 'chatgpt-4o-latest'\n | (string & {});\n\nexport const openaiProviderOptions = z.object({\n /**\n * Modify the likelihood of specified tokens appearing in the completion.\n *\n * Accepts a JSON object that maps tokens (specified by their token ID in\n * the GPT tokenizer) to an associated bias value from -100 to 100.\n */\n logitBias: z.record(z.coerce.number(), z.number()).optional(),\n\n /**\n * Whether to enable parallel function calling during tool use. Default to true.\n */\n parallelToolCalls: z.boolean().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help OpenAI to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n\n /**\n * Reasoning effort for reasoning models. Defaults to `medium`.\n */\n reasoningEffort: z.enum(['low', 'medium', 'high']).optional(),\n\n /**\n * Maximum number of completion tokens to generate. Useful for reasoning models.\n */\n maxCompletionTokens: z.number().optional(),\n\n /**\n * Whether to enable persistence in responses API.\n */\n store: z.boolean().optional(),\n\n /**\n * Metadata to associate with the request.\n */\n metadata: z.record(z.string()).optional(),\n\n /**\n * Parameters for prediction mode.\n */\n prediction: z.record(z.any()).optional(),\n\n /**\n * Whether to use structured outputs.\n *\n * @default true\n */\n structuredOutputs: z.boolean().optional(),\n});\n\nexport type OpenAIProviderOptions = z.infer<typeof openaiProviderOptions>;\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const openaiErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAIErrorData = z.infer<typeof openaiErrorDataSchema>;\n\nexport const openaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: openaiErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n JSONSchema7,\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n structuredOutputs,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n structuredOutputs: boolean;\n}): {\n tools?: {\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict?: boolean;\n };\n }[];\n toolChoice?:\n | 'auto'\n | 'none'\n | 'required'\n | { type: 'function'; function: { name: string } };\n\n toolWarnings: Array<LanguageModelV2CallWarning>;\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict: boolean | undefined;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n strict: structuredOutputs ? true : undefined,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiTools,\n toolChoice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompletionPrompt } from './convert-to-openai-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAICompletionModelId,\n openaiCompletionProviderOptions,\n} from './openai-completion-options';\nimport {\n openaiErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\n\ntype OpenAICompletionConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAICompletionLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAICompletionModelId;\n\n private readonly config: OpenAICompletionConfig;\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n constructor(\n modelId: OpenAICompletionModelId,\n config: OpenAICompletionConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n // No URLs are supported for completion models.\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n tools,\n toolChoice,\n seed,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n const openaiOptions = {\n ...(await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiCompletionProviderOptions,\n })),\n ...(await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompletionProviderOptions,\n })),\n };\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompletionPrompt({ prompt });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: openaiOptions.echo,\n logit_bias: openaiOptions.logitBias,\n suffix: openaiOptions.suffix,\n user: openaiOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n\n return {\n content: [{ type: 'text', text: choice.text }],\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n },\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiCompletionChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiCompletionChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text',\n text: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n openaiErrorDataSchema,\n]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompletionPrompt({\n prompt,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { z } from 'zod';\n\n// https://platform.openai.com/docs/models\nexport type OpenAICompletionModelId = 'gpt-3.5-turbo-instruct' | (string & {});\n\nexport const openaiCompletionProviderOptions = z.object({\n /**\nEcho back the prompt in addition to the completion.\n */\n echo: z.boolean().optional(),\n\n /**\nModify the likelihood of specified tokens appearing in the completion.\n\nAccepts a JSON object that maps tokens (specified by their token ID in\nthe GPT tokenizer) to an associated bias value from -100 to 100. You\ncan use this tokenizer tool to convert text to token IDs. Mathematically,\nthe bias is added to the logits generated by the model prior to sampling.\nThe exact effect will vary per model, but values between -1 and 1 should\ndecrease or increase likelihood of selection; values like -100 or 100\nshould result in a ban or exclusive selection of the relevant token.\n\nAs an example, you can pass {\"50256\": -100} to prevent the <|endoftext|>\ntoken from being generated.\n */\n logitBias: z.record(z.string(), z.number()).optional(),\n\n /**\nThe suffix that comes after a completion of inserted text.\n */\n suffix: z.string().optional(),\n\n /**\nA unique identifier representing your end-user, which can help OpenAI to\nmonitor and detect abuse. Learn more.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompletionProviderOptions = z.infer<\n typeof openaiCompletionProviderOptions\n>;\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport {\n OpenAIEmbeddingModelId,\n openaiEmbeddingProviderOptions,\n} from './openai-embedding-options';\nimport { openaiFailedResponseHandler } from './openai-error';\n\nexport class OpenAIEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: OpenAIEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: OpenAIConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(modelId: OpenAIEmbeddingModelId, config: OpenAIConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n // Parse provider options\n const openaiOptions =\n (await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiEmbeddingProviderOptions,\n })) ?? {};\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: openaiOptions.dimensions,\n user: openaiOptions.user,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { z } from 'zod';\n\nexport type OpenAIEmbeddingModelId =\n | 'text-embedding-3-small'\n | 'text-embedding-3-large'\n | 'text-embedding-ada-002'\n | (string & {});\n\nexport const openaiEmbeddingProviderOptions = z.object({\n /**\nThe number of dimensions the resulting output embeddings should have.\nOnly supported in text-embedding-3 and later models.\n */\n dimensions: z.number().optional(),\n\n /**\nA unique identifier representing your end-user, which can help OpenAI to\nmonitor and detect abuse. Learn more.\n*/\n user: z.string().optional(),\n});\n\nexport type OpenAIEmbeddingProviderOptions = z.infer<\n typeof openaiEmbeddingProviderOptions\n>;\n","import { ImageModelV2, ImageModelV2CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport { openaiFailedResponseHandler } from './openai-error';\nimport {\n OpenAIImageModelId,\n OpenAIImageSettings,\n modelMaxImagesPerCall,\n hasDefaultResponseFormat,\n} from './openai-image-settings';\n\ninterface OpenAIImageModelConfig extends OpenAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\nexport class OpenAIImageModel implements ImageModelV2 {\n readonly specificationVersion = 'v2';\n\n get maxImagesPerCall(): number {\n return (\n this.settings.maxImagesPerCall ?? modelMaxImagesPerCall[this.modelId] ?? 1\n );\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAIImageModelId,\n private readonly settings: OpenAIImageSettings,\n private readonly config: OpenAIImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV2['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV2['doGenerate']>>\n > {\n const warnings: Array<ImageModelV2CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n ...(!hasDefaultResponseFormat.has(this.modelId)\n ? { response_format: 'b64_json' }\n : {}),\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","export type OpenAIImageModelId =\n | 'gpt-image-1'\n | 'dall-e-3'\n | 'dall-e-2'\n | (string & {});\n\n// https://platform.openai.com/docs/guides/images\nexport const modelMaxImagesPerCall: Record<OpenAIImageModelId, number> = {\n 'dall-e-3': 1,\n 'dall-e-2': 10,\n 'gpt-image-1': 10,\n};\n\nexport const hasDefaultResponseFormat = new Set(['gpt-image-1']);\n\nexport interface OpenAIImageSettings {\n /**\nOverride the maximum number of images per call (default is dependent on the\nmodel, or 1 for an unknown model).\n */\n maxImagesPerCall?: number;\n}\n","import { z } from 'zod';\n\nconst WebSearchPreviewParameters = z.object({});\n\nfunction webSearchPreviewTool({\n searchContextSize,\n userLocation,\n}: {\n searchContextSize?: 'low' | 'medium' | 'high';\n userLocation?: {\n type?: 'approximate';\n city?: string;\n region?: string;\n country?: string;\n timezone?: string;\n };\n} = {}): {\n type: 'provider-defined';\n id: 'openai.web_search_preview';\n args: {};\n parameters: typeof WebSearchPreviewParameters;\n} {\n return {\n type: 'provider-defined',\n id: 'openai.web_search_preview',\n args: {\n searchContextSize,\n userLocation,\n },\n parameters: WebSearchPreviewParameters,\n };\n}\n\nexport const openaiTools = {\n webSearchPreview: webSearchPreviewTool,\n};\n","import {\n TranscriptionModelV1,\n TranscriptionModelV1CallOptions,\n TranscriptionModelV1CallWarning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n convertBase64ToUint8Array,\n createJsonResponseHandler,\n parseProviderOptions,\n postFormDataToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport { openaiFailedResponseHandler } from './openai-error';\nimport {\n OpenAITranscriptionModelId,\n openAITranscriptionProviderOptions,\n OpenAITranscriptionProviderOptions,\n} from './openai-transcription-options';\n\nexport type OpenAITranscriptionCallOptions = Omit<\n TranscriptionModelV1CallOptions,\n 'providerOptions'\n> & {\n providerOptions?: {\n openai?: OpenAITranscriptionProviderOptions;\n };\n};\n\ninterface OpenAITranscriptionModelConfig extends OpenAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\n// https://platform.openai.com/docs/guides/speech-to-text#supported-languages\nconst languageMap = {\n afrikaans: 'af',\n arabic: 'ar',\n armenian: 'hy',\n azerbaijani: 'az',\n belarusian: 'be',\n bosnian: 'bs',\n bulgarian: 'bg',\n catalan: 'ca',\n chinese: 'zh',\n croatian: 'hr',\n czech: 'cs',\n danish: 'da',\n dutch: 'nl',\n english: 'en',\n estonian: 'et',\n finnish: 'fi',\n french: 'fr',\n galician: 'gl',\n german: 'de',\n greek: 'el',\n hebrew: 'he',\n hindi: 'hi',\n hungarian: 'hu',\n icelandic: 'is',\n indonesian: 'id',\n italian: 'it',\n japanese: 'ja',\n kannada: 'kn',\n kazakh: 'kk',\n korean: 'ko',\n latvian: 'lv',\n lithuanian: 'lt',\n macedonian: 'mk',\n malay: 'ms',\n marathi: 'mr',\n maori: 'mi',\n nepali: 'ne',\n norwegian: 'no',\n persian: 'fa',\n polish: 'pl',\n portuguese: 'pt',\n romanian: 'ro',\n russian: 'ru',\n serbian: 'sr',\n slovak: 'sk',\n slovenian: 'sl',\n spanish: 'es',\n swahili: 'sw',\n swedish: 'sv',\n tagalog: 'tl',\n tamil: 'ta',\n thai: 'th',\n turkish: 'tr',\n ukrainian: 'uk',\n urdu: 'ur',\n vietnamese: 'vi',\n welsh: 'cy',\n};\n\nexport class OpenAITranscriptionModel implements TranscriptionModelV1 {\n readonly specificationVersion = 'v1';\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAITranscriptionModelId,\n private readonly config: OpenAITranscriptionModelConfig,\n ) {}\n\n private async getArgs({\n audio,\n mediaType,\n providerOptions,\n }: OpenAITranscriptionCallOptions) {\n const warnings: TranscriptionModelV1CallWarning[] = [];\n\n // Parse provider options\n const openAIOptions = await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openAITranscriptionProviderOptions,\n });\n\n // Create form data with base fields\n const formData = new FormData();\n const blob =\n audio instanceof Uint8Array\n ? new Blob([audio])\n : new Blob([convertBase64ToUint8Array(audio)]);\n\n formData.append('model', this.modelId);\n formData.append('file', new File([blob], 'audio', { type: mediaType }));\n\n // Add provider-specific options\n if (openAIOptions) {\n const transcriptionModelOptions = {\n include: openAIOptions.include,\n language: openAIOptions.language,\n prompt: openAIOptions.prompt,\n temperature: openAIOptions.temperature,\n timestamp_granularities: openAIOptions.timestampGranularities,\n };\n\n for (const [key, value] of Object.entries(transcriptionModelOptions)) {\n if (value != null) {\n formData.append(key, String(value));\n }\n }\n }\n\n return {\n formData,\n warnings,\n };\n }\n\n async doGenerate(\n options: OpenAITranscriptionCallOptions,\n ): Promise<Awaited<ReturnType<TranscriptionModelV1['doGenerate']>>> {\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { formData, warnings } = await this.getArgs(options);\n\n const {\n value: response,\n responseHeaders,\n rawValue: rawResponse,\n } = await postFormDataToApi({\n url: this.config.url({\n path: '/audio/transcriptions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n formData,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTranscriptionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const language =\n response.language != null && response.language in languageMap\n ? languageMap[response.language as keyof typeof languageMap]\n : undefined;\n\n return {\n text: response.text,\n segments:\n response.words?.map(word => ({\n text: word.word,\n startSecond: word.start,\n endSecond: word.end,\n })) ?? [],\n language,\n durationInSeconds: response.duration ?? undefined,\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n}\n\nconst openaiTranscriptionResponseSchema = z.object({\n text: z.string(),\n language: z.string().nullish(),\n duration: z.number().nullish(),\n words: z\n .array(\n z.object({\n word: z.string(),\n start: z.number(),\n end: z.number(),\n }),\n )\n .nullish(),\n});\n","import { z } from 'zod';\n\nexport type OpenAITranscriptionModelId =\n | 'whisper-1'\n | 'gpt-4o-mini-transcribe'\n | 'gpt-4o-transcribe'\n | (string & {});\n\n// https://platform.openai.com/docs/api-reference/audio/createTranscription\nexport const openAITranscriptionProviderOptions = z.object({\n /**\n * Additional information to include in the transcription response.\n */\n\n include: z.array(z.string()).nullish(),\n\n /**\n * The language of the input audio in ISO-639-1 format.\n */\n language: z.string().nullish(),\n\n /**\n * An optional text to guide the model's style or continue a previous audio segment.\n */\n prompt: z.string().nullish(),\n\n /**\n * The sampling temperature, between 0 and 1.\n * @default 0\n */\n temperature: z.number().min(0).max(1).default(0).nullish(),\n\n /**\n * The timestamp granularities to populate for this transcription.\n * @default ['segment']\n */\n timestampGranularities: z\n .array(z.enum(['word', 'segment']))\n .default(['segment'])\n .nullish(),\n});\n\nexport type OpenAITranscriptionProviderOptions = z.infer<\n typeof openAITranscriptionProviderOptions\n>;\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from '../openai-config';\nimport { openaiFailedResponseHandler } from '../openai-error';\nimport { convertToOpenAIResponsesMessages } from './convert-to-openai-responses-messages';\nimport { mapOpenAIResponseFinishReason } from './map-openai-responses-finish-reason';\nimport { prepareResponsesTools } from './openai-responses-prepare-tools';\nimport { OpenAIResponsesModelId } from './openai-responses-settings';\n\nexport class OpenAIResponsesLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAIResponsesModelId;\n\n private readonly config: OpenAIConfig;\n\n constructor(modelId: OpenAIResponsesModelId, config: OpenAIConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'image/*': [/^https?:\\/\\/.*$/],\n };\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n maxOutputTokens,\n temperature,\n stopSequences,\n topP,\n topK,\n presencePenalty,\n frequencyPenalty,\n seed,\n prompt,\n providerOptions,\n tools,\n toolChoice,\n responseFormat,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n const modelConfig = getResponsesModelConfig(this.modelId);\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'stopSequences' });\n }\n\n const { messages, warnings: messageWarnings } =\n convertToOpenAIResponsesMessages({\n prompt,\n systemMessageMode: modelConfig.systemMessageMode,\n });\n\n warnings.push(...messageWarnings);\n\n const openaiOptions = await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: openaiResponsesProviderOptionsSchema,\n });\n\n const isStrict = openaiOptions?.strictSchemas ?? true;\n\n const baseArgs = {\n model: this.modelId,\n input: messages,\n temperature,\n top_p: topP,\n max_output_tokens: maxOutputTokens,\n\n ...(responseFormat?.type === 'json' && {\n text: {\n format:\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n strict: isStrict,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n schema: responseFormat.schema,\n }\n : { type: 'json_object' },\n },\n }),\n\n // provider options:\n metadata: openaiOptions?.metadata,\n parallel_tool_calls: openaiOptions?.parallelToolCalls,\n previous_response_id: openaiOptions?.previousResponseId,\n store: openaiOptions?.store,\n user: openaiOptions?.user,\n instructions: openaiOptions?.instructions,\n\n // model-specific settings:\n ...(modelConfig.isReasoningModel &&\n (openaiOptions?.reasoningEffort != null ||\n openaiOptions?.reasoningSummary != null) && {\n reasoning: {\n ...(openaiOptions?.reasoningEffort != null && {\n effort: openaiOptions.reasoningEffort,\n }),\n ...(openaiOptions?.reasoningSummary != null && {\n summary: openaiOptions.reasoningSummary,\n }),\n },\n }),\n ...(modelConfig.requiredAutoTruncation && {\n truncation: 'auto',\n }),\n };\n\n if (modelConfig.isReasoningModel) {\n // remove unsupported settings for reasoning models\n // see https://platform.openai.com/docs/guides/reasoning#limitations\n if (baseArgs.temperature != null) {\n baseArgs.temperature = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'temperature',\n details: 'temperature is not supported for reasoning models',\n });\n }\n\n if (baseArgs.top_p != null) {\n baseArgs.top_p = undefined;\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topP',\n details: 'topP is not supported for reasoning models',\n });\n }\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareResponsesTools({\n tools,\n toolChoice,\n strict: isStrict,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/responses',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n z.object({\n id: z.string(),\n created_at: z.number(),\n model: z.string(),\n output: z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n role: z.literal('assistant'),\n content: z.array(\n z.object({\n type: z.literal('output_text'),\n text: z.string(),\n annotations: z.array(\n z.object({\n type: z.literal('url_citation'),\n start_index: z.number(),\n end_index: z.number(),\n url: z.string(),\n title: z.string(),\n }),\n ),\n }),\n ),\n }),\n z.object({\n type: z.literal('function_call'),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n }),\n z.object({\n type: z.literal('web_search_call'),\n }),\n z.object({\n type: z.literal('computer_call'),\n }),\n z.object({\n type: z.literal('reasoning'),\n summary: z.array(\n z.object({\n type: z.literal('summary_text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n incomplete_details: z.object({ reason: z.string() }).nullable(),\n usage: usageSchema,\n }),\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const content: Array<LanguageModelV2Content> = [];\n\n // map response content to content array\n for (const part of response.output) {\n switch (part.type) {\n case 'reasoning': {\n content.push({\n type: 'reasoning',\n text: part.summary.map(summary => summary.text).join(),\n });\n break;\n }\n\n case 'message': {\n for (const contentPart of part.content) {\n content.push({\n type: 'text',\n text: contentPart.text,\n });\n\n for (const annotation of contentPart.annotations) {\n content.push({\n type: 'source',\n sourceType: 'url',\n id: this.config.generateId?.() ?? generateId(),\n url: annotation.url,\n title: annotation.title,\n });\n }\n }\n break;\n }\n\n case 'function_call': {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: part.call_id,\n toolName: part.name,\n args: part.arguments,\n });\n break;\n }\n }\n }\n\n return {\n content,\n finishReason: mapOpenAIResponseFinishReason({\n finishReason: response.incomplete_details?.reason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens,\n },\n request: { body },\n response: {\n id: response.id,\n timestamp: new Date(response.created_at * 1000),\n modelId: response.model,\n headers: responseHeaders,\n body: rawResponse,\n },\n providerMetadata: {\n openai: {\n responseId: response.id,\n cachedPromptTokens:\n response.usage.input_tokens_details?.cached_tokens ?? null,\n reasoningTokens:\n response.usage.output_tokens_details?.reasoning_tokens ?? null,\n },\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/responses',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...body,\n stream: true,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiResponsesChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const self = this;\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let cachedPromptTokens: number | null = null;\n let reasoningTokens: number | null = null;\n let responseId: string | null = null;\n const ongoingToolCalls: Record<\n number,\n { toolName: string; toolCallId: string } | undefined\n > = {};\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiResponsesChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isResponseOutputItemAddedChunk(value)) {\n if (value.item.type === 'function_call') {\n ongoingToolCalls[value.output_index] = {\n toolName: value.item.name,\n toolCallId: value.item.call_id,\n };\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: value.item.call_id,\n toolName: value.item.name,\n argsTextDelta: value.item.arguments,\n });\n }\n } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {\n const toolCall = ongoingToolCalls[value.output_index];\n\n if (toolCall != null) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: value.delta,\n });\n }\n } else if (isResponseCreatedChunk(value)) {\n responseId = value.response.id;\n controller.enqueue({\n type: 'response-metadata',\n id: value.response.id,\n timestamp: new Date(value.response.created_at * 1000),\n modelId: value.response.model,\n });\n } else if (isTextDeltaChunk(value)) {\n controller.enqueue({\n type: 'text',\n text: value.delta,\n });\n } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {\n controller.enqueue({\n type: 'reasoning',\n text: value.delta,\n });\n } else if (\n isResponseOutputItemDoneChunk(value) &&\n value.item.type === 'function_call'\n ) {\n ongoingToolCalls[value.output_index] = undefined;\n hasToolCalls = true;\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: value.item.call_id,\n toolName: value.item.name,\n args: value.item.arguments,\n });\n } else if (isResponseFinishedChunk(value)) {\n finishReason = mapOpenAIResponseFinishReason({\n finishReason: value.response.incomplete_details?.reason,\n hasToolCalls,\n });\n usage.inputTokens = value.response.usage.input_tokens;\n usage.outputTokens = value.response.usage.output_tokens;\n cachedPromptTokens =\n value.response.usage.input_tokens_details?.cached_tokens ??\n cachedPromptTokens;\n reasoningTokens =\n value.response.usage.output_tokens_details?.reasoning_tokens ??\n reasoningTokens;\n } else if (isResponseAnnotationAddedChunk(value)) {\n controller.enqueue({\n type: 'source',\n sourceType: 'url',\n id: self.config.generateId?.() ?? generateId(),\n url: value.annotation.url,\n title: value.annotation.title,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n ...((cachedPromptTokens != null || reasoningTokens != null) && {\n providerMetadata: {\n openai: {\n responseId,\n cachedPromptTokens,\n reasoningTokens,\n },\n },\n }),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst usageSchema = z.object({\n input_tokens: z.number(),\n input_tokens_details: z\n .object({ cached_tokens: z.number().nullish() })\n .nullish(),\n output_tokens: z.number(),\n output_tokens_details: z\n .object({ reasoning_tokens: z.number().nullish() })\n .nullish(),\n});\n\nconst textDeltaChunkSchema = z.object({\n type: z.literal('response.output_text.delta'),\n delta: z.string(),\n});\n\nconst responseFinishedChunkSchema = z.object({\n type: z.enum(['response.completed', 'response.incomplete']),\n response: z.object({\n incomplete_details: z.object({ reason: z.string() }).nullish(),\n usage: usageSchema,\n }),\n});\n\nconst responseCreatedChunkSchema = z.object({\n type: z.literal('response.created'),\n response: z.object({\n id: z.string(),\n created_at: z.number(),\n model: z.string(),\n }),\n});\n\nconst responseOutputItemDoneSchema = z.object({\n type: z.literal('response.output_item.done'),\n output_index: z.number(),\n item: z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n }),\n z.object({\n type: z.literal('function_call'),\n id: z.string(),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n status: z.literal('completed'),\n }),\n ]),\n});\n\nconst responseFunctionCallArgumentsDeltaSchema = z.object({\n type: z.literal('response.function_call_arguments.delta'),\n item_id: z.string(),\n output_index: z.number(),\n delta: z.string(),\n});\n\nconst responseOutputItemAddedSchema = z.object({\n type: z.literal('response.output_item.added'),\n output_index: z.number(),\n item: z.discriminatedUnion('type', [\n z.object({\n type: z.literal('message'),\n }),\n z.object({\n type: z.literal('function_call'),\n id: z.string(),\n call_id: z.string(),\n name: z.string(),\n arguments: z.string(),\n }),\n ]),\n});\n\nconst responseAnnotationAddedSchema = z.object({\n type: z.literal('response.output_text.annotation.added'),\n annotation: z.object({\n type: z.literal('url_citation'),\n url: z.string(),\n title: z.string(),\n }),\n});\n\nconst responseReasoningSummaryTextDeltaSchema = z.object({\n type: z.literal('response.reasoning_summary_text.delta'),\n item_id: z.string(),\n output_index: z.number(),\n summary_index: z.number(),\n delta: z.string(),\n});\n\nconst openaiResponsesChunkSchema = z.union([\n textDeltaChunkSchema,\n responseFinishedChunkSchema,\n responseCreatedChunkSchema,\n responseOutputItemDoneSchema,\n responseFunctionCallArgumentsDeltaSchema,\n responseOutputItemAddedSchema,\n responseAnnotationAddedSchema,\n responseReasoningSummaryTextDeltaSchema,\n z.object({ type: z.string() }).passthrough(), // fallback for unknown chunks\n]);\n\nfunction isTextDeltaChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof textDeltaChunkSchema> {\n return chunk.type === 'response.output_text.delta';\n}\n\nfunction isResponseOutputItemDoneChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseOutputItemDoneSchema> {\n return chunk.type === 'response.output_item.done';\n}\n\nfunction isResponseFinishedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseFinishedChunkSchema> {\n return (\n chunk.type === 'response.completed' || chunk.type === 'response.incomplete'\n );\n}\n\nfunction isResponseCreatedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseCreatedChunkSchema> {\n return chunk.type === 'response.created';\n}\n\nfunction isResponseFunctionCallArgumentsDeltaChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseFunctionCallArgumentsDeltaSchema> {\n return chunk.type === 'response.function_call_arguments.delta';\n}\n\nfunction isResponseOutputItemAddedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseOutputItemAddedSchema> {\n return chunk.type === 'response.output_item.added';\n}\n\nfunction isResponseAnnotationAddedChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseAnnotationAddedSchema> {\n return chunk.type === 'response.output_text.annotation.added';\n}\n\nfunction isResponseReasoningSummaryTextDeltaChunk(\n chunk: z.infer<typeof openaiResponsesChunkSchema>,\n): chunk is z.infer<typeof responseReasoningSummaryTextDeltaSchema> {\n return chunk.type === 'response.reasoning_summary_text.delta';\n}\n\ntype ResponsesModelConfig = {\n isReasoningModel: boolean;\n systemMessageMode: 'remove' | 'system' | 'developer';\n requiredAutoTruncation: boolean;\n};\n\nfunction getResponsesModelConfig(modelId: string): ResponsesModelConfig {\n // o series reasoning models:\n if (modelId.startsWith('o')) {\n if (modelId.startsWith('o1-mini') || modelId.startsWith('o1-preview')) {\n return {\n isReasoningModel: true,\n systemMessageMode: 'remove',\n requiredAutoTruncation: false,\n };\n }\n\n return {\n isReasoningModel: true,\n systemMessageMode: 'developer',\n requiredAutoTruncation: false,\n };\n }\n\n // gpt models:\n return {\n isReasoningModel: false,\n systemMessageMode: 'system',\n requiredAutoTruncation: false,\n };\n}\n\nconst openaiResponsesProviderOptionsSchema = z.object({\n metadata: z.any().nullish(),\n parallelToolCalls: z.boolean().nullish(),\n previousResponseId: z.string().nullish(),\n store: z.boolean().nullish(),\n user: z.string().nullish(),\n reasoningEffort: z.string().nullish(),\n strictSchemas: z.boolean().nullish(),\n instructions: z.string().nullish(),\n reasoningSummary: z.string().nullish(),\n});\n\nexport type OpenAIResponsesProviderOptions = z.infer<\n typeof openaiResponsesProviderOptionsSchema\n>;\n","import {\n LanguageModelV2CallWarning,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIResponsesPrompt } from './openai-responses-api-types';\n\nexport function convertToOpenAIResponsesMessages({\n prompt,\n systemMessageMode,\n}: {\n prompt: LanguageModelV2Prompt;\n systemMessageMode: 'system' | 'developer' | 'remove';\n}): {\n messages: OpenAIResponsesPrompt;\n warnings: Array<LanguageModelV2CallWarning>;\n} {\n const messages: OpenAIResponsesPrompt = [];\n const warnings: Array<LanguageModelV2CallWarning> = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n switch (systemMessageMode) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n case 'developer': {\n messages.push({ role: 'developer', content });\n break;\n }\n case 'remove': {\n warnings.push({\n type: 'other',\n message: 'system messages are removed for this model',\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = systemMessageMode;\n throw new Error(\n `Unsupported system message mode: ${_exhaustiveCheck}`,\n );\n }\n }\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map((part, index) => {\n switch (part.type) {\n case 'text': {\n return { type: 'input_text', text: part.text };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'input_image',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n\n // OpenAI specific extension: image detail\n detail: part.providerOptions?.openai?.imageDetail,\n };\n } else if (part.mediaType === 'application/pdf') {\n if (part.data instanceof URL) {\n // The AI SDK automatically downloads files for user file parts with URLs\n throw new UnsupportedFunctionalityError({\n functionality: 'PDF file parts with URLs',\n });\n }\n\n return {\n type: 'input_file',\n filename: part.filename ?? `part-${index}.pdf`,\n file_data: `data:application/pdf;base64,${part.data}`,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n messages.push({\n role: 'assistant',\n content: [{ type: 'output_text', text: part.text }],\n });\n break;\n }\n case 'tool-call': {\n messages.push({\n type: 'function_call',\n call_id: part.toolCallId,\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n });\n break;\n }\n }\n }\n\n break;\n }\n\n case 'tool': {\n for (const part of content) {\n messages.push({\n type: 'function_call_output',\n call_id: part.toolCallId,\n output: JSON.stringify(part.result),\n });\n }\n\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return { messages, warnings };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIResponseFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case undefined:\n case null:\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'max_output_tokens':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n default:\n return hasToolCalls ? 'tool-calls' : 'unknown';\n }\n}\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAIResponsesTool } from './openai-responses-api-types';\n\nexport function prepareResponsesTools({\n tools,\n toolChoice,\n strict,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n strict: boolean;\n}): {\n tools?: Array<OpenAIResponsesTool>;\n toolChoice?:\n | 'auto'\n | 'none'\n | 'required'\n | { type: 'web_search_preview' }\n | { type: 'function'; name: string };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiTools: Array<OpenAIResponsesTool> = [];\n\n for (const tool of tools) {\n switch (tool.type) {\n case 'function':\n openaiTools.push({\n type: 'function',\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n strict: strict ? true : undefined,\n });\n break;\n case 'provider-defined':\n switch (tool.id) {\n case 'openai.web_search_preview':\n openaiTools.push({\n type: 'web_search_preview',\n search_context_size: tool.args.searchContextSize as\n | 'low'\n | 'medium'\n | 'high',\n user_location: tool.args.userLocation as {\n type: 'approximate';\n city: string;\n region: string;\n },\n });\n break;\n default:\n toolWarnings.push({ type: 'unsupported-tool', tool });\n break;\n }\n break;\n default:\n toolWarnings.push({ type: 'unsupported-tool', tool });\n break;\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiTools,\n toolChoice:\n toolChoice.toolName === 'web_search_preview'\n ? { type: 'web_search_preview' }\n : { type: 'function', name: toolChoice.toolName },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { SpeechModelV1, SpeechModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createBinaryResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAIConfig } from './openai-config';\nimport { openaiFailedResponseHandler } from './openai-error';\nimport { OpenAISpeechModelId } from './openai-speech-options';\nimport { OpenAISpeechAPITypes } from './openai-api-types';\n\n// https://platform.openai.com/docs/api-reference/audio/createSpeech\nconst OpenAIProviderOptionsSchema = z.object({\n instructions: z.string().nullish(),\n speed: z.number().min(0.25).max(4.0).default(1.0).nullish(),\n});\n\nexport type OpenAISpeechCallOptions = z.infer<\n typeof OpenAIProviderOptionsSchema\n>;\n\ninterface OpenAISpeechModelConfig extends OpenAIConfig {\n _internal?: {\n currentDate?: () => Date;\n };\n}\n\nexport class OpenAISpeechModel implements SpeechModelV1 {\n readonly specificationVersion = 'v1';\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAISpeechModelId,\n private readonly config: OpenAISpeechModelConfig,\n ) {}\n\n private async getArgs({\n text,\n voice = 'alloy',\n outputFormat = 'mp3',\n speed,\n instructions,\n providerOptions,\n }: Parameters<SpeechModelV1['doGenerate']>[0]) {\n const warnings: SpeechModelV1CallWarning[] = [];\n\n // Parse provider options\n const openAIOptions = await parseProviderOptions({\n provider: 'openai',\n providerOptions,\n schema: OpenAIProviderOptionsSchema,\n });\n\n // Create request body\n const requestBody: Record<string, unknown> = {\n model: this.modelId,\n input: text,\n voice,\n response_format: 'mp3',\n speed,\n instructions,\n };\n\n if (outputFormat) {\n if (['mp3', 'opus', 'aac', 'flac', 'wav', 'pcm'].includes(outputFormat)) {\n requestBody.response_format = outputFormat;\n } else {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'outputFormat',\n details: `Unsupported output format: ${outputFormat}. Using mp3 instead.`,\n });\n }\n }\n\n // Add provider-specific options\n if (openAIOptions) {\n const speechModelOptions: OpenAISpeechAPITypes = {};\n\n for (const key in speechModelOptions) {\n const value = speechModelOptions[key as keyof OpenAISpeechAPITypes];\n if (value !== undefined) {\n requestBody[key] = value;\n }\n }\n }\n\n return {\n requestBody,\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<SpeechModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<SpeechModelV1['doGenerate']>>> {\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { requestBody, warnings } = await this.getArgs(options);\n\n const {\n value: audio,\n responseHeaders,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/audio/speech',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: requestBody,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createBinaryResponseHandler(),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n audio,\n warnings,\n request: {\n body: JSON.stringify(requestBody),\n },\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACQA,IAAAA,0BAIO;;;ACZP,IAAAC,mBAUO;AACP,IAAAC,yBAUO;AACP,IAAAC,cAAkB;;;ACtBlB,sBAIO;AAEP,4BAAgC;AAEzB,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA,oBAAoB;AACtB,GAME;AACA,QAAM,WAA6B,CAAC;AACpC,QAAM,WAA8C,CAAC;AAErD,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,gBAAQ,mBAAmB;AAAA,UACzB,KAAK,UAAU;AACb,qBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,UACF;AAAA,UACA,KAAK,aAAa;AAChB,qBAAS,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAC5C;AAAA,UACF;AAAA,UACA,KAAK,UAAU;AACb,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AACD;AAAA,UACF;AAAA,UACA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI;AAAA,cACR,oCAAoC,gBAAgB;AAAA,YACtD;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,QAAQ,CAAC,EAAE,KAAK,CAAC;AACxD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,CAAC,MAAM,UAAU;AA1DhD;AA2DY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA;AAAA,sBAG5D,SAAQ,gBAAK,oBAAL,mBAAsB,WAAtB,mBAA8B;AAAA,oBACxC;AAAA,kBACF;AAAA,gBACF,WAAW,KAAK,UAAU,WAAW,QAAQ,GAAG;AAC9C,sBAAI,KAAK,gBAAgB,KAAK;AAC5B,0BAAM,IAAI,8CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,0BAAQ,KAAK,WAAW;AAAA,oBACtB,KAAK,aAAa;AAChB,6BAAO;AAAA,wBACL,MAAM;AAAA,wBACN,aAAa;AAAA,0BACX,UAAM,uCAAgB,KAAK,IAAI;AAAA,0BAC/B,QAAQ;AAAA,wBACV;AAAA,sBACF;AAAA,oBACF;AAAA,oBACA,KAAK;AAAA,oBACL,KAAK,cAAc;AACjB,6BAAO;AAAA,wBACL,MAAM;AAAA,wBACN,aAAa;AAAA,0BACX,UAAM,uCAAgB,KAAK,IAAI;AAAA,0BAC/B,QAAQ;AAAA,wBACV;AAAA,sBACF;AAAA,oBACF;AAAA,oBAEA,SAAS;AACP,4BAAM,IAAI,8CAA8B;AAAA,wBACtC,eAAe,uCAAuC,KAAK,SAAS;AAAA,sBACtE,CAAC;AAAA,oBACH;AAAA,kBACF;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,sBAAI,KAAK,gBAAgB,KAAK;AAC5B,0BAAM,IAAI,8CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,MAAM;AAAA,sBACJ,WAAU,UAAK,aAAL,YAAiB,QAAQ,KAAK;AAAA,sBACxC,WAAW,+BAA+B,KAAK,IAAI;AAAA,oBACrD;AAAA,kBACF;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,SAAS;AAC9B;;;ACvMO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAAkB;AAkDX,IAAM,wBAAwB,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAO5C,WAAW,aAAE,OAAO,aAAE,OAAO,OAAO,GAAG,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK5D,mBAAmB,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMxC,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK1B,iBAAiB,aAAE,KAAK,CAAC,OAAO,UAAU,MAAM,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK5D,qBAAqB,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKzC,OAAO,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK5B,UAAU,aAAE,OAAO,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKxC,YAAY,aAAE,OAAO,aAAE,IAAI,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOvC,mBAAmB,aAAE,QAAQ,EAAE,SAAS;AAC1C,CAAC;;;ACrGD,IAAAC,cAAkB;AAClB,IAAAC,yBAA+C;AAExC,IAAM,wBAAwB,cAAE,OAAO;AAAA,EAC5C,OAAO,cAAE,OAAO;AAAA,IACd,SAAS,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,cAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,cAAE,MAAM,CAAC,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ACrBD,IAAAC,mBAKO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AACF,GAqBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAMC,eAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,MAAAA,aAAY,KAAK;AAAA,QACf,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,QAAQ,oBAAoB,OAAO;AAAA,QACrC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAOA,cAAa,YAAY,QAAW,aAAa;AAAA,EACnE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAOA,cAAa,YAAY,MAAM,aAAa;AAAA,IAC9D,KAAK;AACH,aAAO;AAAA,QACL,OAAOA;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANrDO,IAAM,0BAAN,MAAyD;AAAA,EAW9D,YAAY,SAA4B,QAA0B;AAVlE,SAAS,uBAAuB;AAIhC,SAAS,gBAAgB;AAAA,MACvB,WAAW,CAAC,iBAAiB;AAAA,IAC/B;AAKE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAA+B;AA9EjC;AA+EI,UAAM,WAAyC,CAAC;AAGhD,UAAM,iBACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,UAAM,qBAAoB,mBAAc,sBAAd,YAAmC;AAE7D,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,mBACD;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,UAAU,UAAU,gBAAgB,IAAI;AAAA,MAC9C;AAAA,QACE;AAAA,QACA,mBAAmB,qBAAqB,KAAK,OAAO;AAAA,MACtD;AAAA,IACF;AAEA,aAAS,KAAK,GAAG,eAAe;AAEhC,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,YAAY,cAAc;AAAA,MAC1B,MAAM,cAAc;AAAA,MACpB,qBAAqB,cAAc;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS;AAAA;AAAA,QAErB,qBAAqB,eAAe,UAAU,OAC5C;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,QAAQ;AAAA,YACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc;AAAA,UACxB;AAAA,MACN,MAAM;AAAA,MACN;AAAA;AAAA;AAAA,MAIA,uBAAuB,cAAc;AAAA,MACrC,OAAO,cAAc;AAAA,MACrB,UAAU,cAAc;AAAA,MACxB,YAAY,cAAc;AAAA,MAC1B,kBAAkB,cAAc;AAAA;AAAA,MAGhC;AAAA,IACF;AAEA,QAAI,iBAAiB,KAAK,OAAO,GAAG;AAGlC,UAAI,SAAS,eAAe,MAAM;AAChC,iBAAS,cAAc;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,SAAS,MAAM;AAC1B,iBAAS,QAAQ;AACjB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,qBAAqB,MAAM;AACtC,iBAAS,oBAAoB;AAC7B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,oBAAoB,MAAM;AACrC,iBAAS,mBAAmB;AAC5B,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AACA,UAAI,SAAS,cAAc,MAAM;AAC/B,iBAAS,aAAa;AACtB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAGA,UAAI,SAAS,cAAc,MAAM;AAC/B,YAAI,SAAS,yBAAyB,MAAM;AAC1C,mBAAS,wBAAwB,SAAS;AAAA,QAC5C;AACA,iBAAS,aAAa;AAAA,MACxB;AAAA,IACF,WACE,KAAK,QAAQ,WAAW,uBAAuB,KAC/C,KAAK,QAAQ,WAAW,4BAA4B,GACpD;AACA,UAAI,SAAS,eAAe,MAAM;AAChC,iBAAS,cAAc;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SACE;AAAA,QACJ,CAAC;AAAA,MACH;AAAA,IACF;AACA,UAAM;AAAA,MACJ,OAAOC;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAOA;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA3PjE;AA4PI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,eAAW,aAAY,YAAO,QAAQ,eAAf,YAA6B,CAAC,GAAG;AACtD,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,cAAc;AAAA,QACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,QACtC,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B,CAAC;AAAA,IACH;AAGA,UAAM,0BAAyB,cAAS,UAAT,mBAAgB;AAC/C,UAAM,sBAAqB,cAAS,UAAT,mBAAgB;AAC3C,UAAM,mBAA6C,EAAE,QAAQ,CAAC,EAAE;AAChE,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,OAAO,kBACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,OAAO,2BACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,OAAO,2BACtB,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,OAAO,qBACtB,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,OAAO;AAAA,QACL,cAAa,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC9C,eAAc,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACrD;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,eAAe;AAEnB,UAAM,mBAA6C,EAAE,QAAQ,CAAC,EAAE;AAEhE,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AA1YvC;AA4YY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,cAAc,wCAAiB;AACrC,oBAAM,eAAe,gDAAqB;AAE1C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,iCAAiB,OAAO,kBACtB,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,iCAAiB,OAAO,2BACtB,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,iCAAiB,OAAO,2BACtB,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,iCAAiB,OAAO,qBACtB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,yBAAyB,cAC5B,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,2BAA2B,cAAE,OAAO;AAAA,EACxC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wBAAwB,cAAE,MAAM;AAAA,EACpC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QAC9C,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAED,SAAS,iBAAiB,SAAiB;AACzC,SAAO,QAAQ,WAAW,GAAG;AAC/B;AAMA,SAAS,qBAAqB,SAAiB;AAtrB/C;AAurBE,MAAI,CAAC,iBAAiB,OAAO,GAAG;AAC9B,WAAO;AAAA,EACT;AAEA,UACE,2BAAgB,OAAuC,MAAvD,mBACI,sBADJ,YACyB;AAE7B;AAEA,IAAM,kBAAkB;AAAA,EACtB,WAAW;AAAA,IACT,mBAAmB;AAAA,EACrB;AAAA,EACA,sBAAsB;AAAA,IACpB,mBAAmB;AAAA,EACrB;AAAA,EACA,cAAc;AAAA,IACZ,mBAAmB;AAAA,EACrB;AAAA,EACA,yBAAyB;AAAA,IACvB,mBAAmB;AAAA,EACrB;AAAA,EACA,IAAI;AAAA,IACF,mBAAmB;AAAA,EACrB;AAAA,EACA,iBAAiB;AAAA,IACf,mBAAmB;AAAA,EACrB;AAAA,EACA,WAAW;AAAA,IACT,mBAAmB;AAAA,EACrB;AAAA,EACA,sBAAsB;AAAA,IACpB,mBAAmB;AAAA,EACrB;AAAA,EACA,WAAW;AAAA,IACT,mBAAmB;AAAA,EACrB;AAAA,EACA,sBAAsB;AAAA,IACpB,mBAAmB;AAAA,EACrB;AACF;;;AOztBA,IAAAC,yBAQO;AACP,IAAAC,cAAkB;;;AChBlB,IAAAC,mBAIO;AAEA,SAAS,gCAAgC;AAAA,EAC9C;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAOE;AAEA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;AC5FA,IAAAC,cAAkB;AAKX,IAAM,kCAAkC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAItD,MAAM,cAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgB3B,WAAW,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrD,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B,MAAM,cAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;AFAM,IAAM,gCAAN,MAA+D;AAAA,EAWpE,YACE,SACA,QACA;AAbF,SAAS,uBAAuB;AAsBhC,SAAS,gBAA0C;AAAA;AAAA,IAEnD;AAVE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAUA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,WAAyC,CAAC;AAGhD,UAAM,gBAAgB;AAAA,MACpB,GAAI,UAAM,6CAAqB;AAAA,QAC7B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,MACD,GAAI,UAAM,6CAAqB;AAAA,QAC7B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,gCAAgC,EAAE,OAAO,CAAC;AAE5C,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,cAAc;AAAA,QACpB,YAAY,cAAc;AAAA,QAC1B,QAAQ,cAAc;AAAA,QACtB,MAAM,cAAc;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA;AAAA,QAGA,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,MAC7C,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAC/B;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AAAA,YACnC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,OAAO;AAAA,cACf,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAIA,IAAM,iCAAiC,cAAE,OAAO;AAAA,EAC9C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,8BAA8B,cAAE,MAAM;AAAA,EAC1C,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AGpVD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;;;ACVlB,IAAAC,cAAkB;AAQX,IAAM,iCAAiC,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKrD,YAAY,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhC,MAAM,cAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ADFM,IAAM,uBAAN,MAA+D;AAAA,EAYpE,YAAY,SAAiC,QAAsB;AAXnE,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAS/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAPA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA1CJ;AA2CI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAGA,UAAM,iBACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,cAAc;AAAA,QAC1B,MAAM,cAAc;AAAA,MACtB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AEnGD,IAAAC,yBAIO;AACP,IAAAC,cAAkB;;;ACCX,IAAM,wBAA4D;AAAA,EACvE,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,eAAe;AACjB;AAEO,IAAM,2BAA2B,oBAAI,IAAI,CAAC,aAAa,CAAC;;;ADSxD,IAAM,mBAAN,MAA+C;AAAA,EAapD,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAfnB,SAAS,uBAAuB;AAAA,EAgB7B;AAAA,EAdH,IAAI,mBAA2B;AAzBjC;AA0BI,YACE,gBAAK,SAAS,qBAAd,YAAkC,sBAAsB,KAAK,OAAO,MAApE,YAAyE;AAAA,EAE7E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AApDJ;AAqDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,GAAI,CAAC,yBAAyB,IAAI,KAAK,OAAO,IAC1C,EAAE,iBAAiB,WAAW,IAC9B,CAAC;AAAA,MACP;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,UAAU,cAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AE7GD,IAAAC,cAAkB;AAElB,IAAM,6BAA6B,cAAE,OAAO,CAAC,CAAC;AAE9C,SAAS,qBAAqB;AAAA,EAC5B;AAAA,EACA;AACF,IASI,CAAC,GAKH;AACA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,MAAM;AAAA,MACJ;AAAA,MACA;AAAA,IACF;AAAA,IACA,YAAY;AAAA,EACd;AACF;AAEO,IAAM,cAAc;AAAA,EACzB,kBAAkB;AACpB;;;AC9BA,IAAAC,yBAMO;AACP,IAAAC,eAAkB;;;ACZlB,IAAAC,eAAkB;AASX,IAAM,qCAAqC,eAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAKzD,SAAS,eAAE,MAAM,eAAE,OAAO,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAKrC,UAAU,eAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA,EAK7B,QAAQ,eAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAM3B,aAAa,eAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE,QAAQ,CAAC,EAAE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMzD,wBAAwB,eACrB,MAAM,eAAE,KAAK,CAAC,QAAQ,SAAS,CAAC,CAAC,EACjC,QAAQ,CAAC,SAAS,CAAC,EACnB,QAAQ;AACb,CAAC;;;ADHD,IAAM,cAAc;AAAA,EAClB,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,WAAW;AAAA,EACX,WAAW;AAAA,EACX,YAAY;AAAA,EACZ,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,OAAO;AAAA,EACP,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,SAAS;AAAA,EACT,OAAO;AAAA,EACP,MAAM;AAAA,EACN,SAAS;AAAA,EACT,WAAW;AAAA,EACX,MAAM;AAAA,EACN,YAAY;AAAA,EACZ,OAAO;AACT;AAEO,IAAM,2BAAN,MAA+D;AAAA,EAOpE,YACW,SACQ,QACjB;AAFS;AACQ;AARnB,SAAS,uBAAuB;AAAA,EAS7B;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAmC;AACjC,UAAM,WAA8C,CAAC;AAGrD,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,WAAW,IAAI,SAAS;AAC9B,UAAM,OACJ,iBAAiB,aACb,IAAI,KAAK,CAAC,KAAK,CAAC,IAChB,IAAI,KAAK,KAAC,kDAA0B,KAAK,CAAC,CAAC;AAEjD,aAAS,OAAO,SAAS,KAAK,OAAO;AACrC,aAAS,OAAO,QAAQ,IAAI,KAAK,CAAC,IAAI,GAAG,SAAS,EAAE,MAAM,UAAU,CAAC,CAAC;AAGtE,QAAI,eAAe;AACjB,YAAM,4BAA4B;AAAA,QAChC,SAAS,cAAc;AAAA,QACvB,UAAU,cAAc;AAAA,QACxB,QAAQ,cAAc;AAAA,QACtB,aAAa,cAAc;AAAA,QAC3B,yBAAyB,cAAc;AAAA,MACzC;AAEA,iBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,yBAAyB,GAAG;AACpE,YAAI,SAAS,MAAM;AACjB,mBAAS,OAAO,KAAK,OAAO,KAAK,CAAC;AAAA,QACpC;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SACkE;AA9JtE;AA+JI,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,UAAU,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAEzD,UAAM;AAAA,MACJ,OAAO;AAAA,MACP;AAAA,MACA,UAAU;AAAA,IACZ,IAAI,UAAM,0CAAkB;AAAA,MAC1B,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,WACJ,SAAS,YAAY,QAAQ,SAAS,YAAY,cAC9C,YAAY,SAAS,QAAoC,IACzD;AAEN,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf,WACE,oBAAS,UAAT,mBAAgB,IAAI,WAAS;AAAA,QAC3B,MAAM,KAAK;AAAA,QACX,aAAa,KAAK;AAAA,QAClB,WAAW,KAAK;AAAA,MAClB,QAJA,YAIO,CAAC;AAAA,MACV;AAAA,MACA,oBAAmB,cAAS,aAAT,YAAqB;AAAA,MACxC;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,oCAAoC,eAAE,OAAO;AAAA,EACjD,MAAM,eAAE,OAAO;AAAA,EACf,UAAU,eAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,UAAU,eAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,OAAO,eACJ;AAAA,IACC,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,OAAO;AAAA,MACf,OAAO,eAAE,OAAO;AAAA,MAChB,KAAK,eAAE,OAAO;AAAA,IAChB,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;;;AEpND,IAAAC,yBAQO;AACP,IAAAC,eAAkB;;;ACjBlB,IAAAC,mBAIO;AAGA,SAAS,iCAAiC;AAAA,EAC/C;AAAA,EACA;AACF,GAME;AACA,QAAM,WAAkC,CAAC;AACzC,QAAM,WAA8C,CAAC;AAErD,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,gBAAQ,mBAAmB;AAAA,UACzB,KAAK,UAAU;AACb,qBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,UACF;AAAA,UACA,KAAK,aAAa;AAChB,qBAAS,KAAK,EAAE,MAAM,aAAa,QAAQ,CAAC;AAC5C;AAAA,UACF;AAAA,UACA,KAAK,UAAU;AACb,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AACD;AAAA,UACF;AAAA,UACA,SAAS;AACP,kBAAM,mBAA0B;AAChC,kBAAM,IAAI;AAAA,cACR,oCAAoC,gBAAgB;AAAA,YACtD;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,CAAC,MAAM,UAAU;AApDhD;AAqDY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,cAAc,MAAM,KAAK,KAAK;AAAA,cAC/C;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA;AAAA,oBAG3C,SAAQ,gBAAK,oBAAL,mBAAsB,WAAtB,mBAA8B;AAAA,kBACxC;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,sBAAI,KAAK,gBAAgB,KAAK;AAE5B,0BAAM,IAAI,+CAA8B;AAAA,sBACtC,eAAe;AAAA,oBACjB,CAAC;AAAA,kBACH;AAEA,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAU,UAAK,aAAL,YAAiB,QAAQ,KAAK;AAAA,oBACxC,WAAW,+BAA+B,KAAK,IAAI;AAAA,kBACrD;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,uBAAS,KAAK;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,CAAC,EAAE,MAAM,eAAe,MAAM,KAAK,KAAK,CAAC;AAAA,cACpD,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,uBAAS,KAAK;AAAA,gBACZ,MAAM;AAAA,gBACN,SAAS,KAAK;AAAA,gBACd,MAAM,KAAK;AAAA,gBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,cACrC,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,QAAQ,SAAS;AAC1B,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,KAAK;AAAA,YACd,QAAQ,KAAK,UAAU,KAAK,MAAM;AAAA,UACpC,CAAC;AAAA,QACH;AAEA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,SAAS;AAC9B;;;AC/IO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AAAA,IACL,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO,eAAe,eAAe;AAAA,EACzC;AACF;;;ACpBA,IAAAC,mBAIO;AAGA,SAAS,sBAAsB;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AACF,GAaE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAMC,eAA0C,CAAC;AAEjD,aAAW,QAAQ,OAAO;AACxB,YAAQ,KAAK,MAAM;AAAA,MACjB,KAAK;AACH,QAAAA,aAAY,KAAK;AAAA,UACf,MAAM;AAAA,UACN,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,QAAQ,SAAS,OAAO;AAAA,QAC1B,CAAC;AACD;AAAA,MACF,KAAK;AACH,gBAAQ,KAAK,IAAI;AAAA,UACf,KAAK;AACH,YAAAA,aAAY,KAAK;AAAA,cACf,MAAM;AAAA,cACN,qBAAqB,KAAK,KAAK;AAAA,cAI/B,eAAe,KAAK,KAAK;AAAA,YAK3B,CAAC;AACD;AAAA,UACF;AACE,yBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AACpD;AAAA,QACJ;AACA;AAAA,MACF;AACE,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AACpD;AAAA,IACJ;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAOA,cAAa,YAAY,QAAW,aAAa;AAAA,EACnE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAOA,cAAa,YAAY,MAAM,aAAa;AAAA,IAC9D,KAAK;AACH,aAAO;AAAA,QACL,OAAOA;AAAA,QACP,YACE,WAAW,aAAa,uBACpB,EAAE,MAAM,qBAAqB,IAC7B,EAAE,MAAM,YAAY,MAAM,WAAW,SAAS;AAAA,QACpD;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AH5EO,IAAM,+BAAN,MAA8D;AAAA,EAOnE,YAAY,SAAiC,QAAsB;AANnE,SAAS,uBAAuB;AAWhC,SAAS,gBAA0C;AAAA,MACjD,WAAW,CAAC,iBAAiB;AAAA,IAC/B;AANE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAMA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA3DnD;AA4DI,UAAM,WAAyC,CAAC;AAChD,UAAM,cAAc,wBAAwB,KAAK,OAAO;AAExD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,gBAAgB,CAAC;AAAA,IACzE;AAEA,UAAM,EAAE,UAAU,UAAU,gBAAgB,IAC1C,iCAAiC;AAAA,MAC/B;AAAA,MACA,mBAAmB,YAAY;AAAA,IACjC,CAAC;AAEH,aAAS,KAAK,GAAG,eAAe;AAEhC,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,YAAW,oDAAe,kBAAf,YAAgC;AAEjD,UAAM,WAAW;AAAA,MACf,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,MACP;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MAEnB,IAAI,iDAAgB,UAAS,UAAU;AAAA,QACrC,MAAM;AAAA,UACJ,QACE,eAAe,UAAU,OACrB;AAAA,YACE,MAAM;AAAA,YACN,QAAQ;AAAA,YACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,YAC5B,QAAQ,eAAe;AAAA,UACzB,IACA,EAAE,MAAM,cAAc;AAAA,QAC9B;AAAA,MACF;AAAA;AAAA,MAGA,UAAU,+CAAe;AAAA,MACzB,qBAAqB,+CAAe;AAAA,MACpC,sBAAsB,+CAAe;AAAA,MACrC,OAAO,+CAAe;AAAA,MACtB,MAAM,+CAAe;AAAA,MACrB,cAAc,+CAAe;AAAA;AAAA,MAG7B,GAAI,YAAY,sBACb,+CAAe,oBAAmB,SACjC,+CAAe,qBAAoB,SAAS;AAAA,QAC5C,WAAW;AAAA,UACT,IAAI,+CAAe,oBAAmB,QAAQ;AAAA,YAC5C,QAAQ,cAAc;AAAA,UACxB;AAAA,UACA,IAAI,+CAAe,qBAAoB,QAAQ;AAAA,YAC7C,SAAS,cAAc;AAAA,UACzB;AAAA,QACF;AAAA,MACF;AAAA,MACF,GAAI,YAAY,0BAA0B;AAAA,QACxC,YAAY;AAAA,MACd;AAAA,IACF;AAEA,QAAI,YAAY,kBAAkB;AAGhC,UAAI,SAAS,eAAe,MAAM;AAChC,iBAAS,cAAc;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAEA,UAAI,SAAS,SAAS,MAAM;AAC1B,iBAAS,QAAQ;AACjB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM;AAAA,MACJ,OAAOC;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,sBAAsB;AAAA,MACxB;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAOA;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AArMjE;AAsMI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB,eAAE,OAAO;AAAA,UACP,IAAI,eAAE,OAAO;AAAA,UACb,YAAY,eAAE,OAAO;AAAA,UACrB,OAAO,eAAE,OAAO;AAAA,UAChB,QAAQ,eAAE;AAAA,YACR,eAAE,mBAAmB,QAAQ;AAAA,cAC3B,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,SAAS;AAAA,gBACzB,MAAM,eAAE,QAAQ,WAAW;AAAA,gBAC3B,SAAS,eAAE;AAAA,kBACT,eAAE,OAAO;AAAA,oBACP,MAAM,eAAE,QAAQ,aAAa;AAAA,oBAC7B,MAAM,eAAE,OAAO;AAAA,oBACf,aAAa,eAAE;AAAA,sBACb,eAAE,OAAO;AAAA,wBACP,MAAM,eAAE,QAAQ,cAAc;AAAA,wBAC9B,aAAa,eAAE,OAAO;AAAA,wBACtB,WAAW,eAAE,OAAO;AAAA,wBACpB,KAAK,eAAE,OAAO;AAAA,wBACd,OAAO,eAAE,OAAO;AAAA,sBAClB,CAAC;AAAA,oBACH;AAAA,kBACF,CAAC;AAAA,gBACH;AAAA,cACF,CAAC;AAAA,cACD,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,eAAe;AAAA,gBAC/B,SAAS,eAAE,OAAO;AAAA,gBAClB,MAAM,eAAE,OAAO;AAAA,gBACf,WAAW,eAAE,OAAO;AAAA,cACtB,CAAC;AAAA,cACD,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,iBAAiB;AAAA,cACnC,CAAC;AAAA,cACD,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,eAAe;AAAA,cACjC,CAAC;AAAA,cACD,eAAE,OAAO;AAAA,gBACP,MAAM,eAAE,QAAQ,WAAW;AAAA,gBAC3B,SAAS,eAAE;AAAA,kBACT,eAAE,OAAO;AAAA,oBACP,MAAM,eAAE,QAAQ,cAAc;AAAA,oBAC9B,MAAM,eAAE,OAAO;AAAA,kBACjB,CAAC;AAAA,gBACH;AAAA,cACF,CAAC;AAAA,YACH,CAAC;AAAA,UACH;AAAA,UACA,oBAAoB,eAAE,OAAO,EAAE,QAAQ,eAAE,OAAO,EAAE,CAAC,EAAE,SAAS;AAAA,UAC9D,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,UAAyC,CAAC;AAGhD,eAAW,QAAQ,SAAS,QAAQ;AAClC,cAAQ,KAAK,MAAM;AAAA,QACjB,KAAK,aAAa;AAChB,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,MAAM,KAAK,QAAQ,IAAI,aAAW,QAAQ,IAAI,EAAE,KAAK;AAAA,UACvD,CAAC;AACD;AAAA,QACF;AAAA,QAEA,KAAK,WAAW;AACd,qBAAW,eAAe,KAAK,SAAS;AACtC,oBAAQ,KAAK;AAAA,cACX,MAAM;AAAA,cACN,MAAM,YAAY;AAAA,YACpB,CAAC;AAED,uBAAW,cAAc,YAAY,aAAa;AAChD,sBAAQ,KAAK;AAAA,gBACX,MAAM;AAAA,gBACN,YAAY;AAAA,gBACZ,KAAI,sBAAK,QAAO,eAAZ,gDAA8B,mCAAW;AAAA,gBAC7C,KAAK,WAAW;AAAA,gBAChB,OAAO,WAAW;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAEA,KAAK,iBAAiB;AACpB,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN,cAAc;AAAA,YACd,YAAY,KAAK;AAAA,YACjB,UAAU,KAAK;AAAA,YACf,MAAM,KAAK;AAAA,UACb,CAAC;AACD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,8BAA8B;AAAA,QAC1C,eAAc,cAAS,uBAAT,mBAA6B;AAAA,QAC3C,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAC/B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,IAAI,SAAS;AAAA,QACb,WAAW,IAAI,KAAK,SAAS,aAAa,GAAI;AAAA,QAC9C,SAAS,SAAS;AAAA,QAClB,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,qBACE,oBAAS,MAAM,yBAAf,mBAAqC,kBAArC,YAAsD;AAAA,UACxD,kBACE,oBAAS,MAAM,0BAAf,mBAAsC,qBAAtC,YAA0D;AAAA,QAC9D;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,OAAO;AAEb,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,qBAAoC;AACxC,QAAI,kBAAiC;AACrC,QAAI,aAA4B;AAChC,UAAM,mBAGF,CAAC;AACL,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AA3YvC;AA6YY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,+BAA+B,KAAK,GAAG;AACzC,kBAAI,MAAM,KAAK,SAAS,iBAAiB;AACvC,iCAAiB,MAAM,YAAY,IAAI;AAAA,kBACrC,UAAU,MAAM,KAAK;AAAA,kBACrB,YAAY,MAAM,KAAK;AAAA,gBACzB;AAEA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,MAAM,KAAK;AAAA,kBACvB,UAAU,MAAM,KAAK;AAAA,kBACrB,eAAe,MAAM,KAAK;AAAA,gBAC5B,CAAC;AAAA,cACH;AAAA,YACF,WAAW,0CAA0C,KAAK,GAAG;AAC3D,oBAAM,WAAW,iBAAiB,MAAM,YAAY;AAEpD,kBAAI,YAAY,MAAM;AACpB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS;AAAA,kBACnB,eAAe,MAAM;AAAA,gBACvB,CAAC;AAAA,cACH;AAAA,YACF,WAAW,uBAAuB,KAAK,GAAG;AACxC,2BAAa,MAAM,SAAS;AAC5B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,MAAM,SAAS;AAAA,gBACnB,WAAW,IAAI,KAAK,MAAM,SAAS,aAAa,GAAI;AAAA,gBACpD,SAAS,MAAM,SAAS;AAAA,cAC1B,CAAC;AAAA,YACH,WAAW,iBAAiB,KAAK,GAAG;AAClC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH,WAAW,yCAAyC,KAAK,GAAG;AAC1D,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH,WACE,8BAA8B,KAAK,KACnC,MAAM,KAAK,SAAS,iBACpB;AACA,+BAAiB,MAAM,YAAY,IAAI;AACvC,6BAAe;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,cAAc;AAAA,gBACd,YAAY,MAAM,KAAK;AAAA,gBACvB,UAAU,MAAM,KAAK;AAAA,gBACrB,MAAM,MAAM,KAAK;AAAA,cACnB,CAAC;AAAA,YACH,WAAW,wBAAwB,KAAK,GAAG;AACzC,6BAAe,8BAA8B;AAAA,gBAC3C,eAAc,WAAM,SAAS,uBAAf,mBAAmC;AAAA,gBACjD;AAAA,cACF,CAAC;AACD,oBAAM,cAAc,MAAM,SAAS,MAAM;AACzC,oBAAM,eAAe,MAAM,SAAS,MAAM;AAC1C,oCACE,iBAAM,SAAS,MAAM,yBAArB,mBAA2C,kBAA3C,YACA;AACF,iCACE,iBAAM,SAAS,MAAM,0BAArB,mBAA4C,qBAA5C,YACA;AAAA,YACJ,WAAW,+BAA+B,KAAK,GAAG;AAChD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,YAAY;AAAA,gBACZ,KAAI,sBAAK,QAAO,eAAZ,gDAA8B,mCAAW;AAAA,gBAC7C,KAAK,MAAM,WAAW;AAAA,gBACtB,OAAO,MAAM,WAAW;AAAA,cAC1B,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,IAAK,sBAAsB,QAAQ,mBAAmB,SAAS;AAAA,gBAC7D,kBAAkB;AAAA,kBAChB,QAAQ;AAAA,oBACN;AAAA,oBACA;AAAA,oBACA;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,cAAc,eAAE,OAAO;AAAA,EAC3B,cAAc,eAAE,OAAO;AAAA,EACvB,sBAAsB,eACnB,OAAO,EAAE,eAAe,eAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,EAC9C,QAAQ;AAAA,EACX,eAAe,eAAE,OAAO;AAAA,EACxB,uBAAuB,eACpB,OAAO,EAAE,kBAAkB,eAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,EACjD,QAAQ;AACb,CAAC;AAED,IAAM,uBAAuB,eAAE,OAAO;AAAA,EACpC,MAAM,eAAE,QAAQ,4BAA4B;AAAA,EAC5C,OAAO,eAAE,OAAO;AAClB,CAAC;AAED,IAAM,8BAA8B,eAAE,OAAO;AAAA,EAC3C,MAAM,eAAE,KAAK,CAAC,sBAAsB,qBAAqB,CAAC;AAAA,EAC1D,UAAU,eAAE,OAAO;AAAA,IACjB,oBAAoB,eAAE,OAAO,EAAE,QAAQ,eAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,IAC7D,OAAO;AAAA,EACT,CAAC;AACH,CAAC;AAED,IAAM,6BAA6B,eAAE,OAAO;AAAA,EAC1C,MAAM,eAAE,QAAQ,kBAAkB;AAAA,EAClC,UAAU,eAAE,OAAO;AAAA,IACjB,IAAI,eAAE,OAAO;AAAA,IACb,YAAY,eAAE,OAAO;AAAA,IACrB,OAAO,eAAE,OAAO;AAAA,EAClB,CAAC;AACH,CAAC;AAED,IAAM,+BAA+B,eAAE,OAAO;AAAA,EAC5C,MAAM,eAAE,QAAQ,2BAA2B;AAAA,EAC3C,cAAc,eAAE,OAAO;AAAA,EACvB,MAAM,eAAE,mBAAmB,QAAQ;AAAA,IACjC,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,QAAQ,SAAS;AAAA,IAC3B,CAAC;AAAA,IACD,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,QAAQ,eAAe;AAAA,MAC/B,IAAI,eAAE,OAAO;AAAA,MACb,SAAS,eAAE,OAAO;AAAA,MAClB,MAAM,eAAE,OAAO;AAAA,MACf,WAAW,eAAE,OAAO;AAAA,MACpB,QAAQ,eAAE,QAAQ,WAAW;AAAA,IAC/B,CAAC;AAAA,EACH,CAAC;AACH,CAAC;AAED,IAAM,2CAA2C,eAAE,OAAO;AAAA,EACxD,MAAM,eAAE,QAAQ,wCAAwC;AAAA,EACxD,SAAS,eAAE,OAAO;AAAA,EAClB,cAAc,eAAE,OAAO;AAAA,EACvB,OAAO,eAAE,OAAO;AAClB,CAAC;AAED,IAAM,gCAAgC,eAAE,OAAO;AAAA,EAC7C,MAAM,eAAE,QAAQ,4BAA4B;AAAA,EAC5C,cAAc,eAAE,OAAO;AAAA,EACvB,MAAM,eAAE,mBAAmB,QAAQ;AAAA,IACjC,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,QAAQ,SAAS;AAAA,IAC3B,CAAC;AAAA,IACD,eAAE,OAAO;AAAA,MACP,MAAM,eAAE,QAAQ,eAAe;AAAA,MAC/B,IAAI,eAAE,OAAO;AAAA,MACb,SAAS,eAAE,OAAO;AAAA,MAClB,MAAM,eAAE,OAAO;AAAA,MACf,WAAW,eAAE,OAAO;AAAA,IACtB,CAAC;AAAA,EACH,CAAC;AACH,CAAC;AAED,IAAM,gCAAgC,eAAE,OAAO;AAAA,EAC7C,MAAM,eAAE,QAAQ,uCAAuC;AAAA,EACvD,YAAY,eAAE,OAAO;AAAA,IACnB,MAAM,eAAE,QAAQ,cAAc;AAAA,IAC9B,KAAK,eAAE,OAAO;AAAA,IACd,OAAO,eAAE,OAAO;AAAA,EAClB,CAAC;AACH,CAAC;AAED,IAAM,0CAA0C,eAAE,OAAO;AAAA,EACvD,MAAM,eAAE,QAAQ,uCAAuC;AAAA,EACvD,SAAS,eAAE,OAAO;AAAA,EAClB,cAAc,eAAE,OAAO;AAAA,EACvB,eAAe,eAAE,OAAO;AAAA,EACxB,OAAO,eAAE,OAAO;AAClB,CAAC;AAED,IAAM,6BAA6B,eAAE,MAAM;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,eAAE,OAAO,EAAE,MAAM,eAAE,OAAO,EAAE,CAAC,EAAE,YAAY;AAAA;AAC7C,CAAC;AAED,SAAS,iBACP,OAC+C;AAC/C,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,8BACP,OACuD;AACvD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,wBACP,OACsD;AACtD,SACE,MAAM,SAAS,wBAAwB,MAAM,SAAS;AAE1D;AAEA,SAAS,uBACP,OACqD;AACrD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,0CACP,OACmE;AACnE,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,+BACP,OACwD;AACxD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,+BACP,OACwD;AACxD,SAAO,MAAM,SAAS;AACxB;AAEA,SAAS,yCACP,OACkE;AAClE,SAAO,MAAM,SAAS;AACxB;AAQA,SAAS,wBAAwB,SAAuC;AAEtE,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,QAAI,QAAQ,WAAW,SAAS,KAAK,QAAQ,WAAW,YAAY,GAAG;AACrE,aAAO;AAAA,QACL,kBAAkB;AAAA,QAClB,mBAAmB;AAAA,QACnB,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAEA,WAAO;AAAA,MACL,kBAAkB;AAAA,MAClB,mBAAmB;AAAA,MACnB,wBAAwB;AAAA,IAC1B;AAAA,EACF;AAGA,SAAO;AAAA,IACL,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,IACnB,wBAAwB;AAAA,EAC1B;AACF;AAEA,IAAM,uCAAuC,eAAE,OAAO;AAAA,EACpD,UAAU,eAAE,IAAI,EAAE,QAAQ;AAAA,EAC1B,mBAAmB,eAAE,QAAQ,EAAE,QAAQ;AAAA,EACvC,oBAAoB,eAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,OAAO,eAAE,QAAQ,EAAE,QAAQ;AAAA,EAC3B,MAAM,eAAE,OAAO,EAAE,QAAQ;AAAA,EACzB,iBAAiB,eAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,eAAe,eAAE,QAAQ,EAAE,QAAQ;AAAA,EACnC,cAAc,eAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,kBAAkB,eAAE,OAAO,EAAE,QAAQ;AACvC,CAAC;;;AIlsBD,IAAAC,yBAKO;AACP,IAAAC,eAAkB;AAOlB,IAAM,8BAA8B,eAAE,OAAO;AAAA,EAC3C,cAAc,eAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,OAAO,eAAE,OAAO,EAAE,IAAI,IAAI,EAAE,IAAI,CAAG,EAAE,QAAQ,CAAG,EAAE,QAAQ;AAC5D,CAAC;AAYM,IAAM,oBAAN,MAAiD;AAAA,EAOtD,YACW,SACQ,QACjB;AAFS;AACQ;AARnB,SAAS,uBAAuB;AAAA,EAS7B;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA,QAAQ;AAAA,IACR,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAA+C;AAC7C,UAAM,WAAuC,CAAC;AAG9C,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,cAAuC;AAAA,MAC3C,OAAO,KAAK;AAAA,MACZ,OAAO;AAAA,MACP;AAAA,MACA,iBAAiB;AAAA,MACjB;AAAA,MACA;AAAA,IACF;AAEA,QAAI,cAAc;AAChB,UAAI,CAAC,OAAO,QAAQ,OAAO,QAAQ,OAAO,KAAK,EAAE,SAAS,YAAY,GAAG;AACvE,oBAAY,kBAAkB;AAAA,MAChC,OAAO;AACL,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,SAAS,8BAA8B,YAAY;AAAA,QACrD,CAAC;AAAA,MACH;AAAA,IACF;AAGA,QAAI,eAAe;AACjB,YAAM,qBAA2C,CAAC;AAElD,iBAAW,OAAO,oBAAoB;AACpC,cAAM,QAAQ,mBAAmB,GAAiC;AAClE,YAAI,UAAU,QAAW;AACvB,sBAAY,GAAG,IAAI;AAAA,QACrB;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC2D;AApG/D;AAqGI,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,aAAa,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE5D,UAAM;AAAA,MACJ,OAAO;AAAA,MACP;AAAA,MACA,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,oDAA4B;AAAA,MACvD,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,SAAS;AAAA,QACP,MAAM,KAAK,UAAU,WAAW;AAAA,MAClC;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;;;AtBqBO,SAAS,aACd,UAAkC,CAAC,GACnB;AA9JlB;AA+JE,QAAM,WACJ,uDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAG3C,QAAM,iBAAgB,aAAQ,kBAAR,YAAyB;AAE/C,QAAM,gBAAe,aAAQ,SAAR,YAAgB;AAErC,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,oCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,uBAAuB,QAAQ;AAAA,IAC/B,kBAAkB,QAAQ;AAAA,IAC1B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,wBAAwB,SAAS;AAAA,IACnC,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,8BAA8B,SAAS;AAAA,IACzC,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT;AAAA,IACA,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,qBAAqB,SAAS;AAAA,IAChC,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,mBAAmB,CACvB,SACA,WAAgC,CAAC,MAEjC,IAAI,iBAAiB,SAAS,UAAU;AAAA,IACtC,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,2BAA2B,CAAC,YAChC,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,oBAAoB,CAAC,YACzB,IAAI,kBAAkB,SAAS;AAAA,IAC7B,UAAU,GAAG,YAAY;AAAA,IACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,sBAAsB,CAC1B,YACG;AACH,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,QAAI,YAAY,0BAA0B;AACxC,aAAO,sBAAsB,OAAO;AAAA,IACtC;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,QAAM,uBAAuB,CAAC,YAAoC;AAChE,WAAO,IAAI,6BAA6B,SAAS;AAAA,MAC/C,UAAU,GAAG,YAAY;AAAA,MACzB,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,MACpC,SAAS;AAAA,MACT,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA,EACH;AAEA,QAAM,WAAW,SACf,SACA;AACA,WAAO,oBAAoB,OAAO;AAAA,EACpC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AACrB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,QAAQ;AACjB,WAAS,aAAa;AAEtB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,SAAS;AAClB,WAAS,cAAc;AAEvB,WAAS,QAAQ;AAEjB,SAAO;AACT;AAKO,IAAM,SAAS,aAAa;AAAA,EACjC,eAAe;AAAA;AACjB,CAAC;","names":["import_provider_utils","import_provider","import_provider_utils","import_zod","import_zod","import_provider_utils","import_provider","openaiTools","openaiTools","toolCall","import_provider_utils","import_zod","import_provider","import_zod","import_provider","import_provider_utils","import_zod","import_zod","import_provider_utils","import_zod","import_zod","import_provider_utils","import_zod","import_zod","import_provider_utils","import_zod","import_provider","import_provider","openaiTools","openaiTools","import_provider_utils","import_zod"]}
|