@ai-sdk/openai-compatible 1.0.4 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-chat-options.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-completion-options.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-embedding-options.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["export { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nexport type {\n OpenAICompatibleChatModelId,\n OpenAICompatibleProviderOptions,\n} from './openai-compatible-chat-options';\nexport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nexport type {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionProviderOptions,\n} from './openai-compatible-completion-options';\nexport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nexport type {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingProviderOptions,\n} from './openai-compatible-embedding-options';\nexport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\nexport type {\n OpenAICompatibleErrorData,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nexport type { MetadataExtractor } from './openai-compatible-metadata-extractor';\nexport { createOpenAICompatible } from './openai-compatible-provider';\nexport type {\n OpenAICompatibleProvider,\n OpenAICompatibleProviderSettings,\n} from './openai-compatible-provider';\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n openaiCompatibleProviderOptions,\n} from './openai-compatible-chat-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\nimport { prepareTools } from './openai-compatible-prepare-tools';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n );\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: compatibleOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = responseBody.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // reasoning content:\n const reasoning =\n choice.message.reasoning_content ?? choice.message.reasoning;\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n // provider metadata:\n const providerMetadata: SharedV2ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...(await this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n })),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n totalTokens: number | undefined;\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n };\n let isFirstChunk = true;\n const providerOptionsName = this.providerOptionsName;\n let isActiveReasoning = false;\n let isActiveText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n // Usage may be at the top level per spec or nested under choices[0]\n // for some providers (e.g. moonshotai).\n const effectiveUsage = value.usage ?? value.choices?.[0]?.usage;\n if (effectiveUsage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = effectiveUsage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n usage.totalTokens = total_tokens ?? undefined;\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n const reasoningContent = delta.reasoning_content ?? delta.reasoning;\n if (reasoningContent) {\n if (!isActiveReasoning) {\n controller.enqueue({\n type: 'reasoning-start',\n id: 'reasoning-0',\n });\n isActiveReasoning = true;\n }\n\n controller.enqueue({\n type: 'reasoning-delta',\n id: 'reasoning-0',\n delta: reasoningContent,\n });\n }\n\n if (delta.content) {\n if (!isActiveText) {\n controller.enqueue({ type: 'text-start', id: 'txt-0' });\n isActiveText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: 'txt-0',\n delta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallDelta.id,\n toolName: toolCallDelta.function.name,\n });\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCall.id,\n toolName: toolCall.function.name,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n if (isActiveReasoning) {\n controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });\n }\n\n if (isActiveText) {\n controller.enqueue({ type: 'text-end', id: 'txt-0' });\n }\n\n // go through all tool calls and send the ones that are not finished\n for (const toolCall of toolCalls.filter(\n toolCall => !toolCall.hasFinished,\n )) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n }\n\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n // Most openai-compatible models set `reasoning_content`, but some\n // providers serving `gpt-oss` set `reasoning`. See #7866\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n // Some providers report usage within each choice in streaming chunks\n usage: openaiCompatibleTokenUsageSchema,\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV2Prompt,\n SharedV2ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: SharedV2ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV2Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleChatModelId = string;\n\nexport const openaiCompatibleProviderOptions = z.object({\n /**\n * A unique identifier representing your end-user, which can help the provider to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n\n /**\n * Reasoning effort for reasoning models. Defaults to `medium`.\n */\n reasoningEffort: z.string().optional(),\n});\n\nexport type OpenAICompatibleProviderOptions = z.infer<\n typeof openaiCompatibleProviderOptions\n>;\n","import { z, ZodType } from 'zod/v4';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodType<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n openaiCompatibleCompletionProviderOptions,\n} from './openai-compatible-completion-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV2\n{\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n const completionOptions =\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleCompletionProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: completionOptions.echo,\n logit_bias: completionOptions.logitBias,\n suffix: completionOptions.suffix,\n user: completionOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n if (choice.text != null && choice.text.length > 0) {\n content.push({ type: 'text', text: choice.text });\n }\n\n return {\n content,\n usage: {\n inputTokens: response.usage?.prompt_tokens ?? undefined,\n outputTokens: response.usage?.completion_tokens ?? undefined,\n totalTokens: response.usage?.total_tokens ?? undefined,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n\n controller.enqueue({\n type: 'text-start',\n id: '0',\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens ?? undefined;\n usage.outputTokens = value.usage.completion_tokens ?? undefined;\n usage.totalTokens = value.usage.total_tokens ?? undefined;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: choice.text,\n });\n }\n },\n\n flush(controller) {\n if (!isFirstChunk) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst usageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: usageSchema.nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleCompletionModelId = string;\n\nexport const openaiCompatibleCompletionProviderOptions = z.object({\n /**\n * Echo back the prompt in addition to the completion.\n */\n echo: z.boolean().optional(),\n\n /**\n * Modify the likelihood of specified tokens appearing in the completion.\n *\n * Accepts a JSON object that maps tokens (specified by their token ID in\n * the GPT tokenizer) to an associated bias value from -100 to 100.\n */\n logitBias: z.record(z.string(), z.number()).optional(),\n\n /**\n * The suffix that comes after a completion of inserted text.\n */\n suffix: z.string().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleCompletionProviderOptions = z.infer<\n typeof openaiCompatibleCompletionProviderOptions\n>;\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n OpenAICompatibleEmbeddingModelId,\n openaiCompatibleEmbeddingProviderOptions,\n} from './openai-compatible-embedding-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n );\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: compatibleOptions.dimensions,\n user: compatibleOptions.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n providerMetadata: response.providerMetadata,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n providerMetadata: z\n .record(z.string(), z.record(z.string(), z.any()))\n .optional(),\n});\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleEmbeddingModelId = string;\n\nexport const openaiCompatibleEmbeddingProviderOptions = z.object({\n /**\n * The number of dimensions the resulting output embeddings should have.\n * Only supported in text-embedding-3 and later models.\n */\n dimensions: z.number().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleEmbeddingProviderOptions = z.infer<\n typeof openaiCompatibleEmbeddingProviderOptions\n>;\n","import { ImageModelV2, ImageModelV2CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV2 {\n readonly specificationVersion = 'v2';\n readonly maxImagesPerCall = 10;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV2['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV2['doGenerate']>>\n > {\n const warnings: Array<ImageModelV2CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV2, 'imageModel'> {\n (modelId: CHAT_MODEL_IDS): LanguageModelV2;\n\n languageModel(modelId: CHAT_MODEL_IDS): LanguageModelV2;\n\n chatModel(modelId: CHAT_MODEL_IDS): LanguageModelV2;\n\n completionModel(modelId: COMPLETION_MODEL_IDS): LanguageModelV2;\n\n textEmbeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV2<string>;\n\n imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV2;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nInclude usage information in streaming responses.\n */\n includeUsage?: boolean;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (modelId: CHAT_MODEL_IDS) =>\n createChatModel(modelId);\n\n const createChatModel = (modelId: CHAT_MODEL_IDS) =>\n new OpenAICompatibleChatLanguageModel(modelId, {\n ...getCommonModelConfig('chat'),\n includeUsage: options.includeUsage,\n });\n\n const createCompletionModel = (modelId: COMPLETION_MODEL_IDS) =>\n new OpenAICompatibleCompletionLanguageModel(modelId, {\n ...getCommonModelConfig('completion'),\n includeUsage: options.includeUsage,\n });\n\n const createEmbeddingModel = (modelId: EMBEDDING_MODEL_IDS) =>\n new OpenAICompatibleEmbeddingModel(modelId, {\n ...getCommonModelConfig('embedding'),\n });\n\n const createImageModel = (modelId: IMAGE_MODEL_IDS) =>\n new OpenAICompatibleImageModel(modelId, getCommonModelConfig('image'));\n\n const provider = (modelId: CHAT_MODEL_IDS) => createLanguageModel(modelId);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,4BAYO;AACP,IAAAC,aAAkB;;;ACvBlB,sBAIO;AAGP,SAAS,kBAAkB,SAExB;AATH;AAUE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA,oBAC7C;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,YACT,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtJO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,gBAAkB;AAIX,IAAM,kCAAkC,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKtD,MAAM,YAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK1B,iBAAiB,YAAE,OAAO,EAAE,SAAS;AACvC,CAAC;;;ACfD,IAAAC,aAA2B;AAEpB,IAAM,kCAAkC,aAAE,OAAO;AAAA,EACtD,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF,IAAAC,mBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAqBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANjCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAUxE,YACE,SACA,QACA;AAZF,SAAS,uBAAuB;AA3DlC;AAwEI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,sDAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AA9FtB;AA+FI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAhHnD;AAiHI,UAAM,WAAyC,CAAC;AAGhD,UAAM,oBAAoB,OAAO;AAAA,OAC9B,eAAM,4CAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,eAAM,4CAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA,QAE1B,kBAAkB,kBAAkB;AAAA;AAAA,QAGpC,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA3MjE;AA4MI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,qCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,sCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,aACJ,YAAO,QAAQ,sBAAf,YAAoC,OAAO,QAAQ;AACrD,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,kCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,mBAA6C;AAAA,MACjD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,GAAI,QAAM,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QACzD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA,MACA;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AA9S/D;AA+SI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,qCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,sCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AACnB,UAAM,sBAAsB,KAAK;AACjC,QAAI,oBAAoB;AACxB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAjYvC,gBAAAC,KAAA;AAmYY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAIA,kBAAM,kBAAiB,WAAM,UAAN,aAAe,MAAAA,MAAA,MAAM,YAAN,gBAAAA,IAAgB,OAAhB,mBAAoB;AAC1D,gBAAI,kBAAkB,MAAM;AAC1B,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI;AAEJ,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,kBAAM,oBAAmB,WAAM,sBAAN,YAA2B,MAAM;AAC1D,gBAAI,kBAAkB;AACpB,kBAAI,CAAC,mBAAmB;AACtB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AACD,oCAAoB;AAAA,cACtB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS;AACjB,kBAAI,CAAC,cAAc;AACjB,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,QAAQ,CAAC;AACtD,+BAAe;AAAA,cACjB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,cAAc;AAAA,oBAClB,UAAU,cAAc,SAAS;AAAA,kBACnC,CAAC;AAED,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,UAAUA,UAAS,SAAS;AAAA,sBAC9B,CAAC;AAAA,oBACH;AAIA,4BAAI,sCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,sBACf,CAAC;AAED,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,kCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,sCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,kBACf,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,kCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AApmB5B,gBAAAD,KAAA;AAqmBY,gBAAI,mBAAmB;AACrB,yBAAW,QAAQ,EAAE,MAAM,iBAAiB,IAAI,cAAc,CAAC;AAAA,YACjE;AAEA,gBAAI,cAAc;AAChB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,QAAQ,CAAC;AAAA,YACtD;AAGA,uBAAW,YAAY,UAAU;AAAA,cAC/B,CAAAC,cAAY,CAACA,UAAS;AAAA,YACxB,GAAG;AACD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,SAAS;AAAA,cACf,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,aAAYD,MAAA,SAAS,OAAT,OAAAA,UAAe,kCAAW;AAAA,gBACtC,UAAU,SAAS,SAAS;AAAA,gBAC5B,OAAO,SAAS,SAAS;AAAA,cAC3B,CAAC;AAAA,YACH;AAEA,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAa,WAAM,iBAAN,YAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,mCAAmC,aACtC,OAAO;AAAA,EACN,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,aACpB,OAAO;AAAA,IACN,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,aACxB,OAAO;AAAA,IACN,kBAAkB,aAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,aAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAU,aAAE,OAAO;AAAA,cACjB,MAAM,aAAE,OAAO;AAAA,cACf,WAAW,aAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAG5C,gBAEA,aAAE,MAAM;AAAA,EACN,aAAE,OAAO;AAAA,IACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,aAAE;AAAA,MACT,aAAE,OAAO;AAAA,QACP,OAAO,aACJ,OAAO;AAAA,UACN,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA,UAG5B,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,UAC9B,YAAY,aACT;AAAA,YACC,aAAE,OAAO;AAAA,cACP,OAAO,aAAE,OAAO;AAAA,cAChB,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,UAAU,aAAE,OAAO;AAAA,gBACjB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA;AAAA,QAElC,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AO5vBH,IAAAE,yBAUO;AACP,IAAAC,aAAkB;;;ACpBlB,IAAAC,mBAIO;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAOE;AAEA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;AC5FA,IAAAC,aAAkB;AAIX,IAAM,4CAA4C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAIhE,MAAM,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ3B,WAAW,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrD,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;AFmBM,IAAM,0CAAN,MAEP;AAAA;AAAA,EAQE,YACE,SACA,QACA;AAVF,SAAS,uBAAuB;AAlDlC;AA6DI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AAjFtB;AAkFI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAnGnD;AAoGI,UAAM,WAAyC,CAAC;AAGhD,UAAM,qBACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU,KAAK;AAAA,MACf;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,OAAO,CAAC;AAEtD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA,QACxB,YAAY,kBAAkB;AAAA,QAC9B,QAAQ,kBAAkB;AAAA,QAC1B,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAvKjE;AAwKI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,QAAQ,QAAQ,OAAO,KAAK,SAAS,GAAG;AACjD,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,IAClD;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,cAAa,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC9C,eAAc,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,QACnD,cAAa,oBAAS,UAAT,mBAAgB,iBAAhB,YAAgC;AAAA,MAC/C;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAvQvC;AAwQY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,eAAc,WAAM,MAAM,kBAAZ,YAA6B;AACjD,oBAAM,gBAAe,WAAM,MAAM,sBAAZ,YAAiC;AACtD,oBAAM,eAAc,WAAM,MAAM,iBAAZ,YAA4B;AAAA,YAClD;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,OAAO;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,CAAC,cAAc;AACjB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,cAAc,aAAE,OAAO;AAAA,EAC3B,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,2CAA2C,aAAE,OAAO;AAAA,EACxD,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,MAAM,aAAE,OAAO;AAAA,MACf,eAAe,aAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,YAAY,QAAQ;AAC7B,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEA,aAAE,MAAM;AAAA,EACN,aAAE,OAAO;AAAA,IACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,aAAE;AAAA,MACT,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,OAAO;AAAA,QACf,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,aAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,YAAY,QAAQ;AAAA,EAC7B,CAAC;AAAA,EACD;AACF,CAAC;;;AG/XH,IAAAC,mBAGO;AACP,IAAAC,yBAOO;AACP,IAAAC,aAAkB;;;ACZlB,IAAAC,aAAkB;AAIX,IAAM,2CAA2C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAK/D,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhC,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ADwBM,IAAM,iCAAN,MAEP;AAAA,EAkBE,YACE,SACA,QACA;AApBF,SAAS,uBAAuB;AAqB9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAlBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAUA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA/EJ;AAgFI,UAAM,oBAAoB,OAAO;AAAA,OAC9B,eAAM,6CAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,eAAM,6CAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,kBAAkB;AAAA,QAC9B,MAAM,kBAAkB;AAAA,MAC1B;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,kBAAkB,SAAS;AAAA,MAC3B,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,aAAE,OAAO;AAAA,EACjD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACvD,kBAAkB,aACf,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC,CAAC,EAChD,SAAS;AACd,CAAC;;;AEnJD,IAAAC,yBAMO;AACP,IAAAC,aAAkB;AAkBX,IAAM,6BAAN,MAAyD;AAAA,EAQ9D,YACW,SACQ,QACjB;AAFS;AACQ;AATnB,SAAS,uBAAuB;AAChC,SAAS,mBAAmB;AAAA,EASzB;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAlDJ;AAmDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,MACnB;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsC,aAAE,OAAO;AAAA,EACnD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,UAAU,aAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;ACrGD,IAAAC,yBAAoD;AAqE7C,SAAS,uBAMd,SAMA;AACA,QAAM,cAAU,6CAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAAC,YAC3B,gBAAgB,OAAO;AAEzB,QAAM,kBAAkB,CAAC,YACvB,IAAI,kCAAkC,SAAS;AAAA,IAC7C,GAAG,qBAAqB,MAAM;AAAA,IAC9B,cAAc,QAAQ;AAAA,EACxB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,wCAAwC,SAAS;AAAA,IACnD,GAAG,qBAAqB,YAAY;AAAA,IACpC,cAAc,QAAQ;AAAA,EACxB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,+BAA+B,SAAS;AAAA,IAC1C,GAAG,qBAAqB,WAAW;AAAA,EACrC,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,2BAA2B,SAAS,qBAAqB,OAAO,CAAC;AAEvE,QAAM,WAAW,CAAC,YAA4B,oBAAoB,OAAO;AAEzE,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["import_provider","import_v4","import_v4","import_provider","_a","toolCall","import_provider_utils","import_v4","import_provider","import_v4","import_provider","import_provider_utils","import_v4","import_v4","import_provider_utils","import_v4","import_provider_utils"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-chat-options.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-completion-options.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-embedding-options.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["export { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nexport type {\n OpenAICompatibleChatModelId,\n OpenAICompatibleProviderOptions,\n} from './openai-compatible-chat-options';\nexport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nexport type {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionProviderOptions,\n} from './openai-compatible-completion-options';\nexport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nexport type {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingProviderOptions,\n} from './openai-compatible-embedding-options';\nexport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\nexport type {\n OpenAICompatibleErrorData,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nexport type { MetadataExtractor } from './openai-compatible-metadata-extractor';\nexport { createOpenAICompatible } from './openai-compatible-provider';\nexport type {\n OpenAICompatibleProvider,\n OpenAICompatibleProviderSettings,\n} from './openai-compatible-provider';\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n openaiCompatibleProviderOptions,\n} from './openai-compatible-chat-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\nimport { prepareTools } from './openai-compatible-prepare-tools';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n );\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: compatibleOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n reasoning_effort: compatibleOptions.reasoningEffort,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = responseBody.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // reasoning content:\n const reasoning =\n choice.message.reasoning_content ?? choice.message.reasoning;\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n // provider metadata:\n const providerMetadata: SharedV2ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...(await this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n })),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n inputTokens: responseBody.usage?.prompt_tokens ?? undefined,\n outputTokens: responseBody.usage?.completion_tokens ?? undefined,\n totalTokens: responseBody.usage?.total_tokens ?? undefined,\n reasoningTokens:\n responseBody.usage?.completion_tokens_details?.reasoning_tokens ??\n undefined,\n cachedInputTokens:\n responseBody.usage?.prompt_tokens_details?.cached_tokens ?? undefined,\n },\n providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n totalTokens: number | undefined;\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n totalTokens: undefined,\n };\n let isFirstChunk = true;\n const providerOptionsName = this.providerOptionsName;\n let isActiveReasoning = false;\n let isActiveText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n total_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n usage.totalTokens = total_tokens ?? undefined;\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n const reasoningContent = delta.reasoning_content ?? delta.reasoning;\n if (reasoningContent) {\n if (!isActiveReasoning) {\n controller.enqueue({\n type: 'reasoning-start',\n id: 'reasoning-0',\n });\n isActiveReasoning = true;\n }\n\n controller.enqueue({\n type: 'reasoning-delta',\n id: 'reasoning-0',\n delta: reasoningContent,\n });\n }\n\n if (delta.content) {\n if (!isActiveText) {\n controller.enqueue({ type: 'text-start', id: 'txt-0' });\n isActiveText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: 'txt-0',\n delta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallDelta.id,\n toolName: toolCallDelta.function.name,\n });\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCall.id,\n toolName: toolCall.function.name,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n if (isActiveReasoning) {\n controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });\n }\n\n if (isActiveText) {\n controller.enqueue({ type: 'text-end', id: 'txt-0' });\n }\n\n // go through all tool calls and send the ones that are not finished\n for (const toolCall of toolCalls.filter(\n toolCall => !toolCall.hasFinished,\n )) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n }\n\n const providerMetadata: SharedV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n inputTokens: usage.promptTokens ?? undefined,\n outputTokens: usage.completionTokens ?? undefined,\n totalTokens: usage.totalTokens ?? undefined,\n reasoningTokens:\n usage.completionTokensDetails.reasoningTokens ?? undefined,\n cachedInputTokens:\n usage.promptTokensDetails.cachedTokens ?? undefined,\n },\n providerMetadata,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n // Most openai-compatible models set `reasoning_content`, but some\n // providers serving `gpt-oss` set `reasoning`. See #7866\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV2Prompt,\n SharedV2ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: SharedV2ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV2Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleChatModelId = string;\n\nexport const openaiCompatibleProviderOptions = z.object({\n /**\n * A unique identifier representing your end-user, which can help the provider to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n\n /**\n * Reasoning effort for reasoning models. Defaults to `medium`.\n */\n reasoningEffort: z.string().optional(),\n});\n\nexport type OpenAICompatibleProviderOptions = z.infer<\n typeof openaiCompatibleProviderOptions\n>;\n","import { z, ZodType } from 'zod/v4';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodType<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n openaiCompatibleCompletionProviderOptions,\n} from './openai-compatible-completion-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV2\n{\n readonly specificationVersion = 'v2';\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n // Parse provider options\n const completionOptions =\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleCompletionProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: completionOptions.echo,\n logit_bias: completionOptions.logitBias,\n suffix: completionOptions.suffix,\n user: completionOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n if (choice.text != null && choice.text.length > 0) {\n content.push({ type: 'text', text: choice.text });\n }\n\n return {\n content,\n usage: {\n inputTokens: response.usage?.prompt_tokens ?? undefined,\n outputTokens: response.usage?.completion_tokens ?? undefined,\n totalTokens: response.usage?.total_tokens ?? undefined,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n\n controller.enqueue({\n type: 'text-start',\n id: '0',\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens ?? undefined;\n usage.outputTokens = value.usage.completion_tokens ?? undefined;\n usage.totalTokens = value.usage.total_tokens ?? undefined;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: choice.text,\n });\n }\n },\n\n flush(controller) {\n if (!isFirstChunk) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst usageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: usageSchema.nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleCompletionModelId = string;\n\nexport const openaiCompatibleCompletionProviderOptions = z.object({\n /**\n * Echo back the prompt in addition to the completion.\n */\n echo: z.boolean().optional(),\n\n /**\n * Modify the likelihood of specified tokens appearing in the completion.\n *\n * Accepts a JSON object that maps tokens (specified by their token ID in\n * the GPT tokenizer) to an associated bias value from -100 to 100.\n */\n logitBias: z.record(z.string(), z.number()).optional(),\n\n /**\n * The suffix that comes after a completion of inserted text.\n */\n suffix: z.string().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleCompletionProviderOptions = z.infer<\n typeof openaiCompatibleCompletionProviderOptions\n>;\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n OpenAICompatibleEmbeddingModelId,\n openaiCompatibleEmbeddingProviderOptions,\n} from './openai-compatible-embedding-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n );\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: compatibleOptions.dimensions,\n user: compatibleOptions.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n providerMetadata: response.providerMetadata,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n providerMetadata: z\n .record(z.string(), z.record(z.string(), z.any()))\n .optional(),\n});\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleEmbeddingModelId = string;\n\nexport const openaiCompatibleEmbeddingProviderOptions = z.object({\n /**\n * The number of dimensions the resulting output embeddings should have.\n * Only supported in text-embedding-3 and later models.\n */\n dimensions: z.number().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleEmbeddingProviderOptions = z.infer<\n typeof openaiCompatibleEmbeddingProviderOptions\n>;\n","import { ImageModelV2, ImageModelV2CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV2 {\n readonly specificationVersion = 'v2';\n readonly maxImagesPerCall = 10;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV2['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV2['doGenerate']>>\n > {\n const warnings: Array<ImageModelV2CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV2,\n ImageModelV2,\n LanguageModelV2,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV2, 'imageModel'> {\n (modelId: CHAT_MODEL_IDS): LanguageModelV2;\n\n languageModel(modelId: CHAT_MODEL_IDS): LanguageModelV2;\n\n chatModel(modelId: CHAT_MODEL_IDS): LanguageModelV2;\n\n completionModel(modelId: COMPLETION_MODEL_IDS): LanguageModelV2;\n\n textEmbeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV2<string>;\n\n imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV2;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nInclude usage information in streaming responses.\n */\n includeUsage?: boolean;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (modelId: CHAT_MODEL_IDS) =>\n createChatModel(modelId);\n\n const createChatModel = (modelId: CHAT_MODEL_IDS) =>\n new OpenAICompatibleChatLanguageModel(modelId, {\n ...getCommonModelConfig('chat'),\n includeUsage: options.includeUsage,\n });\n\n const createCompletionModel = (modelId: COMPLETION_MODEL_IDS) =>\n new OpenAICompatibleCompletionLanguageModel(modelId, {\n ...getCommonModelConfig('completion'),\n includeUsage: options.includeUsage,\n });\n\n const createEmbeddingModel = (modelId: EMBEDDING_MODEL_IDS) =>\n new OpenAICompatibleEmbeddingModel(modelId, {\n ...getCommonModelConfig('embedding'),\n });\n\n const createImageModel = (modelId: IMAGE_MODEL_IDS) =>\n new OpenAICompatibleImageModel(modelId, getCommonModelConfig('image'));\n\n const provider = (modelId: CHAT_MODEL_IDS) => createLanguageModel(modelId);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,IAAAC,yBAYO;AACP,IAAAC,aAAkB;;;ACvBlB,sBAIO;AAEP,4BAAgC;AAEhC,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,oBAC9D;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,YACT,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACvJO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,gBAAkB;AAIX,IAAM,kCAAkC,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKtD,MAAM,YAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK1B,iBAAiB,YAAE,OAAO,EAAE,SAAS;AACvC,CAAC;;;ACfD,IAAAC,aAA2B;AAEpB,IAAM,kCAAkC,aAAE,OAAO;AAAA,EACtD,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF,IAAAC,mBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAqBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANjCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAUxE,YACE,SACA,QACA;AAZF,SAAS,uBAAuB;AA3DlC;AAwEI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AA9FtB;AA+FI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAhHnD;AAiHI,UAAM,WAAyC,CAAC;AAGhD,UAAM,oBAAoB,OAAO;AAAA,OAC9B,eAAM,6CAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,eAAM,6CAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA,QAE1B,kBAAkB,kBAAkB;AAAA;AAAA,QAGpC,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA3MjE;AA4MI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,aACJ,YAAO,QAAQ,sBAAf,YAAoC,OAAO,QAAQ;AACrD,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,mBAA6C;AAAA,MACjD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,GAAI,QAAM,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QACzD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,cAAa,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QAClD,eAAc,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,QACvD,cAAa,wBAAa,UAAb,mBAAoB,iBAApB,YAAoC;AAAA,QACjD,kBACE,8BAAa,UAAb,mBAAoB,8BAApB,mBAA+C,qBAA/C,YACA;AAAA,QACF,oBACE,8BAAa,UAAb,mBAAoB,0BAApB,mBAA2C,kBAA3C,YAA4D;AAAA,MAChE;AAAA,MACA;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AA9S/D;AA+SI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,UAAM,QAYF;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,MACA,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AACnB,UAAM,sBAAsB,KAAK;AACjC,QAAI,oBAAoB;AACxB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA;AAAA,UAGA,UAAU,OAAO,YAAY;AAjYvC,gBAAAC,KAAA;AAmYY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAC9C,oBAAM,cAAc,sCAAgB;AACpC,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,kBAAM,oBAAmBA,MAAA,MAAM,sBAAN,OAAAA,MAA2B,MAAM;AAC1D,gBAAI,kBAAkB;AACpB,kBAAI,CAAC,mBAAmB;AACtB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AACD,oCAAoB;AAAA,cACtB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS;AACjB,kBAAI,CAAC,cAAc;AACjB,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,QAAQ,CAAC;AACtD,+BAAe;AAAA,cACjB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,cAAc;AAAA,oBAClB,UAAU,cAAc,SAAS;AAAA,kBACnC,CAAC;AAED,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,UAAUA,UAAS,SAAS;AAAA,sBAC9B,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,sBACf,CAAC;AAED,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,kBACf,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAjmB5B,gBAAAD,KAAA;AAkmBY,gBAAI,mBAAmB;AACrB,yBAAW,QAAQ,EAAE,MAAM,iBAAiB,IAAI,cAAc,CAAC;AAAA,YACjE;AAEA,gBAAI,cAAc;AAChB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,QAAQ,CAAC;AAAA,YACtD;AAGA,uBAAW,YAAY,UAAU;AAAA,cAC/B,CAAAC,cAAY,CAACA,UAAS;AAAA,YACxB,GAAG;AACD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,SAAS;AAAA,cACf,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,aAAYD,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,gBACtC,UAAU,SAAS,SAAS;AAAA,gBAC5B,OAAO,SAAS,SAAS;AAAA,cAC3B,CAAC;AAAA,YACH;AAEA,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,cAAa,WAAM,iBAAN,YAAsB;AAAA,gBACnC,eAAc,WAAM,qBAAN,YAA0B;AAAA,gBACxC,cAAa,WAAM,gBAAN,YAAqB;AAAA,gBAClC,kBACE,WAAM,wBAAwB,oBAA9B,YAAiD;AAAA,gBACnD,oBACE,WAAM,oBAAoB,iBAA1B,YAA0C;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,mCAAmC,aACtC,OAAO;AAAA,EACN,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,aACpB,OAAO;AAAA,IACN,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,aACxB,OAAO;AAAA,IACN,kBAAkB,aAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,aAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAU,aAAE,OAAO;AAAA,cACjB,MAAM,aAAE,OAAO;AAAA,cACf,WAAW,aAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAG5C,gBAEA,aAAE,MAAM;AAAA,EACN,aAAE,OAAO;AAAA,IACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,aAAE;AAAA,MACT,aAAE,OAAO;AAAA,QACP,OAAO,aACJ,OAAO;AAAA,UACN,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA,UAG5B,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,UAC9B,YAAY,aACT;AAAA,YACC,aAAE,OAAO;AAAA,cACP,OAAO,aAAE,OAAO;AAAA,cAChB,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,UAAU,aAAE,OAAO;AAAA,gBACjB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AOvvBH,IAAAE,yBAUO;AACP,IAAAC,aAAkB;;;ACpBlB,IAAAC,mBAIO;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAOE;AAEA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;AC5FA,IAAAC,aAAkB;AAIX,IAAM,4CAA4C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAIhE,MAAM,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ3B,WAAW,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrD,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;AFmBM,IAAM,0CAAN,MAEP;AAAA;AAAA,EAQE,YACE,SACA,QACA;AAVF,SAAS,uBAAuB;AAlDlC;AA6DI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AAjFtB;AAkFI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAnGnD;AAoGI,UAAM,WAAyC,CAAC;AAGhD,UAAM,qBACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU,KAAK;AAAA,MACf;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,OAAO,CAAC;AAEtD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA,QACxB,YAAY,kBAAkB;AAAA,QAC9B,QAAQ,kBAAkB;AAAA,QAC1B,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAvKjE;AAwKI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,QAAQ,QAAQ,OAAO,KAAK,SAAS,GAAG;AACjD,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,IAClD;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO;AAAA,QACL,cAAa,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC9C,eAAc,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,QACnD,cAAa,oBAAS,UAAT,mBAAgB,iBAAhB,YAAgC;AAAA,MAC/C;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAvQvC;AAwQY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,eAAc,WAAM,MAAM,kBAAZ,YAA6B;AACjD,oBAAM,gBAAe,WAAM,MAAM,sBAAZ,YAAiC;AACtD,oBAAM,eAAc,WAAM,MAAM,iBAAZ,YAA4B;AAAA,YAClD;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,OAAO;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,CAAC,cAAc;AACjB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,cAAc,aAAE,OAAO;AAAA,EAC3B,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,2CAA2C,aAAE,OAAO;AAAA,EACxD,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,MAAM,aAAE,OAAO;AAAA,MACf,eAAe,aAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,YAAY,QAAQ;AAC7B,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEA,aAAE,MAAM;AAAA,EACN,aAAE,OAAO;AAAA,IACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,aAAE;AAAA,MACT,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,OAAO;AAAA,QACf,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,aAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,YAAY,QAAQ;AAAA,EAC7B,CAAC;AAAA,EACD;AACF,CAAC;;;AG/XH,IAAAC,mBAGO;AACP,IAAAC,yBAOO;AACP,IAAAC,aAAkB;;;ACZlB,IAAAC,aAAkB;AAIX,IAAM,2CAA2C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAK/D,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhC,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ADwBM,IAAM,iCAAN,MAEP;AAAA,EAkBE,YACE,SACA,QACA;AApBF,SAAS,uBAAuB;AAqB9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAlBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAUA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA/EJ;AAgFI,UAAM,oBAAoB,OAAO;AAAA,OAC9B,eAAM,6CAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,eAAM,6CAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,kBAAkB;AAAA,QAC9B,MAAM,kBAAkB;AAAA,MAC1B;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,kBAAkB,SAAS;AAAA,MAC3B,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,aAAE,OAAO;AAAA,EACjD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACvD,kBAAkB,aACf,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC,CAAC,EAChD,SAAS;AACd,CAAC;;;AEnJD,IAAAC,yBAMO;AACP,IAAAC,aAAkB;AAkBX,IAAM,6BAAN,MAAyD;AAAA,EAQ9D,YACW,SACQ,QACjB;AAFS;AACQ;AATnB,SAAS,uBAAuB;AAChC,SAAS,mBAAmB;AAAA,EASzB;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAlDJ;AAmDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,MACnB;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsC,aAAE,OAAO;AAAA,EACnD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,UAAU,aAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;ACrGD,IAAAC,yBAAoD;AAqE7C,SAAS,uBAMd,SAMA;AACA,QAAM,cAAU,6CAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAAC,YAC3B,gBAAgB,OAAO;AAEzB,QAAM,kBAAkB,CAAC,YACvB,IAAI,kCAAkC,SAAS;AAAA,IAC7C,GAAG,qBAAqB,MAAM;AAAA,IAC9B,cAAc,QAAQ;AAAA,EACxB,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,wCAAwC,SAAS;AAAA,IACnD,GAAG,qBAAqB,YAAY;AAAA,IACpC,cAAc,QAAQ;AAAA,EACxB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,+BAA+B,SAAS;AAAA,IAC1C,GAAG,qBAAqB,WAAW;AAAA,EACrC,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,2BAA2B,SAAS,qBAAqB,OAAO,CAAC;AAEvE,QAAM,WAAW,CAAC,YAA4B,oBAAoB,OAAO;AAEzE,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["import_provider","import_provider_utils","import_v4","import_v4","import_provider","_a","toolCall","import_provider_utils","import_v4","import_provider","import_v4","import_provider","import_provider_utils","import_v4","import_v4","import_provider_utils","import_v4","import_provider_utils"]}
package/dist/index.mjs CHANGED
@@ -18,6 +18,7 @@ import { z as z3 } from "zod/v4";
18
18
  import {
19
19
  UnsupportedFunctionalityError
20
20
  } from "@ai-sdk/provider";
21
+ import { convertToBase64 } from "@ai-sdk/provider-utils";
21
22
  function getOpenAIMetadata(message) {
22
23
  var _a, _b;
23
24
  return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
@@ -54,7 +55,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
54
55
  return {
55
56
  type: "image_url",
56
57
  image_url: {
57
- url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`
58
+ url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${convertToBase64(part.data)}`
58
59
  },
59
60
  ...partMetadata
60
61
  };
@@ -487,7 +488,7 @@ var OpenAICompatibleChatLanguageModel = class {
487
488
  },
488
489
  // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX
489
490
  transform(chunk, controller) {
490
- var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
491
+ var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
491
492
  if (options.includeRawChunks) {
492
493
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
493
494
  }
@@ -510,15 +511,14 @@ var OpenAICompatibleChatLanguageModel = class {
510
511
  ...getResponseMetadata(value)
511
512
  });
512
513
  }
513
- const effectiveUsage = (_c = value.usage) != null ? _c : (_b = (_a2 = value.choices) == null ? void 0 : _a2[0]) == null ? void 0 : _b.usage;
514
- if (effectiveUsage != null) {
514
+ if (value.usage != null) {
515
515
  const {
516
516
  prompt_tokens,
517
517
  completion_tokens,
518
518
  total_tokens,
519
519
  prompt_tokens_details,
520
520
  completion_tokens_details
521
- } = effectiveUsage;
521
+ } = value.usage;
522
522
  usage.promptTokens = prompt_tokens != null ? prompt_tokens : void 0;
523
523
  usage.completionTokens = completion_tokens != null ? completion_tokens : void 0;
524
524
  usage.totalTokens = total_tokens != null ? total_tokens : void 0;
@@ -545,7 +545,7 @@ var OpenAICompatibleChatLanguageModel = class {
545
545
  return;
546
546
  }
547
547
  const delta = choice.delta;
548
- const reasoningContent = (_d = delta.reasoning_content) != null ? _d : delta.reasoning;
548
+ const reasoningContent = (_a2 = delta.reasoning_content) != null ? _a2 : delta.reasoning;
549
549
  if (reasoningContent) {
550
550
  if (!isActiveReasoning) {
551
551
  controller.enqueue({
@@ -581,7 +581,7 @@ var OpenAICompatibleChatLanguageModel = class {
581
581
  message: `Expected 'id' to be a string.`
582
582
  });
583
583
  }
584
- if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
584
+ if (((_b = toolCallDelta.function) == null ? void 0 : _b.name) == null) {
585
585
  throw new InvalidResponseDataError({
586
586
  data: toolCallDelta,
587
587
  message: `Expected 'function.name' to be a string.`
@@ -597,12 +597,12 @@ var OpenAICompatibleChatLanguageModel = class {
597
597
  type: "function",
598
598
  function: {
599
599
  name: toolCallDelta.function.name,
600
- arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
600
+ arguments: (_c = toolCallDelta.function.arguments) != null ? _c : ""
601
601
  },
602
602
  hasFinished: false
603
603
  };
604
604
  const toolCall2 = toolCalls[index];
605
- if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) {
605
+ if (((_d = toolCall2.function) == null ? void 0 : _d.name) != null && ((_e = toolCall2.function) == null ? void 0 : _e.arguments) != null) {
606
606
  if (toolCall2.function.arguments.length > 0) {
607
607
  controller.enqueue({
608
608
  type: "tool-input-start",
@@ -617,7 +617,7 @@ var OpenAICompatibleChatLanguageModel = class {
617
617
  });
618
618
  controller.enqueue({
619
619
  type: "tool-call",
620
- toolCallId: (_i = toolCall2.id) != null ? _i : generateId(),
620
+ toolCallId: (_f = toolCall2.id) != null ? _f : generateId(),
621
621
  toolName: toolCall2.function.name,
622
622
  input: toolCall2.function.arguments
623
623
  });
@@ -630,22 +630,22 @@ var OpenAICompatibleChatLanguageModel = class {
630
630
  if (toolCall.hasFinished) {
631
631
  continue;
632
632
  }
633
- if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) {
634
- toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : "";
633
+ if (((_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null) {
634
+ toolCall.function.arguments += (_i = (_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null ? _i : "";
635
635
  }
636
636
  controller.enqueue({
637
637
  type: "tool-input-delta",
638
638
  id: toolCall.id,
639
- delta: (_m = toolCallDelta.function.arguments) != null ? _m : ""
639
+ delta: (_j = toolCallDelta.function.arguments) != null ? _j : ""
640
640
  });
641
- if (((_n = toolCall.function) == null ? void 0 : _n.name) != null && ((_o = toolCall.function) == null ? void 0 : _o.arguments) != null && isParsableJson(toolCall.function.arguments)) {
641
+ if (((_k = toolCall.function) == null ? void 0 : _k.name) != null && ((_l = toolCall.function) == null ? void 0 : _l.arguments) != null && isParsableJson(toolCall.function.arguments)) {
642
642
  controller.enqueue({
643
643
  type: "tool-input-end",
644
644
  id: toolCall.id
645
645
  });
646
646
  controller.enqueue({
647
647
  type: "tool-call",
648
- toolCallId: (_p = toolCall.id) != null ? _p : generateId(),
648
+ toolCallId: (_m = toolCall.id) != null ? _m : generateId(),
649
649
  toolName: toolCall.function.name,
650
650
  input: toolCall.function.arguments
651
651
  });
@@ -770,9 +770,7 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z3.union([
770
770
  })
771
771
  ).nullish()
772
772
  }).nullish(),
773
- finish_reason: z3.string().nullish(),
774
- // Some providers report usage within each choice in streaming chunks
775
- usage: openaiCompatibleTokenUsageSchema
773
+ finish_reason: z3.string().nullish()
776
774
  })
777
775
  ),
778
776
  usage: openaiCompatibleTokenUsageSchema