@ai-sdk/openai-compatible 1.0.0-canary.4 → 1.0.0-canary.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/index.js +3 -9
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +3 -9
- package/dist/index.mjs.map +1 -1
- package/package.json +11 -12
- /package/{internal/dist → dist/internal}/index.d.mts +0 -0
- /package/{internal/dist → dist/internal}/index.d.ts +0 -0
- /package/{internal/dist → dist/internal}/index.js +0 -0
- /package/{internal/dist → dist/internal}/index.js.map +0 -0
- /package/{internal/dist → dist/internal}/index.mjs +0 -0
- /package/{internal/dist → dist/internal}/index.mjs.map +0 -0
package/CHANGELOG.md
CHANGED
package/dist/index.js
CHANGED
@@ -395,7 +395,6 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
395
395
|
completionTokens: (_k = (_j = responseBody.usage) == null ? void 0 : _j.completion_tokens) != null ? _k : NaN
|
396
396
|
},
|
397
397
|
providerMetadata,
|
398
|
-
rawCall: { rawPrompt, rawSettings },
|
399
398
|
request: { body },
|
400
399
|
response: {
|
401
400
|
...getResponseMetadata(responseBody),
|
@@ -455,7 +454,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
455
454
|
});
|
456
455
|
return {
|
457
456
|
stream: simulatedStream,
|
458
|
-
|
457
|
+
request: result.request,
|
459
458
|
response: result.response,
|
460
459
|
warnings: result.warnings
|
461
460
|
};
|
@@ -678,7 +677,6 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
678
677
|
}
|
679
678
|
})
|
680
679
|
),
|
681
|
-
rawCall: { rawPrompt, rawSettings },
|
682
680
|
request: { body },
|
683
681
|
response: { headers: responseHeaders },
|
684
682
|
warnings
|
@@ -940,7 +938,6 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
940
938
|
abortSignal: options.abortSignal,
|
941
939
|
fetch: this.config.fetch
|
942
940
|
});
|
943
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
944
941
|
const choice = response.choices[0];
|
945
942
|
return {
|
946
943
|
text: choice.text,
|
@@ -949,8 +946,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
949
946
|
completionTokens: (_d = (_c = response.usage) == null ? void 0 : _c.completion_tokens) != null ? _d : NaN
|
950
947
|
},
|
951
948
|
finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),
|
952
|
-
|
953
|
-
request: { body: JSON.stringify(args) },
|
949
|
+
request: { body: args },
|
954
950
|
response: {
|
955
951
|
...getResponseMetadata(response),
|
956
952
|
headers: responseHeaders,
|
@@ -979,7 +975,6 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
979
975
|
abortSignal: options.abortSignal,
|
980
976
|
fetch: this.config.fetch
|
981
977
|
});
|
982
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
983
978
|
let finishReason = "unknown";
|
984
979
|
let usage = {
|
985
980
|
promptTokens: Number.NaN,
|
@@ -1036,8 +1031,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
1036
1031
|
}
|
1037
1032
|
})
|
1038
1033
|
),
|
1039
|
-
|
1040
|
-
request: { body: JSON.stringify(body) },
|
1034
|
+
request: { body },
|
1041
1035
|
response: { headers: responseHeaders },
|
1042
1036
|
warnings
|
1043
1037
|
};
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["export { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nexport type { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nexport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nexport type { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nexport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nexport type { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nexport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\nexport type { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nexport type {\n OpenAICompatibleErrorData,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nexport type { MetadataExtractor } from './openai-compatible-metadata-extractor';\nexport { createOpenAICompatible } from './openai-compatible-provider';\nexport type {\n OpenAICompatibleProvider,\n OpenAICompatibleProviderSettings,\n} from './openai-compatible-provider';\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2ObjectGenerationMode,\n LanguageModelV2ProviderMetadata,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV2ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV2ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n rawCall: { rawPrompt, rawSettings },\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV2StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n response: result.response,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: true });\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV2Prompt,\n LanguageModelV2ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: LanguageModelV2ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV2Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV2\n{\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n rawCall: { rawPrompt, rawSettings },\n request: { body: JSON.stringify(args) },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n request: { body: JSON.stringify(body) },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport {\n OpenAICompatibleImageModelId,\n OpenAICompatibleImageSettings,\n} from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV1,\n ImageModelV1,\n LanguageModelV2,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV2, 'imageModel'> {\n (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n chatModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n completionModel(\n modelId: COMPLETION_MODEL_IDS,\n settings?: OpenAICompatibleCompletionSettings,\n ): LanguageModelV2;\n\n textEmbeddingModel(\n modelId: EMBEDDING_MODEL_IDS,\n settings?: OpenAICompatibleEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n imageModel(\n modelId: IMAGE_MODEL_IDS,\n settings?: OpenAICompatibleImageSettings,\n ): ImageModelV1;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) => createChatModel(modelId, settings);\n\n const createChatModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) =>\n new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n\n const createCompletionModel = (\n modelId: COMPLETION_MODEL_IDS,\n settings: OpenAICompatibleCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createEmbeddingModel = (\n modelId: EMBEDDING_MODEL_IDS,\n settings: OpenAICompatibleEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const createImageModel = (\n modelId: IMAGE_MODEL_IDS,\n settings: OpenAICompatibleImageSettings = {},\n ) =>\n new OpenAICompatibleImageModel(\n modelId,\n settings,\n getCommonModelConfig('image'),\n );\n\n const provider = (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ) => createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,4BAWO;AACP,IAAAC,cAAkB;;;ACtBlB,sBAIO;AAGP,SAAS,kBAAkB,SAExB;AATH;AAUE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA,oBAC7C;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACvIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAA6B;AAEtB,IAAM,kCAAkC,aAAE,OAAO;AAAA,EACtD,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF,IAAAC,mBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAqBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALjCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAYxE,YACE,SACA,UACA,QACA;AAfF,SAAS,uBAAuB;AA3DlC;AA2EI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,sDAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA2D;AAC7D,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApHnD;AAqHI,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA/LjE;AAgMI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,qCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,sCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,aAAa,QAAQ,CAAC;AAGrC,UAAM,mBAAoD;AAAA,MACxD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,IAAG,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QAClD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,UAAM,sBAAqB,kBAAa,UAAb,mBAAoB;AAC/C,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,KAAK,mBAAmB,EAAE,kBACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,KAAK,mBAAmB,EAAE,qBACzC,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,sBAAf,YAAoC;AAAA,MAC/C,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AAxPzD,YAAAC;AAwP6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,kCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QACnD,mBAAkB,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,MAC7D;AAAA,MACA;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAjR/D;AAkRI,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAC5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,WAAW;AACpB,gBAAI,MAAM,QAAQ,OAAO,SAAS,GAAG;AACnC,yBAAW,QAAQ,OAAO,WAAW;AACnC,oBAAI,KAAK,SAAS,QAAQ;AACxB,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK;AAAA,kBAClB,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF,OAAO;AACL,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AACrD,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,qCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,sCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAWA;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,eAAe;AACnB,QAAI,sBAAsB,KAAK;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA;AAAA,UAEA,UAAU,OAAO,YAAY;AA9YvC,gBAAAA,KAAA;AAgZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAE9C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,sCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,kCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,sCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,kCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AArlB5B,gBAAAD,KAAA;AAslBY,kBAAM,mBAAoD;AAAA,cACxD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,mBAAmB,MAAM;AACzD,+BAAiB,mBAAmB,EAAE,kBACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,oBAAoB,gBAAgB,MAAM;AAClD,+BAAiB,mBAAmB,EAAE,qBACpC,MAAM,oBAAoB;AAAA,YAC9B;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAcA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AMptBH,IAAAE,yBASO;AACP,IAAAC,cAAkB;;;ACjBlB,IAAAC,mBAIO;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ADnEO,IAAM,0CAAN,MAEP;AAAA;AAAA,EAWE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AA1CzC;AAwDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,QAAQ,YAAY,CAAC;AAEnE,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA,QACpB,YAAY,KAAK,SAAS;AAAA,QAC1B,QAAQ,KAAK,SAAS;AAAA,QACtB,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxJjE;AAyJI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,2CAA2C,cAAE,OAAO;AAAA,EACxD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AE5VH,IAAAC,mBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AA4BX,IAAM,iCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5EJ;AA6EI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC3HD,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AAqBX,IAAM,6BAAN,MAAyD;AAAA,EAW9D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,mBAA2B;AAhCjC;AAiCI,YAAO,UAAK,SAAS,qBAAd,YAAkC;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAzDJ;AA0DI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,QACjB,GAAI,KAAK,SAAS,OAAO,EAAE,MAAM,KAAK,SAAS,KAAK,IAAI,CAAC;AAAA,MAC3D;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsC,cAAE,OAAO;AAAA,EACnD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,UAAU,cAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AC7GD,IAAAC,yBAAoD;AAsF7C,SAAS,uBAMd,SAMA;AACA,QAAM,cAAU,6CAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAC1B,SACA,WAAyC,CAAC,MACvC,gBAAgB,SAAS,QAAQ;AAEtC,QAAM,kBAAkB,CACtB,SACA,WAAyC,CAAC,MAE1C,IAAI,kCAAkC,SAAS,UAAU;AAAA,IACvD,GAAG,qBAAqB,MAAM;AAAA,IAC9B,6BAA6B;AAAA,EAC/B,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAA+C,CAAC,MAEhD,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,uBAAuB,CAC3B,SACA,WAA8C,CAAC,MAE/C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,mBAAmB,CACvB,SACA,WAA0C,CAAC,MAE3C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,OAAO;AAAA,EAC9B;AAEF,QAAM,WAAW,CACf,SACA,aACG,oBAAoB,SAAS,QAAQ;AAE1C,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["import_provider","import_zod","import_provider","_a","toolCall","import_provider_utils","import_zod","import_provider","import_provider","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider_utils"]}
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["export { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nexport type { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nexport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nexport type { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nexport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nexport type { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nexport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\nexport type { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nexport type {\n OpenAICompatibleErrorData,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nexport type { MetadataExtractor } from './openai-compatible-metadata-extractor';\nexport { createOpenAICompatible } from './openai-compatible-provider';\nexport type {\n OpenAICompatibleProvider,\n OpenAICompatibleProviderSettings,\n} from './openai-compatible-provider';\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2ObjectGenerationMode,\n LanguageModelV2ProviderMetadata,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV2ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV2ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV2StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n request: result.request,\n response: result.response,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: true });\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV2Prompt,\n LanguageModelV2ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: LanguageModelV2ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV2Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV2\n{\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport {\n OpenAICompatibleImageModelId,\n OpenAICompatibleImageSettings,\n} from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV1,\n ImageModelV1,\n LanguageModelV2,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV2, 'imageModel'> {\n (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n chatModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n completionModel(\n modelId: COMPLETION_MODEL_IDS,\n settings?: OpenAICompatibleCompletionSettings,\n ): LanguageModelV2;\n\n textEmbeddingModel(\n modelId: EMBEDDING_MODEL_IDS,\n settings?: OpenAICompatibleEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n imageModel(\n modelId: IMAGE_MODEL_IDS,\n settings?: OpenAICompatibleImageSettings,\n ): ImageModelV1;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) => createChatModel(modelId, settings);\n\n const createChatModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) =>\n new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n\n const createCompletionModel = (\n modelId: COMPLETION_MODEL_IDS,\n settings: OpenAICompatibleCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createEmbeddingModel = (\n modelId: EMBEDDING_MODEL_IDS,\n settings: OpenAICompatibleEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const createImageModel = (\n modelId: IMAGE_MODEL_IDS,\n settings: OpenAICompatibleImageSettings = {},\n ) =>\n new OpenAICompatibleImageModel(\n modelId,\n settings,\n getCommonModelConfig('image'),\n );\n\n const provider = (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ) => createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,4BAWO;AACP,IAAAC,cAAkB;;;ACtBlB,sBAIO;AAGP,SAAS,kBAAkB,SAExB;AATH;AAUE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA,oBAC7C;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACvIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAA6B;AAEtB,IAAM,kCAAkC,aAAE,OAAO;AAAA,EACtD,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF,IAAAC,mBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAqBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALjCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAYxE,YACE,SACA,UACA,QACA;AAfF,SAAS,uBAAuB;AA3DlC;AA2EI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,sDAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA2D;AAC7D,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApHnD;AAqHI,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA/LjE;AAgMI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,qCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,sCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,aAAa,QAAQ,CAAC;AAGrC,UAAM,mBAAoD;AAAA,MACxD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,IAAG,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QAClD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,UAAM,sBAAqB,kBAAa,UAAb,mBAAoB;AAC/C,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,KAAK,mBAAmB,EAAE,kBACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,KAAK,mBAAmB,EAAE,qBACzC,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,sBAAf,YAAoC;AAAA,MAC/C,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AAxPzD,YAAAC;AAwP6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,kCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QACnD,mBAAkB,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,MAC7D;AAAA,MACA;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAhR/D;AAiRI,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAC5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,WAAW;AACpB,gBAAI,MAAM,QAAQ,OAAO,SAAS,GAAG;AACnC,yBAAW,QAAQ,OAAO,WAAW;AACnC,oBAAI,KAAK,SAAS,QAAQ;AACxB,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK;AAAA,kBAClB,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF,OAAO;AACL,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AACrD,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,qCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,sCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAWA;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,eAAe;AACnB,QAAI,sBAAsB,KAAK;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA;AAAA,UAEA,UAAU,OAAO,YAAY;AA7YvC,gBAAAA,KAAA;AA+YY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAE9C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,sCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,kCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,sCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,kCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAplB5B,gBAAAD,KAAA;AAqlBY,kBAAM,mBAAoD;AAAA,cACxD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,mBAAmB,MAAM;AACzD,+BAAiB,mBAAmB,EAAE,kBACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,oBAAoB,gBAAgB,MAAM;AAClD,+BAAiB,mBAAmB,EAAE,qBACpC,MAAM,oBAAoB;AAAA,YAC9B;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAcA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AMltBH,IAAAE,yBASO;AACP,IAAAC,cAAkB;;;ACjBlB,IAAAC,mBAIO;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ADnEO,IAAM,0CAAN,MAEP;AAAA;AAAA,EAWE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AA1CzC;AAwDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,QAAQ,YAAY,CAAC;AAEnE,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA,QACpB,YAAY,KAAK,SAAS;AAAA,QAC1B,QAAQ,KAAK,SAAS;AAAA,QACtB,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxJjE;AAyJI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,2CAA2C,cAAE,OAAO;AAAA,EACxD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AEvVH,IAAAC,mBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AA4BX,IAAM,iCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5EJ;AA6EI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC3HD,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AAqBX,IAAM,6BAAN,MAAyD;AAAA,EAW9D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,mBAA2B;AAhCjC;AAiCI,YAAO,UAAK,SAAS,qBAAd,YAAkC;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAzDJ;AA0DI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,QACjB,GAAI,KAAK,SAAS,OAAO,EAAE,MAAM,KAAK,SAAS,KAAK,IAAI,CAAC;AAAA,MAC3D;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsC,cAAE,OAAO;AAAA,EACnD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,UAAU,cAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AC7GD,IAAAC,yBAAoD;AAsF7C,SAAS,uBAMd,SAMA;AACA,QAAM,cAAU,6CAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAC1B,SACA,WAAyC,CAAC,MACvC,gBAAgB,SAAS,QAAQ;AAEtC,QAAM,kBAAkB,CACtB,SACA,WAAyC,CAAC,MAE1C,IAAI,kCAAkC,SAAS,UAAU;AAAA,IACvD,GAAG,qBAAqB,MAAM;AAAA,IAC9B,6BAA6B;AAAA,EAC/B,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAA+C,CAAC,MAEhD,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,uBAAuB,CAC3B,SACA,WAA8C,CAAC,MAE/C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,mBAAmB,CACvB,SACA,WAA0C,CAAC,MAE3C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,OAAO;AAAA,EAC9B;AAEF,QAAM,WAAW,CACf,SACA,aACG,oBAAoB,SAAS,QAAQ;AAE1C,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["import_provider","import_zod","import_provider","_a","toolCall","import_provider_utils","import_zod","import_provider","import_provider","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider_utils"]}
|
package/dist/index.mjs
CHANGED
@@ -379,7 +379,6 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
379
379
|
completionTokens: (_k = (_j = responseBody.usage) == null ? void 0 : _j.completion_tokens) != null ? _k : NaN
|
380
380
|
},
|
381
381
|
providerMetadata,
|
382
|
-
rawCall: { rawPrompt, rawSettings },
|
383
382
|
request: { body },
|
384
383
|
response: {
|
385
384
|
...getResponseMetadata(responseBody),
|
@@ -439,7 +438,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
439
438
|
});
|
440
439
|
return {
|
441
440
|
stream: simulatedStream,
|
442
|
-
|
441
|
+
request: result.request,
|
443
442
|
response: result.response,
|
444
443
|
warnings: result.warnings
|
445
444
|
};
|
@@ -662,7 +661,6 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
662
661
|
}
|
663
662
|
})
|
664
663
|
),
|
665
|
-
rawCall: { rawPrompt, rawSettings },
|
666
664
|
request: { body },
|
667
665
|
response: { headers: responseHeaders },
|
668
666
|
warnings
|
@@ -933,7 +931,6 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
933
931
|
abortSignal: options.abortSignal,
|
934
932
|
fetch: this.config.fetch
|
935
933
|
});
|
936
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
937
934
|
const choice = response.choices[0];
|
938
935
|
return {
|
939
936
|
text: choice.text,
|
@@ -942,8 +939,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
942
939
|
completionTokens: (_d = (_c = response.usage) == null ? void 0 : _c.completion_tokens) != null ? _d : NaN
|
943
940
|
},
|
944
941
|
finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),
|
945
|
-
|
946
|
-
request: { body: JSON.stringify(args) },
|
942
|
+
request: { body: args },
|
947
943
|
response: {
|
948
944
|
...getResponseMetadata(response),
|
949
945
|
headers: responseHeaders,
|
@@ -972,7 +968,6 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
972
968
|
abortSignal: options.abortSignal,
|
973
969
|
fetch: this.config.fetch
|
974
970
|
});
|
975
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
976
971
|
let finishReason = "unknown";
|
977
972
|
let usage = {
|
978
973
|
promptTokens: Number.NaN,
|
@@ -1029,8 +1024,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
1029
1024
|
}
|
1030
1025
|
})
|
1031
1026
|
),
|
1032
|
-
|
1033
|
-
request: { body: JSON.stringify(body) },
|
1027
|
+
request: { body },
|
1034
1028
|
response: { headers: responseHeaders },
|
1035
1029
|
warnings
|
1036
1030
|
};
|
package/dist/index.mjs.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2ObjectGenerationMode,\n LanguageModelV2ProviderMetadata,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV2ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV2ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n rawCall: { rawPrompt, rawSettings },\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV2StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n response: result.response,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: true });\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV2Prompt,\n LanguageModelV2ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: LanguageModelV2ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV2Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV2\n{\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n rawCall: { rawPrompt, rawSettings },\n request: { body: JSON.stringify(args) },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n request: { body: JSON.stringify(body) },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport {\n OpenAICompatibleImageModelId,\n OpenAICompatibleImageSettings,\n} from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV1,\n ImageModelV1,\n LanguageModelV2,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV2, 'imageModel'> {\n (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n chatModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n completionModel(\n modelId: COMPLETION_MODEL_IDS,\n settings?: OpenAICompatibleCompletionSettings,\n ): LanguageModelV2;\n\n textEmbeddingModel(\n modelId: EMBEDDING_MODEL_IDS,\n settings?: OpenAICompatibleEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n imageModel(\n modelId: IMAGE_MODEL_IDS,\n settings?: OpenAICompatibleImageSettings,\n ): ImageModelV1;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) => createChatModel(modelId, settings);\n\n const createChatModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) =>\n new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n\n const createCompletionModel = (\n modelId: COMPLETION_MODEL_IDS,\n settings: OpenAICompatibleCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createEmbeddingModel = (\n modelId: EMBEDDING_MODEL_IDS,\n settings: OpenAICompatibleEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const createImageModel = (\n modelId: IMAGE_MODEL_IDS,\n settings: OpenAICompatibleImageSettings = {},\n ) =>\n new OpenAICompatibleImageModel(\n modelId,\n settings,\n getCommonModelConfig('image'),\n );\n\n const provider = (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ) => createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";AAAA;AAAA,EAEE;AAAA,OAOK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EAEA;AAAA,OAEK;AACP,SAAS,KAAAA,UAAS;;;ACtBlB;AAAA,EAGE;AAAA,OACK;AAGP,SAAS,kBAAkB,SAExB;AATH;AAUE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA,oBAC7C;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACvIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,SAAS,SAAoB;AAEtB,IAAM,kCAAkC,EAAE,OAAO;AAAA,EACtD,OAAO,EAAE,OAAO;AAAA,IACd,SAAS,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,EAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF;AAAA,EAGE,iCAAAC;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAqBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALjCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAYxE,YACE,SACA,UACA,QACA;AAfF,SAAS,uBAAuB;AA3DlC;AA2EI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwB,+BAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA2D;AAC7D,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApHnD;AAqHI,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA/LjE;AAgMI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,aAAa,QAAQ,CAAC;AAGrC,UAAM,mBAAoD;AAAA,MACxD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,IAAG,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QAClD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,UAAM,sBAAqB,kBAAa,UAAb,mBAAoB;AAC/C,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,KAAK,mBAAmB,EAAE,kBACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,KAAK,mBAAmB,EAAE,qBACzC,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,sBAAf,YAAoC;AAAA,MAC/C,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AAxPzD,YAAAC;AAwP6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,MAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QACnD,mBAAkB,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,MAC7D;AAAA,MACA;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAjR/D;AAkRI,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAC5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,WAAW;AACpB,gBAAI,MAAM,QAAQ,OAAO,SAAS,GAAG;AACnC,yBAAW,QAAQ,OAAO,WAAW;AACnC,oBAAI,KAAK,SAAS,QAAQ;AACxB,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK;AAAA,kBAClB,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF,OAAO;AACL,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AACrD,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAWA;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,eAAe;AACnB,QAAI,sBAAsB,KAAK;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA;AAAA,UAEA,UAAU,OAAO,YAAY;AA9YvC,gBAAAA,KAAA;AAgZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAE9C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AArlB5B,gBAAAD,KAAA;AAslBY,kBAAM,mBAAoD;AAAA,cACxD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,mBAAmB,MAAM;AACzD,+BAAiB,mBAAmB,EAAE,kBACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,oBAAoB,gBAAgB,MAAM;AAClD,+BAAiB,mBAAmB,EAAE,qBACpC,MAAM,oBAAoB;AAAA,YAC9B;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAcA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,mCAAmCE,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AMptBH;AAAA,EACE,kBAAAC;AAAA,EACA,oCAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAGA,iBAAAC;AAAA,OAEK;AACP,SAAS,KAAAC,UAAS;;;ACjBlB;AAAA,EACE;AAAA,EAEA,iCAAAC;AAAA,OACK;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAIA,+BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ADnEO,IAAM,0CAAN,MAEP;AAAA;AAAA,EAWE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AA1CzC;AAwDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwBC,gCAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,QAAQ,YAAY,CAAC;AAEnE,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA,QACpB,YAAY,KAAK,SAAS;AAAA,QAC1B,QAAQ,KAAK,SAAS;AAAA,QACtB,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxJjE;AAyJI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMF,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BE;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,2CAA2CC,GAAE,OAAO;AAAA,EACxD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,MAAMA,GAAE,OAAO;AAAA,MACf,eAAeA,GAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO;AAAA,MACxB,mBAAmBA,GAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AE5VH;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AA4BX,IAAM,iCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5EJ;AA6EI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoCC,GAAE,OAAO;AAAA,EACjD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC3HD;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAqBX,IAAM,6BAAN,MAAyD;AAAA,EAW9D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,mBAA2B;AAhCjC;AAiCI,YAAO,UAAK,SAAS,qBAAd,YAAkC;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAzDJ;AA0DI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,QACjB,GAAI,KAAK,SAAS,OAAO,EAAE,MAAM,KAAK,SAAS,KAAK,IAAI,CAAC;AAAA,MAC3D;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsCC,GAAE,OAAO;AAAA,EACnD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,UAAUA,GAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AC7GD,SAAwB,4BAA4B;AAsF7C,SAAS,uBAMd,SAMA;AACA,QAAM,UAAU,qBAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAC1B,SACA,WAAyC,CAAC,MACvC,gBAAgB,SAAS,QAAQ;AAEtC,QAAM,kBAAkB,CACtB,SACA,WAAyC,CAAC,MAE1C,IAAI,kCAAkC,SAAS,UAAU;AAAA,IACvD,GAAG,qBAAqB,MAAM;AAAA,IAC9B,6BAA6B;AAAA,EAC/B,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAA+C,CAAC,MAEhD,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,uBAAuB,CAC3B,SACA,WAA8C,CAAC,MAE/C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,mBAAmB,CACvB,SACA,WAA0C,CAAC,MAE3C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,OAAO;AAAA,EAC9B;AAEF,QAAM,WAAW,CACf,SACA,aACG,oBAAoB,SAAS,QAAQ;AAE1C,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["z","UnsupportedFunctionalityError","_a","toolCall","z","combineHeaders","createEventSourceResponseHandler","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","UnsupportedFunctionalityError","createJsonErrorResponseHandler","postJsonToApi","combineHeaders","createJsonResponseHandler","createEventSourceResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z"]}
|
1
|
+
{"version":3,"sources":["../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2ObjectGenerationMode,\n LanguageModelV2ProviderMetadata,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV2ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV2ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV2StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n request: result.request,\n response: result.response,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: true });\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV2ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV2Prompt,\n LanguageModelV2ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: LanguageModelV2ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV2Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV2\n{\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported-setting', setting: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV2Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport {\n OpenAICompatibleImageModelId,\n OpenAICompatibleImageSettings,\n} from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV1,\n ImageModelV1,\n LanguageModelV2,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV2, 'imageModel'> {\n (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n chatModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV2;\n\n completionModel(\n modelId: COMPLETION_MODEL_IDS,\n settings?: OpenAICompatibleCompletionSettings,\n ): LanguageModelV2;\n\n textEmbeddingModel(\n modelId: EMBEDDING_MODEL_IDS,\n settings?: OpenAICompatibleEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n imageModel(\n modelId: IMAGE_MODEL_IDS,\n settings?: OpenAICompatibleImageSettings,\n ): ImageModelV1;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) => createChatModel(modelId, settings);\n\n const createChatModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) =>\n new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n\n const createCompletionModel = (\n modelId: COMPLETION_MODEL_IDS,\n settings: OpenAICompatibleCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createEmbeddingModel = (\n modelId: EMBEDDING_MODEL_IDS,\n settings: OpenAICompatibleEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const createImageModel = (\n modelId: IMAGE_MODEL_IDS,\n settings: OpenAICompatibleImageSettings = {},\n ) =>\n new OpenAICompatibleImageModel(\n modelId,\n settings,\n getCommonModelConfig('image'),\n );\n\n const provider = (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ) => createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";AAAA;AAAA,EAEE;AAAA,OAOK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EAEA;AAAA,OAEK;AACP,SAAS,KAAAA,UAAS;;;ACtBlB;AAAA,EAGE;AAAA,OACK;AAGP,SAAS,kBAAkB,SAExB;AATH;AAUE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA,oBAC7C;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACvIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,SAAS,SAAoB;AAEtB,IAAM,kCAAkC,EAAE,OAAO;AAAA,EACtD,OAAO,EAAE,OAAO;AAAA,IACd,SAAS,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,EAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF;AAAA,EAGE,iCAAAC;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAqBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALjCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAYxE,YACE,SACA,UACA,QACA;AAfF,SAAS,uBAAuB;AA3DlC;AA2EI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwB,+BAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA2D;AAC7D,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApHnD;AAqHI,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA/LjE;AAgMI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,aAAa,QAAQ,CAAC;AAGrC,UAAM,mBAAoD;AAAA,MACxD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,IAAG,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QAClD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,UAAM,sBAAqB,kBAAa,UAAb,mBAAoB;AAC/C,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,KAAK,mBAAmB,EAAE,kBACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,KAAK,mBAAmB,EAAE,qBACzC,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,sBAAf,YAAoC;AAAA,MAC/C,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AAxPzD,YAAAC;AAwP6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,MAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QACnD,mBAAkB,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,MAC7D;AAAA,MACA;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAhR/D;AAiRI,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAC5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,WAAW;AACpB,gBAAI,MAAM,QAAQ,OAAO,SAAS,GAAG;AACnC,yBAAW,QAAQ,OAAO,WAAW;AACnC,oBAAI,KAAK,SAAS,QAAQ;AACxB,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK;AAAA,kBAClB,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF,OAAO;AACL,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,UAAU,OAAO;AAAA,QACjB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AACrD,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAWA;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,eAAe;AACnB,QAAI,sBAAsB,KAAK;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA;AAAA,UAEA,UAAU,OAAO,YAAY;AA7YvC,gBAAAA,KAAA;AA+YY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAE9C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAplB5B,gBAAAD,KAAA;AAqlBY,kBAAM,mBAAoD;AAAA,cACxD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,mBAAmB,MAAM;AACzD,+BAAiB,mBAAmB,EAAE,kBACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,oBAAoB,gBAAgB,MAAM;AAClD,+BAAiB,mBAAmB,EAAE,qBACpC,MAAM,oBAAoB;AAAA,YAC9B;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAcA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,mCAAmCE,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AMltBH;AAAA,EACE,kBAAAC;AAAA,EACA,oCAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAGA,iBAAAC;AAAA,OAEK;AACP,SAAS,KAAAC,UAAS;;;ACjBlB;AAAA,EACE;AAAA,EAEA,iCAAAC;AAAA,OACK;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAIA,+BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ADnEO,IAAM,0CAAN,MAEP;AAAA;AAAA,EAWE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AA1CzC;AAwDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwBC,gCAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,QAAQ,CAAC;AAAA,IACjE;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,aAAa,CAAC;AAAA,IACtE;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,QAAQ,YAAY,CAAC;AAEnE,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,KAAK,SAAS;AAAA,QACpB,YAAY,KAAK,SAAS;AAAA,QAC1B,QAAQ,KAAK,SAAS;AAAA,QACtB,MAAM,KAAK,SAAS;AAAA;AAAA,QAGpB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxJjE;AAyJI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMF,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BE;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,2CAA2CC,GAAE,OAAO;AAAA,EACxD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,MAAMA,GAAE,OAAO;AAAA,MACf,eAAeA,GAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO;AAAA,MACxB,mBAAmBA,GAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AEvVH;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AA4BX,IAAM,iCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5EJ;AA6EI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoCC,GAAE,OAAO;AAAA,EACjD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC3HD;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAqBX,IAAM,6BAAN,MAAyD;AAAA,EAW9D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,mBAA2B;AAhCjC;AAiCI,YAAO,UAAK,SAAS,qBAAd,YAAkC;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAzDJ;AA0DI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,QACjB,GAAI,KAAK,SAAS,OAAO,EAAE,MAAM,KAAK,SAAS,KAAK,IAAI,CAAC;AAAA,MAC3D;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsCC,GAAE,OAAO;AAAA,EACnD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,UAAUA,GAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AC7GD,SAAwB,4BAA4B;AAsF7C,SAAS,uBAMd,SAMA;AACA,QAAM,UAAU,qBAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAC1B,SACA,WAAyC,CAAC,MACvC,gBAAgB,SAAS,QAAQ;AAEtC,QAAM,kBAAkB,CACtB,SACA,WAAyC,CAAC,MAE1C,IAAI,kCAAkC,SAAS,UAAU;AAAA,IACvD,GAAG,qBAAqB,MAAM;AAAA,IAC9B,6BAA6B;AAAA,EAC/B,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAA+C,CAAC,MAEhD,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,uBAAuB,CAC3B,SACA,WAA8C,CAAC,MAE/C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,mBAAmB,CACvB,SACA,WAA0C,CAAC,MAE3C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,OAAO;AAAA,EAC9B;AAEF,QAAM,WAAW,CACf,SACA,aACG,oBAAoB,SAAS,QAAQ;AAE1C,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["z","UnsupportedFunctionalityError","_a","toolCall","z","combineHeaders","createEventSourceResponseHandler","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","UnsupportedFunctionalityError","createJsonErrorResponseHandler","postJsonToApi","combineHeaders","createJsonResponseHandler","createEventSourceResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z"]}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@ai-sdk/openai-compatible",
|
3
|
-
"version": "1.0.0-canary.
|
3
|
+
"version": "1.0.0-canary.5",
|
4
4
|
"license": "Apache-2.0",
|
5
5
|
"sideEffects": false,
|
6
6
|
"main": "./dist/index.js",
|
@@ -8,7 +8,6 @@
|
|
8
8
|
"types": "./dist/index.d.ts",
|
9
9
|
"files": [
|
10
10
|
"dist/**/*",
|
11
|
-
"internal/dist/**/*",
|
12
11
|
"CHANGELOG.md"
|
13
12
|
],
|
14
13
|
"exports": {
|
@@ -19,15 +18,15 @@
|
|
19
18
|
"require": "./dist/index.js"
|
20
19
|
},
|
21
20
|
"./internal": {
|
22
|
-
"types": "./internal/
|
23
|
-
"import": "./internal/
|
24
|
-
"module": "./internal/
|
25
|
-
"require": "./internal/
|
21
|
+
"types": "./dist/internal/index.d.ts",
|
22
|
+
"import": "./dist/internal/index.mjs",
|
23
|
+
"module": "./dist/internal/index.mjs",
|
24
|
+
"require": "./dist/internal/index.js"
|
26
25
|
}
|
27
26
|
},
|
28
27
|
"dependencies": {
|
29
|
-
"@ai-sdk/provider": "2.0.0-canary.
|
30
|
-
"@ai-sdk/provider-utils": "3.0.0-canary.
|
28
|
+
"@ai-sdk/provider": "2.0.0-canary.4",
|
29
|
+
"@ai-sdk/provider-utils": "3.0.0-canary.5"
|
31
30
|
},
|
32
31
|
"devDependencies": {
|
33
32
|
"@types/node": "20.17.24",
|
@@ -57,11 +56,11 @@
|
|
57
56
|
"ai"
|
58
57
|
],
|
59
58
|
"scripts": {
|
60
|
-
"build": "tsup",
|
61
|
-
"build:watch": "tsup --watch",
|
62
|
-
"clean": "rm -rf dist
|
59
|
+
"build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
|
60
|
+
"build:watch": "pnpm clean && tsup --watch",
|
61
|
+
"clean": "rm -rf dist *.tsbuildinfo",
|
63
62
|
"lint": "eslint \"./**/*.ts*\"",
|
64
|
-
"type-check": "tsc --
|
63
|
+
"type-check": "tsc --build",
|
65
64
|
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
66
65
|
"test": "pnpm test:node && pnpm test:edge",
|
67
66
|
"test:edge": "vitest --config vitest.edge.config.js --run",
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|