@ai-sdk/openai-compatible 0.2.10 → 0.2.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +3 -1
- package/dist/index.d.ts +3 -1
- package/dist/index.js +12 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +12 -8
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +1 -0
- package/internal/dist/index.d.ts +1 -0
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,17 @@
|
|
1
1
|
# @ai-sdk/openai-compatible
|
2
2
|
|
3
|
+
## 0.2.12
|
4
|
+
|
5
|
+
### Patch Changes
|
6
|
+
|
7
|
+
- 13492fe: fix(providers/xai): return actual usage when streaming instead of NaN
|
8
|
+
|
9
|
+
## 0.2.11
|
10
|
+
|
11
|
+
### Patch Changes
|
12
|
+
|
13
|
+
- b5c9cd4: fix (provider/openai-compatible): change tool_call type schema to nullish
|
14
|
+
|
3
15
|
## 0.2.10
|
4
16
|
|
5
17
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
import { LanguageModelV1ProviderMetadata, LanguageModelV1, LanguageModelV1ObjectGenerationMode, EmbeddingModelV1, ImageModelV1, ProviderV1 } from '@ai-sdk/provider';
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
3
|
-
import {
|
3
|
+
import { ZodSchema, z } from 'zod';
|
4
4
|
|
5
5
|
type OpenAICompatibleChatModelId = string;
|
6
6
|
interface OpenAICompatibleChatSettings {
|
@@ -109,6 +109,7 @@ type OpenAICompatibleChatConfig = {
|
|
109
109
|
path: string;
|
110
110
|
}) => string;
|
111
111
|
fetch?: FetchFunction;
|
112
|
+
includeUsage?: boolean;
|
112
113
|
errorStructure?: ProviderErrorStructure<any>;
|
113
114
|
metadataExtractor?: MetadataExtractor;
|
114
115
|
/**
|
@@ -173,6 +174,7 @@ interface OpenAICompatibleCompletionSettings {
|
|
173
174
|
|
174
175
|
type OpenAICompatibleCompletionConfig = {
|
175
176
|
provider: string;
|
177
|
+
includeUsage?: boolean;
|
176
178
|
headers: () => Record<string, string | undefined>;
|
177
179
|
url: (options: {
|
178
180
|
modelId: string;
|
package/dist/index.d.ts
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
import { LanguageModelV1ProviderMetadata, LanguageModelV1, LanguageModelV1ObjectGenerationMode, EmbeddingModelV1, ImageModelV1, ProviderV1 } from '@ai-sdk/provider';
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
3
|
-
import {
|
3
|
+
import { ZodSchema, z } from 'zod';
|
4
4
|
|
5
5
|
type OpenAICompatibleChatModelId = string;
|
6
6
|
interface OpenAICompatibleChatSettings {
|
@@ -109,6 +109,7 @@ type OpenAICompatibleChatConfig = {
|
|
109
109
|
path: string;
|
110
110
|
}) => string;
|
111
111
|
fetch?: FetchFunction;
|
112
|
+
includeUsage?: boolean;
|
112
113
|
errorStructure?: ProviderErrorStructure<any>;
|
113
114
|
metadataExtractor?: MetadataExtractor;
|
114
115
|
/**
|
@@ -173,6 +174,7 @@ interface OpenAICompatibleCompletionSettings {
|
|
173
174
|
|
174
175
|
type OpenAICompatibleCompletionConfig = {
|
175
176
|
provider: string;
|
177
|
+
includeUsage?: boolean;
|
176
178
|
headers: () => Record<string, string | undefined>;
|
177
179
|
url: (options: {
|
178
180
|
modelId: string;
|
package/dist/index.js
CHANGED
@@ -505,7 +505,12 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
505
505
|
};
|
506
506
|
}
|
507
507
|
const { args, warnings } = this.getArgs({ ...options });
|
508
|
-
const body =
|
508
|
+
const body = {
|
509
|
+
...args,
|
510
|
+
stream: true,
|
511
|
+
// only include stream_options when in strict compatibility mode:
|
512
|
+
stream_options: this.config.includeUsage ? { include_usage: true } : void 0
|
513
|
+
};
|
509
514
|
const metadataExtractor = (_a = this.config.metadataExtractor) == null ? void 0 : _a.createStreamExtractor();
|
510
515
|
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
511
516
|
url: this.config.url({
|
@@ -513,10 +518,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
513
518
|
modelId: this.modelId
|
514
519
|
}),
|
515
520
|
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
516
|
-
body
|
517
|
-
...args,
|
518
|
-
stream: true
|
519
|
-
},
|
521
|
+
body,
|
520
522
|
failedResponseHandler: this.failedResponseHandler,
|
521
523
|
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
|
522
524
|
this.chunkSchema
|
@@ -725,7 +727,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
725
727
|
rawCall: { rawPrompt, rawSettings },
|
726
728
|
rawResponse: { headers: responseHeaders },
|
727
729
|
warnings,
|
728
|
-
request: { body }
|
730
|
+
request: { body: JSON.stringify(body) }
|
729
731
|
};
|
730
732
|
}
|
731
733
|
};
|
@@ -782,7 +784,7 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_zod2.z.union
|
|
782
784
|
import_zod2.z.object({
|
783
785
|
index: import_zod2.z.number(),
|
784
786
|
id: import_zod2.z.string().nullish(),
|
785
|
-
type: import_zod2.z.literal("function").
|
787
|
+
type: import_zod2.z.literal("function").nullish(),
|
786
788
|
function: import_zod2.z.object({
|
787
789
|
name: import_zod2.z.string().nullish(),
|
788
790
|
arguments: import_zod2.z.string().nullish()
|
@@ -1034,7 +1036,9 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
1034
1036
|
const { args, warnings } = this.getArgs(options);
|
1035
1037
|
const body = {
|
1036
1038
|
...args,
|
1037
|
-
stream: true
|
1039
|
+
stream: true,
|
1040
|
+
// only include stream_options when in strict compatibility mode:
|
1041
|
+
stream_options: this.config.includeUsage ? { include_usage: true } : void 0
|
1038
1042
|
};
|
1039
1043
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
1040
1044
|
url: this.config.url({
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["export { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nexport type { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nexport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nexport type { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nexport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nexport type { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nexport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\nexport type { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nexport type {\n OpenAICompatibleErrorData,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nexport type { MetadataExtractor } from './openai-compatible-metadata-extractor';\nexport { createOpenAICompatible } from './openai-compatible-provider';\nexport type {\n OpenAICompatibleProvider,\n OpenAICompatibleProviderSettings,\n} from './openai-compatible-provider';\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ObjectGenerationMode,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV1ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerMetadata,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n structuredOutputs: this.supportsStructuredOutputs,\n });\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n name: mode.name ?? 'response',\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(responseBody),\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV1StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n rawResponse: result.rawResponse,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: true });\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV1Prompt,\n LanguageModelV1ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerMetadata?: LanguageModelV1ProviderMetadata;\n}) {\n return message?.providerMetadata?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n },\n ...partMetadata,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n mode,\n structuredOutputs,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n };\n structuredOutputs: boolean;\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, tool_choice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV1\n{\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerMetadata,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return { args: baseArgs, warnings };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(response),\n warnings,\n request: { body: JSON.stringify(args) },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV1Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'images',\n });\n }\n }\n })\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV1,\n ImageModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV1, 'imageModel'> {\n (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n chatModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n completionModel(\n modelId: COMPLETION_MODEL_IDS,\n settings?: OpenAICompatibleCompletionSettings,\n ): LanguageModelV1;\n\n textEmbeddingModel(\n modelId: EMBEDDING_MODEL_IDS,\n settings?: OpenAICompatibleEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n imageModel(\n modelId: IMAGE_MODEL_IDS,\n settings?: OpenAICompatibleImageSettings,\n ): ImageModelV1;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) => createChatModel(modelId, settings);\n\n const createChatModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) =>\n new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n\n const createCompletionModel = (\n modelId: COMPLETION_MODEL_IDS,\n settings: OpenAICompatibleCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createEmbeddingModel = (\n modelId: EMBEDDING_MODEL_IDS,\n settings: OpenAICompatibleEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const createImageModel = (\n modelId: IMAGE_MODEL_IDS,\n settings: OpenAICompatibleImageSettings = {},\n ) =>\n new OpenAICompatibleImageModel(\n modelId,\n settings,\n getCommonModelConfig('image'),\n );\n\n const provider = (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ) => createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,IAAAC,yBAWO;AACP,IAAAC,cAAkB;;;ACtBlB,sBAIO;AACP,4BAA0C;AAG1C,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,qBAAT,mBAA2B,qBAA3B,YAA+C,CAAC;AACzD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAtCvC;AAuCY,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,eAAW,iDAA0B,KAAK,KAAK,CAAC;AAAA,kBACxD;AAAA,kBACA,GAAG;AAAA,gBACL;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,sBAAM,IAAI,8CAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACpIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAA6B;AAEtB,IAAM,kCAAkC,aAAE,OAAO;AAAA,EACtD,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF,IAAAC,mBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAuBE;AAhCF;AAkCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,aAAa,QAAW,aAAa;AAAA,EAC1E;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,aAAa,MAAM,aAAa;AAAA,IACrE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALtCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAYxE,YACE,SACA,UACA,QACA;AAfF,SAAS,uBAAuB;AA3DlC;AA2EI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA2D;AAC7D,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAnHnD;AAoHI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MAEN,MAAM;AAAA,MACN;AAAA,MACA,GAAG,qDAAmB,KAAK;AAAA;AAAA,MAG3B,UAAU,sCAAsC,MAAM;AAAA,IACxD;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa;AAAA,UACxD;AAAA,UACA,mBAAmB,KAAK;AAAA,QAC1B,CAAC;AAED,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,8BAA8B,QAAQ,KAAK,UAAU,OACtD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,OAAM,UAAK,SAAL,YAAa;AAAA,gBACnB,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAjPjE;AAkPI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,aAAa,QAAQ,CAAC;AAGrC,UAAM,mBAAoD;AAAA,MACxD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,IAAG,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QAClD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,UAAM,sBAAqB,kBAAa,UAAb,mBAAoB;AAC/C,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,KAAK,mBAAmB,EAAE,kBACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,KAAK,mBAAmB,EAAE,qBACzC,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,sBAAf,YAAoC;AAAA,MAC/C,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AA1SzD,YAAAC;AA0S6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QACnD,mBAAkB,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,MAC7D;AAAA,MACA;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,iBAAiB,MAAM,YAAY;AAAA,MAC3D,UAAU,oBAAoB,YAAY;AAAA,MAC1C;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAhU/D;AAiUI,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAC5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,WAAW;AACpB,gBAAI,MAAM,QAAQ,OAAO,SAAS,GAAG;AACnC,yBAAW,QAAQ,OAAO,WAAW;AACnC,oBAAI,KAAK,SAAS,QAAQ;AACxB,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK;AAAA,kBAClB,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF,OAAO;AACL,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,QACpB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AACrD,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAWA;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,eAAe;AACnB,QAAI,sBAAsB,KAAK;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA;AAAA,UAEA,UAAU,OAAO,YAAY;AA7bvC,gBAAAA,KAAA;AA+bY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAE9C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AApoB5B,gBAAAD,KAAA;AAqoBY,kBAAM,mBAAoD;AAAA,cACxD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,mBAAmB,MAAM;AACzD,+BAAiB,mBAAmB,EAAE,kBACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,oBAAoB,gBAAgB,MAAM;AAClD,+BAAiB,mBAAmB,EAAE,qBACpC,MAAM,oBAAoB;AAAA,YAC9B;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAcA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AM1wBH,IAAAE,mBAOO;AACP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;AClBlB,IAAAC,mBAIO;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,SAAS;AACZ,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ADtEO,IAAM,0CAAN,MAEP;AAAA;AAAA,EAWE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AA3CzC;AAyDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA5FnD;AA6FI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,QAAQ,YAAY,CAAC;AAEnE,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,YAAY,KAAK,SAAS;AAAA,MAC1B,QAAQ,KAAK,SAAS;AAAA,MACtB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA,MACA,GAAG,qDAAmB,KAAK;AAAA;AAAA,MAG3B,QAAQ;AAAA;AAAA,MAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,IACjC;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,aAAI,UAAK,UAAL,mBAAY,QAAQ;AACtB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,YAAI,KAAK,YAAY;AACnB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,eAAO,EAAE,MAAM,UAAU,SAAS;AAAA,MACpC;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AArLjE;AAsLI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,iBAAiB,MAAM,YAAY;AAAA,MAC3D,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAIA,IAAM,2CAA2C,cAAE,OAAO;AAAA,EACxD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AEtXH,IAAAC,mBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AA4BX,IAAM,iCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5EJ;AA6EI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC3HD,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AAmBX,IAAM,6BAAN,MAAyD;AAAA,EAW9D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,mBAA2B;AA9BjC;AA+BI,YAAO,UAAK,SAAS,qBAAd,YAAkC;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAvDJ;AAwDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,QACjB,GAAI,KAAK,SAAS,OAAO,EAAE,MAAM,KAAK,SAAS,KAAK,IAAI,CAAC;AAAA,MAC3D;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsC,cAAE,OAAO;AAAA,EACnD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,UAAU,cAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AC3GD,IAAAC,yBAAoD;AAsF7C,SAAS,uBAMd,SAMA;AACA,QAAM,cAAU,6CAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAC1B,SACA,WAAyC,CAAC,MACvC,gBAAgB,SAAS,QAAQ;AAEtC,QAAM,kBAAkB,CACtB,SACA,WAAyC,CAAC,MAE1C,IAAI,kCAAkC,SAAS,UAAU;AAAA,IACvD,GAAG,qBAAqB,MAAM;AAAA,IAC9B,6BAA6B;AAAA,EAC/B,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAA+C,CAAC,MAEhD,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,uBAAuB,CAC3B,SACA,WAA8C,CAAC,MAE/C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,mBAAmB,CACvB,SACA,WAA0C,CAAC,MAE3C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,OAAO;AAAA,EAC9B;AAEF,QAAM,WAAW,CACf,SACA,aACG,oBAAoB,SAAS,QAAQ;AAE1C,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["import_provider","import_provider_utils","import_zod","import_provider","_a","toolCall","import_provider","import_provider_utils","import_zod","import_provider","import_provider","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider_utils"]}
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["export { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nexport type { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nexport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nexport type { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nexport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nexport type { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nexport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\nexport type { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nexport type {\n OpenAICompatibleErrorData,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nexport type { MetadataExtractor } from './openai-compatible-metadata-extractor';\nexport { createOpenAICompatible } from './openai-compatible-provider';\nexport type {\n OpenAICompatibleProvider,\n OpenAICompatibleProviderSettings,\n} from './openai-compatible-provider';\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ObjectGenerationMode,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV1ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerMetadata,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n structuredOutputs: this.supportsStructuredOutputs,\n });\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n name: mode.name ?? 'response',\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(responseBody),\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV1StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n rawResponse: result.rawResponse,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV1Prompt,\n LanguageModelV1ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerMetadata?: LanguageModelV1ProviderMetadata;\n}) {\n return message?.providerMetadata?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n },\n ...partMetadata,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n mode,\n structuredOutputs,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n };\n structuredOutputs: boolean;\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, tool_choice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV1\n{\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerMetadata,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return { args: baseArgs, warnings };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(response),\n warnings,\n request: { body: JSON.stringify(args) },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV1Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'images',\n });\n }\n }\n })\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV1,\n ImageModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV1, 'imageModel'> {\n (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n chatModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n completionModel(\n modelId: COMPLETION_MODEL_IDS,\n settings?: OpenAICompatibleCompletionSettings,\n ): LanguageModelV1;\n\n textEmbeddingModel(\n modelId: EMBEDDING_MODEL_IDS,\n settings?: OpenAICompatibleEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n imageModel(\n modelId: IMAGE_MODEL_IDS,\n settings?: OpenAICompatibleImageSettings,\n ): ImageModelV1;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) => createChatModel(modelId, settings);\n\n const createChatModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) =>\n new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n\n const createCompletionModel = (\n modelId: COMPLETION_MODEL_IDS,\n settings: OpenAICompatibleCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createEmbeddingModel = (\n modelId: EMBEDDING_MODEL_IDS,\n settings: OpenAICompatibleEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const createImageModel = (\n modelId: IMAGE_MODEL_IDS,\n settings: OpenAICompatibleImageSettings = {},\n ) =>\n new OpenAICompatibleImageModel(\n modelId,\n settings,\n getCommonModelConfig('image'),\n );\n\n const provider = (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ) => createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,IAAAC,yBAWO;AACP,IAAAC,cAAkB;;;ACtBlB,sBAIO;AACP,4BAA0C;AAG1C,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,qBAAT,mBAA2B,qBAA3B,YAA+C,CAAC;AACzD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAtCvC;AAuCY,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,eAAW,iDAA0B,KAAK,KAAK,CAAC;AAAA,kBACxD;AAAA,kBACA,GAAG;AAAA,gBACL;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,sBAAM,IAAI,8CAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACpIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAA6B;AAEtB,IAAM,kCAAkC,aAAE,OAAO;AAAA,EACtD,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF,IAAAC,mBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAuBE;AAhCF;AAkCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,aAAa,QAAW,aAAa;AAAA,EAC1E;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,aAAa,MAAM,aAAa;AAAA,IACrE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALrCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAYxE,YACE,SACA,UACA,QACA;AAfF,SAAS,uBAAuB;AA5DlC;AA4EI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA2D;AAC7D,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApHnD;AAqHI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MAEN,MAAM;AAAA,MACN;AAAA,MACA,GAAG,qDAAmB,KAAK;AAAA;AAAA,MAG3B,UAAU,sCAAsC,MAAM;AAAA,IACxD;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa;AAAA,UACxD;AAAA,UACA,mBAAmB,KAAK;AAAA,QAC1B,CAAC;AAED,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,8BAA8B,QAAQ,KAAK,UAAU,OACtD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,OAAM,UAAK,SAAL,YAAa;AAAA,gBACnB,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAlPjE;AAmPI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,aAAa,QAAQ,CAAC;AAGrC,UAAM,mBAAoD;AAAA,MACxD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,IAAG,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QAClD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,UAAM,sBAAqB,kBAAa,UAAb,mBAAoB;AAC/C,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,KAAK,mBAAmB,EAAE,kBACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,KAAK,mBAAmB,EAAE,qBACzC,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,sBAAf,YAAoC;AAAA,MAC/C,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AA3SzD,YAAAC;AA2S6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QACnD,mBAAkB,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,MAC7D;AAAA,MACA;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,iBAAiB,MAAM,YAAY;AAAA,MAC3D,UAAU,oBAAoB,YAAY;AAAA,MAC1C;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAjU/D;AAkUI,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAC5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,WAAW;AACpB,gBAAI,MAAM,QAAQ,OAAO,SAAS,GAAG;AACnC,yBAAW,QAAQ,OAAO,WAAW;AACnC,oBAAI,KAAK,SAAS,QAAQ;AACxB,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK;AAAA,kBAClB,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF,OAAO;AACL,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,QACpB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAWA;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,eAAe;AACnB,QAAI,sBAAsB,KAAK;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA;AAAA,UAEA,UAAU,OAAO,YAAY;AApcvC,gBAAAA,KAAA;AAscY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAE9C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AA3oB5B,gBAAAD,KAAA;AA4oBY,kBAAM,mBAAoD;AAAA,cACxD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,mBAAmB,MAAM;AACzD,+BAAiB,mBAAmB,EAAE,kBACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,oBAAoB,gBAAgB,MAAM;AAClD,+BAAiB,mBAAmB,EAAE,qBACpC,MAAM,oBAAoB;AAAA,YAC9B;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAcA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAEA,IAAM,mCAAmC,cACtC,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,cAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AMjxBH,IAAAE,mBAOO;AACP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;AClBlB,IAAAC,mBAIO;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,SAAS;AACZ,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ADrEO,IAAM,0CAAN,MAEP;AAAA;AAAA,EAWE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AA5CzC;AA0DI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7FnD;AA8FI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,QAAQ,YAAY,CAAC;AAEnE,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,YAAY,KAAK,SAAS;AAAA,MAC1B,QAAQ,KAAK,SAAS;AAAA,MACtB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA,MACA,GAAG,qDAAmB,KAAK;AAAA;AAAA,MAG3B,QAAQ;AAAA;AAAA,MAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,IACjC;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,aAAI,UAAK,UAAL,mBAAY,QAAQ;AACtB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,YAAI,KAAK,YAAY;AACnB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,eAAO,EAAE,MAAM,UAAU,SAAS;AAAA,MACpC;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtLjE;AAuLI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,iBAAiB,MAAM,YAAY;AAAA,MAC3D,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAIA,IAAM,2CAA2C,cAAE,OAAO;AAAA,EACxD,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEA,cAAE,MAAM;AAAA,EACN,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AE5XH,IAAAC,mBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AA4BX,IAAM,iCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5EJ;AA6EI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC3HD,IAAAC,yBAMO;AACP,IAAAC,cAAkB;AAmBX,IAAM,6BAAN,MAAyD;AAAA,EAW9D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,mBAA2B;AA9BjC;AA+BI,YAAO,UAAK,SAAS,qBAAd,YAAkC;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAvDJ;AAwDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,QACjB,GAAI,KAAK,SAAS,OAAO,EAAE,MAAM,KAAK,SAAS,KAAK,IAAI,CAAC;AAAA,MAC3D;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsC,cAAE,OAAO;AAAA,EACnD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,UAAU,cAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AC3GD,IAAAC,yBAAoD;AAsF7C,SAAS,uBAMd,SAMA;AACA,QAAM,cAAU,6CAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAC1B,SACA,WAAyC,CAAC,MACvC,gBAAgB,SAAS,QAAQ;AAEtC,QAAM,kBAAkB,CACtB,SACA,WAAyC,CAAC,MAE1C,IAAI,kCAAkC,SAAS,UAAU;AAAA,IACvD,GAAG,qBAAqB,MAAM;AAAA,IAC9B,6BAA6B;AAAA,EAC/B,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAA+C,CAAC,MAEhD,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,uBAAuB,CAC3B,SACA,WAA8C,CAAC,MAE/C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,mBAAmB,CACvB,SACA,WAA0C,CAAC,MAE3C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,OAAO;AAAA,EAC9B;AAEF,QAAM,WAAW,CACf,SACA,aACG,oBAAoB,SAAS,QAAQ;AAE1C,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["import_provider","import_provider_utils","import_zod","import_provider","_a","toolCall","import_provider","import_provider_utils","import_zod","import_provider","import_provider","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider_utils"]}
|
package/dist/index.mjs
CHANGED
@@ -489,7 +489,12 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
489
489
|
};
|
490
490
|
}
|
491
491
|
const { args, warnings } = this.getArgs({ ...options });
|
492
|
-
const body =
|
492
|
+
const body = {
|
493
|
+
...args,
|
494
|
+
stream: true,
|
495
|
+
// only include stream_options when in strict compatibility mode:
|
496
|
+
stream_options: this.config.includeUsage ? { include_usage: true } : void 0
|
497
|
+
};
|
493
498
|
const metadataExtractor = (_a = this.config.metadataExtractor) == null ? void 0 : _a.createStreamExtractor();
|
494
499
|
const { responseHeaders, value: response } = await postJsonToApi({
|
495
500
|
url: this.config.url({
|
@@ -497,10 +502,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
497
502
|
modelId: this.modelId
|
498
503
|
}),
|
499
504
|
headers: combineHeaders(this.config.headers(), options.headers),
|
500
|
-
body
|
501
|
-
...args,
|
502
|
-
stream: true
|
503
|
-
},
|
505
|
+
body,
|
504
506
|
failedResponseHandler: this.failedResponseHandler,
|
505
507
|
successfulResponseHandler: createEventSourceResponseHandler(
|
506
508
|
this.chunkSchema
|
@@ -709,7 +711,7 @@ var OpenAICompatibleChatLanguageModel = class {
|
|
709
711
|
rawCall: { rawPrompt, rawSettings },
|
710
712
|
rawResponse: { headers: responseHeaders },
|
711
713
|
warnings,
|
712
|
-
request: { body }
|
714
|
+
request: { body: JSON.stringify(body) }
|
713
715
|
};
|
714
716
|
}
|
715
717
|
};
|
@@ -766,7 +768,7 @@ var createOpenAICompatibleChatChunkSchema = (errorSchema) => z2.union([
|
|
766
768
|
z2.object({
|
767
769
|
index: z2.number(),
|
768
770
|
id: z2.string().nullish(),
|
769
|
-
type: z2.literal("function").
|
771
|
+
type: z2.literal("function").nullish(),
|
770
772
|
function: z2.object({
|
771
773
|
name: z2.string().nullish(),
|
772
774
|
arguments: z2.string().nullish()
|
@@ -1029,7 +1031,9 @@ var OpenAICompatibleCompletionLanguageModel = class {
|
|
1029
1031
|
const { args, warnings } = this.getArgs(options);
|
1030
1032
|
const body = {
|
1031
1033
|
...args,
|
1032
|
-
stream: true
|
1034
|
+
stream: true,
|
1035
|
+
// only include stream_options when in strict compatibility mode:
|
1036
|
+
stream_options: this.config.includeUsage ? { include_usage: true } : void 0
|
1033
1037
|
};
|
1034
1038
|
const { responseHeaders, value: response } = await postJsonToApi2({
|
1035
1039
|
url: this.config.url({
|
package/dist/index.mjs.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ObjectGenerationMode,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV1ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerMetadata,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n structuredOutputs: this.supportsStructuredOutputs,\n });\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n name: mode.name ?? 'response',\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(responseBody),\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV1StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n rawResponse: result.rawResponse,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify({ ...args, stream: true });\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV1Prompt,\n LanguageModelV1ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerMetadata?: LanguageModelV1ProviderMetadata;\n}) {\n return message?.providerMetadata?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n },\n ...partMetadata,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n mode,\n structuredOutputs,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n };\n structuredOutputs: boolean;\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, tool_choice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV1\n{\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerMetadata,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return { args: baseArgs, warnings };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(response),\n warnings,\n request: { body: JSON.stringify(args) },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV1Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'images',\n });\n }\n }\n })\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV1,\n ImageModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV1, 'imageModel'> {\n (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n chatModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n completionModel(\n modelId: COMPLETION_MODEL_IDS,\n settings?: OpenAICompatibleCompletionSettings,\n ): LanguageModelV1;\n\n textEmbeddingModel(\n modelId: EMBEDDING_MODEL_IDS,\n settings?: OpenAICompatibleEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n imageModel(\n modelId: IMAGE_MODEL_IDS,\n settings?: OpenAICompatibleImageSettings,\n ): ImageModelV1;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) => createChatModel(modelId, settings);\n\n const createChatModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) =>\n new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n\n const createCompletionModel = (\n modelId: COMPLETION_MODEL_IDS,\n settings: OpenAICompatibleCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createEmbeddingModel = (\n modelId: EMBEDDING_MODEL_IDS,\n settings: OpenAICompatibleEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const createImageModel = (\n modelId: IMAGE_MODEL_IDS,\n settings: OpenAICompatibleImageSettings = {},\n ) =>\n new OpenAICompatibleImageModel(\n modelId,\n settings,\n getCommonModelConfig('image'),\n );\n\n const provider = (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ) => createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";AAAA;AAAA,EAEE;AAAA,OAOK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EAEA;AAAA,OAEK;AACP,SAAS,KAAAA,UAAS;;;ACtBlB;AAAA,EAGE;AAAA,OACK;AACP,SAAS,iCAAiC;AAG1C,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,qBAAT,mBAA2B,qBAA3B,YAA+C,CAAC;AACzD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAtCvC;AAuCY,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,WAAW,0BAA0B,KAAK,KAAK,CAAC;AAAA,kBACxD;AAAA,kBACA,GAAG;AAAA,gBACL;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACpIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,SAAS,SAAoB;AAEtB,IAAM,kCAAkC,EAAE,OAAO;AAAA,EACtD,OAAO,EAAE,OAAO;AAAA,IACd,SAAS,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,EAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF;AAAA,EAGE,iCAAAC;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAuBE;AAhCF;AAkCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,aAAa,QAAW,aAAa;AAAA,EAC1E;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,aAAa,MAAM,aAAa;AAAA,IACrE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALtCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAYxE,YACE,SACA,UACA,QACA;AAfF,SAAS,uBAAuB;AA3DlC;AA2EI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwB,+BAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA2D;AAC7D,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAnHnD;AAoHI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MAEN,MAAM;AAAA,MACN;AAAA,MACA,GAAG,qDAAmB,KAAK;AAAA;AAAA,MAG3B,UAAU,sCAAsC,MAAM;AAAA,IACxD;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa;AAAA,UACxD;AAAA,UACA,mBAAmB,KAAK;AAAA,QAC1B,CAAC;AAED,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,8BAA8B,QAAQ,KAAK,UAAU,OACtD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,OAAM,UAAK,SAAL,YAAa;AAAA,gBACnB,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAjPjE;AAkPI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,aAAa,QAAQ,CAAC;AAGrC,UAAM,mBAAoD;AAAA,MACxD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,IAAG,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QAClD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,UAAM,sBAAqB,kBAAa,UAAb,mBAAoB;AAC/C,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,KAAK,mBAAmB,EAAE,kBACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,KAAK,mBAAmB,EAAE,qBACzC,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,sBAAf,YAAoC;AAAA,MAC/C,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AA1SzD,YAAAC;AA0S6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,MAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QACnD,mBAAkB,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,MAC7D;AAAA,MACA;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,iBAAiB,MAAM,YAAY;AAAA,MAC3D,UAAU,oBAAoB,YAAY;AAAA,MAC1C;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAhU/D;AAiUI,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAC5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,WAAW;AACpB,gBAAI,MAAM,QAAQ,OAAO,SAAS,GAAG;AACnC,yBAAW,QAAQ,OAAO,WAAW;AACnC,oBAAI,KAAK,SAAS,QAAQ;AACxB,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK;AAAA,kBAClB,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF,OAAO;AACL,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,QACpB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,EAAE,GAAG,MAAM,QAAQ,KAAK,CAAC;AACrD,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAWA;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,eAAe;AACnB,QAAI,sBAAsB,KAAK;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA;AAAA,UAEA,UAAU,OAAO,YAAY;AA7bvC,gBAAAA,KAAA;AA+bY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAE9C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AApoB5B,gBAAAD,KAAA;AAqoBY,kBAAM,mBAAoD;AAAA,cACxD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,mBAAmB,MAAM;AACzD,+BAAiB,mBAAmB,EAAE,kBACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,oBAAoB,gBAAgB,MAAM;AAClD,+BAAiB,mBAAmB,EAAE,qBACpC,MAAM,oBAAoB;AAAA,YAC9B;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAcA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,IAAM,mCAAmCE,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AM1wBH;AAAA,EAME,iCAAAC;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,oCAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAGA,iBAAAC;AAAA,OAEK;AACP,SAAS,KAAAC,UAAS;;;AClBlB;AAAA,EACE;AAAA,EAEA,iCAAAC;AAAA,OACK;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,SAAS;AACZ,oBAAM,IAAIA,+BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAIA,+BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ADtEO,IAAM,0CAAN,MAEP;AAAA;AAAA,EAWE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AA3CzC;AAyDI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwBC,gCAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA5FnD;AA6FI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,QAAQ,YAAY,CAAC;AAEnE,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,YAAY,KAAK,SAAS;AAAA,MAC1B,QAAQ,KAAK,SAAS;AAAA,MACtB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA,MACA,GAAG,qDAAmB,KAAK;AAAA;AAAA,MAG3B,QAAQ;AAAA;AAAA,MAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,IACjC;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,aAAI,UAAK,UAAL,mBAAY,QAAQ;AACtB,gBAAM,IAAIC,+BAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,YAAI,KAAK,YAAY;AACnB,gBAAM,IAAIA,+BAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,eAAO,EAAE,MAAM,UAAU,SAAS;AAAA,MACpC;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AArLjE;AAsLI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,iBAAiB,MAAM,YAAY;AAAA,MAC3D,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMF,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BE;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAIA,IAAM,2CAA2CC,GAAE,OAAO;AAAA,EACxD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,MAAMA,GAAE,OAAO;AAAA,MACf,eAAeA,GAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO;AAAA,MACxB,mBAAmBA,GAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AEtXH;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AA4BX,IAAM,iCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5EJ;AA6EI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoCC,GAAE,OAAO;AAAA,EACjD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC3HD;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAmBX,IAAM,6BAAN,MAAyD;AAAA,EAW9D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,mBAA2B;AA9BjC;AA+BI,YAAO,UAAK,SAAS,qBAAd,YAAkC;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAvDJ;AAwDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,QACjB,GAAI,KAAK,SAAS,OAAO,EAAE,MAAM,KAAK,SAAS,KAAK,IAAI,CAAC;AAAA,MAC3D;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsCC,GAAE,OAAO;AAAA,EACnD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,UAAUA,GAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AC3GD,SAAwB,4BAA4B;AAsF7C,SAAS,uBAMd,SAMA;AACA,QAAM,UAAU,qBAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAC1B,SACA,WAAyC,CAAC,MACvC,gBAAgB,SAAS,QAAQ;AAEtC,QAAM,kBAAkB,CACtB,SACA,WAAyC,CAAC,MAE1C,IAAI,kCAAkC,SAAS,UAAU;AAAA,IACvD,GAAG,qBAAqB,MAAM;AAAA,IAC9B,6BAA6B;AAAA,EAC/B,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAA+C,CAAC,MAEhD,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,uBAAuB,CAC3B,SACA,WAA8C,CAAC,MAE/C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,mBAAmB,CACvB,SACA,WAA0C,CAAC,MAE3C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,OAAO;AAAA,EAC9B;AAEF,QAAM,WAAW,CACf,SACA,aACG,oBAAoB,SAAS,QAAQ;AAE1C,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["z","UnsupportedFunctionalityError","_a","toolCall","z","UnsupportedFunctionalityError","combineHeaders","createEventSourceResponseHandler","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","UnsupportedFunctionalityError","createJsonErrorResponseHandler","UnsupportedFunctionalityError","postJsonToApi","combineHeaders","createJsonResponseHandler","createEventSourceResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z"]}
|
1
|
+
{"version":3,"sources":["../src/openai-compatible-chat-language-model.ts","../src/convert-to-openai-compatible-chat-messages.ts","../src/get-response-metadata.ts","../src/map-openai-compatible-finish-reason.ts","../src/openai-compatible-error.ts","../src/openai-compatible-prepare-tools.ts","../src/openai-compatible-completion-language-model.ts","../src/convert-to-openai-compatible-completion-prompt.ts","../src/openai-compatible-embedding-model.ts","../src/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts"],"sourcesContent":["import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1ObjectGenerationMode,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n OpenAICompatibleChatSettings,\n} from './openai-compatible-chat-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nimport { prepareTools } from './openai-compatible-prepare-tools';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\nDefault object generation mode that should be used with this model when\nno mode is specified. Should be the mode with the best results for this\nmodel. `undefined` can be specified if object generation is not supported.\n */\n defaultObjectGenerationMode?: LanguageModelV1ObjectGenerationMode;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n readonly settings: OpenAICompatibleChatSettings;\n\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n settings: OpenAICompatibleChatSettings,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get defaultObjectGenerationMode(): 'json' | 'tool' | undefined {\n return this.config.defaultObjectGenerationMode;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerMetadata,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools({\n mode,\n structuredOutputs: this.supportsStructuredOutputs,\n });\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.supportsStructuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n name: mode.name ?? 'response',\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = responseBody.choices[0];\n\n // provider metadata:\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n }),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n const promptTokenDetails = responseBody.usage?.prompt_tokens_details;\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata[this.providerOptionsName].reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata[this.providerOptionsName].cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n\n return {\n text: choice.message.content ?? undefined,\n reasoning: choice.message.reasoning_content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: {\n promptTokens: responseBody.usage?.prompt_tokens ?? NaN,\n completionTokens: responseBody.usage?.completion_tokens ?? NaN,\n },\n providerMetadata,\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(responseBody),\n warnings,\n request: { body },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n if (this.settings.simulateStreaming) {\n const result = await this.doGenerate(options);\n const simulatedStream = new ReadableStream<LanguageModelV1StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n if (result.reasoning) {\n if (Array.isArray(result.reasoning)) {\n for (const part of result.reasoning) {\n if (part.type === 'text') {\n controller.enqueue({\n type: 'reasoning',\n textDelta: part.text,\n });\n }\n }\n } else {\n controller.enqueue({\n type: 'reasoning',\n textDelta: result.reasoning,\n });\n }\n }\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n controller.close();\n },\n });\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n rawResponse: result.rawResponse,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: {\n completionTokens: number | undefined;\n completionTokensDetails: {\n reasoningTokens: number | undefined;\n acceptedPredictionTokens: number | undefined;\n rejectedPredictionTokens: number | undefined;\n };\n promptTokens: number | undefined;\n promptTokensDetails: {\n cachedTokens: number | undefined;\n };\n } = {\n completionTokens: undefined,\n completionTokensDetails: {\n reasoningTokens: undefined,\n acceptedPredictionTokens: undefined,\n rejectedPredictionTokens: undefined,\n },\n promptTokens: undefined,\n promptTokensDetails: {\n cachedTokens: undefined,\n },\n };\n let isFirstChunk = true;\n let providerOptionsName = this.providerOptionsName;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n const value = chunk.value;\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error.message });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n const {\n prompt_tokens,\n completion_tokens,\n prompt_tokens_details,\n completion_tokens_details,\n } = value.usage;\n\n usage.promptTokens = prompt_tokens ?? undefined;\n usage.completionTokens = completion_tokens ?? undefined;\n\n if (completion_tokens_details?.reasoning_tokens != null) {\n usage.completionTokensDetails.reasoningTokens =\n completion_tokens_details?.reasoning_tokens;\n }\n if (\n completion_tokens_details?.accepted_prediction_tokens != null\n ) {\n usage.completionTokensDetails.acceptedPredictionTokens =\n completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n completion_tokens_details?.rejected_prediction_tokens != null\n ) {\n usage.completionTokensDetails.rejectedPredictionTokens =\n completion_tokens_details?.rejected_prediction_tokens;\n }\n if (prompt_tokens_details?.cached_tokens != null) {\n usage.promptTokensDetails.cachedTokens =\n prompt_tokens_details?.cached_tokens;\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n if (delta.reasoning_content != null) {\n controller.enqueue({\n type: 'reasoning',\n textDelta: delta.reasoning_content,\n });\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n const providerMetadata: LanguageModelV1ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (usage.completionTokensDetails.reasoningTokens != null) {\n providerMetadata[providerOptionsName].reasoningTokens =\n usage.completionTokensDetails.reasoningTokens;\n }\n if (\n usage.completionTokensDetails.acceptedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage.completionTokensDetails.acceptedPredictionTokens;\n }\n if (\n usage.completionTokensDetails.rejectedPredictionTokens != null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage.completionTokensDetails.rejectedPredictionTokens;\n }\n if (usage.promptTokensDetails.cachedTokens != null) {\n providerMetadata[providerOptionsName].cachedPromptTokens =\n usage.promptTokensDetails.cachedTokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n providerMetadata,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <ERROR_SCHEMA extends z.ZodType>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n }),\n errorSchema,\n ]);\n","import {\n LanguageModelV1Prompt,\n LanguageModelV1ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\n\nfunction getOpenAIMetadata(message: {\n providerMetadata?: LanguageModelV1ProviderMetadata;\n}) {\n return message?.providerMetadata?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n },\n ...partMetadata,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z, ZodSchema } from 'zod';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodSchema<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n mode,\n structuredOutputs,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n };\n structuredOutputs: boolean;\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, tool_choice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionSettings,\n} from './openai-compatible-completion-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV1\n{\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n readonly settings: OpenAICompatibleCompletionSettings;\n\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n settings: OpenAICompatibleCompletionSettings,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerMetadata,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerMetadata?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return { args: baseArgs, warnings };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders, body: rawResponse },\n response: getResponseMetadata(response),\n warnings,\n request: { body: JSON.stringify(args) },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n errorSchema,\n ]);\n","import {\n InvalidPromptError,\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV1Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'images',\n });\n }\n }\n })\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingSettings,\n} from './openai-compatible-embedding-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel\n implements EmbeddingModelV1<string>\n{\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n private readonly settings: OpenAICompatibleEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n settings: OpenAICompatibleEmbeddingSettings,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","import { ImageModelV1, ImageModelV1CallWarning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from './openai-compatible-error';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV1 {\n readonly specificationVersion = 'v1';\n\n get maxImagesPerCall(): number {\n return this.settings.maxImagesPerCall ?? 10;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly settings: OpenAICompatibleImageSettings,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV1['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV1['doGenerate']>>\n > {\n const warnings: Array<ImageModelV1CallWarning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported-setting', setting: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n ...(this.settings.user ? { user: this.settings.user } : {}),\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV1,\n ImageModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport { FetchFunction, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAICompatibleChatLanguageModel } from './openai-compatible-chat-language-model';\nimport { OpenAICompatibleChatSettings } from './openai-compatible-chat-settings';\nimport { OpenAICompatibleCompletionLanguageModel } from './openai-compatible-completion-language-model';\nimport { OpenAICompatibleCompletionSettings } from './openai-compatible-completion-settings';\nimport { OpenAICompatibleEmbeddingModel } from './openai-compatible-embedding-model';\nimport { OpenAICompatibleEmbeddingSettings } from './openai-compatible-embedding-settings';\nimport { OpenAICompatibleImageSettings } from './openai-compatible-image-settings';\nimport { OpenAICompatibleImageModel } from './openai-compatible-image-model';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV1, 'imageModel'> {\n (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n chatModel(\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ): LanguageModelV1;\n\n completionModel(\n modelId: COMPLETION_MODEL_IDS,\n settings?: OpenAICompatibleCompletionSettings,\n ): LanguageModelV1;\n\n textEmbeddingModel(\n modelId: EMBEDDING_MODEL_IDS,\n settings?: OpenAICompatibleEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n imageModel(\n modelId: IMAGE_MODEL_IDS,\n settings?: OpenAICompatibleImageSettings,\n ): ImageModelV1;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getHeaders = () => ({\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n });\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) => createChatModel(modelId, settings);\n\n const createChatModel = (\n modelId: CHAT_MODEL_IDS,\n settings: OpenAICompatibleChatSettings = {},\n ) =>\n new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n\n const createCompletionModel = (\n modelId: COMPLETION_MODEL_IDS,\n settings: OpenAICompatibleCompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createEmbeddingModel = (\n modelId: EMBEDDING_MODEL_IDS,\n settings: OpenAICompatibleEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const createImageModel = (\n modelId: IMAGE_MODEL_IDS,\n settings: OpenAICompatibleImageSettings = {},\n ) =>\n new OpenAICompatibleImageModel(\n modelId,\n settings,\n getCommonModelConfig('image'),\n );\n\n const provider = (\n modelId: CHAT_MODEL_IDS,\n settings?: OpenAICompatibleChatSettings,\n ) => createLanguageModel(modelId, settings);\n\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n"],"mappings":";AAAA;AAAA,EAEE;AAAA,OAOK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EAEA;AAAA,OAEK;AACP,SAAS,KAAAA,UAAS;;;ACtBlB;AAAA,EAGE;AAAA,OACK;AACP,SAAS,iCAAiC;AAG1C,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,qBAAT,mBAA2B,qBAA3B,YAA+C,CAAC;AACzD;AAEO,SAAS,sCACd,QAC4B;AAC5B,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAtCvC;AAuCY,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,WAAW,0BAA0B,KAAK,KAAK,CAAC;AAAA,kBACxD;AAAA,kBACA,GAAG;AAAA,gBACL;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACpIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,SAAS,SAAoB;AAEtB,IAAM,kCAAkC,EAAE,OAAO;AAAA,EACtD,OAAO,EAAE,OAAO;AAAA,IACd,SAAS,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,EAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC7BF;AAAA,EAGE,iCAAAC;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAuBE;AAhCF;AAkCE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,QAAM,oBAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,aAAa,QAAW,aAAa;AAAA,EAC1E;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,aAAa,MAAM,aAAa;AAAA,IACrE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALrCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAYxE,YACE,SACA,UACA,QACA;AAfF,SAAS,uBAAuB;AA5DlC;AA4EI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwB,+BAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,8BAA2D;AAC7D,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApHnD;AAqHI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,MAEN,MAAM;AAAA,MACN;AAAA,MACA,GAAG,qDAAmB,KAAK;AAAA;AAAA,MAG3B,UAAU,sCAAsC,MAAM;AAAA,IACxD;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa;AAAA,UACxD;AAAA,UACA,mBAAmB,KAAK;AAAA,QAC1B,CAAC;AAED,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,8BAA8B,QAAQ,KAAK,UAAU,OACtD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,OAAM,UAAK,SAAL,YAAa;AAAA,gBACnB,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAlPjE;AAmPI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,aAAa,QAAQ,CAAC;AAGrC,UAAM,mBAAoD;AAAA,MACxD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,IAAG,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QAClD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,UAAM,sBAAqB,kBAAa,UAAb,mBAAoB;AAC/C,SAAI,iEAAwB,qBAAoB,MAAM;AACpD,uBAAiB,KAAK,mBAAmB,EAAE,kBACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,yDAAoB,kBAAiB,MAAM;AAC7C,uBAAiB,KAAK,mBAAmB,EAAE,qBACzC,yDAAoB;AAAA,IACxB;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,sBAAf,YAAoC;AAAA,MAC/C,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AA3SzD,YAAAC;AA2S6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,MAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO;AAAA,QACL,eAAc,wBAAa,UAAb,mBAAoB,kBAApB,YAAqC;AAAA,QACnD,mBAAkB,wBAAa,UAAb,mBAAoB,sBAApB,YAAyC;AAAA,MAC7D;AAAA,MACA;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,iBAAiB,MAAM,YAAY;AAAA,MAC3D,UAAU,oBAAoB,YAAY;AAAA,MAC1C;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAjU/D;AAkUI,QAAI,KAAK,SAAS,mBAAmB;AACnC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAC5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AACpE,cAAI,OAAO,WAAW;AACpB,gBAAI,MAAM,QAAQ,OAAO,SAAS,GAAG;AACnC,yBAAW,QAAQ,OAAO,WAAW;AACnC,oBAAI,KAAK,SAAS,QAAQ;AACxB,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK;AAAA,kBAClB,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF,OAAO;AACL,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AACA,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AACA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AACD,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,QACpB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAEtD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAWA;AAAA,MACF,kBAAkB;AAAA,MAClB,yBAAyB;AAAA,QACvB,iBAAiB;AAAA,QACjB,0BAA0B;AAAA,QAC1B,0BAA0B;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,qBAAqB;AAAA,QACnB,cAAc;AAAA,MAChB;AAAA,IACF;AACA,QAAI,eAAe;AACnB,QAAI,sBAAsB,KAAK;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA;AAAA,UAEA,UAAU,OAAO,YAAY;AApcvC,gBAAAA,KAAA;AAscY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AACA,kBAAM,QAAQ,MAAM;AAEpB,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,QAAQ,CAAC;AAChE;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM;AAAA,gBACJ;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF,IAAI,MAAM;AAEV,oBAAM,eAAe,wCAAiB;AACtC,oBAAM,mBAAmB,gDAAqB;AAE9C,mBAAI,uEAA2B,qBAAoB,MAAM;AACvD,sBAAM,wBAAwB,kBAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBACE,uEAA2B,+BAA8B,MACzD;AACA,sBAAM,wBAAwB,2BAC5B,uEAA2B;AAAA,cAC/B;AACA,mBAAI,+DAAuB,kBAAiB,MAAM;AAChD,sBAAM,oBAAoB,eACxB,+DAAuB;AAAA,cAC3B;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,gBAAI,MAAM,qBAAqB,MAAM;AACnC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAIA,MAAA,cAAc,aAAd,gBAAAA,IAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AA3oB5B,gBAAAD,KAAA;AA4oBY,kBAAM,mBAAoD;AAAA,cACxD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,gBAAI,MAAM,wBAAwB,mBAAmB,MAAM;AACzD,+BAAiB,mBAAmB,EAAE,kBACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBACE,MAAM,wBAAwB,4BAA4B,MAC1D;AACA,+BAAiB,mBAAmB,EAAE,2BACpC,MAAM,wBAAwB;AAAA,YAClC;AACA,gBAAI,MAAM,oBAAoB,gBAAgB,MAAM;AAClD,+BAAiB,mBAAmB,EAAE,qBACpC,MAAM,oBAAoB;AAAA,YAC9B;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO;AAAA,gBACL,eAAcA,MAAA,MAAM,iBAAN,OAAAA,MAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAEA,IAAM,mCAAmCE,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAMA,GAAE,QAAQ,UAAU;AAAA,YAC1B,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAC5C,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,OAAOA,GACJ,OAAO;AAAA,UACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,UACtC,YAAYA,GACT;AAAA,YACCA,GAAE,OAAO;AAAA,cACP,OAAOA,GAAE,OAAO;AAAA,cAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAMA,GAAE,QAAQ,UAAU,EAAE,QAAQ;AAAA,cACpC,UAAUA,GAAE,OAAO;AAAA,gBACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACpC,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;;;AMjxBH;AAAA,EAME,iCAAAC;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,oCAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAGA,iBAAAC;AAAA,OAEK;AACP,SAAS,KAAAC,UAAS;;;AClBlB;AAAA,EACE;AAAA,EAEA,iCAAAC;AAAA,OACK;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,SAAS;AACZ,oBAAM,IAAIA,+BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAIA,+BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ADrEO,IAAM,0CAAN,MAEP;AAAA;AAAA,EAWE,YACE,SACA,UACA,QACA;AAdF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AA5CzC;AA0DI,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwBC,gCAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7FnD;AA8FI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,QAAQ,YAAY,CAAC;AAEnE,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,YAAY,KAAK,SAAS;AAAA,MAC1B,QAAQ,KAAK,SAAS;AAAA,MACtB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA,MACA,GAAG,qDAAmB,KAAK;AAAA;AAAA,MAG3B,QAAQ;AAAA;AAAA,MAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,IACjC;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,aAAI,UAAK,UAAL,mBAAY,QAAQ;AACtB,gBAAM,IAAIC,+BAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,YAAI,KAAK,YAAY;AACnB,gBAAM,IAAIA,+BAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,eAAO,EAAE,MAAM,UAAU,SAAS;AAAA,MACpC;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtLjE;AAuLI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,iBAAiB,MAAM,YAAY;AAAA,MAC3D,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMF,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BE;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAIA,IAAM,2CAA2CC,GAAE,OAAO;AAAA,EACxD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,MAAMA,GAAE,OAAO;AAAA,MACf,eAAeA,GAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAOA,GACJ,OAAO;AAAA,MACN,eAAeA,GAAE,OAAO;AAAA,MACxB,mBAAmBA,GAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AE5XH;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AA4BX,IAAM,iCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AApDrC;AAqDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAxDvC;AAyDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5EJ;AA6EI,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoCC,GAAE,OAAO;AAAA,EACjD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC3HD;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAmBX,IAAM,6BAAN,MAAyD;AAAA,EAW9D,YACW,SACQ,UACA,QACjB;AAHS;AACQ;AACA;AAbnB,SAAS,uBAAuB;AAAA,EAc7B;AAAA,EAZH,IAAI,mBAA2B;AA9BjC;AA+BI,YAAO,UAAK,SAAS,qBAAd,YAAkC;AAAA,EAC3C;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAQA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAvDJ;AAwDI,UAAM,WAA2C,CAAC;AAElD,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,uBAAuB,SAAS,OAAO,CAAC;AAAA,IAChE;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,QACjB,GAAI,KAAK,SAAS,OAAO,EAAE,MAAM,KAAK,SAAS,KAAK,IAAI,CAAC;AAAA,MAC3D;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsCC,GAAE,OAAO;AAAA,EACnD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,UAAUA,GAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;AC3GD,SAAwB,4BAA4B;AAsF7C,SAAS,uBAMd,SAMA;AACA,QAAM,UAAU,qBAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,aAAa,OAAO;AAAA,IACxB,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAC1B,SACA,WAAyC,CAAC,MACvC,gBAAgB,SAAS,QAAQ;AAEtC,QAAM,kBAAkB,CACtB,SACA,WAAyC,CAAC,MAE1C,IAAI,kCAAkC,SAAS,UAAU;AAAA,IACvD,GAAG,qBAAqB,MAAM;AAAA,IAC9B,6BAA6B;AAAA,EAC/B,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAA+C,CAAC,MAEhD,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,uBAAuB,CAC3B,SACA,WAA8C,CAAC,MAE/C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,mBAAmB,CACvB,SACA,WAA0C,CAAC,MAE3C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,OAAO;AAAA,EAC9B;AAEF,QAAM,WAAW,CACf,SACA,aACG,oBAAoB,SAAS,QAAQ;AAE1C,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["z","UnsupportedFunctionalityError","_a","toolCall","z","UnsupportedFunctionalityError","combineHeaders","createEventSourceResponseHandler","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","UnsupportedFunctionalityError","createJsonErrorResponseHandler","UnsupportedFunctionalityError","postJsonToApi","combineHeaders","createJsonResponseHandler","createEventSourceResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z"]}
|
package/internal/dist/index.d.ts
CHANGED