@ai-sdk/openai-compatible 2.0.0-beta.54 → 2.0.0-beta.56

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,21 @@
1
1
  # @ai-sdk/openai-compatible
2
2
 
3
+ ## 2.0.0-beta.56
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [50b70d6]
8
+ - @ai-sdk/provider-utils@4.0.0-beta.55
9
+
10
+ ## 2.0.0-beta.55
11
+
12
+ ### Patch Changes
13
+
14
+ - 9061dc0: feat: image editing
15
+ - Updated dependencies [9061dc0]
16
+ - @ai-sdk/provider-utils@4.0.0-beta.54
17
+ - @ai-sdk/provider@3.0.0-beta.28
18
+
3
19
  ## 2.0.0-beta.54
4
20
 
5
21
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -202,7 +202,7 @@ declare class OpenAICompatibleImageModel implements ImageModelV3 {
202
202
  readonly maxImagesPerCall = 10;
203
203
  get provider(): string;
204
204
  constructor(modelId: OpenAICompatibleImageModelId, config: OpenAICompatibleImageModelConfig);
205
- doGenerate({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal, }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<Awaited<ReturnType<ImageModelV3['doGenerate']>>>;
205
+ doGenerate({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal, files, mask, }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<Awaited<ReturnType<ImageModelV3['doGenerate']>>>;
206
206
  }
207
207
 
208
208
  interface OpenAICompatibleProvider<CHAT_MODEL_IDS extends string = string, COMPLETION_MODEL_IDS extends string = string, EMBEDDING_MODEL_IDS extends string = string, IMAGE_MODEL_IDS extends string = string> extends Omit<ProviderV3, 'imageModel'> {
package/dist/index.d.ts CHANGED
@@ -202,7 +202,7 @@ declare class OpenAICompatibleImageModel implements ImageModelV3 {
202
202
  readonly maxImagesPerCall = 10;
203
203
  get provider(): string;
204
204
  constructor(modelId: OpenAICompatibleImageModelId, config: OpenAICompatibleImageModelConfig);
205
- doGenerate({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal, }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<Awaited<ReturnType<ImageModelV3['doGenerate']>>>;
205
+ doGenerate({ prompt, n, size, aspectRatio, seed, providerOptions, headers, abortSignal, files, mask, }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<Awaited<ReturnType<ImageModelV3['doGenerate']>>>;
206
206
  }
207
207
 
208
208
  interface OpenAICompatibleProvider<CHAT_MODEL_IDS extends string = string, COMPLETION_MODEL_IDS extends string = string, EMBEDDING_MODEL_IDS extends string = string, IMAGE_MODEL_IDS extends string = string> extends Omit<ProviderV3, 'imageModel'> {
package/dist/index.js CHANGED
@@ -1360,9 +1360,11 @@ var OpenAICompatibleImageModel = class {
1360
1360
  seed,
1361
1361
  providerOptions,
1362
1362
  headers,
1363
- abortSignal
1363
+ abortSignal,
1364
+ files,
1365
+ mask
1364
1366
  }) {
1365
- var _a, _b, _c, _d, _e;
1367
+ var _a, _b, _c, _d, _e, _f, _g;
1366
1368
  const warnings = [];
1367
1369
  if (aspectRatio != null) {
1368
1370
  warnings.push({
@@ -1375,6 +1377,41 @@ var OpenAICompatibleImageModel = class {
1375
1377
  warnings.push({ type: "unsupported", feature: "seed" });
1376
1378
  }
1377
1379
  const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1380
+ if (files != null && files.length > 0) {
1381
+ const { value: response2, responseHeaders: responseHeaders2 } = await (0, import_provider_utils5.postFormDataToApi)({
1382
+ url: this.config.url({
1383
+ path: "/images/edits",
1384
+ modelId: this.modelId
1385
+ }),
1386
+ headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
1387
+ formData: (0, import_provider_utils5.convertToFormData)({
1388
+ model: this.modelId,
1389
+ prompt,
1390
+ image: await Promise.all(files.map((file) => fileToBlob(file))),
1391
+ mask: mask != null ? await fileToBlob(mask) : void 0,
1392
+ n,
1393
+ size,
1394
+ ...(_d = providerOptions.openai) != null ? _d : {}
1395
+ }),
1396
+ failedResponseHandler: (0, import_provider_utils5.createJsonErrorResponseHandler)(
1397
+ (_e = this.config.errorStructure) != null ? _e : defaultOpenAICompatibleErrorStructure
1398
+ ),
1399
+ successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
1400
+ openaiCompatibleImageResponseSchema
1401
+ ),
1402
+ abortSignal,
1403
+ fetch: this.config.fetch
1404
+ });
1405
+ return {
1406
+ images: response2.data.map((item) => item.b64_json),
1407
+ warnings,
1408
+ response: {
1409
+ timestamp: currentDate,
1410
+ modelId: this.modelId,
1411
+ headers: responseHeaders2
1412
+ }
1413
+ };
1414
+ }
1378
1415
  const { value: response, responseHeaders } = await (0, import_provider_utils5.postJsonToApi)({
1379
1416
  url: this.config.url({
1380
1417
  path: "/images/generations",
@@ -1386,11 +1423,11 @@ var OpenAICompatibleImageModel = class {
1386
1423
  prompt,
1387
1424
  n,
1388
1425
  size,
1389
- ...(_d = providerOptions.openai) != null ? _d : {},
1426
+ ...(_f = providerOptions.openai) != null ? _f : {},
1390
1427
  response_format: "b64_json"
1391
1428
  },
1392
1429
  failedResponseHandler: (0, import_provider_utils5.createJsonErrorResponseHandler)(
1393
- (_e = this.config.errorStructure) != null ? _e : defaultOpenAICompatibleErrorStructure
1430
+ (_g = this.config.errorStructure) != null ? _g : defaultOpenAICompatibleErrorStructure
1394
1431
  ),
1395
1432
  successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
1396
1433
  openaiCompatibleImageResponseSchema
@@ -1412,12 +1449,19 @@ var OpenAICompatibleImageModel = class {
1412
1449
  var openaiCompatibleImageResponseSchema = import_v48.z.object({
1413
1450
  data: import_v48.z.array(import_v48.z.object({ b64_json: import_v48.z.string() }))
1414
1451
  });
1452
+ async function fileToBlob(file) {
1453
+ if (file.type === "url") {
1454
+ return (0, import_provider_utils5.downloadBlob)(file.url);
1455
+ }
1456
+ const data = file.data instanceof Uint8Array ? file.data : (0, import_provider_utils5.convertBase64ToUint8Array)(file.data);
1457
+ return new Blob([data], { type: file.mediaType });
1458
+ }
1415
1459
 
1416
1460
  // src/openai-compatible-provider.ts
1417
1461
  var import_provider_utils6 = require("@ai-sdk/provider-utils");
1418
1462
 
1419
1463
  // src/version.ts
1420
- var VERSION = true ? "2.0.0-beta.54" : "0.0.0-test";
1464
+ var VERSION = true ? "2.0.0-beta.56" : "0.0.0-test";
1421
1465
 
1422
1466
  // src/openai-compatible-provider.ts
1423
1467
  function createOpenAICompatible(options) {
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/chat/openai-compatible-chat-language-model.ts","../src/openai-compatible-error.ts","../src/chat/convert-openai-compatible-chat-usage.ts","../src/chat/convert-to-openai-compatible-chat-messages.ts","../src/chat/get-response-metadata.ts","../src/chat/map-openai-compatible-finish-reason.ts","../src/chat/openai-compatible-chat-options.ts","../src/chat/openai-compatible-prepare-tools.ts","../src/completion/openai-compatible-completion-language-model.ts","../src/completion/convert-openai-compatible-completion-usage.ts","../src/completion/convert-to-openai-compatible-completion-prompt.ts","../src/completion/get-response-metadata.ts","../src/completion/map-openai-compatible-finish-reason.ts","../src/completion/openai-compatible-completion-options.ts","../src/embedding/openai-compatible-embedding-model.ts","../src/embedding/openai-compatible-embedding-options.ts","../src/image/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts","../src/version.ts"],"sourcesContent":["export { OpenAICompatibleChatLanguageModel } from './chat/openai-compatible-chat-language-model';\nexport type {\n OpenAICompatibleChatModelId,\n OpenAICompatibleProviderOptions,\n} from './chat/openai-compatible-chat-options';\nexport { OpenAICompatibleCompletionLanguageModel } from './completion/openai-compatible-completion-language-model';\nexport type {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionProviderOptions,\n} from './completion/openai-compatible-completion-options';\nexport { OpenAICompatibleEmbeddingModel } from './embedding/openai-compatible-embedding-model';\nexport type {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingProviderOptions,\n} from './embedding/openai-compatible-embedding-options';\nexport { OpenAICompatibleImageModel } from './image/openai-compatible-image-model';\nexport type {\n OpenAICompatibleErrorData,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nexport type { MetadataExtractor } from './chat/openai-compatible-metadata-extractor';\nexport { createOpenAICompatible } from './openai-compatible-provider';\nexport type {\n OpenAICompatibleProvider,\n OpenAICompatibleProviderSettings,\n} from './openai-compatible-provider';\nexport { VERSION } from './version';\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV3,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n SharedV3ProviderMetadata,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { convertOpenAICompatibleChatUsage } from './convert-openai-compatible-chat-usage';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n openaiCompatibleProviderOptions,\n} from './openai-compatible-chat-options';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\nimport { prepareTools } from './openai-compatible-prepare-tools';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV3['supportedUrls'];\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: SharedV3Warning[] = [];\n\n // Parse provider options\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n );\n\n if (topK != null) {\n warnings.push({ type: 'unsupported', feature: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported',\n feature: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: compatibleOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...Object.fromEntries(\n Object.entries(\n providerOptions?.[this.providerOptionsName] ?? {},\n ).filter(\n ([key]) =>\n !Object.keys(openaiCompatibleProviderOptions.shape).includes(key),\n ),\n ),\n\n reasoning_effort: compatibleOptions.reasoningEffort,\n verbosity: compatibleOptions.textVerbosity,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = responseBody.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // reasoning content:\n const reasoning =\n choice.message.reasoning_content ?? choice.message.reasoning;\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n // provider metadata:\n const providerMetadata: SharedV3ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...(await this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n })),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: convertOpenAICompatibleChatUsage(responseBody.usage),\n providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n let usage: z.infer<typeof openaiCompatibleTokenUsageSchema> | undefined =\n undefined;\n let isFirstChunk = true;\n const providerOptionsName = this.providerOptionsName;\n let isActiveReasoning = false;\n let isActiveText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in chunk.value) {\n finishReason = 'error';\n controller.enqueue({\n type: 'error',\n error: chunk.value.error.message,\n });\n return;\n }\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n // remove this workaround when the issue is fixed\n const value = chunk.value as z.infer<typeof chunkBaseSchema>;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = value.usage;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n const reasoningContent = delta.reasoning_content ?? delta.reasoning;\n if (reasoningContent) {\n if (!isActiveReasoning) {\n controller.enqueue({\n type: 'reasoning-start',\n id: 'reasoning-0',\n });\n isActiveReasoning = true;\n }\n\n controller.enqueue({\n type: 'reasoning-delta',\n id: 'reasoning-0',\n delta: reasoningContent,\n });\n }\n\n if (delta.content) {\n if (!isActiveText) {\n controller.enqueue({ type: 'text-start', id: 'txt-0' });\n isActiveText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: 'txt-0',\n delta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallDelta.id,\n toolName: toolCallDelta.function.name,\n });\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n if (isActiveReasoning) {\n controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });\n }\n\n if (isActiveText) {\n controller.enqueue({ type: 'text-end', id: 'txt-0' });\n }\n\n // go through all tool calls and send the ones that are not finished\n for (const toolCall of toolCalls.filter(\n toolCall => !toolCall.hasFinished,\n )) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n }\n\n const providerMetadata: SharedV3ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (\n usage?.completion_tokens_details?.accepted_prediction_tokens !=\n null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage?.completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n usage?.completion_tokens_details?.rejected_prediction_tokens !=\n null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage?.completion_tokens_details?.rejected_prediction_tokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: convertOpenAICompatibleChatUsage(usage),\n providerMetadata,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\nconst chunkBaseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n // Most openai-compatible models set `reasoning_content`, but some\n // providers serving `gpt-oss` set `reasoning`. See #7866\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) => z.union([chunkBaseSchema, errorSchema]);\n","import { z, ZodType } from 'zod/v4';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodType<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import { LanguageModelV3Usage } from '@ai-sdk/provider';\n\nexport function convertOpenAICompatibleChatUsage(\n usage:\n | {\n prompt_tokens?: number | null;\n completion_tokens?: number | null;\n prompt_tokens_details?: {\n cached_tokens?: number | null;\n } | null;\n completion_tokens_details?: {\n reasoning_tokens?: number | null;\n } | null;\n }\n | undefined\n | null,\n): LanguageModelV3Usage {\n if (usage == null) {\n return {\n inputTokens: {\n total: undefined,\n noCache: undefined,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: undefined,\n text: undefined,\n reasoning: undefined,\n },\n raw: undefined,\n };\n }\n\n const promptTokens = usage.prompt_tokens ?? 0;\n const completionTokens = usage.completion_tokens ?? 0;\n const cacheReadTokens = usage.prompt_tokens_details?.cached_tokens ?? 0;\n const reasoningTokens =\n usage.completion_tokens_details?.reasoning_tokens ?? 0;\n\n return {\n inputTokens: {\n total: promptTokens,\n noCache: promptTokens - cacheReadTokens,\n cacheRead: cacheReadTokens,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: completionTokens,\n text: completionTokens - reasoningTokens,\n reasoning: reasoningTokens,\n },\n raw: usage,\n };\n}\n","import {\n LanguageModelV3Prompt,\n SharedV3ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: SharedV3ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV3Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'execution-denied':\n contentValue = output.reason ?? 'Tool execution denied.';\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleChatModelId = string;\n\nexport const openaiCompatibleProviderOptions = z.object({\n /**\n * A unique identifier representing your end-user, which can help the provider to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n\n /**\n * Reasoning effort for reasoning models. Defaults to `medium`.\n */\n reasoningEffort: z.string().optional(),\n\n /**\n * Controls the verbosity of the generated text. Defaults to `medium`.\n */\n textVerbosity: z.string().optional(),\n});\n\nexport type OpenAICompatibleProviderOptions = z.infer<\n typeof openaiCompatibleProviderOptions\n>;\n","import {\n LanguageModelV3CallOptions,\n SharedV3Warning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV3CallOptions['tools'];\n toolChoice?: LanguageModelV3CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n strict?: boolean;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: SharedV3Warning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: SharedV3Warning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n strict?: boolean;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider') {\n toolWarnings.push({\n type: 'unsupported',\n feature: `provider-defined tool ${tool.id}`,\n });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n ...(tool.strict != null ? { strict: tool.strict } : {}),\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV3,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { convertOpenAICompatibleCompletionUsage } from './convert-openai-compatible-completion-usage';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n openaiCompatibleCompletionProviderOptions,\n} from './openai-compatible-completion-options';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV3['supportedUrls'];\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV3\n{\n readonly specificationVersion = 'v3';\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: SharedV3Warning[] = [];\n\n // Parse provider options\n const completionOptions =\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleCompletionProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({ type: 'unsupported', feature: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported', feature: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported', feature: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported',\n feature: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: completionOptions.echo,\n logit_bias: completionOptions.logitBias,\n suffix: completionOptions.suffix,\n user: completionOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // text content:\n if (choice.text != null && choice.text.length > 0) {\n content.push({ type: 'text', text: choice.text });\n }\n\n return {\n content,\n usage: convertOpenAICompatibleCompletionUsage(response.usage),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n let usage:\n | {\n prompt_tokens: number | undefined;\n completion_tokens: number | undefined;\n total_tokens: number | undefined;\n }\n | undefined = undefined;\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n\n controller.enqueue({\n type: 'text-start',\n id: '0',\n });\n }\n\n if (value.usage != null) {\n usage = value.usage;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: choice.text,\n });\n }\n },\n\n flush(controller) {\n if (!isFirstChunk) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: convertOpenAICompatibleCompletionUsage(usage),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst usageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: usageSchema.nullish(),\n }),\n errorSchema,\n ]);\n","import { LanguageModelV3Usage } from '@ai-sdk/provider';\n\nexport function convertOpenAICompatibleCompletionUsage(\n usage:\n | {\n prompt_tokens?: number | null;\n completion_tokens?: number | null;\n }\n | undefined\n | null,\n): LanguageModelV3Usage {\n if (usage == null) {\n return {\n inputTokens: {\n total: undefined,\n noCache: undefined,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: undefined,\n text: undefined,\n reasoning: undefined,\n },\n raw: undefined,\n };\n }\n\n const promptTokens = usage.prompt_tokens ?? 0;\n const completionTokens = usage.completion_tokens ?? 0;\n\n return {\n inputTokens: {\n total: promptTokens,\n noCache: promptTokens,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: completionTokens,\n text: completionTokens,\n reasoning: undefined,\n },\n raw: usage,\n };\n}\n","import {\n InvalidPromptError,\n LanguageModelV3Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV3Prompt;\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleCompletionModelId = string;\n\nexport const openaiCompatibleCompletionProviderOptions = z.object({\n /**\n * Echo back the prompt in addition to the completion.\n */\n echo: z.boolean().optional(),\n\n /**\n * Modify the likelihood of specified tokens appearing in the completion.\n *\n * Accepts a JSON object that maps tokens (specified by their token ID in\n * the GPT tokenizer) to an associated bias value from -100 to 100.\n */\n logitBias: z.record(z.string(), z.number()).optional(),\n\n /**\n * The suffix that comes after a completion of inserted text.\n */\n suffix: z.string().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleCompletionProviderOptions = z.infer<\n typeof openaiCompatibleCompletionProviderOptions\n>;\n","import {\n EmbeddingModelV3,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n OpenAICompatibleEmbeddingModelId,\n openaiCompatibleEmbeddingProviderOptions,\n} from './openai-compatible-embedding-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel implements EmbeddingModelV3 {\n readonly specificationVersion = 'v3';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV3['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV3['doEmbed']>>\n > {\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n );\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: compatibleOptions.dimensions,\n user: compatibleOptions.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n warnings: [],\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n providerMetadata: response.providerMetadata,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n providerMetadata: z\n .record(z.string(), z.record(z.string(), z.any()))\n .optional(),\n});\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleEmbeddingModelId = string;\n\nexport const openaiCompatibleEmbeddingProviderOptions = z.object({\n /**\n * The number of dimensions the resulting output embeddings should have.\n * Only supported in text-embedding-3 and later models.\n */\n dimensions: z.number().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleEmbeddingProviderOptions = z.infer<\n typeof openaiCompatibleEmbeddingProviderOptions\n>;\n","import { ImageModelV3, SharedV3Warning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV3 {\n readonly specificationVersion = 'v3';\n readonly maxImagesPerCall = 10;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV3['doGenerate']>>\n > {\n const warnings: Array<SharedV3Warning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported',\n feature: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported', feature: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV3,\n ImageModelV3,\n LanguageModelV3,\n ProviderV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n withoutTrailingSlash,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport {\n OpenAICompatibleChatConfig,\n OpenAICompatibleChatLanguageModel,\n} from './chat/openai-compatible-chat-language-model';\nimport { OpenAICompatibleCompletionLanguageModel } from './completion/openai-compatible-completion-language-model';\nimport { OpenAICompatibleEmbeddingModel } from './embedding/openai-compatible-embedding-model';\nimport { OpenAICompatibleImageModel } from './image/openai-compatible-image-model';\nimport { VERSION } from './version';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV3, 'imageModel'> {\n (modelId: CHAT_MODEL_IDS): LanguageModelV3;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n config?: Partial<OpenAICompatibleChatConfig>,\n ): LanguageModelV3;\n\n chatModel(modelId: CHAT_MODEL_IDS): LanguageModelV3;\n\n completionModel(modelId: COMPLETION_MODEL_IDS): LanguageModelV3;\n\n embeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV3;\n\n imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV3;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nInclude usage information in streaming responses.\n */\n includeUsage?: boolean;\n\n /**\n * Whether the provider supports structured outputs in chat models.\n */\n supportsStructuredOutputs?: boolean;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const headers = {\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n };\n\n const getHeaders = () =>\n withUserAgentSuffix(headers, `ai-sdk/openai-compatible/${VERSION}`);\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (modelId: CHAT_MODEL_IDS) =>\n createChatModel(modelId);\n\n const createChatModel = (modelId: CHAT_MODEL_IDS) =>\n new OpenAICompatibleChatLanguageModel(modelId, {\n ...getCommonModelConfig('chat'),\n includeUsage: options.includeUsage,\n supportsStructuredOutputs: options.supportsStructuredOutputs,\n });\n\n const createCompletionModel = (modelId: COMPLETION_MODEL_IDS) =>\n new OpenAICompatibleCompletionLanguageModel(modelId, {\n ...getCommonModelConfig('completion'),\n includeUsage: options.includeUsage,\n });\n\n const createEmbeddingModel = (modelId: EMBEDDING_MODEL_IDS) =>\n new OpenAICompatibleEmbeddingModel(modelId, {\n ...getCommonModelConfig('embedding'),\n });\n\n const createImageModel = (modelId: IMAGE_MODEL_IDS) =>\n new OpenAICompatibleImageModel(modelId, getCommonModelConfig('image'));\n\n const provider = (modelId: CHAT_MODEL_IDS) => createLanguageModel(modelId);\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n","declare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,IAAAC,yBAYO;AACP,IAAAC,aAAkB;;;ACvBlB,gBAA2B;AAEpB,IAAM,kCAAkC,YAAE,OAAO;AAAA,EACtD,OAAO,YAAE,OAAO;AAAA,IACd,SAAS,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,YAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,YAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,YAAE,MAAM,CAAC,YAAE,OAAO,GAAG,YAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC3BK,SAAS,iCACd,OAasB;AAhBxB;AAiBE,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,aAAa;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA,cAAc;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAEA,QAAM,gBAAe,WAAM,kBAAN,YAAuB;AAC5C,QAAM,oBAAmB,WAAM,sBAAN,YAA2B;AACpD,QAAM,mBAAkB,iBAAM,0BAAN,mBAA6B,kBAA7B,YAA8C;AACtE,QAAM,mBACJ,iBAAM,8BAAN,mBAAiC,qBAAjC,YAAqD;AAEvD,SAAO;AAAA,IACL,aAAa;AAAA,MACX,OAAO;AAAA,MACP,SAAS,eAAe;AAAA,MACxB,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,IACA,cAAc;AAAA,MACZ,OAAO;AAAA,MACP,MAAM,mBAAmB;AAAA,MACzB,WAAW;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AACF;;;ACtDA,sBAIO;AAEP,4BAAgC;AAEhC,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAhB9B;AAiBE,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,oBAC9D;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AACH,8BAAe,YAAO,WAAP,YAAiB;AAChC;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,YACT,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC1JO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,IAAAC,aAAkB;AAIX,IAAM,kCAAkC,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKtD,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK1B,iBAAiB,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrC,eAAe,aAAE,OAAO,EAAE,SAAS;AACrC,CAAC;;;ACpBD,IAAAC,mBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAsBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAAkC,CAAC;AAEzC,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,YAAY;AAC5B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,SAAS,yBAAyB,KAAK,EAAE;AAAA,MAC3C,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,GAAI,KAAK,UAAU,OAAO,EAAE,QAAQ,KAAK,OAAO,IAAI,CAAC;AAAA,QACvD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;APtCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAUxE,YACE,SACA,QACA;AAZF,SAAS,uBAAuB;AA5DlC;AAyEI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AA/FtB;AAgGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAjHnD;AAkHI,UAAM,WAA8B,CAAC;AAGrC,UAAM,oBAAoB,OAAO;AAAA,OAC9B,eAAM,6CAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,eAAM,6CAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,OAAO;AAAA,UACR,OAAO;AAAA,aACL,wDAAkB,KAAK,yBAAvB,YAA+C,CAAC;AAAA,UAClD,EAAE;AAAA,YACA,CAAC,CAAC,GAAG,MACH,CAAC,OAAO,KAAK,gCAAgC,KAAK,EAAE,SAAS,GAAG;AAAA,UACpE;AAAA,QACF;AAAA,QAEA,kBAAkB,kBAAkB;AAAA,QACpC,WAAW,kBAAkB;AAAA;AAAA,QAG7B,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AApNjE;AAqNI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,aACJ,YAAO,QAAQ,sBAAf,YAAoC,OAAO,QAAQ;AACrD,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,mBAA6C;AAAA,MACjD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,GAAI,QAAM,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QACzD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO,iCAAiC,aAAa,KAAK;AAAA,MAC1D;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AA9S/D;AA+SI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QACF;AACF,QAAI,eAAe;AACnB,UAAM,sBAAsB,KAAK;AACjC,QAAI,oBAAoB;AACxB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAzWvC,gBAAAC,KAAA;AA2WY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,MAAM,OAAO;AAC1B,6BAAe;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAO,MAAM,MAAM,MAAM;AAAA,cAC3B,CAAC;AACD;AAAA,YACF;AAIA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ,MAAM;AAAA,YAChB;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,kBAAM,oBAAmBA,MAAA,MAAM,sBAAN,OAAAA,MAA2B,MAAM;AAC1D,gBAAI,kBAAkB;AACpB,kBAAI,CAAC,mBAAmB;AACtB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AACD,oCAAoB;AAAA,cACtB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS;AACjB,kBAAI,CAAC,cAAc;AACjB,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,QAAQ,CAAC;AACtD,+BAAe;AAAA,cACjB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,cAAc;AAAA,oBAClB,UAAU,cAAc,SAAS;AAAA,kBACnC,CAAC;AAED,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,sBACf,CAAC;AAED,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,kBACf,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAjjB5B,gBAAAD,KAAA;AAkjBY,gBAAI,mBAAmB;AACrB,yBAAW,QAAQ,EAAE,MAAM,iBAAiB,IAAI,cAAc,CAAC;AAAA,YACjE;AAEA,gBAAI,cAAc;AAChB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,QAAQ,CAAC;AAAA,YACtD;AAGA,uBAAW,YAAY,UAAU;AAAA,cAC/B,CAAAC,cAAY,CAACA,UAAS;AAAA,YACxB,GAAG;AACD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,SAAS;AAAA,cACf,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,aAAYD,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,gBACtC,UAAU,SAAS,SAAS;AAAA,gBAC5B,OAAO,SAAS,SAAS;AAAA,cAC3B,CAAC;AAAA,YACH;AAEA,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,kBACE,oCAAO,8BAAP,mBAAkC,+BAClC,MACA;AACA,+BAAiB,mBAAmB,EAAE,4BACpC,oCAAO,8BAAP,mBAAkC;AAAA,YACtC;AACA,kBACE,oCAAO,8BAAP,mBAAkC,+BAClC,MACA;AACA,+BAAiB,mBAAmB,EAAE,4BACpC,oCAAO,8BAAP,mBAAkC;AAAA,YACtC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,iCAAiC,KAAK;AAAA,cAC7C;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,mCAAmC,aACtC,OAAO;AAAA,EACN,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,aACpB,OAAO;AAAA,IACN,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,aACxB,OAAO;AAAA,IACN,kBAAkB,aAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,aAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAU,aAAE,OAAO;AAAA,cACjB,MAAM,aAAE,OAAO;AAAA,cACf,WAAW,aAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAED,IAAM,kBAAkB,aAAE,OAAO;AAAA,EAC/B,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,OAAO,aACJ,OAAO;AAAA,QACN,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,QACpC,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA,QAG5B,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,OAAO,aAAE,OAAO;AAAA,YAChB,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAU,aAAE,OAAO;AAAA,cACjB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,cACzB,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,YAChC,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC,EACA,QAAQ;AAAA,MACX,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAG5C,gBACG,aAAE,MAAM,CAAC,iBAAiB,WAAW,CAAC;;;AQhsB3C,IAAAE,yBAUO;AACP,IAAAC,aAAkB;;;ACjBX,SAAS,uCACd,OAOsB;AAVxB;AAWE,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,aAAa;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA,cAAc;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAEA,QAAM,gBAAe,WAAM,kBAAN,YAAuB;AAC5C,QAAM,oBAAmB,WAAM,sBAAN,YAA2B;AAEpD,SAAO;AAAA,IACL,aAAa;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,MACT,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,IACA,cAAc;AAAA,MACZ,OAAO;AAAA,MACP,MAAM;AAAA,MACN,WAAW;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AACF;;;AC7CA,IAAAC,mBAIO;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAOE;AAEA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;AC5FO,SAASC,qBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAASC,iCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,IAAAC,aAAkB;AAIX,IAAM,4CAA4C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAIhE,MAAM,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ3B,WAAW,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrD,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ALmBM,IAAM,0CAAN,MAEP;AAAA;AAAA,EAQE,YACE,SACA,QACA;AAVF,SAAS,uBAAuB;AAlDlC;AA6DI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AAjFtB;AAkFI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAnGnD;AAoGI,UAAM,WAA8B,CAAC;AAGrC,UAAM,qBACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU,KAAK;AAAA,MACf;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,QAAQ,CAAC;AAAA,IACzD;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,aAAa,CAAC;AAAA,IAC9D;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,OAAO,CAAC;AAEtD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA,QACxB,YAAY,kBAAkB;AAAA,QAC9B,QAAQ,kBAAkB;AAAA,QAC1B,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,QAAQ,QAAQ,OAAO,KAAK,SAAS,GAAG;AACjD,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,IAClD;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO,uCAAuC,SAAS,KAAK;AAAA,MAC5D,cAAcC,iCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAGC,qBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,QAAI,QAMY;AAChB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAC3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAGA,qBAAoB,KAAK;AAAA,cAC9B,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ,MAAM;AAAA,YAChB;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAeD;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,OAAO;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,CAAC,cAAc;AACjB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,uCAAuC,KAAK;AAAA,YACrD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,cAAc,aAAE,OAAO;AAAA,EAC3B,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,2CAA2C,aAAE,OAAO;AAAA,EACxD,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,MAAM,aAAE,OAAO;AAAA,MACf,eAAe,aAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,YAAY,QAAQ;AAC7B,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEA,aAAE,MAAM;AAAA,EACN,aAAE,OAAO;AAAA,IACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,aAAE;AAAA,MACT,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,OAAO;AAAA,QACf,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,aAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,YAAY,QAAQ;AAAA,EAC7B,CAAC;AAAA,EACD;AACF,CAAC;;;AM3XH,IAAAE,mBAGO;AACP,IAAAC,yBAOO;AACP,IAAAC,aAAkB;;;ACZlB,IAAAC,aAAkB;AAIX,IAAM,2CAA2C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAK/D,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhC,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ADwBM,IAAM,iCAAN,MAAiE;AAAA,EAkBtE,YACE,SACA,QACA;AApBF,SAAS,uBAAuB;AAqB9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAlBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAlDrC;AAmDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAtDvC;AAuDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAUA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA7EJ;AA8EI,UAAM,oBAAoB,OAAO;AAAA,OAC9B,eAAM,6CAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,eAAM,6CAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,kBAAkB;AAAA,QAC9B,MAAM,kBAAkB;AAAA,MAC1B;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,UAAU,CAAC;AAAA,MACX,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,kBAAkB,SAAS;AAAA,MAC3B,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,aAAE,OAAO;AAAA,EACjD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACvD,kBAAkB,aACf,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC,CAAC,EAChD,SAAS;AACd,CAAC;;;AElJD,IAAAC,yBAMO;AACP,IAAAC,aAAkB;AAkBX,IAAM,6BAAN,MAAyD;AAAA,EAQ9D,YACW,SACQ,QACjB;AAFS;AACQ;AATnB,SAAS,uBAAuB;AAChC,SAAS,mBAAmB;AAAA,EASzB;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAlDJ;AAmDI,UAAM,WAAmC,CAAC;AAE1C,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,MACnB;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsC,aAAE,OAAO;AAAA,EACnD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,UAAU,aAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;ACrGD,IAAAC,yBAIO;;;ACTA,IAAM,UACX,OACI,kBACA;;;AD4FC,SAAS,uBAMd,SAMA;AACA,QAAM,cAAU,6CAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,UAAU;AAAA,IACd,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,aAAa,UACjB,4CAAoB,SAAS,4BAA4B,OAAO,EAAE;AAEpE,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAAC,YAC3B,gBAAgB,OAAO;AAEzB,QAAM,kBAAkB,CAAC,YACvB,IAAI,kCAAkC,SAAS;AAAA,IAC7C,GAAG,qBAAqB,MAAM;AAAA,IAC9B,cAAc,QAAQ;AAAA,IACtB,2BAA2B,QAAQ;AAAA,EACrC,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,wCAAwC,SAAS;AAAA,IACnD,GAAG,qBAAqB,YAAY;AAAA,IACpC,cAAc,QAAQ;AAAA,EACxB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,+BAA+B,SAAS;AAAA,IAC1C,GAAG,qBAAqB,WAAW;AAAA,EACrC,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,2BAA2B,SAAS,qBAAqB,OAAO,CAAC;AAEvE,QAAM,WAAW,CAAC,YAA4B,oBAAoB,OAAO;AAEzE,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,iBAAiB;AAC1B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["import_provider","import_provider_utils","import_v4","import_v4","import_provider","_a","toolCall","import_provider_utils","import_v4","import_provider","getResponseMetadata","mapOpenAICompatibleFinishReason","import_v4","mapOpenAICompatibleFinishReason","getResponseMetadata","import_provider","import_provider_utils","import_v4","import_v4","import_provider_utils","import_v4","import_provider_utils"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/chat/openai-compatible-chat-language-model.ts","../src/openai-compatible-error.ts","../src/chat/convert-openai-compatible-chat-usage.ts","../src/chat/convert-to-openai-compatible-chat-messages.ts","../src/chat/get-response-metadata.ts","../src/chat/map-openai-compatible-finish-reason.ts","../src/chat/openai-compatible-chat-options.ts","../src/chat/openai-compatible-prepare-tools.ts","../src/completion/openai-compatible-completion-language-model.ts","../src/completion/convert-openai-compatible-completion-usage.ts","../src/completion/convert-to-openai-compatible-completion-prompt.ts","../src/completion/get-response-metadata.ts","../src/completion/map-openai-compatible-finish-reason.ts","../src/completion/openai-compatible-completion-options.ts","../src/embedding/openai-compatible-embedding-model.ts","../src/embedding/openai-compatible-embedding-options.ts","../src/image/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts","../src/version.ts"],"sourcesContent":["export { OpenAICompatibleChatLanguageModel } from './chat/openai-compatible-chat-language-model';\nexport type {\n OpenAICompatibleChatModelId,\n OpenAICompatibleProviderOptions,\n} from './chat/openai-compatible-chat-options';\nexport { OpenAICompatibleCompletionLanguageModel } from './completion/openai-compatible-completion-language-model';\nexport type {\n OpenAICompatibleCompletionModelId,\n OpenAICompatibleCompletionProviderOptions,\n} from './completion/openai-compatible-completion-options';\nexport { OpenAICompatibleEmbeddingModel } from './embedding/openai-compatible-embedding-model';\nexport type {\n OpenAICompatibleEmbeddingModelId,\n OpenAICompatibleEmbeddingProviderOptions,\n} from './embedding/openai-compatible-embedding-options';\nexport { OpenAICompatibleImageModel } from './image/openai-compatible-image-model';\nexport type {\n OpenAICompatibleErrorData,\n ProviderErrorStructure,\n} from './openai-compatible-error';\nexport type { MetadataExtractor } from './chat/openai-compatible-metadata-extractor';\nexport { createOpenAICompatible } from './openai-compatible-provider';\nexport type {\n OpenAICompatibleProvider,\n OpenAICompatibleProviderSettings,\n} from './openai-compatible-provider';\nexport { VERSION } from './version';\n","import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV3,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n SharedV3ProviderMetadata,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { convertOpenAICompatibleChatUsage } from './convert-openai-compatible-chat-usage';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n openaiCompatibleProviderOptions,\n} from './openai-compatible-chat-options';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\nimport { prepareTools } from './openai-compatible-prepare-tools';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV3['supportedUrls'];\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: SharedV3Warning[] = [];\n\n // Parse provider options\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n );\n\n if (topK != null) {\n warnings.push({ type: 'unsupported', feature: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported',\n feature: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: compatibleOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...Object.fromEntries(\n Object.entries(\n providerOptions?.[this.providerOptionsName] ?? {},\n ).filter(\n ([key]) =>\n !Object.keys(openaiCompatibleProviderOptions.shape).includes(key),\n ),\n ),\n\n reasoning_effort: compatibleOptions.reasoningEffort,\n verbosity: compatibleOptions.textVerbosity,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = responseBody.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // reasoning content:\n const reasoning =\n choice.message.reasoning_content ?? choice.message.reasoning;\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n // provider metadata:\n const providerMetadata: SharedV3ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...(await this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n })),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: convertOpenAICompatibleChatUsage(responseBody.usage),\n providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n let usage: z.infer<typeof openaiCompatibleTokenUsageSchema> | undefined =\n undefined;\n let isFirstChunk = true;\n const providerOptionsName = this.providerOptionsName;\n let isActiveReasoning = false;\n let isActiveText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in chunk.value) {\n finishReason = 'error';\n controller.enqueue({\n type: 'error',\n error: chunk.value.error.message,\n });\n return;\n }\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n // remove this workaround when the issue is fixed\n const value = chunk.value as z.infer<typeof chunkBaseSchema>;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = value.usage;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n const reasoningContent = delta.reasoning_content ?? delta.reasoning;\n if (reasoningContent) {\n if (!isActiveReasoning) {\n controller.enqueue({\n type: 'reasoning-start',\n id: 'reasoning-0',\n });\n isActiveReasoning = true;\n }\n\n controller.enqueue({\n type: 'reasoning-delta',\n id: 'reasoning-0',\n delta: reasoningContent,\n });\n }\n\n if (delta.content) {\n if (!isActiveText) {\n controller.enqueue({ type: 'text-start', id: 'txt-0' });\n isActiveText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: 'txt-0',\n delta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallDelta.id,\n toolName: toolCallDelta.function.name,\n });\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n if (isActiveReasoning) {\n controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });\n }\n\n if (isActiveText) {\n controller.enqueue({ type: 'text-end', id: 'txt-0' });\n }\n\n // go through all tool calls and send the ones that are not finished\n for (const toolCall of toolCalls.filter(\n toolCall => !toolCall.hasFinished,\n )) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n }\n\n const providerMetadata: SharedV3ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (\n usage?.completion_tokens_details?.accepted_prediction_tokens !=\n null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage?.completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n usage?.completion_tokens_details?.rejected_prediction_tokens !=\n null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage?.completion_tokens_details?.rejected_prediction_tokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: convertOpenAICompatibleChatUsage(usage),\n providerMetadata,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\nconst chunkBaseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n // Most openai-compatible models set `reasoning_content`, but some\n // providers serving `gpt-oss` set `reasoning`. See #7866\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) => z.union([chunkBaseSchema, errorSchema]);\n","import { z, ZodType } from 'zod/v4';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodType<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import { LanguageModelV3Usage } from '@ai-sdk/provider';\n\nexport function convertOpenAICompatibleChatUsage(\n usage:\n | {\n prompt_tokens?: number | null;\n completion_tokens?: number | null;\n prompt_tokens_details?: {\n cached_tokens?: number | null;\n } | null;\n completion_tokens_details?: {\n reasoning_tokens?: number | null;\n } | null;\n }\n | undefined\n | null,\n): LanguageModelV3Usage {\n if (usage == null) {\n return {\n inputTokens: {\n total: undefined,\n noCache: undefined,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: undefined,\n text: undefined,\n reasoning: undefined,\n },\n raw: undefined,\n };\n }\n\n const promptTokens = usage.prompt_tokens ?? 0;\n const completionTokens = usage.completion_tokens ?? 0;\n const cacheReadTokens = usage.prompt_tokens_details?.cached_tokens ?? 0;\n const reasoningTokens =\n usage.completion_tokens_details?.reasoning_tokens ?? 0;\n\n return {\n inputTokens: {\n total: promptTokens,\n noCache: promptTokens - cacheReadTokens,\n cacheRead: cacheReadTokens,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: completionTokens,\n text: completionTokens - reasoningTokens,\n reasoning: reasoningTokens,\n },\n raw: usage,\n };\n}\n","import {\n LanguageModelV3Prompt,\n SharedV3ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: SharedV3ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV3Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'execution-denied':\n contentValue = output.reason ?? 'Tool execution denied.';\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleChatModelId = string;\n\nexport const openaiCompatibleProviderOptions = z.object({\n /**\n * A unique identifier representing your end-user, which can help the provider to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n\n /**\n * Reasoning effort for reasoning models. Defaults to `medium`.\n */\n reasoningEffort: z.string().optional(),\n\n /**\n * Controls the verbosity of the generated text. Defaults to `medium`.\n */\n textVerbosity: z.string().optional(),\n});\n\nexport type OpenAICompatibleProviderOptions = z.infer<\n typeof openaiCompatibleProviderOptions\n>;\n","import {\n LanguageModelV3CallOptions,\n SharedV3Warning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV3CallOptions['tools'];\n toolChoice?: LanguageModelV3CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n strict?: boolean;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: SharedV3Warning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: SharedV3Warning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n strict?: boolean;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider') {\n toolWarnings.push({\n type: 'unsupported',\n feature: `provider-defined tool ${tool.id}`,\n });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n ...(tool.strict != null ? { strict: tool.strict } : {}),\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV3,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { convertOpenAICompatibleCompletionUsage } from './convert-openai-compatible-completion-usage';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n openaiCompatibleCompletionProviderOptions,\n} from './openai-compatible-completion-options';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV3['supportedUrls'];\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV3\n{\n readonly specificationVersion = 'v3';\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: SharedV3Warning[] = [];\n\n // Parse provider options\n const completionOptions =\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleCompletionProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({ type: 'unsupported', feature: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported', feature: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported', feature: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported',\n feature: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: completionOptions.echo,\n logit_bias: completionOptions.logitBias,\n suffix: completionOptions.suffix,\n user: completionOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // text content:\n if (choice.text != null && choice.text.length > 0) {\n content.push({ type: 'text', text: choice.text });\n }\n\n return {\n content,\n usage: convertOpenAICompatibleCompletionUsage(response.usage),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n let usage:\n | {\n prompt_tokens: number | undefined;\n completion_tokens: number | undefined;\n total_tokens: number | undefined;\n }\n | undefined = undefined;\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n\n controller.enqueue({\n type: 'text-start',\n id: '0',\n });\n }\n\n if (value.usage != null) {\n usage = value.usage;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: choice.text,\n });\n }\n },\n\n flush(controller) {\n if (!isFirstChunk) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: convertOpenAICompatibleCompletionUsage(usage),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst usageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: usageSchema.nullish(),\n }),\n errorSchema,\n ]);\n","import { LanguageModelV3Usage } from '@ai-sdk/provider';\n\nexport function convertOpenAICompatibleCompletionUsage(\n usage:\n | {\n prompt_tokens?: number | null;\n completion_tokens?: number | null;\n }\n | undefined\n | null,\n): LanguageModelV3Usage {\n if (usage == null) {\n return {\n inputTokens: {\n total: undefined,\n noCache: undefined,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: undefined,\n text: undefined,\n reasoning: undefined,\n },\n raw: undefined,\n };\n }\n\n const promptTokens = usage.prompt_tokens ?? 0;\n const completionTokens = usage.completion_tokens ?? 0;\n\n return {\n inputTokens: {\n total: promptTokens,\n noCache: promptTokens,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: completionTokens,\n text: completionTokens,\n reasoning: undefined,\n },\n raw: usage,\n };\n}\n","import {\n InvalidPromptError,\n LanguageModelV3Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV3Prompt;\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleCompletionModelId = string;\n\nexport const openaiCompatibleCompletionProviderOptions = z.object({\n /**\n * Echo back the prompt in addition to the completion.\n */\n echo: z.boolean().optional(),\n\n /**\n * Modify the likelihood of specified tokens appearing in the completion.\n *\n * Accepts a JSON object that maps tokens (specified by their token ID in\n * the GPT tokenizer) to an associated bias value from -100 to 100.\n */\n logitBias: z.record(z.string(), z.number()).optional(),\n\n /**\n * The suffix that comes after a completion of inserted text.\n */\n suffix: z.string().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleCompletionProviderOptions = z.infer<\n typeof openaiCompatibleCompletionProviderOptions\n>;\n","import {\n EmbeddingModelV3,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n OpenAICompatibleEmbeddingModelId,\n openaiCompatibleEmbeddingProviderOptions,\n} from './openai-compatible-embedding-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel implements EmbeddingModelV3 {\n readonly specificationVersion = 'v3';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV3['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV3['doEmbed']>>\n > {\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n );\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: compatibleOptions.dimensions,\n user: compatibleOptions.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n warnings: [],\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n providerMetadata: response.providerMetadata,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n providerMetadata: z\n .record(z.string(), z.record(z.string(), z.any()))\n .optional(),\n});\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleEmbeddingModelId = string;\n\nexport const openaiCompatibleEmbeddingProviderOptions = z.object({\n /**\n * The number of dimensions the resulting output embeddings should have.\n * Only supported in text-embedding-3 and later models.\n */\n dimensions: z.number().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleEmbeddingProviderOptions = z.infer<\n typeof openaiCompatibleEmbeddingProviderOptions\n>;\n","import {\n ImageModelV3,\n ImageModelV3File,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n convertBase64ToUint8Array,\n convertToFormData,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n downloadBlob,\n FetchFunction,\n postFormDataToApi,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV3 {\n readonly specificationVersion = 'v3';\n readonly maxImagesPerCall = 10;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n files,\n mask,\n }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV3['doGenerate']>>\n > {\n const warnings: Array<SharedV3Warning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported',\n feature: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported', feature: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n\n // Image editing mode - use form data and /images/edits endpoint\n if (files != null && files.length > 0) {\n const { value: response, responseHeaders } = await postFormDataToApi({\n url: this.config.url({\n path: '/images/edits',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n formData: convertToFormData<OpenAICompatibleFormDataInput>({\n model: this.modelId,\n prompt,\n image: await Promise.all(files.map(file => fileToBlob(file))),\n mask: mask != null ? await fileToBlob(mask) : undefined,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n }),\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n\n // Standard image generation mode - use JSON and /images/generations endpoint\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n\ntype OpenAICompatibleFormDataInput = {\n model: string;\n prompt: string | undefined;\n image: Blob | Blob[];\n mask?: Blob;\n n: number;\n size: `${number}x${number}` | undefined;\n [key: string]: unknown;\n};\n\nasync function fileToBlob(file: ImageModelV3File): Promise<Blob> {\n if (file.type === 'url') {\n return downloadBlob(file.url);\n }\n\n const data =\n file.data instanceof Uint8Array\n ? file.data\n : convertBase64ToUint8Array(file.data);\n\n return new Blob([data as BlobPart], { type: file.mediaType });\n}\n","import {\n EmbeddingModelV3,\n ImageModelV3,\n LanguageModelV3,\n ProviderV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n withoutTrailingSlash,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport {\n OpenAICompatibleChatConfig,\n OpenAICompatibleChatLanguageModel,\n} from './chat/openai-compatible-chat-language-model';\nimport { OpenAICompatibleCompletionLanguageModel } from './completion/openai-compatible-completion-language-model';\nimport { OpenAICompatibleEmbeddingModel } from './embedding/openai-compatible-embedding-model';\nimport { OpenAICompatibleImageModel } from './image/openai-compatible-image-model';\nimport { VERSION } from './version';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV3, 'imageModel'> {\n (modelId: CHAT_MODEL_IDS): LanguageModelV3;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n config?: Partial<OpenAICompatibleChatConfig>,\n ): LanguageModelV3;\n\n chatModel(modelId: CHAT_MODEL_IDS): LanguageModelV3;\n\n completionModel(modelId: COMPLETION_MODEL_IDS): LanguageModelV3;\n\n embeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV3;\n\n imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV3;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nInclude usage information in streaming responses.\n */\n includeUsage?: boolean;\n\n /**\n * Whether the provider supports structured outputs in chat models.\n */\n supportsStructuredOutputs?: boolean;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const headers = {\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n };\n\n const getHeaders = () =>\n withUserAgentSuffix(headers, `ai-sdk/openai-compatible/${VERSION}`);\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (modelId: CHAT_MODEL_IDS) =>\n createChatModel(modelId);\n\n const createChatModel = (modelId: CHAT_MODEL_IDS) =>\n new OpenAICompatibleChatLanguageModel(modelId, {\n ...getCommonModelConfig('chat'),\n includeUsage: options.includeUsage,\n supportsStructuredOutputs: options.supportsStructuredOutputs,\n });\n\n const createCompletionModel = (modelId: COMPLETION_MODEL_IDS) =>\n new OpenAICompatibleCompletionLanguageModel(modelId, {\n ...getCommonModelConfig('completion'),\n includeUsage: options.includeUsage,\n });\n\n const createEmbeddingModel = (modelId: EMBEDDING_MODEL_IDS) =>\n new OpenAICompatibleEmbeddingModel(modelId, {\n ...getCommonModelConfig('embedding'),\n });\n\n const createImageModel = (modelId: IMAGE_MODEL_IDS) =>\n new OpenAICompatibleImageModel(modelId, getCommonModelConfig('image'));\n\n const provider = (modelId: CHAT_MODEL_IDS) => createLanguageModel(modelId);\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n","declare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,IAAAC,yBAYO;AACP,IAAAC,aAAkB;;;ACvBlB,gBAA2B;AAEpB,IAAM,kCAAkC,YAAE,OAAO;AAAA,EACtD,OAAO,YAAE,OAAO;AAAA,IACd,SAAS,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,YAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,YAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,YAAE,MAAM,CAAC,YAAE,OAAO,GAAG,YAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC3BK,SAAS,iCACd,OAasB;AAhBxB;AAiBE,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,aAAa;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA,cAAc;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAEA,QAAM,gBAAe,WAAM,kBAAN,YAAuB;AAC5C,QAAM,oBAAmB,WAAM,sBAAN,YAA2B;AACpD,QAAM,mBAAkB,iBAAM,0BAAN,mBAA6B,kBAA7B,YAA8C;AACtE,QAAM,mBACJ,iBAAM,8BAAN,mBAAiC,qBAAjC,YAAqD;AAEvD,SAAO;AAAA,IACL,aAAa;AAAA,MACX,OAAO;AAAA,MACP,SAAS,eAAe;AAAA,MACxB,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,IACA,cAAc;AAAA,MACZ,OAAO;AAAA,MACP,MAAM,mBAAmB;AAAA,MACzB,WAAW;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AACF;;;ACtDA,sBAIO;AAEP,4BAAgC;AAEhC,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAhB9B;AAiBE,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,oBAC9D;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AACH,8BAAe,YAAO,WAAP,YAAiB;AAChC;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,YACT,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC1JO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,IAAAC,aAAkB;AAIX,IAAM,kCAAkC,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKtD,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK1B,iBAAiB,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrC,eAAe,aAAE,OAAO,EAAE,SAAS;AACrC,CAAC;;;ACpBD,IAAAC,mBAIO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAsBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAAkC,CAAC;AAEzC,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,YAAY;AAC5B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,SAAS,yBAAyB,KAAK,EAAE;AAAA,MAC3C,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,GAAI,KAAK,UAAU,OAAO,EAAE,QAAQ,KAAK,OAAO,IAAI,CAAC;AAAA,QACvD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;APtCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAUxE,YACE,SACA,QACA;AAZF,SAAS,uBAAuB;AA5DlC;AAyEI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AA/FtB;AAgGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAjHnD;AAkHI,UAAM,WAA8B,CAAC;AAGrC,UAAM,oBAAoB,OAAO;AAAA,OAC9B,eAAM,6CAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,eAAM,6CAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,OAAO;AAAA,UACR,OAAO;AAAA,aACL,wDAAkB,KAAK,yBAAvB,YAA+C,CAAC;AAAA,UAClD,EAAE;AAAA,YACA,CAAC,CAAC,GAAG,MACH,CAAC,OAAO,KAAK,gCAAgC,KAAK,EAAE,SAAS,GAAG;AAAA,UACpE;AAAA,QACF;AAAA,QAEA,kBAAkB,kBAAkB;AAAA,QACpC,WAAW,kBAAkB;AAAA;AAAA,QAG7B,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AApNjE;AAqNI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,aACJ,YAAO,QAAQ,sBAAf,YAAoC,OAAO,QAAQ;AACrD,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,mBAA6C;AAAA,MACjD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,GAAI,QAAM,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QACzD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO,iCAAiC,aAAa,KAAK;AAAA,MAC1D;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AA9S/D;AA+SI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QACF;AACF,QAAI,eAAe;AACnB,UAAM,sBAAsB,KAAK;AACjC,QAAI,oBAAoB;AACxB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAzWvC,gBAAAC,KAAA;AA2WY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,MAAM,OAAO;AAC1B,6BAAe;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAO,MAAM,MAAM,MAAM;AAAA,cAC3B,CAAC;AACD;AAAA,YACF;AAIA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ,MAAM;AAAA,YAChB;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,kBAAM,oBAAmBA,MAAA,MAAM,sBAAN,OAAAA,MAA2B,MAAM;AAC1D,gBAAI,kBAAkB;AACpB,kBAAI,CAAC,mBAAmB;AACtB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AACD,oCAAoB;AAAA,cACtB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS;AACjB,kBAAI,CAAC,cAAc;AACjB,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,QAAQ,CAAC;AACtD,+BAAe;AAAA,cACjB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,cAAc;AAAA,oBAClB,UAAU,cAAc,SAAS;AAAA,kBACnC,CAAC;AAED,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,sBACf,CAAC;AAED,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,kBACf,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAjjB5B,gBAAAD,KAAA;AAkjBY,gBAAI,mBAAmB;AACrB,yBAAW,QAAQ,EAAE,MAAM,iBAAiB,IAAI,cAAc,CAAC;AAAA,YACjE;AAEA,gBAAI,cAAc;AAChB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,QAAQ,CAAC;AAAA,YACtD;AAGA,uBAAW,YAAY,UAAU;AAAA,cAC/B,CAAAC,cAAY,CAACA,UAAS;AAAA,YACxB,GAAG;AACD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,SAAS;AAAA,cACf,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,aAAYD,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,gBACtC,UAAU,SAAS,SAAS;AAAA,gBAC5B,OAAO,SAAS,SAAS;AAAA,cAC3B,CAAC;AAAA,YACH;AAEA,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,kBACE,oCAAO,8BAAP,mBAAkC,+BAClC,MACA;AACA,+BAAiB,mBAAmB,EAAE,4BACpC,oCAAO,8BAAP,mBAAkC;AAAA,YACtC;AACA,kBACE,oCAAO,8BAAP,mBAAkC,+BAClC,MACA;AACA,+BAAiB,mBAAmB,EAAE,4BACpC,oCAAO,8BAAP,mBAAkC;AAAA,YACtC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,iCAAiC,KAAK;AAAA,cAC7C;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,mCAAmC,aACtC,OAAO;AAAA,EACN,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuB,aACpB,OAAO;AAAA,IACN,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,aACxB,OAAO;AAAA,IACN,kBAAkB,aAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4B,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4B,aAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAU,aAAE,OAAO;AAAA,cACjB,MAAM,aAAE,OAAO;AAAA,cACf,WAAW,aAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAED,IAAM,kBAAkB,aAAE,OAAO;AAAA,EAC/B,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,OAAO,aACJ,OAAO;AAAA,QACN,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,QACpC,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA,QAG5B,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,OAAO,aAAE,OAAO;AAAA,YAChB,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAU,aAAE,OAAO;AAAA,cACjB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,cACzB,WAAW,aAAE,OAAO,EAAE,QAAQ;AAAA,YAChC,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC,EACA,QAAQ;AAAA,MACX,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAG5C,gBACG,aAAE,MAAM,CAAC,iBAAiB,WAAW,CAAC;;;AQhsB3C,IAAAE,yBAUO;AACP,IAAAC,aAAkB;;;ACjBX,SAAS,uCACd,OAOsB;AAVxB;AAWE,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,aAAa;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA,cAAc;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAEA,QAAM,gBAAe,WAAM,kBAAN,YAAuB;AAC5C,QAAM,oBAAmB,WAAM,sBAAN,YAA2B;AAEpD,SAAO;AAAA,IACL,aAAa;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,MACT,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,IACA,cAAc;AAAA,MACZ,OAAO;AAAA,MACP,MAAM;AAAA,MACN,WAAW;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AACF;;;AC7CA,IAAAC,mBAIO;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAOE;AAEA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;AC5FO,SAASC,qBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAASC,iCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,IAAAC,aAAkB;AAIX,IAAM,4CAA4C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAIhE,MAAM,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ3B,WAAW,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrD,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ALmBM,IAAM,0CAAN,MAEP;AAAA;AAAA,EAQE,YACE,SACA,QACA;AAVF,SAAS,uBAAuB;AAlDlC;AA6DI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,4BAAwB,uDAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AAjFtB;AAkFI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAnGnD;AAoGI,UAAM,WAA8B,CAAC;AAGrC,UAAM,qBACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU,KAAK;AAAA,MACf;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,QAAQ,CAAC;AAAA,IACzD;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,aAAa,CAAC;AAAA,IAC9D;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,OAAO,CAAC;AAEtD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA,QACxB,YAAY,kBAAkB;AAAA,QAC9B,QAAQ,kBAAkB;AAAA,QAC1B,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,QAAQ,QAAQ,OAAO,KAAK,SAAS,GAAG;AACjD,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,IAClD;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO,uCAAuC,SAAS,KAAK;AAAA,MAC5D,cAAcC,iCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAGC,qBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,+BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,QAAI,QAMY;AAChB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAC3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAGA,qBAAoB,KAAK;AAAA,cAC9B,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ,MAAM;AAAA,YAChB;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAeD;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,OAAO;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,CAAC,cAAc;AACjB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,uCAAuC,KAAK;AAAA,YACrD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,cAAc,aAAE,OAAO;AAAA,EAC3B,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,2CAA2C,aAAE,OAAO;AAAA,EACxD,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,MAAM,aAAE,OAAO;AAAA,MACf,eAAe,aAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,YAAY,QAAQ;AAC7B,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEA,aAAE,MAAM;AAAA,EACN,aAAE,OAAO;AAAA,IACP,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,aAAE;AAAA,MACT,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,OAAO;AAAA,QACf,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,aAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,YAAY,QAAQ;AAAA,EAC7B,CAAC;AAAA,EACD;AACF,CAAC;;;AM3XH,IAAAE,mBAGO;AACP,IAAAC,yBAOO;AACP,IAAAC,aAAkB;;;ACZlB,IAAAC,aAAkB;AAIX,IAAM,2CAA2C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAK/D,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhC,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ADwBM,IAAM,iCAAN,MAAiE;AAAA,EAkBtE,YACE,SACA,QACA;AApBF,SAAS,uBAAuB;AAqB9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAlBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAlDrC;AAmDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAtDvC;AAuDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAUA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA7EJ;AA8EI,UAAM,oBAAoB,OAAO;AAAA,OAC9B,eAAM,6CAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,eAAM,6CAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,kBAAkB;AAAA,QAC9B,MAAM,kBAAkB;AAAA,MAC1B;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,UAAU,CAAC;AAAA,MACX,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,kBAAkB,SAAS;AAAA,MAC3B,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,aAAE,OAAO;AAAA,EACjD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACvD,kBAAkB,aACf,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,IAAI,CAAC,CAAC,EAChD,SAAS;AACd,CAAC;;;AE9ID,IAAAC,yBAUO;AACP,IAAAC,aAAkB;AAkBX,IAAM,6BAAN,MAAyD;AAAA,EAQ9D,YACW,SACQ,QACjB;AAFS;AACQ;AATnB,SAAS,uBAAuB;AAChC,SAAS,mBAAmB;AAAA,EASzB;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5DJ;AA6DI,UAAM,WAAmC,CAAC;AAE1C,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AAGvE,QAAI,SAAS,QAAQ,MAAM,SAAS,GAAG;AACrC,YAAM,EAAE,OAAOC,WAAU,iBAAAC,iBAAgB,IAAI,UAAM,0CAAkB;AAAA,QACnE,KAAK,KAAK,OAAO,IAAI;AAAA,UACnB,MAAM;AAAA,UACN,SAAS,KAAK;AAAA,QAChB,CAAC;AAAA,QACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,QACtD,cAAU,0CAAiD;AAAA,UACzD,OAAO,KAAK;AAAA,UACZ;AAAA,UACA,OAAO,MAAM,QAAQ,IAAI,MAAM,IAAI,UAAQ,WAAW,IAAI,CAAC,CAAC;AAAA,UAC5D,MAAM,QAAQ,OAAO,MAAM,WAAW,IAAI,IAAI;AAAA,UAC9C;AAAA,UACA;AAAA,UACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QACjC,CAAC;AAAA,QACD,2BAAuB;AAAA,WACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,QAChC;AAAA,QACA,+BAA2B;AAAA,UACzB;AAAA,QACF;AAAA,QACA;AAAA,QACA,OAAO,KAAK,OAAO;AAAA,MACrB,CAAC;AAED,aAAO;AAAA,QACL,QAAQD,UAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,QAC/C;AAAA,QACA,UAAU;AAAA,UACR,WAAW;AAAA,UACX,SAAS,KAAK;AAAA,UACd,SAASC;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAGA,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,MACnB;AAAA,MACA,2BAAuB;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsC,aAAE,OAAO;AAAA,EACnD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,UAAU,aAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;AAYD,eAAe,WAAW,MAAuC;AAC/D,MAAI,KAAK,SAAS,OAAO;AACvB,eAAO,qCAAa,KAAK,GAAG;AAAA,EAC9B;AAEA,QAAM,OACJ,KAAK,gBAAgB,aACjB,KAAK,WACL,kDAA0B,KAAK,IAAI;AAEzC,SAAO,IAAI,KAAK,CAAC,IAAgB,GAAG,EAAE,MAAM,KAAK,UAAU,CAAC;AAC9D;;;AC9KA,IAAAC,yBAIO;;;ACTA,IAAM,UACX,OACI,kBACA;;;AD4FC,SAAS,uBAMd,SAMA;AACA,QAAM,cAAU,6CAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,UAAU;AAAA,IACd,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,aAAa,UACjB,4CAAoB,SAAS,4BAA4B,OAAO,EAAE;AAEpE,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAAC,YAC3B,gBAAgB,OAAO;AAEzB,QAAM,kBAAkB,CAAC,YACvB,IAAI,kCAAkC,SAAS;AAAA,IAC7C,GAAG,qBAAqB,MAAM;AAAA,IAC9B,cAAc,QAAQ;AAAA,IACtB,2BAA2B,QAAQ;AAAA,EACrC,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,wCAAwC,SAAS;AAAA,IACnD,GAAG,qBAAqB,YAAY;AAAA,IACpC,cAAc,QAAQ;AAAA,EACxB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,+BAA+B,SAAS;AAAA,IAC1C,GAAG,qBAAqB,WAAW;AAAA,EACrC,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,2BAA2B,SAAS,qBAAqB,OAAO,CAAC;AAEvE,QAAM,WAAW,CAAC,YAA4B,oBAAoB,OAAO;AAEzE,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,iBAAiB;AAC1B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["import_provider","import_provider_utils","import_v4","import_v4","import_provider","_a","toolCall","import_provider_utils","import_v4","import_provider","getResponseMetadata","mapOpenAICompatibleFinishReason","import_v4","mapOpenAICompatibleFinishReason","getResponseMetadata","import_provider","import_provider_utils","import_v4","import_v4","import_provider_utils","import_v4","response","responseHeaders","import_provider_utils"]}
package/dist/index.mjs CHANGED
@@ -1344,8 +1344,12 @@ var openaiTextEmbeddingResponseSchema = z7.object({
1344
1344
  // src/image/openai-compatible-image-model.ts
1345
1345
  import {
1346
1346
  combineHeaders as combineHeaders4,
1347
+ convertBase64ToUint8Array,
1348
+ convertToFormData,
1347
1349
  createJsonErrorResponseHandler as createJsonErrorResponseHandler4,
1348
1350
  createJsonResponseHandler as createJsonResponseHandler4,
1351
+ downloadBlob,
1352
+ postFormDataToApi,
1349
1353
  postJsonToApi as postJsonToApi4
1350
1354
  } from "@ai-sdk/provider-utils";
1351
1355
  import { z as z8 } from "zod/v4";
@@ -1367,9 +1371,11 @@ var OpenAICompatibleImageModel = class {
1367
1371
  seed,
1368
1372
  providerOptions,
1369
1373
  headers,
1370
- abortSignal
1374
+ abortSignal,
1375
+ files,
1376
+ mask
1371
1377
  }) {
1372
- var _a, _b, _c, _d, _e;
1378
+ var _a, _b, _c, _d, _e, _f, _g;
1373
1379
  const warnings = [];
1374
1380
  if (aspectRatio != null) {
1375
1381
  warnings.push({
@@ -1382,6 +1388,41 @@ var OpenAICompatibleImageModel = class {
1382
1388
  warnings.push({ type: "unsupported", feature: "seed" });
1383
1389
  }
1384
1390
  const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1391
+ if (files != null && files.length > 0) {
1392
+ const { value: response2, responseHeaders: responseHeaders2 } = await postFormDataToApi({
1393
+ url: this.config.url({
1394
+ path: "/images/edits",
1395
+ modelId: this.modelId
1396
+ }),
1397
+ headers: combineHeaders4(this.config.headers(), headers),
1398
+ formData: convertToFormData({
1399
+ model: this.modelId,
1400
+ prompt,
1401
+ image: await Promise.all(files.map((file) => fileToBlob(file))),
1402
+ mask: mask != null ? await fileToBlob(mask) : void 0,
1403
+ n,
1404
+ size,
1405
+ ...(_d = providerOptions.openai) != null ? _d : {}
1406
+ }),
1407
+ failedResponseHandler: createJsonErrorResponseHandler4(
1408
+ (_e = this.config.errorStructure) != null ? _e : defaultOpenAICompatibleErrorStructure
1409
+ ),
1410
+ successfulResponseHandler: createJsonResponseHandler4(
1411
+ openaiCompatibleImageResponseSchema
1412
+ ),
1413
+ abortSignal,
1414
+ fetch: this.config.fetch
1415
+ });
1416
+ return {
1417
+ images: response2.data.map((item) => item.b64_json),
1418
+ warnings,
1419
+ response: {
1420
+ timestamp: currentDate,
1421
+ modelId: this.modelId,
1422
+ headers: responseHeaders2
1423
+ }
1424
+ };
1425
+ }
1385
1426
  const { value: response, responseHeaders } = await postJsonToApi4({
1386
1427
  url: this.config.url({
1387
1428
  path: "/images/generations",
@@ -1393,11 +1434,11 @@ var OpenAICompatibleImageModel = class {
1393
1434
  prompt,
1394
1435
  n,
1395
1436
  size,
1396
- ...(_d = providerOptions.openai) != null ? _d : {},
1437
+ ...(_f = providerOptions.openai) != null ? _f : {},
1397
1438
  response_format: "b64_json"
1398
1439
  },
1399
1440
  failedResponseHandler: createJsonErrorResponseHandler4(
1400
- (_e = this.config.errorStructure) != null ? _e : defaultOpenAICompatibleErrorStructure
1441
+ (_g = this.config.errorStructure) != null ? _g : defaultOpenAICompatibleErrorStructure
1401
1442
  ),
1402
1443
  successfulResponseHandler: createJsonResponseHandler4(
1403
1444
  openaiCompatibleImageResponseSchema
@@ -1419,6 +1460,13 @@ var OpenAICompatibleImageModel = class {
1419
1460
  var openaiCompatibleImageResponseSchema = z8.object({
1420
1461
  data: z8.array(z8.object({ b64_json: z8.string() }))
1421
1462
  });
1463
+ async function fileToBlob(file) {
1464
+ if (file.type === "url") {
1465
+ return downloadBlob(file.url);
1466
+ }
1467
+ const data = file.data instanceof Uint8Array ? file.data : convertBase64ToUint8Array(file.data);
1468
+ return new Blob([data], { type: file.mediaType });
1469
+ }
1422
1470
 
1423
1471
  // src/openai-compatible-provider.ts
1424
1472
  import {
@@ -1427,7 +1475,7 @@ import {
1427
1475
  } from "@ai-sdk/provider-utils";
1428
1476
 
1429
1477
  // src/version.ts
1430
- var VERSION = true ? "2.0.0-beta.54" : "0.0.0-test";
1478
+ var VERSION = true ? "2.0.0-beta.56" : "0.0.0-test";
1431
1479
 
1432
1480
  // src/openai-compatible-provider.ts
1433
1481
  function createOpenAICompatible(options) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/chat/openai-compatible-chat-language-model.ts","../src/openai-compatible-error.ts","../src/chat/convert-openai-compatible-chat-usage.ts","../src/chat/convert-to-openai-compatible-chat-messages.ts","../src/chat/get-response-metadata.ts","../src/chat/map-openai-compatible-finish-reason.ts","../src/chat/openai-compatible-chat-options.ts","../src/chat/openai-compatible-prepare-tools.ts","../src/completion/openai-compatible-completion-language-model.ts","../src/completion/convert-openai-compatible-completion-usage.ts","../src/completion/convert-to-openai-compatible-completion-prompt.ts","../src/completion/get-response-metadata.ts","../src/completion/map-openai-compatible-finish-reason.ts","../src/completion/openai-compatible-completion-options.ts","../src/embedding/openai-compatible-embedding-model.ts","../src/embedding/openai-compatible-embedding-options.ts","../src/image/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts","../src/version.ts"],"sourcesContent":["import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV3,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n SharedV3ProviderMetadata,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { convertOpenAICompatibleChatUsage } from './convert-openai-compatible-chat-usage';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n openaiCompatibleProviderOptions,\n} from './openai-compatible-chat-options';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\nimport { prepareTools } from './openai-compatible-prepare-tools';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV3['supportedUrls'];\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: SharedV3Warning[] = [];\n\n // Parse provider options\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n );\n\n if (topK != null) {\n warnings.push({ type: 'unsupported', feature: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported',\n feature: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: compatibleOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...Object.fromEntries(\n Object.entries(\n providerOptions?.[this.providerOptionsName] ?? {},\n ).filter(\n ([key]) =>\n !Object.keys(openaiCompatibleProviderOptions.shape).includes(key),\n ),\n ),\n\n reasoning_effort: compatibleOptions.reasoningEffort,\n verbosity: compatibleOptions.textVerbosity,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = responseBody.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // reasoning content:\n const reasoning =\n choice.message.reasoning_content ?? choice.message.reasoning;\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n // provider metadata:\n const providerMetadata: SharedV3ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...(await this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n })),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: convertOpenAICompatibleChatUsage(responseBody.usage),\n providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n let usage: z.infer<typeof openaiCompatibleTokenUsageSchema> | undefined =\n undefined;\n let isFirstChunk = true;\n const providerOptionsName = this.providerOptionsName;\n let isActiveReasoning = false;\n let isActiveText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in chunk.value) {\n finishReason = 'error';\n controller.enqueue({\n type: 'error',\n error: chunk.value.error.message,\n });\n return;\n }\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n // remove this workaround when the issue is fixed\n const value = chunk.value as z.infer<typeof chunkBaseSchema>;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = value.usage;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n const reasoningContent = delta.reasoning_content ?? delta.reasoning;\n if (reasoningContent) {\n if (!isActiveReasoning) {\n controller.enqueue({\n type: 'reasoning-start',\n id: 'reasoning-0',\n });\n isActiveReasoning = true;\n }\n\n controller.enqueue({\n type: 'reasoning-delta',\n id: 'reasoning-0',\n delta: reasoningContent,\n });\n }\n\n if (delta.content) {\n if (!isActiveText) {\n controller.enqueue({ type: 'text-start', id: 'txt-0' });\n isActiveText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: 'txt-0',\n delta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallDelta.id,\n toolName: toolCallDelta.function.name,\n });\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n if (isActiveReasoning) {\n controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });\n }\n\n if (isActiveText) {\n controller.enqueue({ type: 'text-end', id: 'txt-0' });\n }\n\n // go through all tool calls and send the ones that are not finished\n for (const toolCall of toolCalls.filter(\n toolCall => !toolCall.hasFinished,\n )) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n }\n\n const providerMetadata: SharedV3ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (\n usage?.completion_tokens_details?.accepted_prediction_tokens !=\n null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage?.completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n usage?.completion_tokens_details?.rejected_prediction_tokens !=\n null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage?.completion_tokens_details?.rejected_prediction_tokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: convertOpenAICompatibleChatUsage(usage),\n providerMetadata,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\nconst chunkBaseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n // Most openai-compatible models set `reasoning_content`, but some\n // providers serving `gpt-oss` set `reasoning`. See #7866\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) => z.union([chunkBaseSchema, errorSchema]);\n","import { z, ZodType } from 'zod/v4';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodType<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import { LanguageModelV3Usage } from '@ai-sdk/provider';\n\nexport function convertOpenAICompatibleChatUsage(\n usage:\n | {\n prompt_tokens?: number | null;\n completion_tokens?: number | null;\n prompt_tokens_details?: {\n cached_tokens?: number | null;\n } | null;\n completion_tokens_details?: {\n reasoning_tokens?: number | null;\n } | null;\n }\n | undefined\n | null,\n): LanguageModelV3Usage {\n if (usage == null) {\n return {\n inputTokens: {\n total: undefined,\n noCache: undefined,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: undefined,\n text: undefined,\n reasoning: undefined,\n },\n raw: undefined,\n };\n }\n\n const promptTokens = usage.prompt_tokens ?? 0;\n const completionTokens = usage.completion_tokens ?? 0;\n const cacheReadTokens = usage.prompt_tokens_details?.cached_tokens ?? 0;\n const reasoningTokens =\n usage.completion_tokens_details?.reasoning_tokens ?? 0;\n\n return {\n inputTokens: {\n total: promptTokens,\n noCache: promptTokens - cacheReadTokens,\n cacheRead: cacheReadTokens,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: completionTokens,\n text: completionTokens - reasoningTokens,\n reasoning: reasoningTokens,\n },\n raw: usage,\n };\n}\n","import {\n LanguageModelV3Prompt,\n SharedV3ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: SharedV3ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV3Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'execution-denied':\n contentValue = output.reason ?? 'Tool execution denied.';\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleChatModelId = string;\n\nexport const openaiCompatibleProviderOptions = z.object({\n /**\n * A unique identifier representing your end-user, which can help the provider to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n\n /**\n * Reasoning effort for reasoning models. Defaults to `medium`.\n */\n reasoningEffort: z.string().optional(),\n\n /**\n * Controls the verbosity of the generated text. Defaults to `medium`.\n */\n textVerbosity: z.string().optional(),\n});\n\nexport type OpenAICompatibleProviderOptions = z.infer<\n typeof openaiCompatibleProviderOptions\n>;\n","import {\n LanguageModelV3CallOptions,\n SharedV3Warning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV3CallOptions['tools'];\n toolChoice?: LanguageModelV3CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n strict?: boolean;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: SharedV3Warning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: SharedV3Warning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n strict?: boolean;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider') {\n toolWarnings.push({\n type: 'unsupported',\n feature: `provider-defined tool ${tool.id}`,\n });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n ...(tool.strict != null ? { strict: tool.strict } : {}),\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV3,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { convertOpenAICompatibleCompletionUsage } from './convert-openai-compatible-completion-usage';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n openaiCompatibleCompletionProviderOptions,\n} from './openai-compatible-completion-options';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV3['supportedUrls'];\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV3\n{\n readonly specificationVersion = 'v3';\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: SharedV3Warning[] = [];\n\n // Parse provider options\n const completionOptions =\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleCompletionProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({ type: 'unsupported', feature: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported', feature: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported', feature: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported',\n feature: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: completionOptions.echo,\n logit_bias: completionOptions.logitBias,\n suffix: completionOptions.suffix,\n user: completionOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // text content:\n if (choice.text != null && choice.text.length > 0) {\n content.push({ type: 'text', text: choice.text });\n }\n\n return {\n content,\n usage: convertOpenAICompatibleCompletionUsage(response.usage),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n let usage:\n | {\n prompt_tokens: number | undefined;\n completion_tokens: number | undefined;\n total_tokens: number | undefined;\n }\n | undefined = undefined;\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n\n controller.enqueue({\n type: 'text-start',\n id: '0',\n });\n }\n\n if (value.usage != null) {\n usage = value.usage;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: choice.text,\n });\n }\n },\n\n flush(controller) {\n if (!isFirstChunk) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: convertOpenAICompatibleCompletionUsage(usage),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst usageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: usageSchema.nullish(),\n }),\n errorSchema,\n ]);\n","import { LanguageModelV3Usage } from '@ai-sdk/provider';\n\nexport function convertOpenAICompatibleCompletionUsage(\n usage:\n | {\n prompt_tokens?: number | null;\n completion_tokens?: number | null;\n }\n | undefined\n | null,\n): LanguageModelV3Usage {\n if (usage == null) {\n return {\n inputTokens: {\n total: undefined,\n noCache: undefined,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: undefined,\n text: undefined,\n reasoning: undefined,\n },\n raw: undefined,\n };\n }\n\n const promptTokens = usage.prompt_tokens ?? 0;\n const completionTokens = usage.completion_tokens ?? 0;\n\n return {\n inputTokens: {\n total: promptTokens,\n noCache: promptTokens,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: completionTokens,\n text: completionTokens,\n reasoning: undefined,\n },\n raw: usage,\n };\n}\n","import {\n InvalidPromptError,\n LanguageModelV3Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV3Prompt;\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleCompletionModelId = string;\n\nexport const openaiCompatibleCompletionProviderOptions = z.object({\n /**\n * Echo back the prompt in addition to the completion.\n */\n echo: z.boolean().optional(),\n\n /**\n * Modify the likelihood of specified tokens appearing in the completion.\n *\n * Accepts a JSON object that maps tokens (specified by their token ID in\n * the GPT tokenizer) to an associated bias value from -100 to 100.\n */\n logitBias: z.record(z.string(), z.number()).optional(),\n\n /**\n * The suffix that comes after a completion of inserted text.\n */\n suffix: z.string().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleCompletionProviderOptions = z.infer<\n typeof openaiCompatibleCompletionProviderOptions\n>;\n","import {\n EmbeddingModelV3,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n OpenAICompatibleEmbeddingModelId,\n openaiCompatibleEmbeddingProviderOptions,\n} from './openai-compatible-embedding-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel implements EmbeddingModelV3 {\n readonly specificationVersion = 'v3';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV3['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV3['doEmbed']>>\n > {\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n );\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: compatibleOptions.dimensions,\n user: compatibleOptions.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n warnings: [],\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n providerMetadata: response.providerMetadata,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n providerMetadata: z\n .record(z.string(), z.record(z.string(), z.any()))\n .optional(),\n});\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleEmbeddingModelId = string;\n\nexport const openaiCompatibleEmbeddingProviderOptions = z.object({\n /**\n * The number of dimensions the resulting output embeddings should have.\n * Only supported in text-embedding-3 and later models.\n */\n dimensions: z.number().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleEmbeddingProviderOptions = z.infer<\n typeof openaiCompatibleEmbeddingProviderOptions\n>;\n","import { ImageModelV3, SharedV3Warning } from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV3 {\n readonly specificationVersion = 'v3';\n readonly maxImagesPerCall = 10;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV3['doGenerate']>>\n > {\n const warnings: Array<SharedV3Warning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported',\n feature: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported', feature: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n","import {\n EmbeddingModelV3,\n ImageModelV3,\n LanguageModelV3,\n ProviderV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n withoutTrailingSlash,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport {\n OpenAICompatibleChatConfig,\n OpenAICompatibleChatLanguageModel,\n} from './chat/openai-compatible-chat-language-model';\nimport { OpenAICompatibleCompletionLanguageModel } from './completion/openai-compatible-completion-language-model';\nimport { OpenAICompatibleEmbeddingModel } from './embedding/openai-compatible-embedding-model';\nimport { OpenAICompatibleImageModel } from './image/openai-compatible-image-model';\nimport { VERSION } from './version';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV3, 'imageModel'> {\n (modelId: CHAT_MODEL_IDS): LanguageModelV3;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n config?: Partial<OpenAICompatibleChatConfig>,\n ): LanguageModelV3;\n\n chatModel(modelId: CHAT_MODEL_IDS): LanguageModelV3;\n\n completionModel(modelId: COMPLETION_MODEL_IDS): LanguageModelV3;\n\n embeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV3;\n\n imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV3;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nInclude usage information in streaming responses.\n */\n includeUsage?: boolean;\n\n /**\n * Whether the provider supports structured outputs in chat models.\n */\n supportsStructuredOutputs?: boolean;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const headers = {\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n };\n\n const getHeaders = () =>\n withUserAgentSuffix(headers, `ai-sdk/openai-compatible/${VERSION}`);\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (modelId: CHAT_MODEL_IDS) =>\n createChatModel(modelId);\n\n const createChatModel = (modelId: CHAT_MODEL_IDS) =>\n new OpenAICompatibleChatLanguageModel(modelId, {\n ...getCommonModelConfig('chat'),\n includeUsage: options.includeUsage,\n supportsStructuredOutputs: options.supportsStructuredOutputs,\n });\n\n const createCompletionModel = (modelId: COMPLETION_MODEL_IDS) =>\n new OpenAICompatibleCompletionLanguageModel(modelId, {\n ...getCommonModelConfig('completion'),\n includeUsage: options.includeUsage,\n });\n\n const createEmbeddingModel = (modelId: EMBEDDING_MODEL_IDS) =>\n new OpenAICompatibleEmbeddingModel(modelId, {\n ...getCommonModelConfig('embedding'),\n });\n\n const createImageModel = (modelId: IMAGE_MODEL_IDS) =>\n new OpenAICompatibleImageModel(modelId, getCommonModelConfig('image'));\n\n const provider = (modelId: CHAT_MODEL_IDS) => createLanguageModel(modelId);\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n","declare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";AAAA;AAAA,EAEE;AAAA,OAOK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OAEK;AACP,SAAS,KAAAA,UAAS;;;ACvBlB,SAAS,SAAkB;AAEpB,IAAM,kCAAkC,EAAE,OAAO;AAAA,EACtD,OAAO,EAAE,OAAO;AAAA,IACd,SAAS,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,EAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC3BK,SAAS,iCACd,OAasB;AAhBxB;AAiBE,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,aAAa;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA,cAAc;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAEA,QAAM,gBAAe,WAAM,kBAAN,YAAuB;AAC5C,QAAM,oBAAmB,WAAM,sBAAN,YAA2B;AACpD,QAAM,mBAAkB,iBAAM,0BAAN,mBAA6B,kBAA7B,YAA8C;AACtE,QAAM,mBACJ,iBAAM,8BAAN,mBAAiC,qBAAjC,YAAqD;AAEvD,SAAO;AAAA,IACL,aAAa;AAAA,MACX,OAAO;AAAA,MACP,SAAS,eAAe;AAAA,MACxB,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,IACA,cAAc;AAAA,MACZ,OAAO;AAAA,MACP,MAAM,mBAAmB;AAAA,MACzB,WAAW;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AACF;;;ACtDA;AAAA,EAGE;AAAA,OACK;AAEP,SAAS,uBAAuB;AAEhC,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAhB9B;AAiBE,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,gBAAgB,KAAK,IAAI,CAAC;AAAA,oBAC9D;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AACH,8BAAe,YAAO,WAAP,YAAiB;AAChC;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,YACT,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC1JO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,SAAS,KAAAC,UAAS;AAIX,IAAM,kCAAkCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKtD,MAAMA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK1B,iBAAiBA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrC,eAAeA,GAAE,OAAO,EAAE,SAAS;AACrC,CAAC;;;ACpBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAsBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAAkC,CAAC;AAEzC,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,YAAY;AAC5B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,SAAS,yBAAyB,KAAK,EAAE;AAAA,MAC3C,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,GAAI,KAAK,UAAU,OAAO,EAAE,QAAQ,KAAK,OAAO,IAAI,CAAC;AAAA,QACvD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;APtCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAUxE,YACE,SACA,QACA;AAZF,SAAS,uBAAuB;AA5DlC;AAyEI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwB,+BAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AA/FtB;AAgGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAjHnD;AAkHI,UAAM,WAA8B,CAAC;AAGrC,UAAM,oBAAoB,OAAO;AAAA,OAC9B,WAAM,qBAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,WAAM,qBAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,OAAO;AAAA,UACR,OAAO;AAAA,aACL,wDAAkB,KAAK,yBAAvB,YAA+C,CAAC;AAAA,UAClD,EAAE;AAAA,YACA,CAAC,CAAC,GAAG,MACH,CAAC,OAAO,KAAK,gCAAgC,KAAK,EAAE,SAAS,GAAG;AAAA,UACpE;AAAA,QACF;AAAA,QAEA,kBAAkB,kBAAkB;AAAA,QACpC,WAAW,kBAAkB;AAAA;AAAA,QAG7B,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AApNjE;AAqNI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,aACJ,YAAO,QAAQ,sBAAf,YAAoC,OAAO,QAAQ;AACrD,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,mBAA6C;AAAA,MACjD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,GAAI,QAAM,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QACzD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO,iCAAiC,aAAa,KAAK;AAAA,MAC1D;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AA9S/D;AA+SI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QACF;AACF,QAAI,eAAe;AACnB,UAAM,sBAAsB,KAAK;AACjC,QAAI,oBAAoB;AACxB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAzWvC,gBAAAC,KAAA;AA2WY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,MAAM,OAAO;AAC1B,6BAAe;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAO,MAAM,MAAM,MAAM;AAAA,cAC3B,CAAC;AACD;AAAA,YACF;AAIA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ,MAAM;AAAA,YAChB;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,kBAAM,oBAAmBA,MAAA,MAAM,sBAAN,OAAAA,MAA2B,MAAM;AAC1D,gBAAI,kBAAkB;AACpB,kBAAI,CAAC,mBAAmB;AACtB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AACD,oCAAoB;AAAA,cACtB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS;AACjB,kBAAI,CAAC,cAAc;AACjB,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,QAAQ,CAAC;AACtD,+BAAe;AAAA,cACjB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,cAAc;AAAA,oBAClB,UAAU,cAAc,SAAS;AAAA,kBACnC,CAAC;AAED,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,sBACf,CAAC;AAED,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,kBACf,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAjjB5B,gBAAAD,KAAA;AAkjBY,gBAAI,mBAAmB;AACrB,yBAAW,QAAQ,EAAE,MAAM,iBAAiB,IAAI,cAAc,CAAC;AAAA,YACjE;AAEA,gBAAI,cAAc;AAChB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,QAAQ,CAAC;AAAA,YACtD;AAGA,uBAAW,YAAY,UAAU;AAAA,cAC/B,CAAAC,cAAY,CAACA,UAAS;AAAA,YACxB,GAAG;AACD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,SAAS;AAAA,cACf,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,aAAYD,MAAA,SAAS,OAAT,OAAAA,MAAe,WAAW;AAAA,gBACtC,UAAU,SAAS,SAAS;AAAA,gBAC5B,OAAO,SAAS,SAAS;AAAA,cAC3B,CAAC;AAAA,YACH;AAEA,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,kBACE,oCAAO,8BAAP,mBAAkC,+BAClC,MACA;AACA,+BAAiB,mBAAmB,EAAE,4BACpC,oCAAO,8BAAP,mBAAkC;AAAA,YACtC;AACA,kBACE,oCAAO,8BAAP,mBAAkC,+BAClC,MACA;AACA,+BAAiB,mBAAmB,EAAE,4BACpC,oCAAO,8BAAP,mBAAkC;AAAA,YACtC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,iCAAiC,KAAK;AAAA,cAC7C;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,mCAAmCE,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAED,IAAM,kBAAkBA,GAAE,OAAO;AAAA,EAC/B,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GACJ,OAAO;AAAA,QACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,QACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA,QAG5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,OAAOA,GAAE,OAAO;AAAA,YAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,YAChC,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC,EACA,QAAQ;AAAA,MACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAG5C,gBACGA,GAAE,MAAM,CAAC,iBAAiB,WAAW,CAAC;;;AQhsB3C;AAAA,EACE,kBAAAC;AAAA,EACA,oCAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,wBAAAC;AAAA,EAEA,iBAAAC;AAAA,OAEK;AACP,SAAS,KAAAC,UAAS;;;ACjBX,SAAS,uCACd,OAOsB;AAVxB;AAWE,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,aAAa;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA,cAAc;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAEA,QAAM,gBAAe,WAAM,kBAAN,YAAuB;AAC5C,QAAM,oBAAmB,WAAM,sBAAN,YAA2B;AAEpD,SAAO;AAAA,IACL,aAAa;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,MACT,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,IACA,cAAc;AAAA,MACZ,OAAO;AAAA,MACP,MAAM;AAAA,MACN,WAAW;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AACF;;;AC7CA;AAAA,EACE;AAAA,EAEA,iCAAAC;AAAA,OACK;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAOE;AAEA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAIA,+BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;AC5FO,SAASC,qBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAASC,iCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,SAAS,KAAAC,UAAS;AAIX,IAAM,4CAA4CA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAIhE,MAAMA,GAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ3B,WAAWA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrD,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ALmBM,IAAM,0CAAN,MAEP;AAAA;AAAA,EAQE,YACE,SACA,QACA;AAVF,SAAS,uBAAuB;AAlDlC;AA6DI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwBC,gCAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AAjFtB;AAkFI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAnGnD;AAoGI,UAAM,WAA8B,CAAC;AAGrC,UAAM,qBACH,WAAMC,sBAAqB;AAAA,MAC1B,UAAU,KAAK;AAAA,MACf;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,QAAQ,CAAC;AAAA,IACzD;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,aAAa,CAAC;AAAA,IAC9D;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,OAAO,CAAC;AAEtD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA,QACxB,YAAY,kBAAkB;AAAA,QAC9B,QAAQ,kBAAkB;AAAA,QAC1B,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,QAAQ,QAAQ,OAAO,KAAK,SAAS,GAAG;AACjD,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,IAClD;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO,uCAAuC,SAAS,KAAK;AAAA,MAC5D,cAAcC,iCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAGC,qBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMJ,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BI;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,QAAI,QAMY;AAChB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAC3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAGD,qBAAoB,KAAK;AAAA,cAC9B,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ,MAAM;AAAA,YAChB;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAeD;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,OAAO;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,CAAC,cAAc;AACjB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,uCAAuC,KAAK;AAAA,YACrD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,cAAcG,GAAE,OAAO;AAAA,EAC3B,eAAeA,GAAE,OAAO;AAAA,EACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC5B,cAAcA,GAAE,OAAO;AACzB,CAAC;AAID,IAAM,2CAA2CA,GAAE,OAAO;AAAA,EACxD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,MAAMA,GAAE,OAAO;AAAA,MACf,eAAeA,GAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,YAAY,QAAQ;AAC7B,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,YAAY,QAAQ;AAAA,EAC7B,CAAC;AAAA,EACD;AACF,CAAC;;;AM3XH;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,wBAAAC;AAAA,EACA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACZlB,SAAS,KAAAC,UAAS;AAIX,IAAM,2CAA2CA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAK/D,YAAYA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhC,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ADwBM,IAAM,iCAAN,MAAiE;AAAA,EAkBtE,YACE,SACA,QACA;AApBF,SAAS,uBAAuB;AAqB9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAlBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAlDrC;AAmDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAtDvC;AAuDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAUA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA7EJ;AA8EI,UAAM,oBAAoB,OAAO;AAAA,OAC9B,WAAMC,sBAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,WAAMA,sBAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,kBAAkB;AAAA,QAC9B,MAAM,kBAAkB;AAAA,MAC1B;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,UAAU,CAAC;AAAA,MACX,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,kBAAkB,SAAS;AAAA,MAC3B,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,oCAAoCC,GAAE,OAAO;AAAA,EACjD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACvD,kBAAkBA,GACf,OAAOA,GAAE,OAAO,GAAGA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAChD,SAAS;AACd,CAAC;;;AElJD;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAkBX,IAAM,6BAAN,MAAyD;AAAA,EAQ9D,YACW,SACQ,QACjB;AAFS;AACQ;AATnB,SAAS,uBAAuB;AAChC,SAAS,mBAAmB;AAAA,EASzB;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAlDJ;AAmDI,UAAM,WAAmC,CAAC;AAE1C,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AACvE,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsCC,GAAE,OAAO;AAAA,EACnD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,UAAUA,GAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;;;ACrGD;AAAA,EAEE;AAAA,EACA;AAAA,OACK;;;ACTA,IAAM,UACX,OACI,kBACA;;;AD4FC,SAAS,uBAMd,SAMA;AACA,QAAM,UAAU,qBAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,UAAU;AAAA,IACd,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,aAAa,MACjB,oBAAoB,SAAS,4BAA4B,OAAO,EAAE;AAEpE,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAAC,YAC3B,gBAAgB,OAAO;AAEzB,QAAM,kBAAkB,CAAC,YACvB,IAAI,kCAAkC,SAAS;AAAA,IAC7C,GAAG,qBAAqB,MAAM;AAAA,IAC9B,cAAc,QAAQ;AAAA,IACtB,2BAA2B,QAAQ;AAAA,EACrC,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,wCAAwC,SAAS;AAAA,IACnD,GAAG,qBAAqB,YAAY;AAAA,IACpC,cAAc,QAAQ;AAAA,EACxB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,+BAA+B,SAAS;AAAA,IAC1C,GAAG,qBAAqB,WAAW;AAAA,EACrC,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,2BAA2B,SAAS,qBAAqB,OAAO,CAAC;AAEvE,QAAM,WAAW,CAAC,YAA4B,oBAAoB,OAAO;AAEzE,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,iBAAiB;AAC1B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["z","z","UnsupportedFunctionalityError","_a","toolCall","z","combineHeaders","createEventSourceResponseHandler","createJsonErrorResponseHandler","createJsonResponseHandler","parseProviderOptions","postJsonToApi","z","UnsupportedFunctionalityError","getResponseMetadata","mapOpenAICompatibleFinishReason","z","createJsonErrorResponseHandler","parseProviderOptions","postJsonToApi","combineHeaders","createJsonResponseHandler","mapOpenAICompatibleFinishReason","getResponseMetadata","createEventSourceResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","parseProviderOptions","postJsonToApi","z","z","parseProviderOptions","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z"]}
1
+ {"version":3,"sources":["../src/chat/openai-compatible-chat-language-model.ts","../src/openai-compatible-error.ts","../src/chat/convert-openai-compatible-chat-usage.ts","../src/chat/convert-to-openai-compatible-chat-messages.ts","../src/chat/get-response-metadata.ts","../src/chat/map-openai-compatible-finish-reason.ts","../src/chat/openai-compatible-chat-options.ts","../src/chat/openai-compatible-prepare-tools.ts","../src/completion/openai-compatible-completion-language-model.ts","../src/completion/convert-openai-compatible-completion-usage.ts","../src/completion/convert-to-openai-compatible-completion-prompt.ts","../src/completion/get-response-metadata.ts","../src/completion/map-openai-compatible-finish-reason.ts","../src/completion/openai-compatible-completion-options.ts","../src/embedding/openai-compatible-embedding-model.ts","../src/embedding/openai-compatible-embedding-options.ts","../src/image/openai-compatible-image-model.ts","../src/openai-compatible-provider.ts","../src/version.ts"],"sourcesContent":["import {\n APICallError,\n InvalidResponseDataError,\n LanguageModelV3,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n SharedV3ProviderMetadata,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n isParsableJson,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { convertOpenAICompatibleChatUsage } from './convert-openai-compatible-chat-usage';\nimport { convertToOpenAICompatibleChatMessages } from './convert-to-openai-compatible-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleChatModelId,\n openaiCompatibleProviderOptions,\n} from './openai-compatible-chat-options';\nimport { MetadataExtractor } from './openai-compatible-metadata-extractor';\nimport { prepareTools } from './openai-compatible-prepare-tools';\n\nexport type OpenAICompatibleChatConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n includeUsage?: boolean;\n errorStructure?: ProviderErrorStructure<any>;\n metadataExtractor?: MetadataExtractor;\n\n /**\n * Whether the model supports structured outputs.\n */\n supportsStructuredOutputs?: boolean;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV3['supportedUrls'];\n};\n\nexport class OpenAICompatibleChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3';\n\n readonly supportsStructuredOutputs: boolean;\n\n readonly modelId: OpenAICompatibleChatModelId;\n private readonly config: OpenAICompatibleChatConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleChatModelId,\n config: OpenAICompatibleChatConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleChatChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n\n this.supportsStructuredOutputs = config.supportsStructuredOutputs ?? false;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n providerOptions,\n stopSequences,\n responseFormat,\n seed,\n toolChoice,\n tools,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: SharedV3Warning[] = [];\n\n // Parse provider options\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleProviderOptions,\n })) ?? {},\n );\n\n if (topK != null) {\n warnings.push({ type: 'unsupported', feature: 'topK' });\n }\n\n if (\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n !this.supportsStructuredOutputs\n ) {\n warnings.push({\n type: 'unsupported',\n feature: 'responseFormat',\n details:\n 'JSON response format schema is only supported with structuredOutputs',\n });\n }\n\n const {\n tools: openaiTools,\n toolChoice: openaiToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n user: compatibleOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n response_format:\n responseFormat?.type === 'json'\n ? this.supportsStructuredOutputs === true &&\n responseFormat.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n stop: stopSequences,\n seed,\n ...Object.fromEntries(\n Object.entries(\n providerOptions?.[this.providerOptionsName] ?? {},\n ).filter(\n ([key]) =>\n !Object.keys(openaiCompatibleProviderOptions.shape).includes(key),\n ),\n ),\n\n reasoning_effort: compatibleOptions.reasoningEffort,\n verbosity: compatibleOptions.textVerbosity,\n\n // messages:\n messages: convertToOpenAICompatibleChatMessages(prompt),\n\n // tools:\n tools: openaiTools,\n tool_choice: openaiToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = JSON.stringify(args);\n\n const {\n responseHeaders,\n value: responseBody,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n OpenAICompatibleChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = responseBody.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // text content:\n const text = choice.message.content;\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // reasoning content:\n const reasoning =\n choice.message.reasoning_content ?? choice.message.reasoning;\n if (reasoning != null && reasoning.length > 0) {\n content.push({\n type: 'reasoning',\n text: reasoning,\n });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n // provider metadata:\n const providerMetadata: SharedV3ProviderMetadata = {\n [this.providerOptionsName]: {},\n ...(await this.config.metadataExtractor?.extractMetadata?.({\n parsedBody: rawResponse,\n })),\n };\n const completionTokenDetails =\n responseBody.usage?.completion_tokens_details;\n if (completionTokenDetails?.accepted_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].acceptedPredictionTokens =\n completionTokenDetails?.accepted_prediction_tokens;\n }\n if (completionTokenDetails?.rejected_prediction_tokens != null) {\n providerMetadata[this.providerOptionsName].rejectedPredictionTokens =\n completionTokenDetails?.rejected_prediction_tokens;\n }\n\n return {\n content,\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n usage: convertOpenAICompatibleChatUsage(responseBody.usage),\n providerMetadata,\n request: { body },\n response: {\n ...getResponseMetadata(responseBody),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs({ ...options });\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const metadataExtractor =\n this.config.metadataExtractor?.createStreamExtractor();\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n hasFinished: boolean;\n }> = [];\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n let usage: z.infer<typeof openaiCompatibleTokenUsageSchema> | undefined =\n undefined;\n let isFirstChunk = true;\n const providerOptionsName = this.providerOptionsName;\n let isActiveReasoning = false;\n let isActiveText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n metadataExtractor?.processChunk(chunk.rawValue);\n\n // handle error chunks:\n if ('error' in chunk.value) {\n finishReason = 'error';\n controller.enqueue({\n type: 'error',\n error: chunk.value.error.message,\n });\n return;\n }\n\n // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX\n // remove this workaround when the issue is fixed\n const value = chunk.value as z.infer<typeof chunkBaseSchema>;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = value.usage;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // enqueue reasoning before text deltas:\n const reasoningContent = delta.reasoning_content ?? delta.reasoning;\n if (reasoningContent) {\n if (!isActiveReasoning) {\n controller.enqueue({\n type: 'reasoning-start',\n id: 'reasoning-0',\n });\n isActiveReasoning = true;\n }\n\n controller.enqueue({\n type: 'reasoning-delta',\n id: 'reasoning-0',\n delta: reasoningContent,\n });\n }\n\n if (delta.content) {\n if (!isActiveText) {\n controller.enqueue({ type: 'text-start', id: 'txt-0' });\n isActiveText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: 'txt-0',\n delta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n if (toolCalls[index] == null) {\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallDelta.id,\n toolName: toolCallDelta.function.name,\n });\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n hasFinished: false,\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n\n continue;\n }\n\n // existing tool call, merge if not finished\n const toolCall = toolCalls[index];\n\n if (toolCall.hasFinished) {\n continue;\n }\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.id,\n delta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n toolCall.hasFinished = true;\n }\n }\n }\n },\n\n flush(controller) {\n if (isActiveReasoning) {\n controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });\n }\n\n if (isActiveText) {\n controller.enqueue({ type: 'text-end', id: 'txt-0' });\n }\n\n // go through all tool calls and send the ones that are not finished\n for (const toolCall of toolCalls.filter(\n toolCall => !toolCall.hasFinished,\n )) {\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.id,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n input: toolCall.function.arguments,\n });\n }\n\n const providerMetadata: SharedV3ProviderMetadata = {\n [providerOptionsName]: {},\n ...metadataExtractor?.buildMetadata(),\n };\n if (\n usage?.completion_tokens_details?.accepted_prediction_tokens !=\n null\n ) {\n providerMetadata[providerOptionsName].acceptedPredictionTokens =\n usage?.completion_tokens_details?.accepted_prediction_tokens;\n }\n if (\n usage?.completion_tokens_details?.rejected_prediction_tokens !=\n null\n ) {\n providerMetadata[providerOptionsName].rejectedPredictionTokens =\n usage?.completion_tokens_details?.rejected_prediction_tokens;\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: convertOpenAICompatibleChatUsage(usage),\n providerMetadata,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst openaiCompatibleTokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n total_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n accepted_prediction_tokens: z.number().nullish(),\n rejected_prediction_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst OpenAICompatibleChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\nconst chunkBaseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n // Most openai-compatible models set `reasoning_content`, but some\n // providers serving `gpt-oss` set `reasoning`. See #7866\n reasoning_content: z.string().nullish(),\n reasoning: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openaiCompatibleTokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleChatChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) => z.union([chunkBaseSchema, errorSchema]);\n","import { z, ZodType } from 'zod/v4';\n\nexport const openaiCompatibleErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAICompatibleErrorData = z.infer<\n typeof openaiCompatibleErrorDataSchema\n>;\n\nexport type ProviderErrorStructure<T> = {\n errorSchema: ZodType<T>;\n errorToMessage: (error: T) => string;\n isRetryable?: (response: Response, error?: T) => boolean;\n};\n\nexport const defaultOpenAICompatibleErrorStructure: ProviderErrorStructure<OpenAICompatibleErrorData> =\n {\n errorSchema: openaiCompatibleErrorDataSchema,\n errorToMessage: data => data.error.message,\n };\n","import { LanguageModelV3Usage } from '@ai-sdk/provider';\n\nexport function convertOpenAICompatibleChatUsage(\n usage:\n | {\n prompt_tokens?: number | null;\n completion_tokens?: number | null;\n prompt_tokens_details?: {\n cached_tokens?: number | null;\n } | null;\n completion_tokens_details?: {\n reasoning_tokens?: number | null;\n } | null;\n }\n | undefined\n | null,\n): LanguageModelV3Usage {\n if (usage == null) {\n return {\n inputTokens: {\n total: undefined,\n noCache: undefined,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: undefined,\n text: undefined,\n reasoning: undefined,\n },\n raw: undefined,\n };\n }\n\n const promptTokens = usage.prompt_tokens ?? 0;\n const completionTokens = usage.completion_tokens ?? 0;\n const cacheReadTokens = usage.prompt_tokens_details?.cached_tokens ?? 0;\n const reasoningTokens =\n usage.completion_tokens_details?.reasoning_tokens ?? 0;\n\n return {\n inputTokens: {\n total: promptTokens,\n noCache: promptTokens - cacheReadTokens,\n cacheRead: cacheReadTokens,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: completionTokens,\n text: completionTokens - reasoningTokens,\n reasoning: reasoningTokens,\n },\n raw: usage,\n };\n}\n","import {\n LanguageModelV3Prompt,\n SharedV3ProviderMetadata,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { OpenAICompatibleChatPrompt } from './openai-compatible-api-types';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nfunction getOpenAIMetadata(message: {\n providerOptions?: SharedV3ProviderMetadata;\n}) {\n return message?.providerOptions?.openaiCompatible ?? {};\n}\n\nexport function convertToOpenAICompatibleChatMessages(\n prompt: LanguageModelV3Prompt,\n): OpenAICompatibleChatPrompt {\n const messages: OpenAICompatibleChatPrompt = [];\n for (const { role, content, ...message } of prompt) {\n const metadata = getOpenAIMetadata({ ...message });\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content, ...metadata });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({\n role: 'user',\n content: content[0].text,\n ...getOpenAIMetadata(content[0]),\n });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text, ...partMetadata };\n }\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url: {\n url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n },\n ...partMetadata,\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality: `file part media type ${part.mediaType}`,\n });\n }\n }\n }\n }),\n ...metadata,\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n const partMetadata = getOpenAIMetadata(part);\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n ...partMetadata,\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n ...metadata,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'execution-denied':\n contentValue = output.reason ?? 'Tool execution denied.';\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n const toolResponseMetadata = getOpenAIMetadata(toolResponse);\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n ...toolResponseMetadata,\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleChatModelId = string;\n\nexport const openaiCompatibleProviderOptions = z.object({\n /**\n * A unique identifier representing your end-user, which can help the provider to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n\n /**\n * Reasoning effort for reasoning models. Defaults to `medium`.\n */\n reasoningEffort: z.string().optional(),\n\n /**\n * Controls the verbosity of the generated text. Defaults to `medium`.\n */\n textVerbosity: z.string().optional(),\n});\n\nexport type OpenAICompatibleProviderOptions = z.infer<\n typeof openaiCompatibleProviderOptions\n>;\n","import {\n LanguageModelV3CallOptions,\n SharedV3Warning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV3CallOptions['tools'];\n toolChoice?: LanguageModelV3CallOptions['toolChoice'];\n}): {\n tools:\n | undefined\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n strict?: boolean;\n };\n }>;\n toolChoice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'required'\n | undefined;\n toolWarnings: SharedV3Warning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: SharedV3Warning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const openaiCompatTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n strict?: boolean;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider') {\n toolWarnings.push({\n type: 'unsupported',\n feature: `provider-defined tool ${tool.id}`,\n });\n } else {\n openaiCompatTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n ...(tool.strict != null ? { strict: tool.strict } : {}),\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiCompatTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiCompatTools, toolChoice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiCompatTools,\n toolChoice: {\n type: 'function',\n function: { name: toolChoice.toolName },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n APICallError,\n LanguageModelV3,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n ResponseHandler,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { convertOpenAICompatibleCompletionUsage } from './convert-openai-compatible-completion-usage';\nimport { convertToOpenAICompatibleCompletionPrompt } from './convert-to-openai-compatible-completion-prompt';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapOpenAICompatibleFinishReason } from './map-openai-compatible-finish-reason';\nimport {\n OpenAICompatibleCompletionModelId,\n openaiCompatibleCompletionProviderOptions,\n} from './openai-compatible-completion-options';\n\ntype OpenAICompatibleCompletionConfig = {\n provider: string;\n includeUsage?: boolean;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV3['supportedUrls'];\n};\n\nexport class OpenAICompatibleCompletionLanguageModel\n implements LanguageModelV3\n{\n readonly specificationVersion = 'v3';\n\n readonly modelId: OpenAICompatibleCompletionModelId;\n private readonly config: OpenAICompatibleCompletionConfig;\n private readonly failedResponseHandler: ResponseHandler<APICallError>;\n private readonly chunkSchema; // type inferred via constructor\n\n constructor(\n modelId: OpenAICompatibleCompletionModelId,\n config: OpenAICompatibleCompletionConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n\n // initialize error handling:\n const errorStructure =\n config.errorStructure ?? defaultOpenAICompatibleErrorStructure;\n this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(\n errorStructure.errorSchema,\n );\n this.failedResponseHandler = createJsonErrorResponseHandler(errorStructure);\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: SharedV3Warning[] = [];\n\n // Parse provider options\n const completionOptions =\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleCompletionProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({ type: 'unsupported', feature: 'topK' });\n }\n\n if (tools?.length) {\n warnings.push({ type: 'unsupported', feature: 'tools' });\n }\n\n if (toolChoice != null) {\n warnings.push({ type: 'unsupported', feature: 'toolChoice' });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported',\n feature: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompatibleCompletionPrompt({ prompt });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n return {\n args: {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: completionOptions.echo,\n logit_bias: completionOptions.logitBias,\n suffix: completionOptions.suffix,\n user: completionOptions.user,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n ...providerOptions?.[this.providerOptionsName],\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n },\n warnings,\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleCompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // text content:\n if (choice.text != null && choice.text.length > 0) {\n content.push({ type: 'text', text: choice.text });\n }\n\n return {\n content,\n usage: convertOpenAICompatibleCompletionUsage(response.usage),\n finishReason: mapOpenAICompatibleFinishReason(choice.finish_reason),\n request: { body: args },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options: this.config.includeUsage\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: this.failedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n this.chunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n let usage:\n | {\n prompt_tokens: number | undefined;\n completion_tokens: number | undefined;\n total_tokens: number | undefined;\n }\n | undefined = undefined;\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof this.chunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n\n controller.enqueue({\n type: 'text-start',\n id: '0',\n });\n }\n\n if (value.usage != null) {\n usage = value.usage;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAICompatibleFinishReason(\n choice.finish_reason,\n );\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: choice.text,\n });\n }\n },\n\n flush(controller) {\n if (!isFirstChunk) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage: convertOpenAICompatibleCompletionUsage(usage),\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nconst usageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleCompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n }),\n ),\n usage: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst createOpenAICompatibleCompletionChunkSchema = <\n ERROR_SCHEMA extends z.core.$ZodType,\n>(\n errorSchema: ERROR_SCHEMA,\n) =>\n z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: usageSchema.nullish(),\n }),\n errorSchema,\n ]);\n","import { LanguageModelV3Usage } from '@ai-sdk/provider';\n\nexport function convertOpenAICompatibleCompletionUsage(\n usage:\n | {\n prompt_tokens?: number | null;\n completion_tokens?: number | null;\n }\n | undefined\n | null,\n): LanguageModelV3Usage {\n if (usage == null) {\n return {\n inputTokens: {\n total: undefined,\n noCache: undefined,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: undefined,\n text: undefined,\n reasoning: undefined,\n },\n raw: undefined,\n };\n }\n\n const promptTokens = usage.prompt_tokens ?? 0;\n const completionTokens = usage.completion_tokens ?? 0;\n\n return {\n inputTokens: {\n total: promptTokens,\n noCache: promptTokens,\n cacheRead: undefined,\n cacheWrite: undefined,\n },\n outputTokens: {\n total: completionTokens,\n text: completionTokens,\n reasoning: undefined,\n },\n raw: usage,\n };\n}\n","import {\n InvalidPromptError,\n LanguageModelV3Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompatibleCompletionPrompt({\n prompt,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV3Prompt;\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n }\n })\n .filter(Boolean)\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAICompatibleFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleCompletionModelId = string;\n\nexport const openaiCompatibleCompletionProviderOptions = z.object({\n /**\n * Echo back the prompt in addition to the completion.\n */\n echo: z.boolean().optional(),\n\n /**\n * Modify the likelihood of specified tokens appearing in the completion.\n *\n * Accepts a JSON object that maps tokens (specified by their token ID in\n * the GPT tokenizer) to an associated bias value from -100 to 100.\n */\n logitBias: z.record(z.string(), z.number()).optional(),\n\n /**\n * The suffix that comes after a completion of inserted text.\n */\n suffix: z.string().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleCompletionProviderOptions = z.infer<\n typeof openaiCompatibleCompletionProviderOptions\n>;\n","import {\n EmbeddingModelV3,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n OpenAICompatibleEmbeddingModelId,\n openaiCompatibleEmbeddingProviderOptions,\n} from './openai-compatible-embedding-options';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\n\ntype OpenAICompatibleEmbeddingConfig = {\n /**\nOverride the maximum number of embeddings per call.\n */\n maxEmbeddingsPerCall?: number;\n\n /**\nOverride the parallelism of embedding calls.\n */\n supportsParallelCalls?: boolean;\n\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n};\n\nexport class OpenAICompatibleEmbeddingModel implements EmbeddingModelV3 {\n readonly specificationVersion = 'v3';\n readonly modelId: OpenAICompatibleEmbeddingModelId;\n\n private readonly config: OpenAICompatibleEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.config.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.config.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAICompatibleEmbeddingModelId,\n config: OpenAICompatibleEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n private get providerOptionsName(): string {\n return this.config.provider.split('.')[0].trim();\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV3['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV3['doEmbed']>>\n > {\n const compatibleOptions = Object.assign(\n (await parseProviderOptions({\n provider: 'openai-compatible',\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n (await parseProviderOptions({\n provider: this.providerOptionsName,\n providerOptions,\n schema: openaiCompatibleEmbeddingProviderOptions,\n })) ?? {},\n );\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: compatibleOptions.dimensions,\n user: compatibleOptions.user,\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n warnings: [],\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n providerMetadata: response.providerMetadata,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n providerMetadata: z\n .record(z.string(), z.record(z.string(), z.any()))\n .optional(),\n});\n","import { z } from 'zod/v4';\n\nexport type OpenAICompatibleEmbeddingModelId = string;\n\nexport const openaiCompatibleEmbeddingProviderOptions = z.object({\n /**\n * The number of dimensions the resulting output embeddings should have.\n * Only supported in text-embedding-3 and later models.\n */\n dimensions: z.number().optional(),\n\n /**\n * A unique identifier representing your end-user, which can help providers to\n * monitor and detect abuse.\n */\n user: z.string().optional(),\n});\n\nexport type OpenAICompatibleEmbeddingProviderOptions = z.infer<\n typeof openaiCompatibleEmbeddingProviderOptions\n>;\n","import {\n ImageModelV3,\n ImageModelV3File,\n SharedV3Warning,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n convertBase64ToUint8Array,\n convertToFormData,\n createJsonErrorResponseHandler,\n createJsonResponseHandler,\n downloadBlob,\n FetchFunction,\n postFormDataToApi,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport {\n defaultOpenAICompatibleErrorStructure,\n ProviderErrorStructure,\n} from '../openai-compatible-error';\nimport { OpenAICompatibleImageModelId } from './openai-compatible-image-settings';\n\nexport type OpenAICompatibleImageModelConfig = {\n provider: string;\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n errorStructure?: ProviderErrorStructure<any>;\n _internal?: {\n currentDate?: () => Date;\n };\n};\n\nexport class OpenAICompatibleImageModel implements ImageModelV3 {\n readonly specificationVersion = 'v3';\n readonly maxImagesPerCall = 10;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n readonly modelId: OpenAICompatibleImageModelId,\n private readonly config: OpenAICompatibleImageModelConfig,\n ) {}\n\n async doGenerate({\n prompt,\n n,\n size,\n aspectRatio,\n seed,\n providerOptions,\n headers,\n abortSignal,\n files,\n mask,\n }: Parameters<ImageModelV3['doGenerate']>[0]): Promise<\n Awaited<ReturnType<ImageModelV3['doGenerate']>>\n > {\n const warnings: Array<SharedV3Warning> = [];\n\n if (aspectRatio != null) {\n warnings.push({\n type: 'unsupported',\n feature: 'aspectRatio',\n details:\n 'This model does not support aspect ratio. Use `size` instead.',\n });\n }\n\n if (seed != null) {\n warnings.push({ type: 'unsupported', feature: 'seed' });\n }\n\n const currentDate = this.config._internal?.currentDate?.() ?? new Date();\n\n // Image editing mode - use form data and /images/edits endpoint\n if (files != null && files.length > 0) {\n const { value: response, responseHeaders } = await postFormDataToApi({\n url: this.config.url({\n path: '/images/edits',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n formData: convertToFormData<OpenAICompatibleFormDataInput>({\n model: this.modelId,\n prompt,\n image: await Promise.all(files.map(file => fileToBlob(file))),\n mask: mask != null ? await fileToBlob(mask) : undefined,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n }),\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n\n // Standard image generation mode - use JSON and /images/generations endpoint\n const { value: response, responseHeaders } = await postJsonToApi({\n url: this.config.url({\n path: '/images/generations',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n prompt,\n n,\n size,\n ...(providerOptions.openai ?? {}),\n response_format: 'b64_json',\n },\n failedResponseHandler: createJsonErrorResponseHandler(\n this.config.errorStructure ?? defaultOpenAICompatibleErrorStructure,\n ),\n successfulResponseHandler: createJsonResponseHandler(\n openaiCompatibleImageResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n images: response.data.map(item => item.b64_json),\n warnings,\n response: {\n timestamp: currentDate,\n modelId: this.modelId,\n headers: responseHeaders,\n },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompatibleImageResponseSchema = z.object({\n data: z.array(z.object({ b64_json: z.string() })),\n});\n\ntype OpenAICompatibleFormDataInput = {\n model: string;\n prompt: string | undefined;\n image: Blob | Blob[];\n mask?: Blob;\n n: number;\n size: `${number}x${number}` | undefined;\n [key: string]: unknown;\n};\n\nasync function fileToBlob(file: ImageModelV3File): Promise<Blob> {\n if (file.type === 'url') {\n return downloadBlob(file.url);\n }\n\n const data =\n file.data instanceof Uint8Array\n ? file.data\n : convertBase64ToUint8Array(file.data);\n\n return new Blob([data as BlobPart], { type: file.mediaType });\n}\n","import {\n EmbeddingModelV3,\n ImageModelV3,\n LanguageModelV3,\n ProviderV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n withoutTrailingSlash,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport {\n OpenAICompatibleChatConfig,\n OpenAICompatibleChatLanguageModel,\n} from './chat/openai-compatible-chat-language-model';\nimport { OpenAICompatibleCompletionLanguageModel } from './completion/openai-compatible-completion-language-model';\nimport { OpenAICompatibleEmbeddingModel } from './embedding/openai-compatible-embedding-model';\nimport { OpenAICompatibleImageModel } from './image/openai-compatible-image-model';\nimport { VERSION } from './version';\n\nexport interface OpenAICompatibleProvider<\n CHAT_MODEL_IDS extends string = string,\n COMPLETION_MODEL_IDS extends string = string,\n EMBEDDING_MODEL_IDS extends string = string,\n IMAGE_MODEL_IDS extends string = string,\n> extends Omit<ProviderV3, 'imageModel'> {\n (modelId: CHAT_MODEL_IDS): LanguageModelV3;\n\n languageModel(\n modelId: CHAT_MODEL_IDS,\n config?: Partial<OpenAICompatibleChatConfig>,\n ): LanguageModelV3;\n\n chatModel(modelId: CHAT_MODEL_IDS): LanguageModelV3;\n\n completionModel(modelId: COMPLETION_MODEL_IDS): LanguageModelV3;\n\n embeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV3;\n\n /**\n * @deprecated Use `embeddingModel` instead.\n */\n textEmbeddingModel(modelId: EMBEDDING_MODEL_IDS): EmbeddingModelV3;\n\n imageModel(modelId: IMAGE_MODEL_IDS): ImageModelV3;\n}\n\nexport interface OpenAICompatibleProviderSettings {\n /**\nBase URL for the API calls.\n */\n baseURL: string;\n\n /**\nProvider name.\n */\n name: string;\n\n /**\nAPI key for authenticating requests. If specified, adds an `Authorization`\nheader to request headers with the value `Bearer <apiKey>`. This will be added\nbefore any headers potentially specified in the `headers` option.\n */\n apiKey?: string;\n\n /**\nOptional custom headers to include in requests. These will be added to request headers\nafter any headers potentially added by use of the `apiKey` option.\n */\n headers?: Record<string, string>;\n\n /**\nOptional custom url query parameters to include in request urls.\n */\n queryParams?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nInclude usage information in streaming responses.\n */\n includeUsage?: boolean;\n\n /**\n * Whether the provider supports structured outputs in chat models.\n */\n supportsStructuredOutputs?: boolean;\n}\n\n/**\nCreate an OpenAICompatible provider instance.\n */\nexport function createOpenAICompatible<\n CHAT_MODEL_IDS extends string,\n COMPLETION_MODEL_IDS extends string,\n EMBEDDING_MODEL_IDS extends string,\n IMAGE_MODEL_IDS extends string,\n>(\n options: OpenAICompatibleProviderSettings,\n): OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n> {\n const baseURL = withoutTrailingSlash(options.baseURL);\n const providerName = options.name;\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const headers = {\n ...(options.apiKey && { Authorization: `Bearer ${options.apiKey}` }),\n ...options.headers,\n };\n\n const getHeaders = () =>\n withUserAgentSuffix(headers, `ai-sdk/openai-compatible/${VERSION}`);\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `${providerName}.${modelType}`,\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createLanguageModel = (modelId: CHAT_MODEL_IDS) =>\n createChatModel(modelId);\n\n const createChatModel = (modelId: CHAT_MODEL_IDS) =>\n new OpenAICompatibleChatLanguageModel(modelId, {\n ...getCommonModelConfig('chat'),\n includeUsage: options.includeUsage,\n supportsStructuredOutputs: options.supportsStructuredOutputs,\n });\n\n const createCompletionModel = (modelId: COMPLETION_MODEL_IDS) =>\n new OpenAICompatibleCompletionLanguageModel(modelId, {\n ...getCommonModelConfig('completion'),\n includeUsage: options.includeUsage,\n });\n\n const createEmbeddingModel = (modelId: EMBEDDING_MODEL_IDS) =>\n new OpenAICompatibleEmbeddingModel(modelId, {\n ...getCommonModelConfig('embedding'),\n });\n\n const createImageModel = (modelId: IMAGE_MODEL_IDS) =>\n new OpenAICompatibleImageModel(modelId, getCommonModelConfig('image'));\n\n const provider = (modelId: CHAT_MODEL_IDS) => createLanguageModel(modelId);\n\n provider.specificationVersion = 'v3' as const;\n provider.languageModel = createLanguageModel;\n provider.chatModel = createChatModel;\n provider.completionModel = createCompletionModel;\n provider.embeddingModel = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n provider.imageModel = createImageModel;\n\n return provider as OpenAICompatibleProvider<\n CHAT_MODEL_IDS,\n COMPLETION_MODEL_IDS,\n EMBEDDING_MODEL_IDS,\n IMAGE_MODEL_IDS\n >;\n}\n","declare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";AAAA;AAAA,EAEE;AAAA,OAOK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OAEK;AACP,SAAS,KAAAA,UAAS;;;ACvBlB,SAAS,SAAkB;AAEpB,IAAM,kCAAkC,EAAE,OAAO;AAAA,EACtD,OAAO,EAAE,OAAO;AAAA,IACd,SAAS,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,EAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,EAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAYM,IAAM,wCACX;AAAA,EACE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC;;;AC3BK,SAAS,iCACd,OAasB;AAhBxB;AAiBE,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,aAAa;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA,cAAc;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAEA,QAAM,gBAAe,WAAM,kBAAN,YAAuB;AAC5C,QAAM,oBAAmB,WAAM,sBAAN,YAA2B;AACpD,QAAM,mBAAkB,iBAAM,0BAAN,mBAA6B,kBAA7B,YAA8C;AACtE,QAAM,mBACJ,iBAAM,8BAAN,mBAAiC,qBAAjC,YAAqD;AAEvD,SAAO;AAAA,IACL,aAAa;AAAA,MACX,OAAO;AAAA,MACP,SAAS,eAAe;AAAA,MACxB,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,IACA,cAAc;AAAA,MACZ,OAAO;AAAA,MACP,MAAM,mBAAmB;AAAA,MACzB,WAAW;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AACF;;;ACtDA;AAAA,EAGE;AAAA,OACK;AAEP,SAAS,uBAAuB;AAEhC,SAAS,kBAAkB,SAExB;AAVH;AAWE,UAAO,8CAAS,oBAAT,mBAA0B,qBAA1B,YAA8C,CAAC;AACxD;AAEO,SAAS,sCACd,QAC4B;AAhB9B;AAiBE,QAAM,WAAuC,CAAC;AAC9C,aAAW,EAAE,MAAM,SAAS,GAAG,QAAQ,KAAK,QAAQ;AAClD,UAAM,WAAW,kBAAkB,EAAE,GAAG,QAAQ,CAAC;AACjD,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,GAAG,SAAS,CAAC;AACtD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,QAAQ,CAAC,EAAE;AAAA,YACpB,GAAG,kBAAkB,QAAQ,CAAC,CAAC;AAAA,UACjC,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,kBAAM,eAAe,kBAAkB,IAAI;AAC3C,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,MAAM,GAAG,aAAa;AAAA,cAC1D;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WAAW;AAAA,sBACT,KACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,gBAAgB,KAAK,IAAI,CAAC;AAAA,oBAC9D;AAAA,oBACA,GAAG;AAAA,kBACL;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eAAe,wBAAwB,KAAK,SAAS;AAAA,kBACvD,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,UACD,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,gBAAM,eAAe,kBAAkB,IAAI;AAC3C,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,gBACA,GAAG;AAAA,cACL,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UAC/C,GAAG;AAAA,QACL,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AACH,8BAAe,YAAO,WAAP,YAAiB;AAChC;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,gBAAM,uBAAuB,kBAAkB,YAAY;AAC3D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,YACT,GAAG;AAAA,UACL,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC1JO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,gCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,SAAS,KAAAC,UAAS;AAIX,IAAM,kCAAkCA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKtD,MAAMA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAK1B,iBAAiBA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrC,eAAeA,GAAE,OAAO,EAAE,SAAS;AACrC,CAAC;;;ACpBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAsBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAAkC,CAAC;AAEzC,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,oBAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,YAAY;AAC5B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,SAAS,yBAAyB,KAAK,EAAE;AAAA,MAC3C,CAAC;AAAA,IACH,OAAO;AACL,wBAAkB,KAAK;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,GAAI,KAAK,UAAU,OAAO,EAAE,QAAQ,KAAK,OAAO,IAAI,CAAC;AAAA,QACvD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,mBAAmB,YAAY,QAAW,aAAa;AAAA,EACzE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,mBAAmB,YAAY,MAAM,aAAa;AAAA,IACpE,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,YAAY;AAAA,UACV,MAAM;AAAA,UACN,UAAU,EAAE,MAAM,WAAW,SAAS;AAAA,QACxC;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;APtCO,IAAM,oCAAN,MAAmE;AAAA;AAAA,EAUxE,YACE,SACA,QACA;AAZF,SAAS,uBAAuB;AA5DlC;AAyEI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwB,+BAA+B,cAAc;AAE1E,SAAK,6BAA4B,YAAO,8BAAP,YAAoC;AAAA,EACvE;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AA/FtB;AAgGI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAjHnD;AAkHI,UAAM,WAA8B,CAAC;AAGrC,UAAM,oBAAoB,OAAO;AAAA,OAC9B,WAAM,qBAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,WAAM,qBAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,SACE,iDAAgB,UAAS,UACzB,eAAe,UAAU,QACzB,CAAC,KAAK,2BACN;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB,kBACE,iDAAgB,UAAS,SACrB,KAAK,8BAA8B,QACnC,eAAe,UAAU,OACvB;AAAA,UACE,MAAM;AAAA,UACN,aAAa;AAAA,YACX,QAAQ,eAAe;AAAA,YACvB,OAAM,oBAAe,SAAf,YAAuB;AAAA,YAC7B,aAAa,eAAe;AAAA,UAC9B;AAAA,QACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA,QAEN,MAAM;AAAA,QACN;AAAA,QACA,GAAG,OAAO;AAAA,UACR,OAAO;AAAA,aACL,wDAAkB,KAAK,yBAAvB,YAA+C,CAAC;AAAA,UAClD,EAAE;AAAA,YACA,CAAC,CAAC,GAAG,MACH,CAAC,OAAO,KAAK,gCAAgC,KAAK,EAAE,SAAS,GAAG;AAAA,UACpE;AAAA,QACF;AAAA,QAEA,kBAAkB,kBAAkB;AAAA,QACpC,WAAW,kBAAkB;AAAA;AAAA,QAG7B,UAAU,sCAAsC,MAAM;AAAA;AAAA,QAGtD,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AApNjE;AAqNI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,aAAa,QAAQ,CAAC;AACrC,UAAM,UAAyC,CAAC;AAGhD,UAAM,OAAO,OAAO,QAAQ;AAC5B,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,UAAM,aACJ,YAAO,QAAQ,sBAAf,YAAoC,OAAO,QAAQ;AACrD,QAAI,aAAa,QAAQ,UAAU,SAAS,GAAG;AAC7C,cAAQ,KAAK;AAAA,QACX,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,mBAA6C;AAAA,MACjD,CAAC,KAAK,mBAAmB,GAAG,CAAC;AAAA,MAC7B,GAAI,QAAM,gBAAK,OAAO,sBAAZ,mBAA+B,oBAA/B,4BAAiD;AAAA,QACzD,YAAY;AAAA,MACd;AAAA,IACF;AACA,UAAM,0BACJ,kBAAa,UAAb,mBAAoB;AACtB,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AACA,SAAI,iEAAwB,+BAA8B,MAAM;AAC9D,uBAAiB,KAAK,mBAAmB,EAAE,2BACzC,iEAAwB;AAAA,IAC5B;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,gCAAgC,OAAO,aAAa;AAAA,MAClE,OAAO,iCAAiC,aAAa,KAAK;AAAA,MAC1D;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,YAAY;AAAA,QACnC,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AA9S/D;AA+SI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,EAAE,GAAG,QAAQ,CAAC;AAE5D,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,qBACJ,UAAK,OAAO,sBAAZ,mBAA+B;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2B;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAQD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QACF;AACF,QAAI,eAAe;AACnB,UAAM,sBAAsB,KAAK;AACjC,QAAI,oBAAoB;AACxB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAzWvC,gBAAAC,KAAA;AA2WY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,mEAAmB,aAAa,MAAM;AAGtC,gBAAI,WAAW,MAAM,OAAO;AAC1B,6BAAe;AACf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,OAAO,MAAM,MAAM,MAAM;AAAA,cAC3B,CAAC;AACD;AAAA,YACF;AAIA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ,MAAM;AAAA,YAChB;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAGrB,kBAAM,oBAAmBA,MAAA,MAAM,sBAAN,OAAAA,MAA2B,MAAM;AAC1D,gBAAI,kBAAkB;AACpB,kBAAI,CAAC,mBAAmB;AACtB,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AACD,oCAAoB;AAAA,cACtB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS;AACjB,kBAAI,CAAC,cAAc;AACjB,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,QAAQ,CAAC;AACtD,+BAAe;AAAA,cACjB;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,MAAM;AAAA,cACf,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAE5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yBAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,cAAc;AAAA,oBAClB,UAAU,cAAc,SAAS;AAAA,kBACnC,CAAC;AAED,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,oBACA,aAAa;AAAA,kBACf;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,wBACb,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AAAA,oBACH;AAIA,wBAAI,eAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAIA,UAAS;AAAA,sBACf,CAAC;AAED,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,aAAY,KAAAA,UAAS,OAAT,YAAe,WAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,OAAOA,UAAS,SAAS;AAAA,sBAC3B,CAAC;AACD,sBAAAA,UAAS,cAAc;AAAA,oBACzB;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,oBAAI,SAAS,aAAa;AACxB;AAAA,gBACF;AAEA,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI,SAAS;AAAA,kBACb,QAAO,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBAC7C,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,QAChC,eAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,kBACf,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,aAAY,cAAS,OAAT,YAAe,WAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,OAAO,SAAS,SAAS;AAAA,kBAC3B,CAAC;AACD,2BAAS,cAAc;AAAA,gBACzB;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAjjB5B,gBAAAD,KAAA;AAkjBY,gBAAI,mBAAmB;AACrB,yBAAW,QAAQ,EAAE,MAAM,iBAAiB,IAAI,cAAc,CAAC;AAAA,YACjE;AAEA,gBAAI,cAAc;AAChB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,QAAQ,CAAC;AAAA,YACtD;AAGA,uBAAW,YAAY,UAAU;AAAA,cAC/B,CAAAC,cAAY,CAACA,UAAS;AAAA,YACxB,GAAG;AACD,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI,SAAS;AAAA,cACf,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,aAAYD,MAAA,SAAS,OAAT,OAAAA,MAAe,WAAW;AAAA,gBACtC,UAAU,SAAS,SAAS;AAAA,gBAC5B,OAAO,SAAS,SAAS;AAAA,cAC3B,CAAC;AAAA,YACH;AAEA,kBAAM,mBAA6C;AAAA,cACjD,CAAC,mBAAmB,GAAG,CAAC;AAAA,cACxB,GAAG,uDAAmB;AAAA,YACxB;AACA,kBACE,oCAAO,8BAAP,mBAAkC,+BAClC,MACA;AACA,+BAAiB,mBAAmB,EAAE,4BACpC,oCAAO,8BAAP,mBAAkC;AAAA,YACtC;AACA,kBACE,oCAAO,8BAAP,mBAAkC,+BAClC,MACA;AACA,+BAAiB,mBAAmB,EAAE,4BACpC,oCAAO,8BAAP,mBAAkC;AAAA,YACtC;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,iCAAiC,KAAK;AAAA,cAC7C;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,mCAAmCE,GACtC,OAAO;AAAA,EACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjC,uBAAuBA,GACpB,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2BA,GACxB,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC/C,4BAA4BA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACjD,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,qCAAqCA,GAAE,OAAO;AAAA,EAClD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAED,IAAM,kBAAkBA,GAAE,OAAO;AAAA,EAC/B,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GACJ,OAAO;AAAA,QACN,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,QACpC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA;AAAA;AAAA,QAG5B,mBAAmBA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACtC,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC9B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,OAAOA,GAAE,OAAO;AAAA,YAChB,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,cACzB,WAAWA,GAAE,OAAO,EAAE,QAAQ;AAAA,YAChC,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC,EACA,QAAQ;AAAA,MACX,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wCAAwC,CAG5C,gBACGA,GAAE,MAAM,CAAC,iBAAiB,WAAW,CAAC;;;AQhsB3C;AAAA,EACE,kBAAAC;AAAA,EACA,oCAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,wBAAAC;AAAA,EAEA,iBAAAC;AAAA,OAEK;AACP,SAAS,KAAAC,UAAS;;;ACjBX,SAAS,uCACd,OAOsB;AAVxB;AAWE,MAAI,SAAS,MAAM;AACjB,WAAO;AAAA,MACL,aAAa;AAAA,QACX,OAAO;AAAA,QACP,SAAS;AAAA,QACT,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA,cAAc;AAAA,QACZ,OAAO;AAAA,QACP,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,MACA,KAAK;AAAA,IACP;AAAA,EACF;AAEA,QAAM,gBAAe,WAAM,kBAAN,YAAuB;AAC5C,QAAM,oBAAmB,WAAM,sBAAN,YAA2B;AAEpD,SAAO;AAAA,IACL,aAAa;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,MACT,WAAW;AAAA,MACX,YAAY;AAAA,IACd;AAAA,IACA,cAAc;AAAA,MACZ,OAAO;AAAA,MACP,MAAM;AAAA,MACN,WAAW;AAAA,IACb;AAAA,IACA,KAAK;AAAA,EACP;AACF;;;AC7CA;AAAA,EACE;AAAA,EAEA,iCAAAC;AAAA,OACK;AAEA,SAAS,0CAA0C;AAAA,EACxD;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAOE;AAEA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,mBAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,UACF;AAAA,QACF,CAAC,EACA,OAAO,OAAO,EACd,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAIA,+BAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAIA,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;AC5FO,SAASC,qBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAASC,iCACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,SAAS,KAAAC,UAAS;AAIX,IAAM,4CAA4CA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAIhE,MAAMA,GAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQ3B,WAAWA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,OAAO,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKrD,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ALmBM,IAAM,0CAAN,MAEP;AAAA;AAAA,EAQE,YACE,SACA,QACA;AAVF,SAAS,uBAAuB;AAlDlC;AA6DI,SAAK,UAAU;AACf,SAAK,SAAS;AAGd,UAAM,kBACJ,YAAO,mBAAP,YAAyB;AAC3B,SAAK,cAAc;AAAA,MACjB,eAAe;AAAA,IACjB;AACA,SAAK,wBAAwBC,gCAA+B,cAAc;AAAA,EAC5E;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,IAAI,gBAAgB;AAjFtB;AAkFI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAnGnD;AAoGI,UAAM,WAA8B,CAAC;AAGrC,UAAM,qBACH,WAAMC,sBAAqB;AAAA,MAC1B,UAAU,KAAK;AAAA,MACf;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,QAAI,+BAAO,QAAQ;AACjB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,QAAQ,CAAC;AAAA,IACzD;AAEA,QAAI,cAAc,MAAM;AACtB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,aAAa,CAAC;AAAA,IAC9D;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,0CAA0C,EAAE,OAAO,CAAC;AAEtD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,WAAO;AAAA,MACL,MAAM;AAAA;AAAA,QAEJ,OAAO,KAAK;AAAA;AAAA,QAGZ,MAAM,kBAAkB;AAAA,QACxB,YAAY,kBAAkB;AAAA,QAC9B,QAAQ,kBAAkB;AAAA,QAC1B,MAAM,kBAAkB;AAAA;AAAA,QAGxB,YAAY;AAAA,QACZ;AAAA,QACA,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,QAClB;AAAA,QACA,GAAG,mDAAkB,KAAK;AAAA;AAAA,QAG1B,QAAQ;AAAA;AAAA,QAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,MACjC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,QAAQ,QAAQ,OAAO,KAAK,SAAS,GAAG;AACjD,cAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,IAClD;AAEA,WAAO;AAAA,MACL;AAAA,MACA,OAAO,uCAAuC,SAAS,KAAK;AAAA,MAC5D,cAAcC,iCAAgC,OAAO,aAAa;AAAA,MAClE,SAAS,EAAE,MAAM,KAAK;AAAA,MACtB,UAAU;AAAA,QACR,GAAGC,qBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBAAgB,KAAK,OAAO,eACxB,EAAE,eAAe,KAAK,IACtB;AAAA,IACN;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMJ,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB,KAAK;AAAA,MAC5B,2BAA2BI;AAAA,QACzB,KAAK;AAAA,MACP;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,QAAI,QAMY;AAChB,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAC3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAGA,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAGD,qBAAoB,KAAK;AAAA,cAC9B,CAAC;AAED,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ,MAAM;AAAA,YAChB;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAeD;AAAA,gBACb,OAAO;AAAA,cACT;AAAA,YACF;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO,OAAO;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,CAAC,cAAc;AACjB,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA,OAAO,uCAAuC,KAAK;AAAA,YACrD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,IAAM,cAAcG,GAAE,OAAO;AAAA,EAC3B,eAAeA,GAAE,OAAO;AAAA,EACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC5B,cAAcA,GAAE,OAAO;AACzB,CAAC;AAID,IAAM,2CAA2CA,GAAE,OAAO;AAAA,EACxD,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,MAAMA,GAAE,OAAO;AAAA,MACf,eAAeA,GAAE,OAAO;AAAA,IAC1B,CAAC;AAAA,EACH;AAAA,EACA,OAAO,YAAY,QAAQ;AAC7B,CAAC;AAID,IAAM,8CAA8C,CAGlD,gBAEAA,GAAE,MAAM;AAAA,EACNA,GAAE,OAAO;AAAA,IACP,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAASA,GAAE;AAAA,MACTA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,QACf,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAOA,GAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,YAAY,QAAQ;AAAA,EAC7B,CAAC;AAAA,EACD;AACF,CAAC;;;AM3XH;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,wBAAAC;AAAA,EACA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACZlB,SAAS,KAAAC,UAAS;AAIX,IAAM,2CAA2CA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAK/D,YAAYA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhC,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;;;ADwBM,IAAM,iCAAN,MAAiE;AAAA,EAkBtE,YACE,SACA,QACA;AApBF,SAAS,uBAAuB;AAqB9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAlBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAlDrC;AAmDI,YAAO,UAAK,OAAO,yBAAZ,YAAoC;AAAA,EAC7C;AAAA,EAEA,IAAI,wBAAiC;AAtDvC;AAuDI,YAAO,UAAK,OAAO,0BAAZ,YAAqC;AAAA,EAC9C;AAAA,EAUA,IAAY,sBAA8B;AACxC,WAAO,KAAK,OAAO,SAAS,MAAM,GAAG,EAAE,CAAC,EAAE,KAAK;AAAA,EACjD;AAAA,EAEA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA7EJ;AA8EI,UAAM,oBAAoB,OAAO;AAAA,OAC9B,WAAMC,sBAAqB;AAAA,QAC1B,UAAU;AAAA,QACV;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,OACP,WAAMA,sBAAqB;AAAA,QAC1B,UAAU,KAAK;AAAA,QACf;AAAA,QACA,QAAQ;AAAA,MACV,CAAC,MAJA,YAIM,CAAC;AAAA,IACV;AAEA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,kBAAkB;AAAA,QAC9B,MAAM,kBAAkB;AAAA,MAC1B;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,UAAU,CAAC;AAAA,MACX,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,kBAAkB,SAAS;AAAA,MAC3B,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,oCAAoCC,GAAE,OAAO;AAAA,EACjD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACvD,kBAAkBA,GACf,OAAOA,GAAE,OAAO,GAAGA,GAAE,OAAOA,GAAE,OAAO,GAAGA,GAAE,IAAI,CAAC,CAAC,EAChD,SAAS;AACd,CAAC;;;AE9ID;AAAA,EACE,kBAAAC;AAAA,EACA;AAAA,EACA;AAAA,EACA,kCAAAC;AAAA,EACA,6BAAAC;AAAA,EACA;AAAA,EAEA;AAAA,EACA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAkBX,IAAM,6BAAN,MAAyD;AAAA,EAQ9D,YACW,SACQ,QACjB;AAFS;AACQ;AATnB,SAAS,uBAAuB;AAChC,SAAS,mBAAmB;AAAA,EASzB;AAAA,EAPH,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAOA,MAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AA5DJ;AA6DI,UAAM,WAAmC,CAAC;AAE1C,QAAI,eAAe,MAAM;AACvB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SACE;AAAA,MACJ,CAAC;AAAA,IACH;AAEA,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK,EAAE,MAAM,eAAe,SAAS,OAAO,CAAC;AAAA,IACxD;AAEA,UAAM,eAAc,sBAAK,OAAO,cAAZ,mBAAuB,gBAAvB,4CAA0C,oBAAI,KAAK;AAGvE,QAAI,SAAS,QAAQ,MAAM,SAAS,GAAG;AACrC,YAAM,EAAE,OAAOC,WAAU,iBAAAC,iBAAgB,IAAI,MAAM,kBAAkB;AAAA,QACnE,KAAK,KAAK,OAAO,IAAI;AAAA,UACnB,MAAM;AAAA,UACN,SAAS,KAAK;AAAA,QAChB,CAAC;AAAA,QACD,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,QACtD,UAAU,kBAAiD;AAAA,UACzD,OAAO,KAAK;AAAA,UACZ;AAAA,UACA,OAAO,MAAM,QAAQ,IAAI,MAAM,IAAI,UAAQ,WAAW,IAAI,CAAC,CAAC;AAAA,UAC5D,MAAM,QAAQ,OAAO,MAAM,WAAW,IAAI,IAAI;AAAA,UAC9C;AAAA,UACA;AAAA,UACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QACjC,CAAC;AAAA,QACD,uBAAuBC;AAAA,WACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,QAChC;AAAA,QACA,2BAA2BC;AAAA,UACzB;AAAA,QACF;AAAA,QACA;AAAA,QACA,OAAO,KAAK,OAAO;AAAA,MACrB,CAAC;AAED,aAAO;AAAA,QACL,QAAQJ,UAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,QAC/C;AAAA,QACA,UAAU;AAAA,UACR,WAAW;AAAA,UACX,SAAS,KAAK;AAAA,UACd,SAASC;AAAA,QACX;AAAA,MACF;AAAA,IACF;AAGA,UAAM,EAAE,OAAO,UAAU,gBAAgB,IAAI,MAAMI,eAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAASH,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,IAAI,qBAAgB,WAAhB,YAA0B,CAAC;AAAA,QAC/B,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuBC;AAAA,SACrB,UAAK,OAAO,mBAAZ,YAA8B;AAAA,MAChC;AAAA,MACA,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,QAAQ,SAAS,KAAK,IAAI,UAAQ,KAAK,QAAQ;AAAA,MAC/C;AAAA,MACA,UAAU;AAAA,QACR,WAAW;AAAA,QACX,SAAS,KAAK;AAAA,QACd,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,sCAAsCE,GAAE,OAAO;AAAA,EACnD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,UAAUA,GAAE,OAAO,EAAE,CAAC,CAAC;AAClD,CAAC;AAYD,eAAe,WAAW,MAAuC;AAC/D,MAAI,KAAK,SAAS,OAAO;AACvB,WAAO,aAAa,KAAK,GAAG;AAAA,EAC9B;AAEA,QAAM,OACJ,KAAK,gBAAgB,aACjB,KAAK,OACL,0BAA0B,KAAK,IAAI;AAEzC,SAAO,IAAI,KAAK,CAAC,IAAgB,GAAG,EAAE,MAAM,KAAK,UAAU,CAAC;AAC9D;;;AC9KA;AAAA,EAEE;AAAA,EACA;AAAA,OACK;;;ACTA,IAAM,UACX,OACI,kBACA;;;AD4FC,SAAS,uBAMd,SAMA;AACA,QAAM,UAAU,qBAAqB,QAAQ,OAAO;AACpD,QAAM,eAAe,QAAQ;AAS7B,QAAM,UAAU;AAAA,IACd,GAAI,QAAQ,UAAU,EAAE,eAAe,UAAU,QAAQ,MAAM,GAAG;AAAA,IAClE,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,aAAa,MACjB,oBAAoB,SAAS,4BAA4B,OAAO,EAAE;AAEpE,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,GAAG,YAAY,IAAI,SAAS;AAAA,IACtC,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,sBAAsB,CAAC,YAC3B,gBAAgB,OAAO;AAEzB,QAAM,kBAAkB,CAAC,YACvB,IAAI,kCAAkC,SAAS;AAAA,IAC7C,GAAG,qBAAqB,MAAM;AAAA,IAC9B,cAAc,QAAQ;AAAA,IACtB,2BAA2B,QAAQ;AAAA,EACrC,CAAC;AAEH,QAAM,wBAAwB,CAAC,YAC7B,IAAI,wCAAwC,SAAS;AAAA,IACnD,GAAG,qBAAqB,YAAY;AAAA,IACpC,cAAc,QAAQ;AAAA,EACxB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,+BAA+B,SAAS;AAAA,IAC1C,GAAG,qBAAqB,WAAW;AAAA,EACrC,CAAC;AAEH,QAAM,mBAAmB,CAAC,YACxB,IAAI,2BAA2B,SAAS,qBAAqB,OAAO,CAAC;AAEvE,QAAM,WAAW,CAAC,YAA4B,oBAAoB,OAAO;AAEzE,WAAS,uBAAuB;AAChC,WAAS,gBAAgB;AACzB,WAAS,YAAY;AACrB,WAAS,kBAAkB;AAC3B,WAAS,iBAAiB;AAC1B,WAAS,qBAAqB;AAC9B,WAAS,aAAa;AAEtB,SAAO;AAMT;","names":["z","z","UnsupportedFunctionalityError","_a","toolCall","z","combineHeaders","createEventSourceResponseHandler","createJsonErrorResponseHandler","createJsonResponseHandler","parseProviderOptions","postJsonToApi","z","UnsupportedFunctionalityError","getResponseMetadata","mapOpenAICompatibleFinishReason","z","createJsonErrorResponseHandler","parseProviderOptions","postJsonToApi","combineHeaders","createJsonResponseHandler","mapOpenAICompatibleFinishReason","getResponseMetadata","createEventSourceResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","parseProviderOptions","postJsonToApi","z","z","parseProviderOptions","postJsonToApi","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","z","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z","response","responseHeaders","combineHeaders","createJsonErrorResponseHandler","createJsonResponseHandler","postJsonToApi","z"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/openai-compatible",
3
- "version": "2.0.0-beta.54",
3
+ "version": "2.0.0-beta.56",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -27,8 +27,8 @@
27
27
  }
28
28
  },
29
29
  "dependencies": {
30
- "@ai-sdk/provider": "3.0.0-beta.27",
31
- "@ai-sdk/provider-utils": "4.0.0-beta.53"
30
+ "@ai-sdk/provider": "3.0.0-beta.28",
31
+ "@ai-sdk/provider-utils": "4.0.0-beta.55"
32
32
  },
33
33
  "devDependencies": {
34
34
  "@types/node": "20.17.24",