@ai-sdk/openai 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -574,42 +574,6 @@ var OpenAIChatLanguageModel = class {
574
574
  };
575
575
  }
576
576
  async doStream(options) {
577
- if (isReasoningModel(this.modelId)) {
578
- const result = await this.doGenerate(options);
579
- const simulatedStream = new ReadableStream({
580
- start(controller) {
581
- controller.enqueue({ type: "response-metadata", ...result.response });
582
- if (result.text) {
583
- controller.enqueue({
584
- type: "text-delta",
585
- textDelta: result.text
586
- });
587
- }
588
- if (result.toolCalls) {
589
- for (const toolCall of result.toolCalls) {
590
- controller.enqueue({
591
- type: "tool-call",
592
- ...toolCall
593
- });
594
- }
595
- }
596
- controller.enqueue({
597
- type: "finish",
598
- finishReason: result.finishReason,
599
- usage: result.usage,
600
- logprobs: result.logprobs,
601
- providerMetadata: result.providerMetadata
602
- });
603
- controller.close();
604
- }
605
- });
606
- return {
607
- stream: simulatedStream,
608
- rawCall: result.rawCall,
609
- rawResponse: result.rawResponse,
610
- warnings: result.warnings
611
- };
612
- }
613
577
  const { args, warnings } = this.getArgs(options);
614
578
  const body = {
615
579
  ...args,
@@ -646,7 +610,7 @@ var OpenAIChatLanguageModel = class {
646
610
  stream: response.pipeThrough(
647
611
  new TransformStream({
648
612
  transform(chunk, controller) {
649
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
613
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
650
614
  if (!chunk.success) {
651
615
  finishReason = "error";
652
616
  controller.enqueue({ type: "error", error: chunk.error });
@@ -670,12 +634,18 @@ var OpenAIChatLanguageModel = class {
670
634
  promptTokens: (_a = value.usage.prompt_tokens) != null ? _a : void 0,
671
635
  completionTokens: (_b = value.usage.completion_tokens) != null ? _b : void 0
672
636
  };
673
- if (((_c = value.usage.prompt_tokens_details) == null ? void 0 : _c.cached_tokens) != null) {
674
- providerMetadata = {
675
- openai: {
676
- cachedPromptTokens: (_d = value.usage.prompt_tokens_details) == null ? void 0 : _d.cached_tokens
677
- }
678
- };
637
+ const {
638
+ completion_tokens_details: completionTokenDetails,
639
+ prompt_tokens_details: promptTokenDetails
640
+ } = value.usage;
641
+ if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null || (promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) {
642
+ providerMetadata = { openai: {} };
643
+ if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null) {
644
+ providerMetadata.openai.reasoningTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens;
645
+ }
646
+ if ((promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) {
647
+ providerMetadata.openai.cachedPromptTokens = promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens;
648
+ }
679
649
  }
680
650
  }
681
651
  const choice = value.choices[0];
@@ -723,7 +693,7 @@ var OpenAIChatLanguageModel = class {
723
693
  message: `Expected 'id' to be a string.`
724
694
  });
725
695
  }
726
- if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
696
+ if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
727
697
  throw new import_provider3.InvalidResponseDataError({
728
698
  data: toolCallDelta,
729
699
  message: `Expected 'function.name' to be a string.`
@@ -734,11 +704,11 @@ var OpenAIChatLanguageModel = class {
734
704
  type: "function",
735
705
  function: {
736
706
  name: toolCallDelta.function.name,
737
- arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
707
+ arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
738
708
  }
739
709
  };
740
710
  const toolCall2 = toolCalls[index];
741
- if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) {
711
+ if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) {
742
712
  if (toolCall2.function.arguments.length > 0) {
743
713
  controller.enqueue({
744
714
  type: "tool-call-delta",
@@ -752,7 +722,7 @@ var OpenAIChatLanguageModel = class {
752
722
  controller.enqueue({
753
723
  type: "tool-call",
754
724
  toolCallType: "function",
755
- toolCallId: (_i = toolCall2.id) != null ? _i : (0, import_provider_utils3.generateId)(),
725
+ toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
756
726
  toolName: toolCall2.function.name,
757
727
  args: toolCall2.function.arguments
758
728
  });
@@ -761,21 +731,21 @@ var OpenAIChatLanguageModel = class {
761
731
  continue;
762
732
  }
763
733
  const toolCall = toolCalls[index];
764
- if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) {
765
- toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : "";
734
+ if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
735
+ toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
766
736
  }
767
737
  controller.enqueue({
768
738
  type: "tool-call-delta",
769
739
  toolCallType: "function",
770
740
  toolCallId: toolCall.id,
771
741
  toolName: toolCall.function.name,
772
- argsTextDelta: (_m = toolCallDelta.function.arguments) != null ? _m : ""
742
+ argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
773
743
  });
774
- if (((_n = toolCall.function) == null ? void 0 : _n.name) != null && ((_o = toolCall.function) == null ? void 0 : _o.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
744
+ if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
775
745
  controller.enqueue({
776
746
  type: "tool-call",
777
747
  toolCallType: "function",
778
- toolCallId: (_p = toolCall.id) != null ? _p : (0, import_provider_utils3.generateId)(),
748
+ toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
779
749
  toolName: toolCall.function.name,
780
750
  args: toolCall.function.arguments
781
751
  });
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/internal/index.ts","../../src/openai-chat-language-model.ts","../../src/convert-to-openai-chat-messages.ts","../../src/map-openai-chat-logprobs.ts","../../src/map-openai-finish-reason.ts","../../src/openai-error.ts","../../src/get-response-metadata.ts","../../src/openai-prepare-tools.ts","../../src/openai-completion-language-model.ts","../../src/convert-to-openai-completion-prompt.ts","../../src/map-openai-completion-logprobs.ts","../../src/openai-embedding-model.ts"],"sourcesContent":["export * from '../openai-chat-language-model';\nexport * from '../openai-chat-settings';\nexport * from '../openai-completion-language-model';\nexport * from '../openai-completion-settings';\nexport * from '../openai-embedding-model';\nexport * from '../openai-embedding-settings';\n","import {\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1LogProbs,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAIChatMessages } from './convert-to-openai-chat-messages';\nimport { mapOpenAIChatLogProbsOutput } from './map-openai-chat-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport {\n openAIErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { prepareTools } from './openai-prepare-tools';\n\ntype OpenAIChatConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n\n readonly modelId: OpenAIChatModelId;\n readonly settings: OpenAIChatSettings;\n\n private readonly config: OpenAIChatConfig;\n\n constructor(\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings,\n config: OpenAIChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get supportsStructuredOutputs(): boolean {\n return this.settings.structuredOutputs === true;\n }\n\n get defaultObjectGenerationMode() {\n // audio models don't support structured outputs:\n if (isAudioModel(this.modelId)) {\n return 'tool';\n }\n\n return this.supportsStructuredOutputs ? 'json' : 'tool';\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportsImageUrls(): boolean {\n // image urls can be sent if downloadImages is disabled (default):\n return !this.settings.downloadImages;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerMetadata,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;\n\n if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {\n throw new UnsupportedFunctionalityError({\n functionality: 'useLegacyFunctionCalling with parallelToolCalls',\n });\n }\n\n if (useLegacyFunctionCalling && this.settings.structuredOutputs === true) {\n throw new UnsupportedFunctionalityError({\n functionality: 'structuredOutputs with useLegacyFunctionCalling',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n logit_bias: this.settings.logitBias,\n logprobs:\n this.settings.logprobs === true ||\n typeof this.settings.logprobs === 'number'\n ? true\n : undefined,\n top_logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n user: this.settings.user,\n parallel_tool_calls: this.settings.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n stop: stopSequences,\n seed,\n\n // openai specific settings:\n max_completion_tokens:\n providerMetadata?.openai?.maxCompletionTokens ?? undefined,\n store: providerMetadata?.openai?.store ?? undefined,\n metadata: providerMetadata?.openai?.metadata ?? undefined,\n prediction: providerMetadata?.openai?.prediction ?? undefined,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // messages:\n messages: convertToOpenAIChatMessages({\n prompt,\n useLegacyFunctionCalling,\n }),\n };\n\n // reasoning models have fixed params, remove them if they are set:\n if (isReasoningModel(this.modelId)) {\n baseArgs.temperature = undefined;\n baseArgs.top_p = undefined;\n baseArgs.frequency_penalty = undefined;\n baseArgs.presence_penalty = undefined;\n }\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, functions, function_call, toolWarnings } =\n prepareTools({\n mode,\n useLegacyFunctionCalling,\n structuredOutputs: this.settings.structuredOutputs,\n });\n\n return {\n args: {\n ...baseArgs,\n tools,\n tool_choice,\n functions,\n function_call,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.settings.structuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n strict: true,\n name: mode.name ?? 'response',\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: useLegacyFunctionCalling\n ? {\n ...baseArgs,\n function_call: {\n name: mode.tool.name,\n },\n functions: [\n {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n ],\n }\n : {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n strict:\n this.settings.structuredOutputs === true\n ? true\n : undefined,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args: body, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAIChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = body;\n const choice = response.choices[0];\n\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined;\n if (\n response.usage?.completion_tokens_details?.reasoning_tokens != null ||\n response.usage?.prompt_tokens_details?.cached_tokens != null\n ) {\n providerMetadata = { openai: {} };\n if (response.usage?.completion_tokens_details?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n response.usage?.completion_tokens_details?.reasoning_tokens;\n }\n if (response.usage?.prompt_tokens_details?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n response.usage?.prompt_tokens_details?.cached_tokens;\n }\n }\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls:\n this.settings.useLegacyFunctionCalling && choice.message.function_call\n ? [\n {\n toolCallType: 'function',\n toolCallId: generateId(),\n toolName: choice.message.function_call.name,\n args: choice.message.function_call.arguments,\n },\n ]\n : choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(body) },\n response: getResponseMetadata(response),\n warnings,\n logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),\n providerMetadata,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n // reasoning models don't support streaming, we simulate it:\n if (isReasoningModel(this.modelId)) {\n const result = await this.doGenerate(options);\n\n const simulatedStream = new ReadableStream<LanguageModelV1StreamPart>({\n start(controller) {\n controller.enqueue({ type: 'response-metadata', ...result.response });\n\n if (result.text) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: result.text,\n });\n }\n\n if (result.toolCalls) {\n for (const toolCall of result.toolCalls) {\n controller.enqueue({\n type: 'tool-call',\n ...toolCall,\n });\n }\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason: result.finishReason,\n usage: result.usage,\n logprobs: result.logprobs,\n providerMetadata: result.providerMetadata,\n });\n\n controller.close();\n },\n });\n\n return {\n stream: simulatedStream,\n rawCall: result.rawCall,\n rawResponse: result.rawResponse,\n warnings: result.warnings,\n };\n }\n\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: {\n promptTokens: number | undefined;\n completionTokens: number | undefined;\n } = {\n promptTokens: undefined,\n completionTokens: undefined,\n };\n let logprobs: LanguageModelV1LogProbs;\n let isFirstChunk = true;\n\n const { useLegacyFunctionCalling } = this.settings;\n\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined;\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens ?? undefined,\n completionTokens: value.usage.completion_tokens ?? undefined,\n };\n if (value.usage.prompt_tokens_details?.cached_tokens != null) {\n providerMetadata = {\n openai: {\n cachedPromptTokens:\n value.usage.prompt_tokens_details?.cached_tokens,\n },\n };\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n const mappedLogprobs = mapOpenAIChatLogProbsOutput(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n\n const mappedToolCalls: typeof delta.tool_calls =\n useLegacyFunctionCalling && delta.function_call != null\n ? [\n {\n type: 'function',\n id: generateId(),\n function: delta.function_call,\n index: 0,\n },\n ]\n : delta.tool_calls;\n\n if (mappedToolCalls != null) {\n for (const toolCallDelta of mappedToolCalls) {\n const index = toolCallDelta.index;\n\n // Tool call start. OpenAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n\n continue;\n }\n\n // existing tool call, merge\n const toolCall = toolCalls[index];\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n ...(providerMetadata != null ? { providerMetadata } : {}),\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(body) },\n warnings,\n };\n }\n}\n\nconst openAITokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n function_call: z\n .object({\n arguments: z.string(),\n name: z.string(),\n })\n .nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openAITokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n function_call: z\n .object({\n name: z.string().optional(),\n arguments: z.string().optional(),\n })\n .nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullish(),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: openAITokenUsageSchema,\n }),\n openAIErrorDataSchema,\n]);\n\nfunction isReasoningModel(modelId: string) {\n return modelId.startsWith('o1-');\n}\n\nfunction isAudioModel(modelId: string) {\n return modelId.startsWith('gpt-4o-audio-preview');\n}\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAIChatPrompt } from './openai-chat-prompt';\n\nexport function convertToOpenAIChatMessages({\n prompt,\n useLegacyFunctionCalling = false,\n}: {\n prompt: LanguageModelV1Prompt;\n useLegacyFunctionCalling?: boolean;\n}): OpenAIChatPrompt {\n const messages: OpenAIChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({ role: 'user', content: content[0].text });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n\n // OpenAI specific extension: image detail\n detail: part.providerMetadata?.openai?.imageDetail,\n },\n };\n }\n case 'file': {\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n \"'File content parts with URL data' functionality not supported.\",\n });\n }\n\n switch (part.mimeType) {\n case 'audio/wav': {\n return {\n type: 'input_audio',\n input_audio: { data: part.data, format: 'wav' },\n };\n }\n case 'audio/mp3':\n case 'audio/mpeg': {\n return {\n type: 'input_audio',\n input_audio: { data: part.data, format: 'mp3' },\n };\n }\n\n default: {\n throw new UnsupportedFunctionalityError({\n functionality: `File content part type ${part.mimeType} in user messages`,\n });\n }\n }\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n if (useLegacyFunctionCalling) {\n if (toolCalls.length > 1) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'useLegacyFunctionCalling with multiple tool calls in one message',\n });\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n function_call:\n toolCalls.length > 0 ? toolCalls[0].function : undefined,\n });\n } else {\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n }\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n if (useLegacyFunctionCalling) {\n messages.push({\n role: 'function',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n } else {\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n });\n }\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1LogProbs } from '@ai-sdk/provider';\n\ntype OpenAIChatLogProbs = {\n content:\n | {\n token: string;\n logprob: number;\n top_logprobs:\n | {\n token: string;\n logprob: number;\n }[]\n | null;\n }[]\n | null;\n};\n\nexport function mapOpenAIChatLogProbsOutput(\n logprobs: OpenAIChatLogProbs | null | undefined,\n): LanguageModelV1LogProbs | undefined {\n return (\n logprobs?.content?.map(({ token, logprob, top_logprobs }) => ({\n token,\n logprob,\n topLogprobs: top_logprobs\n ? top_logprobs.map(({ token, logprob }) => ({\n token,\n logprob,\n }))\n : [],\n })) ?? undefined\n );\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const openAIErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAIErrorData = z.infer<typeof openAIErrorDataSchema>;\n\nexport const openaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: openAIErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import {\n JSONSchema7,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n mode,\n useLegacyFunctionCalling = false,\n structuredOutputs = false,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n };\n useLegacyFunctionCalling?: boolean;\n structuredOutputs?: boolean;\n}): {\n tools?: {\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict?: boolean;\n };\n }[];\n tool_choice?:\n | 'auto'\n | 'none'\n | 'required'\n | { type: 'function'; function: { name: string } };\n\n // legacy support\n functions?: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n }[];\n function_call?: { name: string };\n\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n if (useLegacyFunctionCalling) {\n const openaiFunctions: Array<{\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiFunctions.push({\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n functions: openaiFunctions,\n function_call: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case undefined:\n return {\n functions: openaiFunctions,\n function_call: undefined,\n toolWarnings,\n };\n case 'required':\n throw new UnsupportedFunctionalityError({\n functionality: 'useLegacyFunctionCalling and toolChoice: required',\n });\n default:\n return {\n functions: openaiFunctions,\n function_call: { name: toolChoice.toolName },\n toolWarnings,\n };\n }\n }\n\n const openaiTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict?: boolean;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n strict: structuredOutputs === true ? true : undefined,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiTools, tool_choice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1LogProbs,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompletionPrompt } from './convert-to-openai-completion-prompt';\nimport { mapOpenAICompletionLogProbs } from './map-openai-completion-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport {\n openAIErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\nimport { getResponseMetadata } from './get-response-metadata';\n\ntype OpenAICompletionConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAICompletionLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompletionModelId;\n readonly settings: OpenAICompletionSettings;\n\n private readonly config: OpenAICompletionConfig;\n\n constructor(\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings,\n config: OpenAICompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return { args: baseArgs, warnings };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAICompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n logprobs: mapOpenAICompletionLogProbs(choice.logprobs),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n response: getResponseMetadata(response),\n warnings,\n request: { body: JSON.stringify(args) },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiCompletionChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let logprobs: LanguageModelV1LogProbs;\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiCompletionChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n\n const mappedLogprobs = mapOpenAICompletionLogProbs(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openAICompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullish(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n openAIErrorDataSchema,\n]);\n","import {\n InvalidPromptError,\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV1Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'images',\n });\n }\n }\n })\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { LanguageModelV1LogProbs } from '@ai-sdk/provider';\n\ntype OpenAICompletionLogProps = {\n tokens: string[];\n token_logprobs: number[];\n top_logprobs: Record<string, number>[] | null;\n};\n\nexport function mapOpenAICompletionLogProbs(\n logprobs: OpenAICompletionLogProps | null | undefined,\n): LanguageModelV1LogProbs | undefined {\n return logprobs?.tokens.map((token, index) => ({\n token,\n logprob: logprobs.token_logprobs[index],\n topLogprobs: logprobs.top_logprobs\n ? Object.entries(logprobs.top_logprobs[index]).map(\n ([token, logprob]) => ({\n token,\n logprob,\n }),\n )\n : [],\n }));\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAIEmbeddingModelId,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-settings';\nimport { openaiFailedResponseHandler } from './openai-error';\n\ntype OpenAIEmbeddingConfig = {\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class OpenAIEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAIEmbeddingModelId;\n\n private readonly config: OpenAIEmbeddingConfig;\n private readonly settings: OpenAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.settings.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings,\n config: OpenAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;ACpBlB,sBAGO;AACP,4BAA0C;AAGnC,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA,2BAA2B;AAC7B,GAGqB;AACnB,QAAM,WAA6B,CAAC;AAEpC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,QAAQ,CAAC,EAAE,KAAK,CAAC;AACxD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AA/BvC;AAgCY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,eAAW,iDAA0B,KAAK,KAAK,CAAC;AAAA;AAAA,oBAGtD,SAAQ,gBAAK,qBAAL,mBAAuB,WAAvB,mBAA+B;AAAA,kBACzC;AAAA,gBACF;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,wBAAQ,KAAK,UAAU;AAAA,kBACrB,KAAK,aAAa;AAChB,2BAAO;AAAA,sBACL,MAAM;AAAA,sBACN,aAAa,EAAE,MAAM,KAAK,MAAM,QAAQ,MAAM;AAAA,oBAChD;AAAA,kBACF;AAAA,kBACA,KAAK;AAAA,kBACL,KAAK,cAAc;AACjB,2BAAO;AAAA,sBACL,MAAM;AAAA,sBACN,aAAa,EAAE,MAAM,KAAK,MAAM,QAAQ,MAAM;AAAA,oBAChD;AAAA,kBACF;AAAA,kBAEA,SAAS;AACP,0BAAM,IAAI,8CAA8B;AAAA,sBACtC,eAAe,0BAA0B,KAAK,QAAQ;AAAA,oBACxD,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,YAAI,0BAA0B;AAC5B,cAAI,UAAU,SAAS,GAAG;AACxB,kBAAM,IAAI,8CAA8B;AAAA,cACtC,eACE;AAAA,YACJ,CAAC;AAAA,UACH;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS;AAAA,YACT,eACE,UAAU,SAAS,IAAI,UAAU,CAAC,EAAE,WAAW;AAAA,UACnD,CAAC;AAAA,QACH,OAAO;AACL,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS;AAAA,YACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UACjD,CAAC;AAAA,QACH;AAEA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,cAAI,0BAA0B;AAC5B,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,aAAa;AAAA,cACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC7C,CAAC;AAAA,UACH,OAAO;AACL,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,cAAc,aAAa;AAAA,cAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC7C,CAAC;AAAA,UACH;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC5JO,SAAS,4BACd,UACqC;AAnBvC;AAoBE,UACE,gDAAU,YAAV,mBAAmB,IAAI,CAAC,EAAE,OAAO,SAAS,aAAa,OAAO;AAAA,IAC5D;AAAA,IACA;AAAA,IACA,aAAa,eACT,aAAa,IAAI,CAAC,EAAE,OAAAC,QAAO,SAAAC,SAAQ,OAAO;AAAA,MACxC,OAAAD;AAAA,MACA,SAAAC;AAAA,IACF,EAAE,IACF,CAAC;AAAA,EACP,QATA,YASO;AAEX;;;AC9BO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAAkB;AAClB,IAAAC,yBAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ACrBM,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACdA,IAAAC,mBAKO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA,2BAA2B;AAAA,EAC3B,oBAAoB;AACtB,GA+BE;AA1CF;AA4CE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,0BAA0B;AAC5B,UAAM,kBAID,CAAC;AAEN,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,oBAAoB;AACpC,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,MACtD,OAAO;AACL,wBAAgB,KAAK;AAAA,UACnB,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,QAAI,cAAc,MAAM;AACtB,aAAO;AAAA,QACL,WAAW;AAAA,QACX,eAAe;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAEA,UAAMC,QAAO,WAAW;AAExB,YAAQA,OAAM;AAAA,MACZ,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,UACL,WAAW;AAAA,UACX,eAAe;AAAA,UACf;AAAA,QACF;AAAA,MACF,KAAK;AACH,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AACE,eAAO;AAAA,UACL,WAAW;AAAA,UACX,eAAe,EAAE,MAAM,WAAW,SAAS;AAAA,UAC3C;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,cAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,kBAAY,KAAK;AAAA,QACf,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,QAAQ,sBAAsB,OAAO,OAAO;AAAA,QAC9C;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,aAAa,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANxHO,IAAM,0BAAN,MAAyD;AAAA,EAQ9D,YACE,SACA,UACA,QACA;AAXF,SAAS,uBAAuB;AAY9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,4BAAqC;AACvC,WAAO,KAAK,SAAS,sBAAsB;AAAA,EAC7C;AAAA,EAEA,IAAI,8BAA8B;AAEhC,QAAI,aAAa,KAAK,OAAO,GAAG;AAC9B,aAAO;AAAA,IACT;AAEA,WAAO,KAAK,4BAA4B,SAAS;AAAA,EACnD;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,oBAA6B;AAE/B,WAAO,CAAC,KAAK,SAAS;AAAA,EACxB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7FnD;AA8FI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,2BAA2B,KAAK,SAAS;AAE/C,QAAI,4BAA4B,KAAK,SAAS,sBAAsB,MAAM;AACxE,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,QAAI,4BAA4B,KAAK,SAAS,sBAAsB,MAAM;AACxE,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,KAAK,SAAS,aAAa,QAC3B,OAAO,KAAK,SAAS,aAAa,WAC9B,OACA;AAAA,MACN,cACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAClC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACN,MAAM,KAAK,SAAS;AAAA,MACpB,qBAAqB,KAAK,SAAS;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,MAAM;AAAA,MACN;AAAA;AAAA,MAGA,wBACE,gEAAkB,WAAlB,mBAA0B,wBAA1B,YAAiD;AAAA,MACnD,QAAO,gEAAkB,WAAlB,mBAA0B,UAA1B,YAAmC;AAAA,MAC1C,WAAU,gEAAkB,WAAlB,mBAA0B,aAA1B,YAAsC;AAAA,MAChD,aAAY,gEAAkB,WAAlB,mBAA0B,eAA1B,YAAwC;AAAA;AAAA,MAGpD,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,UAAU,4BAA4B;AAAA,QACpC;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAGA,QAAI,iBAAiB,KAAK,OAAO,GAAG;AAClC,eAAS,cAAc;AACvB,eAAS,QAAQ;AACjB,eAAS,oBAAoB;AAC7B,eAAS,mBAAmB;AAAA,IAC9B;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,WAAW,eAAe,aAAa,IACjE,aAAa;AAAA,UACX;AAAA,UACA;AAAA,UACA,mBAAmB,KAAK,SAAS;AAAA,QACnC,CAAC;AAEH,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,UACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,SAAS,sBAAsB,QAAQ,KAAK,UAAU,OACvD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,QAAQ;AAAA,gBACR,OAAM,UAAK,SAAL,YAAa;AAAA,gBACnB,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM,2BACF;AAAA,YACE,GAAG;AAAA,YACH,eAAe;AAAA,cACb,MAAM,KAAK,KAAK;AAAA,YAClB;AAAA,YACA,WAAW;AAAA,cACT;AAAA,gBACE,MAAM,KAAK,KAAK;AAAA,gBAChB,aAAa,KAAK,KAAK;AAAA,gBACvB,YAAY,KAAK,KAAK;AAAA,cACxB;AAAA,YACF;AAAA,UACF,IACA;AAAA,YACE,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,kBACtB,QACE,KAAK,SAAS,sBAAsB,OAChC,OACA;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACJ;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxRjE;AAyRI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAErD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,QAAI;AACJ,UACE,oBAAS,UAAT,mBAAgB,8BAAhB,mBAA2C,qBAAoB,UAC/D,oBAAS,UAAT,mBAAgB,0BAAhB,mBAAuC,kBAAiB,MACxD;AACA,yBAAmB,EAAE,QAAQ,CAAC,EAAE;AAChC,YAAI,oBAAS,UAAT,mBAAgB,8BAAhB,mBAA2C,qBAAoB,MAAM;AACvE,yBAAiB,OAAO,mBACtB,oBAAS,UAAT,mBAAgB,8BAAhB,mBAA2C;AAAA,MAC/C;AACA,YAAI,oBAAS,UAAT,mBAAgB,0BAAhB,mBAAuC,kBAAiB,MAAM;AAChE,yBAAiB,OAAO,sBACtB,oBAAS,UAAT,mBAAgB,0BAAhB,mBAAuC;AAAA,MAC3C;AAAA,IACF;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,WACE,KAAK,SAAS,4BAA4B,OAAO,QAAQ,gBACrD;AAAA,QACE;AAAA,UACE,cAAc;AAAA,UACd,gBAAY,mCAAW;AAAA,UACvB,UAAU,OAAO,QAAQ,cAAc;AAAA,UACvC,MAAM,OAAO,QAAQ,cAAc;AAAA,QACrC;AAAA,MACF,KACA,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AAzUpD,YAAAC;AAyUwD;AAAA,UAC1C,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACN,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,UAAU,4BAA4B,OAAO,QAAQ;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAE3D,QAAI,iBAAiB,KAAK,OAAO,GAAG;AAClC,YAAM,SAAS,MAAM,KAAK,WAAW,OAAO;AAE5C,YAAM,kBAAkB,IAAI,eAA0C;AAAA,QACpE,MAAM,YAAY;AAChB,qBAAW,QAAQ,EAAE,MAAM,qBAAqB,GAAG,OAAO,SAAS,CAAC;AAEpE,cAAI,OAAO,MAAM;AACf,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN,WAAW,OAAO;AAAA,YACpB,CAAC;AAAA,UACH;AAEA,cAAI,OAAO,WAAW;AACpB,uBAAW,YAAY,OAAO,WAAW;AACvC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG;AAAA,cACL,CAAC;AAAA,YACH;AAAA,UACF;AAEA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,UAC3B,CAAC;AAED,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,OAAO;AAAA,QAChB,aAAa,OAAO;AAAA,QACpB,UAAU,OAAO;AAAA,MACnB;AAAA,IACF;AAEA,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAOD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAGA;AAAA,MACF,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AACA,QAAI;AACJ,QAAI,eAAe;AAEnB,UAAM,EAAE,yBAAyB,IAAI,KAAK;AAE1C,QAAI;AACJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAxcvC;AA0cY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,eAAc,WAAM,MAAM,kBAAZ,YAA6B;AAAA,gBAC3C,mBAAkB,WAAM,MAAM,sBAAZ,YAAiC;AAAA,cACrD;AACA,oBAAI,WAAM,MAAM,0BAAZ,mBAAmC,kBAAiB,MAAM;AAC5D,mCAAmB;AAAA,kBACjB,QAAQ;AAAA,oBACN,qBACE,WAAM,MAAM,0BAAZ,mBAAmC;AAAA,kBACvC;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa,OAAW,YAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAEA,kBAAM,kBACJ,4BAA4B,MAAM,iBAAiB,OAC/C;AAAA,cACE;AAAA,gBACE,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,OAAO;AAAA,cACT;AAAA,YACF,IACA,MAAM;AAEZ,gBAAI,mBAAmB,MAAM;AAC3B,yBAAW,iBAAiB,iBAAiB;AAC3C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,kBACF;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AAAA,oBACH;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAhoB5B;AAioBY,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,OAAO;AAAA,gBACL,eAAc,WAAM,iBAAN,YAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,yBAAyB,cAC5B,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,2BAA2B,cAAE,OAAO;AAAA,EACxC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,eAAe,cACZ,OAAO;AAAA,UACN,WAAW,cAAE,OAAO;AAAA,UACpB,MAAM,cAAE,OAAO;AAAA,QACjB,CAAC,EACA,QAAQ;AAAA,QACX,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,UAAU,cACP,OAAO;AAAA,QACN,SAAS,cACN;AAAA,UACC,cAAE,OAAO;AAAA,YACP,OAAO,cAAE,OAAO;AAAA,YAChB,SAAS,cAAE,OAAO;AAAA,YAClB,cAAc,cAAE;AAAA,cACd,cAAE,OAAO;AAAA,gBACP,OAAO,cAAE,OAAO;AAAA,gBAChB,SAAS,cAAE,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC,EACA,QAAQ;AAAA,MACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wBAAwB,cAAE,MAAM;AAAA,EACpC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,eAAe,cACZ,OAAO;AAAA,YACN,MAAM,cAAE,OAAO,EAAE,SAAS;AAAA,YAC1B,WAAW,cAAE,OAAO,EAAE,SAAS;AAAA,UACjC,CAAC,EACA,QAAQ;AAAA,UACX,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,UAAU,cACP,OAAO;AAAA,UACN,SAAS,cACN;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,SAAS,cAAE,OAAO;AAAA,cAClB,cAAc,cAAE;AAAA,gBACd,cAAE,OAAO;AAAA,kBACP,OAAO,cAAE,OAAO;AAAA,kBAChB,SAAS,cAAE,OAAO;AAAA,gBACpB,CAAC;AAAA,cACH;AAAA,YACF,CAAC;AAAA,UACH,EACC,SAAS;AAAA,QACd,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QAC9C,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAED,SAAS,iBAAiB,SAAiB;AACzC,SAAO,QAAQ,WAAW,KAAK;AACjC;AAEA,SAAS,aAAa,SAAiB;AACrC,SAAO,QAAQ,WAAW,sBAAsB;AAClD;;;AOjyBA,IAAAC,mBAOO;AACP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;AChBlB,IAAAC,mBAIO;AAEA,SAAS,gCAAgC;AAAA,EAC9C;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,SAAS;AACZ,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ACrGO,SAAS,4BACd,UACqC;AACrC,SAAO,qCAAU,OAAO,IAAI,CAAC,OAAO,WAAW;AAAA,IAC7C;AAAA,IACA,SAAS,SAAS,eAAe,KAAK;AAAA,IACtC,aAAa,SAAS,eAClB,OAAO,QAAQ,SAAS,aAAa,KAAK,CAAC,EAAE;AAAA,MAC3C,CAAC,CAACC,QAAO,OAAO,OAAO;AAAA,QACrB,OAAAA;AAAA,QACA;AAAA,MACF;AAAA,IACF,IACA,CAAC;AAAA,EACP;AACF;;;AFeO,IAAM,gCAAN,MAA+D;AAAA,EASpE,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,EACF,GAAiD;AA1EnD;AA2EI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,gCAAgC,EAAE,QAAQ,YAAY,CAAC;AAEzD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAClC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACN,QAAQ,KAAK,SAAS;AAAA,MACtB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA;AAAA,MAGA,QAAQ;AAAA;AAAA,MAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,IACjC;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,aAAI,UAAK,UAAL,mBAAY,QAAQ;AACtB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,YAAI,KAAK,YAAY;AACnB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,eAAO,EAAE,MAAM,UAAU,SAAS;AAAA,MACpC;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,UAAU,4BAA4B,OAAO,QAAQ;AAAA,MACrD,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI;AACJ,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa,OAAW,YAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAIA,IAAM,iCAAiC,cAAE,OAAO;AAAA,EAC9C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,MACxB,UAAU,cACP,OAAO;AAAA,QACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,MACnE,CAAC,EACA,QAAQ;AAAA,IACb,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,8BAA8B,cAAE,MAAM;AAAA,EAC1C,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,QAChB,UAAU,cACP,OAAO;AAAA,UACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,QACnE,CAAC,EACA,QAAQ;AAAA,MACb,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AG7XD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;AAcX,IAAM,uBAAN,MAA+D;AAAA,EAmBpE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAnCrC;AAoCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAvCvC;AAwCI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;","names":["import_provider","import_provider_utils","import_zod","token","logprob","import_provider_utils","import_provider","type","_a","toolCall","import_provider","import_provider_utils","import_zod","import_provider","token","import_provider","import_provider_utils","import_zod"]}
1
+ {"version":3,"sources":["../../src/internal/index.ts","../../src/openai-chat-language-model.ts","../../src/convert-to-openai-chat-messages.ts","../../src/map-openai-chat-logprobs.ts","../../src/map-openai-finish-reason.ts","../../src/openai-error.ts","../../src/get-response-metadata.ts","../../src/openai-prepare-tools.ts","../../src/openai-completion-language-model.ts","../../src/convert-to-openai-completion-prompt.ts","../../src/map-openai-completion-logprobs.ts","../../src/openai-embedding-model.ts"],"sourcesContent":["export * from '../openai-chat-language-model';\nexport * from '../openai-chat-settings';\nexport * from '../openai-completion-language-model';\nexport * from '../openai-completion-settings';\nexport * from '../openai-embedding-model';\nexport * from '../openai-embedding-settings';\n","import {\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1LogProbs,\n LanguageModelV1ProviderMetadata,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAIChatMessages } from './convert-to-openai-chat-messages';\nimport { mapOpenAIChatLogProbsOutput } from './map-openai-chat-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport {\n openAIErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { prepareTools } from './openai-prepare-tools';\n\ntype OpenAIChatConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n\n readonly modelId: OpenAIChatModelId;\n readonly settings: OpenAIChatSettings;\n\n private readonly config: OpenAIChatConfig;\n\n constructor(\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings,\n config: OpenAIChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get supportsStructuredOutputs(): boolean {\n return this.settings.structuredOutputs === true;\n }\n\n get defaultObjectGenerationMode() {\n // audio models don't support structured outputs:\n if (isAudioModel(this.modelId)) {\n return 'tool';\n }\n\n return this.supportsStructuredOutputs ? 'json' : 'tool';\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportsImageUrls(): boolean {\n // image urls can be sent if downloadImages is disabled (default):\n return !this.settings.downloadImages;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerMetadata,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;\n\n if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {\n throw new UnsupportedFunctionalityError({\n functionality: 'useLegacyFunctionCalling with parallelToolCalls',\n });\n }\n\n if (useLegacyFunctionCalling && this.settings.structuredOutputs === true) {\n throw new UnsupportedFunctionalityError({\n functionality: 'structuredOutputs with useLegacyFunctionCalling',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n logit_bias: this.settings.logitBias,\n logprobs:\n this.settings.logprobs === true ||\n typeof this.settings.logprobs === 'number'\n ? true\n : undefined,\n top_logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n user: this.settings.user,\n parallel_tool_calls: this.settings.parallelToolCalls,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n stop: stopSequences,\n seed,\n\n // openai specific settings:\n max_completion_tokens:\n providerMetadata?.openai?.maxCompletionTokens ?? undefined,\n store: providerMetadata?.openai?.store ?? undefined,\n metadata: providerMetadata?.openai?.metadata ?? undefined,\n prediction: providerMetadata?.openai?.prediction ?? undefined,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // messages:\n messages: convertToOpenAIChatMessages({\n prompt,\n useLegacyFunctionCalling,\n }),\n };\n\n // reasoning models have fixed params, remove them if they are set:\n if (isReasoningModel(this.modelId)) {\n baseArgs.temperature = undefined;\n baseArgs.top_p = undefined;\n baseArgs.frequency_penalty = undefined;\n baseArgs.presence_penalty = undefined;\n }\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, functions, function_call, toolWarnings } =\n prepareTools({\n mode,\n useLegacyFunctionCalling,\n structuredOutputs: this.settings.structuredOutputs,\n });\n\n return {\n args: {\n ...baseArgs,\n tools,\n tool_choice,\n functions,\n function_call,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format:\n this.settings.structuredOutputs === true && mode.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: mode.schema,\n strict: true,\n name: mode.name ?? 'response',\n description: mode.description,\n },\n }\n : { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: useLegacyFunctionCalling\n ? {\n ...baseArgs,\n function_call: {\n name: mode.tool.name,\n },\n functions: [\n {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n ],\n }\n : {\n ...baseArgs,\n tool_choice: {\n type: 'function',\n function: { name: mode.tool.name },\n },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n strict:\n this.settings.structuredOutputs === true\n ? true\n : undefined,\n },\n },\n ],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args: body, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAIChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = body;\n const choice = response.choices[0];\n\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined;\n if (\n response.usage?.completion_tokens_details?.reasoning_tokens != null ||\n response.usage?.prompt_tokens_details?.cached_tokens != null\n ) {\n providerMetadata = { openai: {} };\n if (response.usage?.completion_tokens_details?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n response.usage?.completion_tokens_details?.reasoning_tokens;\n }\n if (response.usage?.prompt_tokens_details?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n response.usage?.prompt_tokens_details?.cached_tokens;\n }\n }\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls:\n this.settings.useLegacyFunctionCalling && choice.message.function_call\n ? [\n {\n toolCallType: 'function',\n toolCallId: generateId(),\n toolName: choice.message.function_call.name,\n args: choice.message.function_call.arguments,\n },\n ]\n : choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage?.prompt_tokens ?? NaN,\n completionTokens: response.usage?.completion_tokens ?? NaN,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(body) },\n response: getResponseMetadata(response),\n warnings,\n logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),\n providerMetadata,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: {\n promptTokens: number | undefined;\n completionTokens: number | undefined;\n } = {\n promptTokens: undefined,\n completionTokens: undefined,\n };\n let logprobs: LanguageModelV1LogProbs;\n let isFirstChunk = true;\n\n const { useLegacyFunctionCalling } = this.settings;\n\n let providerMetadata: LanguageModelV1ProviderMetadata | undefined;\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens ?? undefined,\n completionTokens: value.usage.completion_tokens ?? undefined,\n };\n\n const {\n completion_tokens_details: completionTokenDetails,\n prompt_tokens_details: promptTokenDetails,\n } = value.usage;\n\n if (\n completionTokenDetails?.reasoning_tokens != null ||\n promptTokenDetails?.cached_tokens != null\n ) {\n providerMetadata = { openai: {} };\n if (completionTokenDetails?.reasoning_tokens != null) {\n providerMetadata.openai.reasoningTokens =\n completionTokenDetails?.reasoning_tokens;\n }\n if (promptTokenDetails?.cached_tokens != null) {\n providerMetadata.openai.cachedPromptTokens =\n promptTokenDetails?.cached_tokens;\n }\n }\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n const mappedLogprobs = mapOpenAIChatLogProbsOutput(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n\n const mappedToolCalls: typeof delta.tool_calls =\n useLegacyFunctionCalling && delta.function_call != null\n ? [\n {\n type: 'function',\n id: generateId(),\n function: delta.function_call,\n index: 0,\n },\n ]\n : delta.tool_calls;\n\n if (mappedToolCalls != null) {\n for (const toolCallDelta of mappedToolCalls) {\n const index = toolCallDelta.index;\n\n // Tool call start. OpenAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n };\n\n const toolCall = toolCalls[index];\n\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null\n ) {\n // send delta if the argument text has already started:\n if (toolCall.function.arguments.length > 0) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n }\n\n // check if tool call is complete\n // (some providers send the full tool call in one chunk):\n if (isParsableJson(toolCall.function.arguments)) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n\n continue;\n }\n\n // existing tool call, merge\n const toolCall = toolCalls[index];\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage: {\n promptTokens: usage.promptTokens ?? NaN,\n completionTokens: usage.completionTokens ?? NaN,\n },\n ...(providerMetadata != null ? { providerMetadata } : {}),\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(body) },\n warnings,\n };\n }\n}\n\nconst openAITokenUsageSchema = z\n .object({\n prompt_tokens: z.number().nullish(),\n completion_tokens: z.number().nullish(),\n prompt_tokens_details: z\n .object({\n cached_tokens: z.number().nullish(),\n })\n .nullish(),\n completion_tokens_details: z\n .object({\n reasoning_tokens: z.number().nullish(),\n })\n .nullish(),\n })\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openAIChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant').nullish(),\n content: z.string().nullish(),\n function_call: z\n .object({\n arguments: z.string(),\n name: z.string(),\n })\n .nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().nullish(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullish(),\n finish_reason: z.string().nullish(),\n }),\n ),\n usage: openAITokenUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z\n .object({\n role: z.enum(['assistant']).nullish(),\n content: z.string().nullish(),\n function_call: z\n .object({\n name: z.string().optional(),\n arguments: z.string().optional(),\n })\n .nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n })\n .nullish(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullish(),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: openAITokenUsageSchema,\n }),\n openAIErrorDataSchema,\n]);\n\nfunction isReasoningModel(modelId: string) {\n return modelId.startsWith('o1-');\n}\n\nfunction isAudioModel(modelId: string) {\n return modelId.startsWith('gpt-4o-audio-preview');\n}\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAIChatPrompt } from './openai-chat-prompt';\n\nexport function convertToOpenAIChatMessages({\n prompt,\n useLegacyFunctionCalling = false,\n}: {\n prompt: LanguageModelV1Prompt;\n useLegacyFunctionCalling?: boolean;\n}): OpenAIChatPrompt {\n const messages: OpenAIChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({ role: 'user', content: content[0].text });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n\n // OpenAI specific extension: image detail\n detail: part.providerMetadata?.openai?.imageDetail,\n },\n };\n }\n case 'file': {\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n \"'File content parts with URL data' functionality not supported.\",\n });\n }\n\n switch (part.mimeType) {\n case 'audio/wav': {\n return {\n type: 'input_audio',\n input_audio: { data: part.data, format: 'wav' },\n };\n }\n case 'audio/mp3':\n case 'audio/mpeg': {\n return {\n type: 'input_audio',\n input_audio: { data: part.data, format: 'mp3' },\n };\n }\n\n default: {\n throw new UnsupportedFunctionalityError({\n functionality: `File content part type ${part.mimeType} in user messages`,\n });\n }\n }\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n if (useLegacyFunctionCalling) {\n if (toolCalls.length > 1) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'useLegacyFunctionCalling with multiple tool calls in one message',\n });\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n function_call:\n toolCalls.length > 0 ? toolCalls[0].function : undefined,\n });\n } else {\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n }\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n if (useLegacyFunctionCalling) {\n messages.push({\n role: 'function',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n } else {\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n });\n }\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1LogProbs } from '@ai-sdk/provider';\n\ntype OpenAIChatLogProbs = {\n content:\n | {\n token: string;\n logprob: number;\n top_logprobs:\n | {\n token: string;\n logprob: number;\n }[]\n | null;\n }[]\n | null;\n};\n\nexport function mapOpenAIChatLogProbsOutput(\n logprobs: OpenAIChatLogProbs | null | undefined,\n): LanguageModelV1LogProbs | undefined {\n return (\n logprobs?.content?.map(({ token, logprob, top_logprobs }) => ({\n token,\n logprob,\n topLogprobs: top_logprobs\n ? top_logprobs.map(({ token, logprob }) => ({\n token,\n logprob,\n }))\n : [],\n })) ?? undefined\n );\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const openAIErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n\n // The additional information below is handled loosely to support\n // OpenAI-compatible providers that have slightly different error\n // responses:\n type: z.string().nullish(),\n param: z.any().nullish(),\n code: z.union([z.string(), z.number()]).nullish(),\n }),\n});\n\nexport type OpenAIErrorData = z.infer<typeof openAIErrorDataSchema>;\n\nexport const openaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: openAIErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import {\n JSONSchema7,\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools({\n mode,\n useLegacyFunctionCalling = false,\n structuredOutputs = false,\n}: {\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n };\n useLegacyFunctionCalling?: boolean;\n structuredOutputs?: boolean;\n}): {\n tools?: {\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict?: boolean;\n };\n }[];\n tool_choice?:\n | 'auto'\n | 'none'\n | 'required'\n | { type: 'function'; function: { name: string } };\n\n // legacy support\n functions?: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n }[];\n function_call?: { name: string };\n\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const toolChoice = mode.toolChoice;\n\n if (useLegacyFunctionCalling) {\n const openaiFunctions: Array<{\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiFunctions.push({\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n functions: openaiFunctions,\n function_call: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case undefined:\n return {\n functions: openaiFunctions,\n function_call: undefined,\n toolWarnings,\n };\n case 'required':\n throw new UnsupportedFunctionalityError({\n functionality: 'useLegacyFunctionCalling and toolChoice: required',\n });\n default:\n return {\n functions: openaiFunctions,\n function_call: { name: toolChoice.toolName },\n toolWarnings,\n };\n }\n }\n\n const openaiTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: JSONSchema7;\n strict?: boolean;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n openaiTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n strict: structuredOutputs === true ? true : undefined,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: openaiTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: openaiTools, tool_choice: type, toolWarnings };\n case 'tool':\n return {\n tools: openaiTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1LogProbs,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompletionPrompt } from './convert-to-openai-completion-prompt';\nimport { mapOpenAICompletionLogProbs } from './map-openai-completion-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport {\n openAIErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\nimport { getResponseMetadata } from './get-response-metadata';\n\ntype OpenAICompletionConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n fetch?: FetchFunction;\n};\n\nexport class OpenAICompletionLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompletionModelId;\n readonly settings: OpenAICompletionSettings;\n\n private readonly config: OpenAICompletionConfig;\n\n constructor(\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings,\n config: OpenAICompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences: userStopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (responseFormat != null && responseFormat.type !== 'text') {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format is not supported.',\n });\n }\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompletionPrompt({ prompt, inputFormat });\n\n const stop = [...(stopSequences ?? []), ...(userStopSequences ?? [])];\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stop.length > 0 ? stop : undefined,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return { args: baseArgs, warnings };\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAICompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n logprobs: mapOpenAICompletionLogProbs(choice.logprobs),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n response: getResponseMetadata(response),\n warnings,\n request: { body: JSON.stringify(args) },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/completions',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiCompletionChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let logprobs: LanguageModelV1LogProbs;\n let isFirstChunk = true;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiCompletionChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n\n const mappedLogprobs = mapOpenAICompletionLogProbs(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings,\n request: { body: JSON.stringify(body) },\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openAICompletionResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullish(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionChunkSchema = z.union([\n z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullish(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n openAIErrorDataSchema,\n]);\n","import {\n InvalidPromptError,\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV1Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'images',\n });\n }\n }\n })\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { LanguageModelV1LogProbs } from '@ai-sdk/provider';\n\ntype OpenAICompletionLogProps = {\n tokens: string[];\n token_logprobs: number[];\n top_logprobs: Record<string, number>[] | null;\n};\n\nexport function mapOpenAICompletionLogProbs(\n logprobs: OpenAICompletionLogProps | null | undefined,\n): LanguageModelV1LogProbs | undefined {\n return logprobs?.tokens.map((token, index) => ({\n token,\n logprob: logprobs.token_logprobs[index],\n topLogprobs: logprobs.top_logprobs\n ? Object.entries(logprobs.top_logprobs[index]).map(\n ([token, logprob]) => ({\n token,\n logprob,\n }),\n )\n : [],\n }));\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAIEmbeddingModelId,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-settings';\nimport { openaiFailedResponseHandler } from './openai-error';\n\ntype OpenAIEmbeddingConfig = {\n provider: string;\n url: (options: { modelId: string; path: string }) => string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class OpenAIEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAIEmbeddingModelId;\n\n private readonly config: OpenAIEmbeddingConfig;\n private readonly settings: OpenAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.settings.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings,\n config: OpenAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/embeddings',\n modelId: this.modelId,\n }),\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBASO;AACP,IAAAC,yBASO;AACP,IAAAC,cAAkB;;;ACpBlB,sBAGO;AACP,4BAA0C;AAGnC,SAAS,4BAA4B;AAAA,EAC1C;AAAA,EACA,2BAA2B;AAC7B,GAGqB;AACnB,QAAM,WAA6B,CAAC;AAEpC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,QAAQ,CAAC,EAAE,KAAK,CAAC;AACxD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AA/BvC;AAgCY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,eAAW,iDAA0B,KAAK,KAAK,CAAC;AAAA;AAAA,oBAGtD,SAAQ,gBAAK,qBAAL,mBAAuB,WAAvB,mBAA+B;AAAA,kBACzC;AAAA,gBACF;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,wBAAQ,KAAK,UAAU;AAAA,kBACrB,KAAK,aAAa;AAChB,2BAAO;AAAA,sBACL,MAAM;AAAA,sBACN,aAAa,EAAE,MAAM,KAAK,MAAM,QAAQ,MAAM;AAAA,oBAChD;AAAA,kBACF;AAAA,kBACA,KAAK;AAAA,kBACL,KAAK,cAAc;AACjB,2BAAO;AAAA,sBACL,MAAM;AAAA,sBACN,aAAa,EAAE,MAAM,KAAK,MAAM,QAAQ,MAAM;AAAA,oBAChD;AAAA,kBACF;AAAA,kBAEA,SAAS;AACP,0BAAM,IAAI,8CAA8B;AAAA,sBACtC,eAAe,0BAA0B,KAAK,QAAQ;AAAA,oBACxD,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,YAAI,0BAA0B;AAC5B,cAAI,UAAU,SAAS,GAAG;AACxB,kBAAM,IAAI,8CAA8B;AAAA,cACtC,eACE;AAAA,YACJ,CAAC;AAAA,UACH;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS;AAAA,YACT,eACE,UAAU,SAAS,IAAI,UAAU,CAAC,EAAE,WAAW;AAAA,UACnD,CAAC;AAAA,QACH,OAAO;AACL,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS;AAAA,YACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,UACjD,CAAC;AAAA,QACH;AAEA;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,cAAI,0BAA0B;AAC5B,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,MAAM,aAAa;AAAA,cACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC7C,CAAC;AAAA,UACH,OAAO;AACL,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,cAAc,aAAa;AAAA,cAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC7C,CAAC;AAAA,UACH;AAAA,QACF;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC5JO,SAAS,4BACd,UACqC;AAnBvC;AAoBE,UACE,gDAAU,YAAV,mBAAmB,IAAI,CAAC,EAAE,OAAO,SAAS,aAAa,OAAO;AAAA,IAC5D;AAAA,IACA;AAAA,IACA,aAAa,eACT,aAAa,IAAI,CAAC,EAAE,OAAAC,QAAO,SAAAC,SAAQ,OAAO;AAAA,MACxC,OAAAD;AAAA,MACA,SAAAC;AAAA,IACF,EAAE,IACF,CAAC;AAAA,EACP,QATA,YASO;AAEX;;;AC9BO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAAkB;AAClB,IAAAC,yBAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,IAKlB,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,IACzB,OAAO,aAAE,IAAI,EAAE,QAAQ;AAAA,IACvB,MAAM,aAAE,MAAM,CAAC,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAClD,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ACrBM,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACdA,IAAAC,mBAKO;AAEA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA,2BAA2B;AAAA,EAC3B,oBAAoB;AACtB,GA+BE;AA1CF;AA4CE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,0BAA0B;AAC5B,UAAM,kBAID,CAAC;AAEN,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,oBAAoB;AACpC,qBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,MACtD,OAAO;AACL,wBAAgB,KAAK;AAAA,UACnB,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,QAAI,cAAc,MAAM;AACtB,aAAO;AAAA,QACL,WAAW;AAAA,QACX,eAAe;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAEA,UAAMC,QAAO,WAAW;AAExB,YAAQA,OAAM;AAAA,MACZ,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AACH,eAAO;AAAA,UACL,WAAW;AAAA,UACX,eAAe;AAAA,UACf;AAAA,QACF;AAAA,MACF,KAAK;AACH,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AACE,eAAO;AAAA,UACL,WAAW;AAAA,UACX,eAAe,EAAE,MAAM,WAAW,SAAS;AAAA,UAC3C;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AAEA,QAAM,cAQD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,kBAAY,KAAK;AAAA,QACf,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,UACjB,QAAQ,sBAAsB,OAAO,OAAO;AAAA,QAC9C;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,aAAa,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANxHO,IAAM,0BAAN,MAAyD;AAAA,EAQ9D,YACE,SACA,UACA,QACA;AAXF,SAAS,uBAAuB;AAY9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,4BAAqC;AACvC,WAAO,KAAK,SAAS,sBAAsB;AAAA,EAC7C;AAAA,EAEA,IAAI,8BAA8B;AAEhC,QAAI,aAAa,KAAK,OAAO,GAAG;AAC9B,aAAO;AAAA,IACT;AAEA,WAAO,KAAK,4BAA4B,SAAS;AAAA,EACnD;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,oBAA6B;AAE/B,WAAO,CAAC,KAAK,SAAS;AAAA,EACxB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7FnD;AA8FI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,2BAA2B,KAAK,SAAS;AAE/C,QAAI,4BAA4B,KAAK,SAAS,sBAAsB,MAAM;AACxE,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,QAAI,4BAA4B,KAAK,SAAS,sBAAsB,MAAM;AACxE,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe;AAAA,MACjB,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,KAAK,SAAS,aAAa,QAC3B,OAAO,KAAK,SAAS,aAAa,WAC9B,OACA;AAAA,MACN,cACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAClC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACN,MAAM,KAAK,SAAS;AAAA,MACpB,qBAAqB,KAAK,SAAS;AAAA;AAAA,MAGnC,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,MAAM;AAAA,MACN;AAAA;AAAA,MAGA,wBACE,gEAAkB,WAAlB,mBAA0B,wBAA1B,YAAiD;AAAA,MACnD,QAAO,gEAAkB,WAAlB,mBAA0B,UAA1B,YAAmC;AAAA,MAC1C,WAAU,gEAAkB,WAAlB,mBAA0B,aAA1B,YAAsC;AAAA,MAChD,aAAY,gEAAkB,WAAlB,mBAA0B,eAA1B,YAAwC;AAAA;AAAA,MAGpD,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,UAAU,4BAA4B;AAAA,QACpC;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAGA,QAAI,iBAAiB,KAAK,OAAO,GAAG;AAClC,eAAS,cAAc;AACvB,eAAS,QAAQ;AACjB,eAAS,oBAAoB;AAC7B,eAAS,mBAAmB;AAAA,IAC9B;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,WAAW,eAAe,aAAa,IACjE,aAAa;AAAA,UACX;AAAA,UACA;AAAA,UACA,mBAAmB,KAAK,SAAS;AAAA,QACnC,CAAC;AAEH,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,UACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBACE,KAAK,SAAS,sBAAsB,QAAQ,KAAK,UAAU,OACvD;AAAA,cACE,MAAM;AAAA,cACN,aAAa;AAAA,gBACX,QAAQ,KAAK;AAAA,gBACb,QAAQ;AAAA,gBACR,OAAM,UAAK,SAAL,YAAa;AAAA,gBACnB,aAAa,KAAK;AAAA,cACpB;AAAA,YACF,IACA,EAAE,MAAM,cAAc;AAAA,UAC9B;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM,2BACF;AAAA,YACE,GAAG;AAAA,YACH,eAAe;AAAA,cACb,MAAM,KAAK,KAAK;AAAA,YAClB;AAAA,YACA,WAAW;AAAA,cACT;AAAA,gBACE,MAAM,KAAK,KAAK;AAAA,gBAChB,aAAa,KAAK,KAAK;AAAA,gBACvB,YAAY,KAAK,KAAK;AAAA,cACxB;AAAA,YACF;AAAA,UACF,IACA;AAAA,YACE,GAAG;AAAA,YACH,aAAa;AAAA,cACX,MAAM;AAAA,cACN,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK;AAAA,YACnC;AAAA,YACA,OAAO;AAAA,cACL;AAAA,gBACE,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK,KAAK;AAAA,kBAChB,aAAa,KAAK,KAAK;AAAA,kBACvB,YAAY,KAAK,KAAK;AAAA,kBACtB,QACE,KAAK,SAAS,sBAAsB,OAChC,OACA;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UACJ;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxRjE;AAyRI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAErD,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,QAAI;AACJ,UACE,oBAAS,UAAT,mBAAgB,8BAAhB,mBAA2C,qBAAoB,UAC/D,oBAAS,UAAT,mBAAgB,0BAAhB,mBAAuC,kBAAiB,MACxD;AACA,yBAAmB,EAAE,QAAQ,CAAC,EAAE;AAChC,YAAI,oBAAS,UAAT,mBAAgB,8BAAhB,mBAA2C,qBAAoB,MAAM;AACvE,yBAAiB,OAAO,mBACtB,oBAAS,UAAT,mBAAgB,8BAAhB,mBAA2C;AAAA,MAC/C;AACA,YAAI,oBAAS,UAAT,mBAAgB,0BAAhB,mBAAuC,kBAAiB,MAAM;AAChE,yBAAiB,OAAO,sBACtB,oBAAS,UAAT,mBAAgB,0BAAhB,mBAAuC;AAAA,MAC3C;AAAA,IACF;AAEA,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,WACE,KAAK,SAAS,4BAA4B,OAAO,QAAQ,gBACrD;AAAA,QACE;AAAA,UACE,cAAc;AAAA,UACd,gBAAY,mCAAW;AAAA,UACvB,UAAU,OAAO,QAAQ,cAAc;AAAA,UACvC,MAAM,OAAO,QAAQ,cAAc;AAAA,QACrC;AAAA,MACF,KACA,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AAzUpD,YAAAC;AAyUwD;AAAA,UAC1C,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACN,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,OAAO;AAAA,QACL,eAAc,oBAAS,UAAT,mBAAgB,kBAAhB,YAAiC;AAAA,QAC/C,mBAAkB,oBAAS,UAAT,mBAAgB,sBAAhB,YAAqC;AAAA,MACzD;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,UAAU,4BAA4B,OAAO,QAAQ;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAOD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAGA;AAAA,MACF,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AACA,QAAI;AACJ,QAAI,eAAe;AAEnB,UAAM,EAAE,yBAAyB,IAAI,KAAK;AAE1C,QAAI;AACJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AA5ZvC;AA8ZY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,eAAc,WAAM,MAAM,kBAAZ,YAA6B;AAAA,gBAC3C,mBAAkB,WAAM,MAAM,sBAAZ,YAAiC;AAAA,cACrD;AAEA,oBAAM;AAAA,gBACJ,2BAA2B;AAAA,gBAC3B,uBAAuB;AAAA,cACzB,IAAI,MAAM;AAEV,mBACE,iEAAwB,qBAAoB,SAC5C,yDAAoB,kBAAiB,MACrC;AACA,mCAAmB,EAAE,QAAQ,CAAC,EAAE;AAChC,qBAAI,iEAAwB,qBAAoB,MAAM;AACpD,mCAAiB,OAAO,kBACtB,iEAAwB;AAAA,gBAC5B;AACA,qBAAI,yDAAoB,kBAAiB,MAAM;AAC7C,mCAAiB,OAAO,qBACtB,yDAAoB;AAAA,gBACxB;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa,OAAW,YAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAEA,kBAAM,kBACJ,4BAA4B,MAAM,iBAAiB,OAC/C;AAAA,cACE;AAAA,gBACE,MAAM;AAAA,gBACN,QAAI,mCAAW;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,OAAO;AAAA,cACT;AAAA,YACF,IACA,MAAM;AAEZ,gBAAI,mBAAmB,MAAM;AAC3B,yBAAW,iBAAiB,iBAAiB;AAC3C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,0CAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,kBACF;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAEhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,MAChC;AAEA,wBAAIA,UAAS,SAAS,UAAU,SAAS,GAAG;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,YAAYA,UAAS;AAAA,wBACrB,UAAUA,UAAS,SAAS;AAAA,wBAC5B,eAAeA,UAAS,SAAS;AAAA,sBACnC,CAAC;AAAA,oBACH;AAIA,4BAAI,uCAAeA,UAAS,SAAS,SAAS,GAAG;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,cAAc;AAAA,wBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,wBACtC,UAAUA,UAAS,SAAS;AAAA,wBAC5B,MAAMA,UAAS,SAAS;AAAA,sBAC1B,CAAC;AAAA,oBACH;AAAA,kBACF;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAhmB5B;AAimBY,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA,OAAO;AAAA,gBACL,eAAc,WAAM,iBAAN,YAAsB;AAAA,gBACpC,mBAAkB,WAAM,qBAAN,YAA0B;AAAA,cAC9C;AAAA,cACA,GAAI,oBAAoB,OAAO,EAAE,iBAAiB,IAAI,CAAC;AAAA,YACzD,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,yBAAyB,cAC5B,OAAO;AAAA,EACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,mBAAmB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,uBAAuB,cACpB,OAAO;AAAA,IACN,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EACpC,CAAC,EACA,QAAQ;AAAA,EACX,2BAA2B,cACxB,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,CAAC,EACA,QAAQ;AACb,CAAC,EACA,QAAQ;AAIX,IAAM,2BAA2B,cAAE,OAAO;AAAA,EACxC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW,EAAE,QAAQ;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,eAAe,cACZ,OAAO;AAAA,UACN,WAAW,cAAE,OAAO;AAAA,UACpB,MAAM,cAAE,OAAO;AAAA,QACjB,CAAC,EACA,QAAQ;AAAA,QACX,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,YACvB,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,UAAU,cACP,OAAO;AAAA,QACN,SAAS,cACN;AAAA,UACC,cAAE,OAAO;AAAA,YACP,OAAO,cAAE,OAAO;AAAA,YAChB,SAAS,cAAE,OAAO;AAAA,YAClB,cAAc,cAAE;AAAA,cACd,cAAE,OAAO;AAAA,gBACP,OAAO,cAAE,OAAO;AAAA,gBAChB,SAAS,cAAE,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC,EACA,QAAQ;AAAA,MACX,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,OAAO;AACT,CAAC;AAID,IAAM,wBAAwB,cAAE,MAAM;AAAA,EACpC,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cACJ,OAAO;AAAA,UACN,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,QAAQ;AAAA,UACpC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,eAAe,cACZ,OAAO;AAAA,YACN,MAAM,cAAE,OAAO,EAAE,SAAS;AAAA,YAC1B,WAAW,cAAE,OAAO,EAAE,SAAS;AAAA,UACjC,CAAC,EACA,QAAQ;AAAA,UACX,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC,EACA,QAAQ;AAAA,QACX,UAAU,cACP,OAAO;AAAA,UACN,SAAS,cACN;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,SAAS,cAAE,OAAO;AAAA,cAClB,cAAc,cAAE;AAAA,gBACd,cAAE,OAAO;AAAA,kBACP,OAAO,cAAE,OAAO;AAAA,kBAChB,SAAS,cAAE,OAAO;AAAA,gBACpB,CAAC;AAAA,cACH;AAAA,YACF,CAAC;AAAA,UACH,EACC,SAAS;AAAA,QACd,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QAC9C,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAAA,EACD;AACF,CAAC;AAED,SAAS,iBAAiB,SAAiB;AACzC,SAAO,QAAQ,WAAW,KAAK;AACjC;AAEA,SAAS,aAAa,SAAiB;AACrC,SAAO,QAAQ,WAAW,sBAAsB;AAClD;;;AOjwBA,IAAAC,mBAOO;AACP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;AChBlB,IAAAC,mBAIO;AAEA,SAAS,gCAAgC;AAAA,EAC9C;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,SAAS;AACZ,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ACrGO,SAAS,4BACd,UACqC;AACrC,SAAO,qCAAU,OAAO,IAAI,CAAC,OAAO,WAAW;AAAA,IAC7C;AAAA,IACA,SAAS,SAAS,eAAe,KAAK;AAAA,IACtC,aAAa,SAAS,eAClB,OAAO,QAAQ,SAAS,aAAa,KAAK,CAAC,EAAE;AAAA,MAC3C,CAAC,CAACC,QAAO,OAAO,OAAO;AAAA,QACrB,OAAAA;AAAA,QACA;AAAA,MACF;AAAA,IACF,IACA,CAAC;AAAA,EACP;AACF;;;AFeO,IAAM,gCAAN,MAA+D;AAAA,EASpE,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,IACf;AAAA,IACA;AAAA,EACF,GAAiD;AA1EnD;AA2EI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,kBAAkB,QAAQ,eAAe,SAAS,QAAQ;AAC5D,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,gCAAgC,EAAE,QAAQ,YAAY,CAAC;AAEzD,UAAM,OAAO,CAAC,GAAI,wCAAiB,CAAC,GAAI,GAAI,gDAAqB,CAAC,CAAE;AAEpE,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAClC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACN,QAAQ,KAAK,SAAS;AAAA,MACtB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA;AAAA,MAGA,QAAQ;AAAA;AAAA,MAGR,MAAM,KAAK,SAAS,IAAI,OAAO;AAAA,IACjC;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,aAAI,UAAK,UAAL,mBAAY,QAAQ;AACtB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,YAAI,KAAK,YAAY;AACnB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,eAAO,EAAE,MAAM,UAAU,SAAS;AAAA,MACpC;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,UAAU,4BAA4B,OAAO,QAAQ;AAAA,MACrD,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO;AAAA,MACX,GAAG;AAAA,MACH,QAAQ;AAAA;AAAA,MAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,IACR;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI;AACJ,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa,OAAW,YAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC;AAAA,MACA,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,IACxC;AAAA,EACF;AACF;AAIA,IAAM,iCAAiC,cAAE,OAAO;AAAA,EAC9C,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,MACxB,UAAU,cACP,OAAO;AAAA,QACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,MACnE,CAAC,EACA,QAAQ;AAAA,IACb,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,8BAA8B,cAAE,MAAM;AAAA,EAC1C,cAAE,OAAO;AAAA,IACP,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,IACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,IAC1B,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,QAChB,UAAU,cACP,OAAO;AAAA,UACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,QACnE,CAAC,EACA,QAAQ;AAAA,MACb,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;;;AG7XD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;AAcX,IAAM,uBAAN,MAA+D;AAAA,EAmBpE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAnCrC;AAoCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAvCvC;AAwCI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;","names":["import_provider","import_provider_utils","import_zod","token","logprob","import_provider_utils","import_provider","type","_a","toolCall","import_provider","import_provider_utils","import_zod","import_provider","token","import_provider","import_provider_utils","import_zod"]}