@ai-sdk/openai 0.0.24 → 0.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -38,9 +38,12 @@ interface OpenAIChatSettings {
38
38
 
39
39
  type OpenAIChatConfig = {
40
40
  provider: string;
41
- baseURL: string;
42
41
  compatibility: 'strict' | 'compatible';
43
42
  headers: () => Record<string, string | undefined>;
43
+ url: (options: {
44
+ modelId: string;
45
+ path: string;
46
+ }) => string;
44
47
  };
45
48
  declare class OpenAIChatLanguageModel implements LanguageModelV1 {
46
49
  readonly specificationVersion = "v1";
package/dist/index.d.ts CHANGED
@@ -38,9 +38,12 @@ interface OpenAIChatSettings {
38
38
 
39
39
  type OpenAIChatConfig = {
40
40
  provider: string;
41
- baseURL: string;
42
41
  compatibility: 'strict' | 'compatible';
43
42
  headers: () => Record<string, string | undefined>;
43
+ url: (options: {
44
+ modelId: string;
45
+ path: string;
46
+ }) => string;
44
47
  };
45
48
  declare class OpenAIChatLanguageModel implements LanguageModelV1 {
46
49
  readonly specificationVersion = "v1";
package/dist/index.js CHANGED
@@ -250,7 +250,10 @@ var OpenAIChatLanguageModel = class {
250
250
  var _a, _b;
251
251
  const args = this.getArgs(options);
252
252
  const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
253
- url: `${this.config.baseURL}/chat/completions`,
253
+ url: this.config.url({
254
+ path: "/chat/completions",
255
+ modelId: this.modelId
256
+ }),
254
257
  headers: this.config.headers(),
255
258
  body: args,
256
259
  failedResponseHandler: openaiFailedResponseHandler,
@@ -286,7 +289,10 @@ var OpenAIChatLanguageModel = class {
286
289
  async doStream(options) {
287
290
  const args = this.getArgs(options);
288
291
  const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
289
- url: `${this.config.baseURL}/chat/completions`,
292
+ url: this.config.url({
293
+ path: "/chat/completions",
294
+ modelId: this.modelId
295
+ }),
290
296
  headers: this.config.headers(),
291
297
  body: {
292
298
  ...args,
@@ -929,7 +935,8 @@ var OpenAI = class {
929
935
  return new OpenAIChatLanguageModel(modelId, settings, {
930
936
  provider: "openai.chat",
931
937
  ...this.baseConfig,
932
- compatibility: "strict"
938
+ compatibility: "strict",
939
+ url: ({ path }) => `${this.baseURL}${path}`
933
940
  });
934
941
  }
935
942
  completion(modelId, settings = {}) {
@@ -1025,7 +1032,7 @@ function createOpenAI(options = {}) {
1025
1032
  });
1026
1033
  const createChatModel = (modelId, settings = {}) => new OpenAIChatLanguageModel(modelId, settings, {
1027
1034
  provider: "openai.chat",
1028
- baseURL,
1035
+ url: ({ path }) => `${baseURL}${path}`,
1029
1036
  headers: getHeaders,
1030
1037
  compatibility
1031
1038
  });
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/openai-facade.ts","../src/openai-chat-language-model.ts","../src/convert-to-openai-chat-messages.ts","../src/map-openai-chat-logprobs.ts","../src/map-openai-finish-reason.ts","../src/openai-error.ts","../src/openai-completion-language-model.ts","../src/convert-to-openai-completion-prompt.ts","../src/map-openai-completion-logprobs.ts","../src/openai-provider.ts","../src/openai-embedding-model.ts"],"sourcesContent":["export * from './openai-facade';\nexport * from './openai-provider';\n","import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAIChatLanguageModel } from './openai-chat-language-model';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport { OpenAICompletionLanguageModel } from './openai-completion-language-model';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport { OpenAIProviderSettings } from './openai-provider';\n\n/**\n@deprecated Use `createOpenAI` instead.\n */\nexport class OpenAI {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.openai.com/v1`.\n */\n readonly baseURL: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `OPENAI_API_KEY` environment variable.\n */\n readonly apiKey?: string;\n\n /**\nOpenAI Organization.\n */\n readonly organization?: string;\n\n /**\nOpenAI project.\n */\n readonly project?: string;\n\n /**\nCustom headers to include in the requests.\n */\n readonly headers?: Record<string, string>;\n\n /**\n * Creates a new OpenAI provider instance.\n */\n constructor(options: OpenAIProviderSettings = {}) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://api.openai.com/v1';\n this.apiKey = options.apiKey;\n this.organization = options.organization;\n this.project = options.project;\n this.headers = options.headers;\n }\n\n private get baseConfig() {\n return {\n organization: this.organization,\n baseURL: this.baseURL,\n headers: () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'OPENAI_API_KEY',\n description: 'OpenAI',\n })}`,\n 'OpenAI-Organization': this.organization,\n 'OpenAI-Project': this.project,\n ...this.headers,\n }),\n };\n }\n\n chat(modelId: OpenAIChatModelId, settings: OpenAIChatSettings = {}) {\n return new OpenAIChatLanguageModel(modelId, settings, {\n provider: 'openai.chat',\n ...this.baseConfig,\n compatibility: 'strict',\n });\n }\n\n completion(\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings = {},\n ) {\n return new OpenAICompletionLanguageModel(modelId, settings, {\n provider: 'openai.completion',\n ...this.baseConfig,\n compatibility: 'strict',\n });\n }\n}\n","import {\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1FinishReason,\n LanguageModelV1LogProbs,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAIChatMessages } from './convert-to-openai-chat-messages';\nimport { mapOpenAIChatLogProbsOutput } from './map-openai-chat-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport {\n openAIErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\n\ntype OpenAIChatConfig = {\n provider: string;\n baseURL: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n};\n\nexport class OpenAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'tool';\n\n readonly modelId: OpenAIChatModelId;\n readonly settings: OpenAIChatSettings;\n\n private readonly config: OpenAIChatConfig;\n\n constructor(\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings,\n config: OpenAIChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n logit_bias: this.settings.logitBias,\n logprobs:\n this.settings.logprobs === true ||\n typeof this.settings.logprobs === 'number',\n top_logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // messages:\n messages: convertToOpenAIChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n return { ...baseArgs, ...prepareToolsAndToolChoice(mode) };\n }\n\n case 'object-json': {\n return {\n ...baseArgs,\n response_format: { type: 'json_object' },\n };\n }\n\n case 'object-tool': {\n return {\n ...baseArgs,\n tool_choice: { type: 'function', function: { name: mode.tool.name } },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n };\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAIChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let logprobs: LanguageModelV1LogProbs;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n const mappedLogprobs = mapOpenAIChatLogProbsOutput(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n // Tool call start. OpenAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n };\n\n const toolCall = toolCalls[index];\n\n // check if tool call is complete (some providers send the full tool call in one chunk)\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n\n // send tool call\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n\n continue;\n }\n\n // existing tool call, merge\n const toolCall = toolCalls[index];\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openAIChatResponseSchema = z.object({\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable().optional(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().optional().nullable(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional(),\n }),\n index: z.number(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullable()\n .optional(),\n finish_reason: z.string().optional().nullable(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatChunkSchema = z.union([\n z.object({\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullish(),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n openAIErrorDataSchema,\n]);\n\nfunction prepareToolsAndToolChoice(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n) {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined };\n }\n\n const mappedTools = tools.map(tool => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n }));\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mappedTools, tool_choice: undefined };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: mappedTools, tool_choice: type };\n case 'tool':\n return {\n tools: mappedTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);\n }\n }\n}\n","import { LanguageModelV1Prompt } from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAIChatPrompt } from './openai-chat-prompt';\n\nexport function convertToOpenAIChatMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAIChatPrompt {\n const messages: OpenAIChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({ role: 'user', content: content[0].text });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n },\n };\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1LogProbs } from '@ai-sdk/provider';\n\ntype OpenAIChatLogProbs = {\n content:\n | {\n token: string;\n logprob: number;\n top_logprobs:\n | {\n token: string;\n logprob: number;\n }[]\n | null;\n }[]\n | null;\n};\n\nexport function mapOpenAIChatLogProbsOutput(\n logprobs: OpenAIChatLogProbs | null | undefined,\n): LanguageModelV1LogProbs | undefined {\n return (\n logprobs?.content?.map(({ token, logprob, top_logprobs }) => ({\n token,\n logprob,\n topLogprobs: top_logprobs\n ? top_logprobs.map(({ token, logprob }) => ({\n token,\n logprob,\n }))\n : [],\n })) ?? undefined\n );\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const openAIErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n type: z.string(),\n param: z.any().nullable(),\n code: z.string().nullable(),\n }),\n});\n\nexport type OpenAIErrorData = z.infer<typeof openAIErrorDataSchema>;\n\nexport const openaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: openAIErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1,\n LanguageModelV1FinishReason,\n LanguageModelV1LogProbs,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompletionPrompt } from './convert-to-openai-completion-prompt';\nimport { mapOpenAICompletionLogProbs } from './map-openai-completion-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport {\n openAIErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\n\ntype OpenAICompletionConfig = {\n provider: string;\n baseURL: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n};\n\nexport class OpenAICompletionLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompletionModelId;\n readonly settings: OpenAICompletionSettings;\n\n private readonly config: OpenAICompletionConfig;\n\n constructor(\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings,\n config: OpenAICompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompletionPrompt({ prompt, inputFormat });\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stopSequences,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return baseArgs;\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/completions`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAICompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n logprobs: mapOpenAICompletionLogProbs(choice.logprobs),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/completions`,\n headers: this.config.headers(),\n body: {\n ...this.getArgs(options),\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiCompletionChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let logprobs: LanguageModelV1LogProbs;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiCompletionChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n\n const mappedLogprobs = mapOpenAICompletionLogProbs(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openAICompletionResponseSchema = z.object({\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullable()\n .optional(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionChunkSchema = z.union([\n z.object({\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullable()\n .optional(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .optional()\n .nullable(),\n }),\n openAIErrorDataSchema,\n]);\n","import {\n InvalidPromptError,\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV1Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'images',\n });\n }\n }\n })\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { LanguageModelV1LogProbs } from '@ai-sdk/provider';\n\ntype OpenAICompletionLogProps = {\n tokens: string[];\n token_logprobs: number[];\n top_logprobs: Record<string, number>[] | null;\n};\n\nexport function mapOpenAICompletionLogProbs(\n logprobs: OpenAICompletionLogProps | null | undefined,\n): LanguageModelV1LogProbs | undefined {\n return logprobs?.tokens.map((token, index) => ({\n token,\n logprob: logprobs.token_logprobs[index],\n topLogprobs: logprobs.top_logprobs\n ? Object.entries(logprobs.top_logprobs[index]).map(\n ([token, logprob]) => ({\n token,\n logprob,\n }),\n )\n : [],\n }));\n}\n","import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAIChatLanguageModel } from './openai-chat-language-model';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport { OpenAICompletionLanguageModel } from './openai-completion-language-model';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport { OpenAIEmbeddingModel } from './openai-embedding-model';\nimport {\n OpenAIEmbeddingModelId,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-settings';\n\nexport interface OpenAIProvider {\n (\n modelId: 'gpt-3.5-turbo-instruct',\n settings?: OpenAICompletionSettings,\n ): OpenAICompletionLanguageModel;\n (\n modelId: OpenAIChatModelId,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an OpenAI chat model for text generation.\n */\n chat(\n modelId: OpenAIChatModelId,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an OpenAI completion model for text generation.\n */\n completion(\n modelId: OpenAICompletionModelId,\n settings?: OpenAICompletionSettings,\n ): OpenAICompletionLanguageModel;\n\n /**\nCreates a model for text embeddings.\n */\n embedding(\n modelId: OpenAIEmbeddingModelId,\n settings?: OpenAIEmbeddingSettings,\n ): OpenAIEmbeddingModel;\n}\n\nexport interface OpenAIProviderSettings {\n /**\nBase URL for the OpenAI API calls.\n */\n baseURL?: string;\n\n /**\n@deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nOpenAI Organization.\n */\n organization?: string;\n\n /**\nOpenAI project.\n */\n project?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nOpenAI compatibility mode. Should be set to `strict` when using the OpenAI API,\nand `compatible` when using 3rd party providers. In `compatible` mode, newer\ninformation such as streamOptions are not being sent. Defaults to 'compatible'.\n */\n compatibility?: 'strict' | 'compatible';\n}\n\n/**\nCreate an OpenAI provider instance.\n */\nexport function createOpenAI(\n options: OpenAIProviderSettings = {},\n): OpenAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://api.openai.com/v1';\n\n // we default to compatible, because strict breaks providers like Groq:\n const compatibility = options.compatibility ?? 'compatible';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'OPENAI_API_KEY',\n description: 'OpenAI',\n })}`,\n 'OpenAI-Organization': options.organization,\n 'OpenAI-Project': options.project,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings = {},\n ) =>\n new OpenAIChatLanguageModel(modelId, settings, {\n provider: 'openai.chat',\n baseURL,\n headers: getHeaders,\n compatibility,\n });\n\n const createCompletionModel = (\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings = {},\n ) =>\n new OpenAICompletionLanguageModel(modelId, settings, {\n provider: 'openai.completion',\n baseURL,\n headers: getHeaders,\n compatibility,\n });\n\n const createEmbeddingModel = (\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings = {},\n ) =>\n new OpenAIEmbeddingModel(modelId, settings, {\n provider: 'openai.embedding',\n baseURL,\n headers: getHeaders,\n });\n\n const provider = function (\n modelId: OpenAIChatModelId | OpenAICompletionModelId,\n settings?: OpenAIChatSettings | OpenAICompletionSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n if (modelId === 'gpt-3.5-turbo-instruct') {\n return createCompletionModel(\n modelId,\n settings as OpenAICompletionSettings,\n );\n }\n\n return createChatModel(modelId, settings as OpenAIChatSettings);\n };\n\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n\n return provider as OpenAIProvider;\n}\n\n/**\nDefault OpenAI provider instance. It uses 'strict' compatibility mode.\n */\nexport const openai = createOpenAI({\n compatibility: 'strict', // strict for OpenAI API\n});\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAIEmbeddingModelId,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-settings';\nimport { openaiFailedResponseHandler } from './openai-error';\n\ntype OpenAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n};\n\nexport class OpenAIEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAIEmbeddingModelId;\n\n private readonly config: OpenAIEmbeddingConfig;\n private readonly settings: OpenAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.settings.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings,\n config: OpenAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: this.config.headers(),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(\n z.object({\n embedding: z.array(z.number()),\n }),\n ),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAAiD;;;ACAjD,sBAOO;AACP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACflB,4BAA0C;AAGnC,SAAS,4BACd,QACkB;AAClB,QAAM,WAA6B,CAAC;AAEpC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,QAAQ,CAAC,EAAE,KAAK,CAAC;AACxD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAxBvC;AAyBY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,eAAW,iDAA0B,KAAK,KAAK,CAAC;AAAA,kBACxD;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC5FO,SAAS,4BACd,UACqC;AAnBvC;AAoBE,UACE,gDAAU,YAAV,mBAAmB,IAAI,CAAC,EAAE,OAAO,SAAS,aAAa,OAAO;AAAA,IAC5D;AAAA,IACA;AAAA,IACA,aAAa,eACT,aAAa,IAAI,CAAC,EAAE,OAAAC,QAAO,SAAAC,SAAQ,OAAO;AAAA,MACxC,OAAAD;AAAA,MACA,SAAAC;AAAA,IACF,EAAE,IACF,CAAC;AAAA,EACP,QATA,YASO;AAEX;;;AC9BO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAAkB;AAClB,IAAAC,yBAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA,IAClB,MAAM,aAAE,OAAO;AAAA,IACf,OAAO,aAAE,IAAI,EAAE,SAAS;AAAA,IACxB,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AJgBM,IAAM,0BAAN,MAAyD;AAAA,EAS9D,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,KAAK,SAAS,aAAa,QAC3B,OAAO,KAAK,SAAS,aAAa;AAAA,MACpC,cACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAClC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACN,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA;AAAA,MAGA,UAAU,4BAA4B,MAAM;AAAA,IAC9C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,eAAO,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,MAC3D;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,aAAa,EAAE,MAAM,YAAY,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK,EAAE;AAAA,UACpE,OAAO;AAAA,YACL;AAAA,cACE,MAAM;AAAA,cACN,UAAU;AAAA,gBACR,MAAM,KAAK,KAAK;AAAA,gBAChB,aAAa,KAAK,KAAK;AAAA,gBACvB,YAAY,KAAK,KAAK;AAAA,cACxB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAI,8CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA/IjE;AAgJI,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AAlKzD,YAAAC;AAkK6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,MACX,UAAU,4BAA4B,OAAO,QAAQ;AAAA,IACvD;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA;AAAA,QAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,MACR;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAOD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI;AAEJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AArOvC;AAuOY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa;AAAW,2BAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yCAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yCAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yCAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,kBACF;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAGhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAeA,UAAS,SAAS,SAAS,GAC1C;AAEA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,cAAc;AAAA,sBACd,YAAYA,UAAS;AAAA,sBACrB,UAAUA,UAAS,SAAS;AAAA,sBAC5B,eAAeA,UAAS,SAAS;AAAA,oBACnC,CAAC;AAGD,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,cAAc;AAAA,sBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,sBACtC,UAAUA,UAAS,SAAS;AAAA,sBAC5B,MAAMA,UAAS,SAAS;AAAA,oBAC1B,CAAC;AAAA,kBACH;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AACF;AAIA,IAAM,2BAA2B,cAAE,OAAO;AAAA,EACxC,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QACxC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,YACnC,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,UAAU,cACP,OAAO;AAAA,QACN,SAAS,cACN;AAAA,UACC,cAAE,OAAO;AAAA,YACP,OAAO,cAAE,OAAO;AAAA,YAChB,SAAS,cAAE,OAAO;AAAA,YAClB,cAAc,cAAE;AAAA,cACd,cAAE,OAAO;AAAA,gBACP,OAAO,cAAE,OAAO;AAAA,gBAChB,SAAS,cAAE,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC,EACA,SAAS,EACT,SAAS;AAAA,MACZ,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IAChD,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,wBAAwB,cAAE,MAAM;AAAA,EACpC,cAAE,OAAO;AAAA,IACP,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cAAE,OAAO;AAAA,UACd,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,UACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC;AAAA,QACD,UAAU,cACP,OAAO;AAAA,UACN,SAAS,cACN;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,SAAS,cAAE,OAAO;AAAA,cAClB,cAAc,cAAE;AAAA,gBACd,cAAE,OAAO;AAAA,kBACP,OAAO,cAAE,OAAO;AAAA,kBAChB,SAAS,cAAE,OAAO;AAAA,gBACpB,CAAC;AAAA,cACH;AAAA,YACF,CAAC;AAAA,UACH,EACC,SAAS;AAAA,QACd,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QAC9C,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;AAED,SAAS,0BACP,MAGA;AA3fF;AA6fE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACpD;AAEA,QAAM,cAAc,MAAM,IAAI,WAAS;AAAA,IACrC,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,IACnB;AAAA,EACF,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACtD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IACjD,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,MAAM,iCAAiC,gBAAgB,EAAE;AAAA,IACrE;AAAA,EACF;AACF;;;AKxiBA,IAAAC,mBAMO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;;;ACblB,IAAAC,mBAIO;AAEA,SAAS,gCAAgC;AAAA,EAC9C;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,SAAS;AACZ,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ACrGO,SAAS,4BACd,UACqC;AACrC,SAAO,qCAAU,OAAO,IAAI,CAAC,OAAO,WAAW;AAAA,IAC7C;AAAA,IACA,SAAS,SAAS,eAAe,KAAK;AAAA,IACtC,aAAa,SAAS,eAClB,OAAO,QAAQ,SAAS,aAAa,KAAK,CAAC,EAAE;AAAA,MAC3C,CAAC,CAACC,QAAO,OAAO,OAAO;AAAA,QACrB,OAAAA;AAAA,QACA;AAAA,MACF;AAAA,IACF,IACA,CAAC;AAAA,EACP;AACF;;;AFUO,IAAM,gCAAN,MAA+D;AAAA,EASpE,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAlEnD;AAmEI,UAAM,OAAO,KAAK;AAElB,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,gCAAgC,EAAE,QAAQ,YAAY,CAAC;AAEzD,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAClC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACN,QAAQ,KAAK,SAAS;AAAA,MACtB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA;AAAA,MAGA,QAAQ;AAAA;AAAA,MAGR,MAAM;AAAA,IACR;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,aAAI,UAAK,UAAL,mBAAY,QAAQ;AACtB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,YAAI,KAAK,YAAY;AACnB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,eAAO;AAAA,MACT;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,UAAU,4BAA4B,OAAO,QAAQ;AAAA,MACrD,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG,KAAK,QAAQ,OAAO;AAAA,QACvB,QAAQ;AAAA;AAAA,QAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,MACR;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI;AAEJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa;AAAW,2BAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AACF;AAIA,IAAM,iCAAiC,cAAE,OAAO;AAAA,EAC9C,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,MACxB,UAAU,cACP,OAAO;AAAA,QACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,MACnE,CAAC,EACA,SAAS,EACT,SAAS;AAAA,IACd,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,8BAA8B,cAAE,MAAM;AAAA,EAC1C,cAAE,OAAO;AAAA,IACP,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,QAChB,UAAU,cACP,OAAO;AAAA,UACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,QACnE,CAAC,EACA,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,SAAS,EACT,SAAS;AAAA,EACd,CAAC;AAAA,EACD;AACF,CAAC;;;ANjUM,IAAM,SAAN,MAAa;AAAA;AAAA;AAAA;AAAA,EA+BlB,YAAY,UAAkC,CAAC,GAAG;AA5CpD;AA6CI,SAAK,WACH,uDAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AACF,SAAK,SAAS,QAAQ;AACtB,SAAK,eAAe,QAAQ;AAC5B,SAAK,UAAU,QAAQ;AACvB,SAAK,UAAU,QAAQ;AAAA,EACzB;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,cAAc,KAAK;AAAA,MACnB,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,eAAe,cAAU,mCAAW;AAAA,UAClC,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC,CAAC;AAAA,QACF,uBAAuB,KAAK;AAAA,QAC5B,kBAAkB,KAAK;AAAA,QACvB,GAAG,KAAK;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,SAA4B,WAA+B,CAAC,GAAG;AAClE,WAAO,IAAI,wBAAwB,SAAS,UAAU;AAAA,MACpD,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AAAA,EAEA,WACE,SACA,WAAqC,CAAC,GACtC;AACA,WAAO,IAAI,8BAA8B,SAAS,UAAU;AAAA,MAC1D,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;;;ASzFA,IAAAC,yBAAiD;;;ACAjD,IAAAC,mBAGO;AACP,IAAAC,yBAGO;AACP,IAAAC,cAAkB;AAaX,IAAM,uBAAN,MAA+D;AAAA,EAmBpE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAhCrC;AAiCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AApCvC;AAqCI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE;AAAA,IACN,cAAE,OAAO;AAAA,MACP,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,IAC/B,CAAC;AAAA,EACH;AACF,CAAC;;;ADNM,SAAS,aACd,UAAkC,CAAC,GACnB;AA7FlB;AA8FE,QAAM,WACJ,uDAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AAGF,QAAM,iBAAgB,aAAQ,kBAAR,YAAyB;AAE/C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,uBAAuB,QAAQ;AAAA,IAC/B,kBAAkB,QAAQ;AAAA,IAC1B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAA+B,CAAC,MAEhC,IAAI,wBAAwB,SAAS,UAAU;AAAA,IAC7C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAAqC,CAAC,MAEtC,IAAI,8BAA8B,SAAS,UAAU;AAAA,IACnD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAoC,CAAC,MAErC,IAAI,qBAAqB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,EACX,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,QAAI,YAAY,0BAA0B;AACxC,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAA8B;AAAA,EAChE;AAEA,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AAErB,SAAO;AACT;AAKO,IAAM,SAAS,aAAa;AAAA,EACjC,eAAe;AAAA;AACjB,CAAC;","names":["import_provider_utils","import_provider_utils","import_zod","token","logprob","import_provider_utils","_a","toolCall","import_provider","import_provider_utils","import_zod","import_provider","token","import_provider_utils","import_provider","import_provider_utils","import_zod"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/openai-facade.ts","../src/openai-chat-language-model.ts","../src/convert-to-openai-chat-messages.ts","../src/map-openai-chat-logprobs.ts","../src/map-openai-finish-reason.ts","../src/openai-error.ts","../src/openai-completion-language-model.ts","../src/convert-to-openai-completion-prompt.ts","../src/map-openai-completion-logprobs.ts","../src/openai-provider.ts","../src/openai-embedding-model.ts"],"sourcesContent":["export * from './openai-facade';\nexport * from './openai-provider';\n","import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAIChatLanguageModel } from './openai-chat-language-model';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport { OpenAICompletionLanguageModel } from './openai-completion-language-model';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport { OpenAIProviderSettings } from './openai-provider';\n\n/**\n@deprecated Use `createOpenAI` instead.\n */\nexport class OpenAI {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.openai.com/v1`.\n */\n readonly baseURL: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `OPENAI_API_KEY` environment variable.\n */\n readonly apiKey?: string;\n\n /**\nOpenAI Organization.\n */\n readonly organization?: string;\n\n /**\nOpenAI project.\n */\n readonly project?: string;\n\n /**\nCustom headers to include in the requests.\n */\n readonly headers?: Record<string, string>;\n\n /**\n * Creates a new OpenAI provider instance.\n */\n constructor(options: OpenAIProviderSettings = {}) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://api.openai.com/v1';\n this.apiKey = options.apiKey;\n this.organization = options.organization;\n this.project = options.project;\n this.headers = options.headers;\n }\n\n private get baseConfig() {\n return {\n organization: this.organization,\n baseURL: this.baseURL,\n headers: () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'OPENAI_API_KEY',\n description: 'OpenAI',\n })}`,\n 'OpenAI-Organization': this.organization,\n 'OpenAI-Project': this.project,\n ...this.headers,\n }),\n };\n }\n\n chat(modelId: OpenAIChatModelId, settings: OpenAIChatSettings = {}) {\n return new OpenAIChatLanguageModel(modelId, settings, {\n provider: 'openai.chat',\n ...this.baseConfig,\n compatibility: 'strict',\n url: ({ path }) => `${this.baseURL}${path}`,\n });\n }\n\n completion(\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings = {},\n ) {\n return new OpenAICompletionLanguageModel(modelId, settings, {\n provider: 'openai.completion',\n ...this.baseConfig,\n compatibility: 'strict',\n });\n }\n}\n","import {\n InvalidResponseDataError,\n LanguageModelV1,\n LanguageModelV1FinishReason,\n LanguageModelV1LogProbs,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n generateId,\n isParsableJson,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAIChatMessages } from './convert-to-openai-chat-messages';\nimport { mapOpenAIChatLogProbsOutput } from './map-openai-chat-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport {\n openAIErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\n\ntype OpenAIChatConfig = {\n provider: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n url: (options: { modelId: string; path: string }) => string;\n};\n\nexport class OpenAIChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'tool';\n\n readonly modelId: OpenAIChatModelId;\n readonly settings: OpenAIChatSettings;\n\n private readonly config: OpenAIChatConfig;\n\n constructor(\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings,\n config: OpenAIChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n logit_bias: this.settings.logitBias,\n logprobs:\n this.settings.logprobs === true ||\n typeof this.settings.logprobs === 'number',\n top_logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // messages:\n messages: convertToOpenAIChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n return { ...baseArgs, ...prepareToolsAndToolChoice(mode) };\n }\n\n case 'object-json': {\n return {\n ...baseArgs,\n response_format: { type: 'json_object' },\n };\n }\n\n case 'object-tool': {\n return {\n ...baseArgs,\n tool_choice: { type: 'function', function: { name: mode.tool.name } },\n tools: [\n {\n type: 'function',\n function: {\n name: mode.tool.name,\n description: mode.tool.description,\n parameters: mode.tool.parameters,\n },\n },\n ],\n };\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAIChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: this.config.url({\n path: '/chat/completions',\n modelId: this.modelId,\n }),\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: {\n name: string;\n arguments: string;\n };\n }> = [];\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let logprobs: LanguageModelV1LogProbs;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n const mappedLogprobs = mapOpenAIChatLogProbsOutput(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n\n if (delta.tool_calls != null) {\n for (const toolCallDelta of delta.tool_calls) {\n const index = toolCallDelta.index;\n\n // Tool call start. OpenAI returns all information except the arguments in the first chunk.\n if (toolCalls[index] == null) {\n if (toolCallDelta.type !== 'function') {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function' type.`,\n });\n }\n\n if (toolCallDelta.id == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'id' to be a string.`,\n });\n }\n\n if (toolCallDelta.function?.name == null) {\n throw new InvalidResponseDataError({\n data: toolCallDelta,\n message: `Expected 'function.name' to be a string.`,\n });\n }\n\n toolCalls[index] = {\n id: toolCallDelta.id,\n type: 'function',\n function: {\n name: toolCallDelta.function.name,\n arguments: toolCallDelta.function.arguments ?? '',\n },\n };\n\n const toolCall = toolCalls[index];\n\n // check if tool call is complete (some providers send the full tool call in one chunk)\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n\n // send tool call\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n\n continue;\n }\n\n // existing tool call, merge\n const toolCall = toolCalls[index];\n\n if (toolCallDelta.function?.arguments != null) {\n toolCall.function!.arguments +=\n toolCallDelta.function?.arguments ?? '';\n }\n\n // send delta\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCallDelta.function.arguments ?? '',\n });\n\n // check if tool call is complete\n if (\n toolCall.function?.name != null &&\n toolCall.function?.arguments != null &&\n isParsableJson(toolCall.function.arguments)\n ) {\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id ?? generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openAIChatResponseSchema = z.object({\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable().optional(),\n tool_calls: z\n .array(\n z.object({\n id: z.string().optional().nullable(),\n type: z.literal('function'),\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional(),\n }),\n index: z.number(),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullable()\n .optional(),\n finish_reason: z.string().optional().nullable(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiChatChunkSchema = z.union([\n z.object({\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n index: z.number(),\n id: z.string().nullish(),\n type: z.literal('function').optional(),\n function: z.object({\n name: z.string().nullish(),\n arguments: z.string().nullish(),\n }),\n }),\n )\n .nullish(),\n }),\n logprobs: z\n .object({\n content: z\n .array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n top_logprobs: z.array(\n z.object({\n token: z.string(),\n logprob: z.number(),\n }),\n ),\n }),\n )\n .nullable(),\n })\n .nullish(),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n }),\n openAIErrorDataSchema,\n]);\n\nfunction prepareToolsAndToolChoice(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n) {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined };\n }\n\n const mappedTools = tools.map(tool => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n }));\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mappedTools, tool_choice: undefined };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n case 'required':\n return { tools: mappedTools, tool_choice: type };\n case 'tool':\n return {\n tools: mappedTools,\n tool_choice: {\n type: 'function',\n function: {\n name: toolChoice.toolName,\n },\n },\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);\n }\n }\n}\n","import { LanguageModelV1Prompt } from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { OpenAIChatPrompt } from './openai-chat-prompt';\n\nexport function convertToOpenAIChatMessages(\n prompt: LanguageModelV1Prompt,\n): OpenAIChatPrompt {\n const messages: OpenAIChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n if (content.length === 1 && content[0].type === 'text') {\n messages.push({ role: 'user', content: content[0].text });\n break;\n }\n\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url: {\n url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n },\n };\n }\n }\n }),\n });\n\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n tool_call_id: toolResponse.toolCallId,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1LogProbs } from '@ai-sdk/provider';\n\ntype OpenAIChatLogProbs = {\n content:\n | {\n token: string;\n logprob: number;\n top_logprobs:\n | {\n token: string;\n logprob: number;\n }[]\n | null;\n }[]\n | null;\n};\n\nexport function mapOpenAIChatLogProbsOutput(\n logprobs: OpenAIChatLogProbs | null | undefined,\n): LanguageModelV1LogProbs | undefined {\n return (\n logprobs?.content?.map(({ token, logprob, top_logprobs }) => ({\n token,\n logprob,\n topLogprobs: top_logprobs\n ? top_logprobs.map(({ token, logprob }) => ({\n token,\n logprob,\n }))\n : [],\n })) ?? undefined\n );\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapOpenAIFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n return 'length';\n case 'content_filter':\n return 'content-filter';\n case 'function_call':\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod';\nimport { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\n\nexport const openAIErrorDataSchema = z.object({\n error: z.object({\n message: z.string(),\n type: z.string(),\n param: z.any().nullable(),\n code: z.string().nullable(),\n }),\n});\n\nexport type OpenAIErrorData = z.infer<typeof openAIErrorDataSchema>;\n\nexport const openaiFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: openAIErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV1,\n LanguageModelV1FinishReason,\n LanguageModelV1LogProbs,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToOpenAICompletionPrompt } from './convert-to-openai-completion-prompt';\nimport { mapOpenAICompletionLogProbs } from './map-openai-completion-logprobs';\nimport { mapOpenAIFinishReason } from './map-openai-finish-reason';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport {\n openAIErrorDataSchema,\n openaiFailedResponseHandler,\n} from './openai-error';\n\ntype OpenAICompletionConfig = {\n provider: string;\n baseURL: string;\n compatibility: 'strict' | 'compatible';\n headers: () => Record<string, string | undefined>;\n};\n\nexport class OpenAICompletionLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = undefined;\n\n readonly modelId: OpenAICompletionModelId;\n readonly settings: OpenAICompletionSettings;\n\n private readonly config: OpenAICompletionConfig;\n\n constructor(\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings,\n config: OpenAICompletionConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n inputFormat,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const { prompt: completionPrompt, stopSequences } =\n convertToOpenAICompletionPrompt({ prompt, inputFormat });\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n echo: this.settings.echo,\n logit_bias: this.settings.logitBias,\n logprobs:\n typeof this.settings.logprobs === 'number'\n ? this.settings.logprobs\n : typeof this.settings.logprobs === 'boolean'\n ? this.settings.logprobs\n ? 0\n : undefined\n : undefined,\n suffix: this.settings.suffix,\n user: this.settings.user,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n frequency_penalty: frequencyPenalty,\n presence_penalty: presencePenalty,\n seed,\n\n // prompt:\n prompt: completionPrompt,\n\n // stop sequences:\n stop: stopSequences,\n };\n\n switch (type) {\n case 'regular': {\n if (mode.tools?.length) {\n throw new UnsupportedFunctionalityError({\n functionality: 'tools',\n });\n }\n\n if (mode.toolChoice) {\n throw new UnsupportedFunctionalityError({\n functionality: 'toolChoice',\n });\n }\n\n return baseArgs;\n }\n\n case 'object-json': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-json mode',\n });\n }\n\n case 'object-tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-tool mode',\n });\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/completions`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openAICompletionResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.text,\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n finishReason: mapOpenAIFinishReason(choice.finish_reason),\n logprobs: mapOpenAICompletionLogProbs(choice.logprobs),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const args = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/completions`,\n headers: this.config.headers(),\n body: {\n ...this.getArgs(options),\n stream: true,\n\n // only include stream_options when in strict compatibility mode:\n stream_options:\n this.config.compatibility === 'strict'\n ? { include_usage: true }\n : undefined,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n openaiCompletionChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { prompt: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let logprobs: LanguageModelV1LogProbs;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof openaiCompletionChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n // handle failed chunk parsing / validation:\n if (!chunk.success) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n // handle error chunks:\n if ('error' in value) {\n finishReason = 'error';\n controller.enqueue({ type: 'error', error: value.error });\n return;\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapOpenAIFinishReason(choice.finish_reason);\n }\n\n if (choice?.text != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: choice.text,\n });\n }\n\n const mappedLogprobs = mapOpenAICompletionLogProbs(\n choice?.logprobs,\n );\n if (mappedLogprobs?.length) {\n if (logprobs === undefined) logprobs = [];\n logprobs.push(...mappedLogprobs);\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n logprobs,\n usage,\n });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n warnings: [],\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openAICompletionResponseSchema = z.object({\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullable()\n .optional(),\n }),\n ),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiCompletionChunkSchema = z.union([\n z.object({\n choices: z.array(\n z.object({\n text: z.string(),\n finish_reason: z.string().nullish(),\n index: z.number(),\n logprobs: z\n .object({\n tokens: z.array(z.string()),\n token_logprobs: z.array(z.number()),\n top_logprobs: z.array(z.record(z.string(), z.number())).nullable(),\n })\n .nullable()\n .optional(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .optional()\n .nullable(),\n }),\n openAIErrorDataSchema,\n]);\n","import {\n InvalidPromptError,\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function convertToOpenAICompletionPrompt({\n prompt,\n inputFormat,\n user = 'user',\n assistant = 'assistant',\n}: {\n prompt: LanguageModelV1Prompt;\n inputFormat: 'prompt' | 'messages';\n user?: string;\n assistant?: string;\n}): {\n prompt: string;\n stopSequences?: string[];\n} {\n // When the user supplied a prompt input, we don't transform it:\n if (\n inputFormat === 'prompt' &&\n prompt.length === 1 &&\n prompt[0].role === 'user' &&\n prompt[0].content.length === 1 &&\n prompt[0].content[0].type === 'text'\n ) {\n return { prompt: prompt[0].content[0].text };\n }\n\n // otherwise transform to a chat message format:\n let text = '';\n\n // if first message is a system message, add it to the text:\n if (prompt[0].role === 'system') {\n text += `${prompt[0].content}\\n\\n`;\n prompt = prompt.slice(1);\n }\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n throw new InvalidPromptError({\n message: 'Unexpected system message in prompt: ${content}',\n prompt,\n });\n }\n\n case 'user': {\n const userMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'images',\n });\n }\n }\n })\n .join('');\n\n text += `${user}:\\n${userMessage}\\n\\n`;\n break;\n }\n\n case 'assistant': {\n const assistantMessage = content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'tool-call': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool-call messages',\n });\n }\n }\n })\n .join('');\n\n text += `${assistant}:\\n${assistantMessage}\\n\\n`;\n break;\n }\n\n case 'tool': {\n throw new UnsupportedFunctionalityError({\n functionality: 'tool messages',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n // Assistant message prefix:\n text += `${assistant}:\\n`;\n\n return {\n prompt: text,\n stopSequences: [`\\n${user}:`],\n };\n}\n","import { LanguageModelV1LogProbs } from '@ai-sdk/provider';\n\ntype OpenAICompletionLogProps = {\n tokens: string[];\n token_logprobs: number[];\n top_logprobs: Record<string, number>[] | null;\n};\n\nexport function mapOpenAICompletionLogProbs(\n logprobs: OpenAICompletionLogProps | null | undefined,\n): LanguageModelV1LogProbs | undefined {\n return logprobs?.tokens.map((token, index) => ({\n token,\n logprob: logprobs.token_logprobs[index],\n topLogprobs: logprobs.top_logprobs\n ? Object.entries(logprobs.top_logprobs[index]).map(\n ([token, logprob]) => ({\n token,\n logprob,\n }),\n )\n : [],\n }));\n}\n","import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { OpenAIChatLanguageModel } from './openai-chat-language-model';\nimport { OpenAIChatModelId, OpenAIChatSettings } from './openai-chat-settings';\nimport { OpenAICompletionLanguageModel } from './openai-completion-language-model';\nimport {\n OpenAICompletionModelId,\n OpenAICompletionSettings,\n} from './openai-completion-settings';\nimport { OpenAIEmbeddingModel } from './openai-embedding-model';\nimport {\n OpenAIEmbeddingModelId,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-settings';\n\nexport interface OpenAIProvider {\n (\n modelId: 'gpt-3.5-turbo-instruct',\n settings?: OpenAICompletionSettings,\n ): OpenAICompletionLanguageModel;\n (\n modelId: OpenAIChatModelId,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an OpenAI chat model for text generation.\n */\n chat(\n modelId: OpenAIChatModelId,\n settings?: OpenAIChatSettings,\n ): OpenAIChatLanguageModel;\n\n /**\nCreates an OpenAI completion model for text generation.\n */\n completion(\n modelId: OpenAICompletionModelId,\n settings?: OpenAICompletionSettings,\n ): OpenAICompletionLanguageModel;\n\n /**\nCreates a model for text embeddings.\n */\n embedding(\n modelId: OpenAIEmbeddingModelId,\n settings?: OpenAIEmbeddingSettings,\n ): OpenAIEmbeddingModel;\n}\n\nexport interface OpenAIProviderSettings {\n /**\nBase URL for the OpenAI API calls.\n */\n baseURL?: string;\n\n /**\n@deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\nAPI key for authenticating requests.\n */\n apiKey?: string;\n\n /**\nOpenAI Organization.\n */\n organization?: string;\n\n /**\nOpenAI project.\n */\n project?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nOpenAI compatibility mode. Should be set to `strict` when using the OpenAI API,\nand `compatible` when using 3rd party providers. In `compatible` mode, newer\ninformation such as streamOptions are not being sent. Defaults to 'compatible'.\n */\n compatibility?: 'strict' | 'compatible';\n}\n\n/**\nCreate an OpenAI provider instance.\n */\nexport function createOpenAI(\n options: OpenAIProviderSettings = {},\n): OpenAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://api.openai.com/v1';\n\n // we default to compatible, because strict breaks providers like Groq:\n const compatibility = options.compatibility ?? 'compatible';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'OPENAI_API_KEY',\n description: 'OpenAI',\n })}`,\n 'OpenAI-Organization': options.organization,\n 'OpenAI-Project': options.project,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: OpenAIChatModelId,\n settings: OpenAIChatSettings = {},\n ) =>\n new OpenAIChatLanguageModel(modelId, settings, {\n provider: 'openai.chat',\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n compatibility,\n });\n\n const createCompletionModel = (\n modelId: OpenAICompletionModelId,\n settings: OpenAICompletionSettings = {},\n ) =>\n new OpenAICompletionLanguageModel(modelId, settings, {\n provider: 'openai.completion',\n baseURL,\n headers: getHeaders,\n compatibility,\n });\n\n const createEmbeddingModel = (\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings = {},\n ) =>\n new OpenAIEmbeddingModel(modelId, settings, {\n provider: 'openai.embedding',\n baseURL,\n headers: getHeaders,\n });\n\n const provider = function (\n modelId: OpenAIChatModelId | OpenAICompletionModelId,\n settings?: OpenAIChatSettings | OpenAICompletionSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The OpenAI model function cannot be called with the new keyword.',\n );\n }\n\n if (modelId === 'gpt-3.5-turbo-instruct') {\n return createCompletionModel(\n modelId,\n settings as OpenAICompletionSettings,\n );\n }\n\n return createChatModel(modelId, settings as OpenAIChatSettings);\n };\n\n provider.chat = createChatModel;\n provider.completion = createCompletionModel;\n provider.embedding = createEmbeddingModel;\n\n return provider as OpenAIProvider;\n}\n\n/**\nDefault OpenAI provider instance. It uses 'strict' compatibility mode.\n */\nexport const openai = createOpenAI({\n compatibility: 'strict', // strict for OpenAI API\n});\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n OpenAIEmbeddingModelId,\n OpenAIEmbeddingSettings,\n} from './openai-embedding-settings';\nimport { openaiFailedResponseHandler } from './openai-error';\n\ntype OpenAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n};\n\nexport class OpenAIEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: OpenAIEmbeddingModelId;\n\n private readonly config: OpenAIEmbeddingConfig;\n private readonly settings: OpenAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return this.settings.supportsParallelCalls ?? true;\n }\n\n constructor(\n modelId: OpenAIEmbeddingModelId,\n settings: OpenAIEmbeddingSettings,\n config: OpenAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: this.config.headers(),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n dimensions: this.settings.dimensions,\n user: this.settings.user,\n },\n failedResponseHandler: openaiFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n openaiTextEmbeddingResponseSchema,\n ),\n abortSignal,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst openaiTextEmbeddingResponseSchema = z.object({\n data: z.array(\n z.object({\n embedding: z.array(z.number()),\n }),\n ),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAAiD;;;ACAjD,sBAOO;AACP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACflB,4BAA0C;AAGnC,SAAS,4BACd,QACkB;AAClB,QAAM,WAA6B,CAAC;AAEpC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,YAAI,QAAQ,WAAW,KAAK,QAAQ,CAAC,EAAE,SAAS,QAAQ;AACtD,mBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,QAAQ,CAAC,EAAE,KAAK,CAAC;AACxD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAxBvC;AAyBY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WAAW;AAAA,oBACT,KACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,eAAW,iDAA0B,KAAK,KAAK,CAAC;AAAA,kBACxD;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,cAAc,aAAa;AAAA,YAC3B,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC5FO,SAAS,4BACd,UACqC;AAnBvC;AAoBE,UACE,gDAAU,YAAV,mBAAmB,IAAI,CAAC,EAAE,OAAO,SAAS,aAAa,OAAO;AAAA,IAC5D;AAAA,IACA;AAAA,IACA,aAAa,eACT,aAAa,IAAI,CAAC,EAAE,OAAAC,QAAO,SAAAC,SAAQ,OAAO;AAAA,MACxC,OAAAD;AAAA,MACA,SAAAC;AAAA,IACF,EAAE,IACF,CAAC;AAAA,EACP,QATA,YASO;AAEX;;;AC9BO,SAAS,sBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AClBA,iBAAkB;AAClB,IAAAC,yBAA+C;AAExC,IAAM,wBAAwB,aAAE,OAAO;AAAA,EAC5C,OAAO,aAAE,OAAO;AAAA,IACd,SAAS,aAAE,OAAO;AAAA,IAClB,MAAM,aAAE,OAAO;AAAA,IACf,OAAO,aAAE,IAAI,EAAE,SAAS;AAAA,IACxB,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,uDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AJgBM,IAAM,0BAAN,MAAyD;AAAA,EAS9D,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,KAAK,SAAS,aAAa,QAC3B,OAAO,KAAK,SAAS,aAAa;AAAA,MACpC,cACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAClC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACN,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA;AAAA,MAGA,UAAU,4BAA4B,MAAM;AAAA,IAC9C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,eAAO,EAAE,GAAG,UAAU,GAAG,0BAA0B,IAAI,EAAE;AAAA,MAC3D;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,aAAa,EAAE,MAAM,YAAY,UAAU,EAAE,MAAM,KAAK,KAAK,KAAK,EAAE;AAAA,UACpE,OAAO;AAAA,YACL;AAAA,cACE,MAAM;AAAA,cACN,UAAU;AAAA,gBACR,MAAM,KAAK,KAAK;AAAA,gBAChB,aAAa,KAAK,KAAK;AAAA,gBACvB,YAAY,KAAK,KAAK;AAAA,cACxB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAI,8CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA/IjE;AAgJI,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,cAAS;AArKzD,YAAAC;AAqK6D;AAAA,UACrD,cAAc;AAAA,UACd,aAAYA,MAAA,SAAS,OAAT,OAAAA,UAAe,mCAAW;AAAA,UACtC,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B;AAAA;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,MACX,UAAU,4BAA4B,OAAO,QAAQ;AAAA,IACvD;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,KAAK,OAAO,IAAI;AAAA,QACnB,MAAM;AAAA,QACN,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,MACD,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA;AAAA,QAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,MACR;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,UAAM,YAOD,CAAC;AAEN,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI;AAEJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AA3OvC;AA6OY,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa;AAAW,2BAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,iBAAiB,MAAM,YAAY;AAC5C,sBAAM,QAAQ,cAAc;AAG5B,oBAAI,UAAU,KAAK,KAAK,MAAM;AAC5B,sBAAI,cAAc,SAAS,YAAY;AACrC,0BAAM,IAAI,yCAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,sBAAI,cAAc,MAAM,MAAM;AAC5B,0BAAM,IAAI,yCAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,wBAAI,mBAAc,aAAd,mBAAwB,SAAQ,MAAM;AACxC,0BAAM,IAAI,yCAAyB;AAAA,sBACjC,MAAM;AAAA,sBACN,SAAS;AAAA,oBACX,CAAC;AAAA,kBACH;AAEA,4BAAU,KAAK,IAAI;AAAA,oBACjB,IAAI,cAAc;AAAA,oBAClB,MAAM;AAAA,oBACN,UAAU;AAAA,sBACR,MAAM,cAAc,SAAS;AAAA,sBAC7B,YAAW,mBAAc,SAAS,cAAvB,YAAoC;AAAA,oBACjD;AAAA,kBACF;AAEA,wBAAMC,YAAW,UAAU,KAAK;AAGhC,wBACE,KAAAA,UAAS,aAAT,mBAAmB,SAAQ,UAC3B,KAAAA,UAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAeA,UAAS,SAAS,SAAS,GAC1C;AAEA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,cAAc;AAAA,sBACd,YAAYA,UAAS;AAAA,sBACrB,UAAUA,UAAS,SAAS;AAAA,sBAC5B,eAAeA,UAAS,SAAS;AAAA,oBACnC,CAAC;AAGD,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,cAAc;AAAA,sBACd,aAAY,KAAAA,UAAS,OAAT,gBAAe,mCAAW;AAAA,sBACtC,UAAUA,UAAS,SAAS;AAAA,sBAC5B,MAAMA,UAAS,SAAS;AAAA,oBAC1B,CAAC;AAAA,kBACH;AAEA;AAAA,gBACF;AAGA,sBAAM,WAAW,UAAU,KAAK;AAEhC,sBAAI,mBAAc,aAAd,mBAAwB,cAAa,MAAM;AAC7C,2BAAS,SAAU,cACjB,yBAAc,aAAd,mBAAwB,cAAxB,YAAqC;AAAA,gBACzC;AAGA,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,gBAAe,mBAAc,SAAS,cAAvB,YAAoC;AAAA,gBACrD,CAAC;AAGD,sBACE,cAAS,aAAT,mBAAmB,SAAQ,UAC3B,cAAS,aAAT,mBAAmB,cAAa,YAChC,uCAAe,SAAS,SAAS,SAAS,GAC1C;AACA,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,aAAY,cAAS,OAAT,gBAAe,mCAAW;AAAA,oBACtC,UAAU,SAAS,SAAS;AAAA,oBAC5B,MAAM,SAAS,SAAS;AAAA,kBAC1B,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AACF;AAIA,IAAM,2BAA2B,cAAE,OAAO;AAAA,EACxC,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QACxC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,YACnC,MAAM,cAAE,QAAQ,UAAU;AAAA,YAC1B,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,UAAU,cACP,OAAO;AAAA,QACN,SAAS,cACN;AAAA,UACC,cAAE,OAAO;AAAA,YACP,OAAO,cAAE,OAAO;AAAA,YAChB,SAAS,cAAE,OAAO;AAAA,YAClB,cAAc,cAAE;AAAA,cACd,cAAE,OAAO;AAAA,gBACP,OAAO,cAAE,OAAO;AAAA,gBAChB,SAAS,cAAE,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH,EACC,SAAS;AAAA,MACd,CAAC,EACA,SAAS,EACT,SAAS;AAAA,MACZ,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IAChD,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,wBAAwB,cAAE,MAAM;AAAA,EACpC,cAAE,OAAO;AAAA,IACP,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,OAAO,cAAE,OAAO;AAAA,UACd,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,UACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,UAC5B,YAAY,cACT;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,cACvB,MAAM,cAAE,QAAQ,UAAU,EAAE,SAAS;AAAA,cACrC,UAAU,cAAE,OAAO;AAAA,gBACjB,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,gBACzB,WAAW,cAAE,OAAO,EAAE,QAAQ;AAAA,cAChC,CAAC;AAAA,YACH,CAAC;AAAA,UACH,EACC,QAAQ;AAAA,QACb,CAAC;AAAA,QACD,UAAU,cACP,OAAO;AAAA,UACN,SAAS,cACN;AAAA,YACC,cAAE,OAAO;AAAA,cACP,OAAO,cAAE,OAAO;AAAA,cAChB,SAAS,cAAE,OAAO;AAAA,cAClB,cAAc,cAAE;AAAA,gBACd,cAAE,OAAO;AAAA,kBACP,OAAO,cAAE,OAAO;AAAA,kBAChB,SAAS,cAAE,OAAO;AAAA,gBACpB,CAAC;AAAA,cACH;AAAA,YACF,CAAC;AAAA,UACH,EACC,SAAS;AAAA,QACd,CAAC,EACA,QAAQ;AAAA,QACX,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QAC9C,OAAO,cAAE,OAAO;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,QAAQ;AAAA,EACb,CAAC;AAAA,EACD;AACF,CAAC;AAED,SAAS,0BACP,MAGA;AAjgBF;AAmgBE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,OAAU;AAAA,EACpD;AAEA,QAAM,cAAc,MAAM,IAAI,WAAS;AAAA,IACrC,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,KAAK;AAAA,MACX,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,IACnB;AAAA,EACF,EAAE;AAEF,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,aAAa,aAAa,OAAU;AAAA,EACtD;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,aAAa,aAAa,KAAK;AAAA,IACjD,KAAK;AACH,aAAO;AAAA,QACL,OAAO;AAAA,QACP,aAAa;AAAA,UACX,MAAM;AAAA,UACN,UAAU;AAAA,YACR,MAAM,WAAW;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,MAAM,iCAAiC,gBAAgB,EAAE;AAAA,IACrE;AAAA,EACF;AACF;;;AK9iBA,IAAAC,mBAMO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;;;ACblB,IAAAC,mBAIO;AAEA,SAAS,gCAAgC;AAAA,EAC9C;AAAA,EACA;AAAA,EACA,OAAO;AAAA,EACP,YAAY;AACd,GAQE;AAEA,MACE,gBAAgB,YAChB,OAAO,WAAW,KAClB,OAAO,CAAC,EAAE,SAAS,UACnB,OAAO,CAAC,EAAE,QAAQ,WAAW,KAC7B,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,SAAS,QAC9B;AACA,WAAO,EAAE,QAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,KAAK;AAAA,EAC7C;AAGA,MAAI,OAAO;AAGX,MAAI,OAAO,CAAC,EAAE,SAAS,UAAU;AAC/B,YAAQ,GAAG,OAAO,CAAC,EAAE,OAAO;AAAA;AAAA;AAC5B,aAAS,OAAO,MAAM,CAAC;AAAA,EACzB;AAEA,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,cAAM,IAAI,oCAAmB;AAAA,UAC3B,SAAS;AAAA,UACT;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,cAAc,QACjB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,SAAS;AACZ,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,IAAI;AAAA,EAAM,WAAW;AAAA;AAAA;AAChC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,cAAM,mBAAmB,QACtB,IAAI,UAAQ;AACX,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,qBAAO,KAAK;AAAA,YACd;AAAA,YACA,KAAK,aAAa;AAChB,oBAAM,IAAI,+CAA8B;AAAA,gBACtC,eAAe;AAAA,cACjB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF,CAAC,EACA,KAAK,EAAE;AAEV,gBAAQ,GAAG,SAAS;AAAA,EAAM,gBAAgB;AAAA;AAAA;AAC1C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,GAAG,SAAS;AAAA;AAEpB,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,eAAe,CAAC;AAAA,EAAK,IAAI,GAAG;AAAA,EAC9B;AACF;;;ACrGO,SAAS,4BACd,UACqC;AACrC,SAAO,qCAAU,OAAO,IAAI,CAAC,OAAO,WAAW;AAAA,IAC7C;AAAA,IACA,SAAS,SAAS,eAAe,KAAK;AAAA,IACtC,aAAa,SAAS,eAClB,OAAO,QAAQ,SAAS,aAAa,KAAK,CAAC,EAAE;AAAA,MAC3C,CAAC,CAACC,QAAO,OAAO,OAAO;AAAA,QACrB,OAAAA;AAAA,QACA;AAAA,MACF;AAAA,IACF,IACA,CAAC;AAAA,EACP;AACF;;;AFUO,IAAM,gCAAN,MAA+D;AAAA,EASpE,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAlEnD;AAmEI,UAAM,OAAO,KAAK;AAElB,UAAM,EAAE,QAAQ,kBAAkB,cAAc,IAC9C,gCAAgC,EAAE,QAAQ,YAAY,CAAC;AAEzD,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,MAAM,KAAK,SAAS;AAAA,MACpB,YAAY,KAAK,SAAS;AAAA,MAC1B,UACE,OAAO,KAAK,SAAS,aAAa,WAC9B,KAAK,SAAS,WACd,OAAO,KAAK,SAAS,aAAa,YAClC,KAAK,SAAS,WACZ,IACA,SACF;AAAA,MACN,QAAQ,KAAK,SAAS;AAAA,MACtB,MAAM,KAAK,SAAS;AAAA;AAAA,MAGpB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB;AAAA;AAAA,MAGA,QAAQ;AAAA;AAAA,MAGR,MAAM;AAAA,IACR;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,aAAI,UAAK,UAAL,mBAAY,QAAQ;AACtB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,YAAI,KAAK,YAAY;AACnB,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eAAe;AAAA,UACjB,CAAC;AAAA,QACH;AAEA,eAAO;AAAA,MACT;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,eAAe;AAClB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAC9C,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,cAAc,sBAAsB,OAAO,aAAa;AAAA,MACxD,UAAU,4BAA4B,OAAO,QAAQ;AAAA,MACrD,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,OAAO,KAAK,QAAQ,OAAO;AAEjC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG,KAAK,QAAQ,OAAO;AAAA,QACvB,QAAQ;AAAA;AAAA,QAGR,gBACE,KAAK,OAAO,kBAAkB,WAC1B,EAAE,eAAe,KAAK,IACtB;AAAA,MACR;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,QAAQ,WAAW,GAAG,YAAY,IAAI;AAE9C,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI;AAEJ,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAE3B,gBAAI,CAAC,MAAM,SAAS;AAClB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAGpB,gBAAI,WAAW,OAAO;AACpB,6BAAe;AACf,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,sBAAsB,OAAO,aAAa;AAAA,YAC3D;AAEA,iBAAI,iCAAQ,SAAQ,MAAM;AACxB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,OAAO;AAAA,cACpB,CAAC;AAAA,YACH;AAEA,kBAAM,iBAAiB;AAAA,cACrB,iCAAQ;AAAA,YACV;AACA,gBAAI,iDAAgB,QAAQ;AAC1B,kBAAI,aAAa;AAAW,2BAAW,CAAC;AACxC,uBAAS,KAAK,GAAG,cAAc;AAAA,YACjC;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AACF;AAIA,IAAM,iCAAiC,cAAE,OAAO;AAAA,EAC9C,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,MAAM,cAAE,OAAO;AAAA,MACf,eAAe,cAAE,OAAO;AAAA,MACxB,UAAU,cACP,OAAO;AAAA,QACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,QAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,MACnE,CAAC,EACA,SAAS,EACT,SAAS;AAAA,IACd,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,8BAA8B,cAAE,MAAM;AAAA,EAC1C,cAAE,OAAO;AAAA,IACP,SAAS,cAAE;AAAA,MACT,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,QACf,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,QAClC,OAAO,cAAE,OAAO;AAAA,QAChB,UAAU,cACP,OAAO;AAAA,UACN,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAC1B,gBAAgB,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,UAClC,cAAc,cAAE,MAAM,cAAE,OAAO,cAAE,OAAO,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,QACnE,CAAC,EACA,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IACA,OAAO,cACJ,OAAO;AAAA,MACN,eAAe,cAAE,OAAO;AAAA,MACxB,mBAAmB,cAAE,OAAO;AAAA,IAC9B,CAAC,EACA,SAAS,EACT,SAAS;AAAA,EACd,CAAC;AAAA,EACD;AACF,CAAC;;;ANjUM,IAAM,SAAN,MAAa;AAAA;AAAA;AAAA;AAAA,EA+BlB,YAAY,UAAkC,CAAC,GAAG;AA5CpD;AA6CI,SAAK,WACH,uDAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AACF,SAAK,SAAS,QAAQ;AACtB,SAAK,eAAe,QAAQ;AAC5B,SAAK,UAAU,QAAQ;AACvB,SAAK,UAAU,QAAQ;AAAA,EACzB;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,cAAc,KAAK;AAAA,MACnB,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,eAAe,cAAU,mCAAW;AAAA,UAClC,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC,CAAC;AAAA,QACF,uBAAuB,KAAK;AAAA,QAC5B,kBAAkB,KAAK;AAAA,QACvB,GAAG,KAAK;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,SAA4B,WAA+B,CAAC,GAAG;AAClE,WAAO,IAAI,wBAAwB,SAAS,UAAU;AAAA,MACpD,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,eAAe;AAAA,MACf,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,KAAK,OAAO,GAAG,IAAI;AAAA,IAC3C,CAAC;AAAA,EACH;AAAA,EAEA,WACE,SACA,WAAqC,CAAC,GACtC;AACA,WAAO,IAAI,8BAA8B,SAAS,UAAU;AAAA,MAC1D,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;;;AS1FA,IAAAC,yBAAiD;;;ACAjD,IAAAC,mBAGO;AACP,IAAAC,yBAGO;AACP,IAAAC,cAAkB;AAaX,IAAM,uBAAN,MAA+D;AAAA,EAmBpE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAhCrC;AAiCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AApCvC;AAqCI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,QACjB,YAAY,KAAK,SAAS;AAAA,QAC1B,MAAM,KAAK,SAAS;AAAA,MACtB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACjD,MAAM,cAAE;AAAA,IACN,cAAE,OAAO;AAAA,MACP,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,IAC/B,CAAC;AAAA,EACH;AACF,CAAC;;;ADNM,SAAS,aACd,UAAkC,CAAC,GACnB;AA7FlB;AA8FE,QAAM,WACJ,uDAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AAGF,QAAM,iBAAgB,aAAQ,kBAAR,YAAyB;AAE/C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,uBAAuB,QAAQ;AAAA,IAC/B,kBAAkB,QAAQ;AAAA,IAC1B,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAA+B,CAAC,MAEhC,IAAI,wBAAwB,SAAS,UAAU;AAAA,IAC7C,UAAU;AAAA,IACV,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAEH,QAAM,wBAAwB,CAC5B,SACA,WAAqC,CAAC,MAEtC,IAAI,8BAA8B,SAAS,UAAU;AAAA,IACnD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAoC,CAAC,MAErC,IAAI,qBAAqB,SAAS,UAAU;AAAA,IAC1C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,EACX,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,QAAI,YAAY,0BAA0B;AACxC,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAA8B;AAAA,EAChE;AAEA,WAAS,OAAO;AAChB,WAAS,aAAa;AACtB,WAAS,YAAY;AAErB,SAAO;AACT;AAKO,IAAM,SAAS,aAAa;AAAA,EACjC,eAAe;AAAA;AACjB,CAAC;","names":["import_provider_utils","import_provider_utils","import_zod","token","logprob","import_provider_utils","_a","toolCall","import_provider","import_provider_utils","import_zod","import_provider","token","import_provider_utils","import_provider","import_provider_utils","import_zod"]}
package/dist/index.mjs CHANGED
@@ -231,7 +231,10 @@ var OpenAIChatLanguageModel = class {
231
231
  var _a, _b;
232
232
  const args = this.getArgs(options);
233
233
  const { responseHeaders, value: response } = await postJsonToApi({
234
- url: `${this.config.baseURL}/chat/completions`,
234
+ url: this.config.url({
235
+ path: "/chat/completions",
236
+ modelId: this.modelId
237
+ }),
235
238
  headers: this.config.headers(),
236
239
  body: args,
237
240
  failedResponseHandler: openaiFailedResponseHandler,
@@ -267,7 +270,10 @@ var OpenAIChatLanguageModel = class {
267
270
  async doStream(options) {
268
271
  const args = this.getArgs(options);
269
272
  const { responseHeaders, value: response } = await postJsonToApi({
270
- url: `${this.config.baseURL}/chat/completions`,
273
+ url: this.config.url({
274
+ path: "/chat/completions",
275
+ modelId: this.modelId
276
+ }),
271
277
  headers: this.config.headers(),
272
278
  body: {
273
279
  ...args,
@@ -919,7 +925,8 @@ var OpenAI = class {
919
925
  return new OpenAIChatLanguageModel(modelId, settings, {
920
926
  provider: "openai.chat",
921
927
  ...this.baseConfig,
922
- compatibility: "strict"
928
+ compatibility: "strict",
929
+ url: ({ path }) => `${this.baseURL}${path}`
923
930
  });
924
931
  }
925
932
  completion(modelId, settings = {}) {
@@ -1020,7 +1027,7 @@ function createOpenAI(options = {}) {
1020
1027
  });
1021
1028
  const createChatModel = (modelId, settings = {}) => new OpenAIChatLanguageModel(modelId, settings, {
1022
1029
  provider: "openai.chat",
1023
- baseURL,
1030
+ url: ({ path }) => `${baseURL}${path}`,
1024
1031
  headers: getHeaders,
1025
1032
  compatibility
1026
1033
  });