@ai-sdk/mistral 2.0.0-canary.6 → 2.0.0-canary.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\n","import {\n EmbeddingModelV1,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\n\nexport interface MistralProvider extends ProviderV2 {\n (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ) => EmbeddingModelV1<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: MistralChatModelId,\n settings: MistralChatSettings = {},\n ) =>\n new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings = {},\n ) =>\n new MistralEmbeddingModel(modelId, settings, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n supportsUrl(url: URL): boolean {\n return url.protocol === 'https:';\n }\n\n private getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: providerOptions?.mistral?.documentImageLimit,\n document_page_limit: providerOptions?.mistral?.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n\n // extract text content.\n // image content or reference content is currently ignored.\n let text = extractTextContent(choice.message.content);\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = body.messages[body.messages.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n return {\n text,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let chunkNumber = 0;\n let trimLeadingSpace = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n chunkNumber++;\n\n const value = chunk.value;\n\n if (chunkNumber === 1) {\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // extract text content.\n // image content or reference content is currently ignored.\n const textContent = extractTextContent(delta.content);\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n if (chunkNumber <= 2) {\n const lastMessage = body.messages[body.messages.length - 1];\n\n if (\n lastMessage.role === 'assistant' &&\n textContent === lastMessage.content.trimEnd()\n ) {\n // Mistral moves the trailing space from the prefix to the next chunk.\n // We trim the leading space to avoid duplication.\n if (textContent.length < lastMessage.content.length) {\n trimLeadingSpace = true;\n }\n\n // skip the repeated content:\n return;\n }\n }\n\n if (textContent != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: trimLeadingSpace\n ? textContent.trimStart()\n : textContent,\n });\n\n trimLeadingSpace = false;\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece:\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n warnings,\n };\n }\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'image_url':\n case 'reference':\n // image content or reference content is currently ignored.\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n ]),\n ),\n ])\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n tool_call_id: toolResponse.toolCallId,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: MistralEmbeddingModelId;\n\n private readonly config: MistralEmbeddingConfig;\n private readonly settings: MistralEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 32;\n }\n\n get supportsParallelCalls(): boolean {\n // Parallel calls are technically possible,\n // but I have been hitting rate limits and disable them for now.\n return this.settings.supportsParallelCalls ?? false;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAIO;;;ACHP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACflB,sBAGO;AAGA,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA,kBAC7C;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,cAAc,aAAa;AAAA,UAC7B,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtHO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,sDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ALzDO,IAAM,2BAAN,MAA0D;AAAA,EAU/D,YACE,SACA,UACA,QACA;AAbF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAY3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,IAAI,aAAa;AAAA,EAC1B;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA3EnD;AA4EI,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,uBAAsB,wDAAiB,YAAjB,mBAA0B;AAAA,MAChD,sBAAqB,wDAAiB,YAAjB,mBAA0B;AAAA;AAAA,MAG/C,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AApKjE;AAqKI,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AAIjC,QAAI,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AAKpD,UAAM,cAAc,KAAK,SAAS,KAAK,SAAS,SAAS,CAAC;AAC1D,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,WAAO;AAAA,MACL;AAAA,MACA,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAC/B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAC/C,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,cAAc;AAClB,QAAI,mBAAmB;AAEvB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,gBAAgB,GAAG;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AAAA,YACnC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAIrB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAKpD,gBAAI,eAAe,GAAG;AACpB,oBAAM,cAAc,KAAK,SAAS,KAAK,SAAS,SAAS,CAAC;AAE1D,kBACE,YAAY,SAAS,eACrB,gBAAgB,YAAY,QAAQ,QAAQ,GAC5C;AAGA,oBAAI,YAAY,SAAS,YAAY,QAAQ,QAAQ;AACnD,qCAAmB;AAAA,gBACrB;AAGA;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,MAAM;AACvB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,mBACP,YAAY,UAAU,IACtB;AAAA,cACN,CAAC;AAED,iCAAmB;AAAA,YACrB;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAEvC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AACD,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuB,cAC1B,MAAM;AAAA,EACL,cAAE,OAAO;AAAA,EACT,cAAE;AAAA,IACA,cAAE,mBAAmB,QAAQ;AAAA,MAC3B,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,WAAW,cAAE,MAAM;AAAA,UACjB,cAAE,OAAO;AAAA,UACT,cAAE,OAAO;AAAA,YACP,KAAK,cAAE,OAAO;AAAA,YACd,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,eAAe,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAIX,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,cAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyB,cAAE,OAAO;AAAA,EACtC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,QACd,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,cAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;;;AM/dD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;AAcX,IAAM,wBAAN,MAAgE;AAAA,EAqBrE,YACE,SACA,UACA,QACA;AAxBF,SAAS,uBAAuB;AAyB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAtBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAnCrC;AAoCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAvCvC;AA0CI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;APRM,SAAS,cACd,UAAmC,CAAC,GACnB;AAhGnB;AAiGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAEjC,IAAI,yBAAyB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAqC,CAAC,MAEtC,IAAI,sBAAsB,SAAS,UAAU;AAAA,IAC3C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider","import_provider_utils","import_provider_utils","import_zod","import_provider","import_provider","import_provider_utils","import_zod"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings = {},\n ) =>\n new MistralEmbeddingModel(modelId, settings, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n supportsUrl(url: URL): boolean {\n return url.protocol === 'https:';\n }\n\n private getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n }) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n let text = extractTextContent(choice.message.content);\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = body.messages[body.messages.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let chunkNumber = 0;\n let trimLeadingSpace = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n chunkNumber++;\n\n const value = chunk.value;\n\n if (chunkNumber === 1) {\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // extract text content.\n // image content or reference content is currently ignored.\n const textContent = extractTextContent(delta.content);\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n if (chunkNumber <= 2) {\n const lastMessage = body.messages[body.messages.length - 1];\n\n if (\n lastMessage.role === 'assistant' &&\n textContent === lastMessage.content.trimEnd()\n ) {\n // Mistral moves the trailing space from the prefix to the next chunk.\n // We trim the leading space to avoid duplication.\n if (textContent.length < lastMessage.content.length) {\n trimLeadingSpace = true;\n }\n\n // skip the repeated content:\n return;\n }\n }\n\n if (textContent != null) {\n controller.enqueue({\n type: 'text',\n text: trimLeadingSpace ? textContent.trimStart() : textContent,\n });\n\n trimLeadingSpace = false;\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece:\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'image_url':\n case 'reference':\n // image content or reference content is currently ignored.\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n ]),\n ),\n ])\n .nullish();\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${part.data}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n tool_call_id: toolResponse.toolCallId,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().nullish(),\n\n documentImageLimit: z.number().nullish(),\n documentPageLimit: z.number().nullish(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n\n private readonly config: MistralEmbeddingConfig;\n private readonly settings: MistralEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 32;\n }\n\n get supportsParallelCalls(): boolean {\n // Parallel calls are technically possible,\n // but I have been hitting rate limits and disable them for now.\n return this.settings.supportsParallelCalls ?? false;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAIO;;;ACFP,IAAAC,yBAQO;AACP,IAAAC,cAAkB;;;ACjBlB,sBAGO;AAGA,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,KAAK,IAAI;AAAA,kBAC7C;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,cAAc,aAAa;AAAA,UAC7B,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtHO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,iBAAkB;AAkBX,IAAM,yBAAyB,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,aAAE,QAAQ,EAAE,QAAQ;AAAA,EAEhC,oBAAoB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,mBAAmB,aAAE,OAAO,EAAE,QAAQ;AACxC,CAAC;;;AC5BD,4BAA+C;AAC/C,IAAAC,cAAkB;AAElB,IAAM,yBAAyB,cAAE,OAAO;AAAA,EACtC,QAAQ,cAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,cAAE,OAAO;AAAA,EAClB,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,cAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,sDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANvDO,IAAM,2BAAN,MAA0D;AAAA,EAS/D,YAAY,SAA6B,QAA2B;AARpE,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAO3B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,IAAI,aAAa;AAAA,EAC1B;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAvEnD;AAwEI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACJ,sDAAqB;AAAA,MACnB,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJD,YAIM,CAAC;AAET,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAErD,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AAKpD,UAAM,cAAc,KAAK,SAAS,KAAK,SAAS,SAAS,CAAC;AAC1D,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,MAAM,SAAS,SAAS;AAAA,QAC1B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,MAC/B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAC/C,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,cAAc;AAClB,QAAI,mBAAmB;AAEvB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,gBAAgB,GAAG;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AAAA,YACnC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAIrB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAKpD,gBAAI,eAAe,GAAG;AACpB,oBAAM,cAAc,KAAK,SAAS,KAAK,SAAS,SAAS,CAAC;AAE1D,kBACE,YAAY,SAAS,eACrB,gBAAgB,YAAY,QAAQ,QAAQ,GAC5C;AAGA,oBAAI,YAAY,SAAS,YAAY,QAAQ,QAAQ;AACnD,qCAAmB;AAAA,gBACrB;AAGA;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,MAAM;AACvB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,MAAM,mBAAmB,YAAY,UAAU,IAAI;AAAA,cACrD,CAAC;AAED,iCAAmB;AAAA,YACrB;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAEvC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AACD,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuB,cAC1B,MAAM;AAAA,EACL,cAAE,OAAO;AAAA,EACT,cAAE;AAAA,IACA,cAAE,mBAAmB,QAAQ;AAAA,MAC3B,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,WAAW,cAAE,MAAM;AAAA,UACjB,cAAE,OAAO;AAAA,UACT,cAAE,OAAO;AAAA,YACP,KAAK,cAAE,OAAO;AAAA,YACd,QAAQ,cAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,eAAe,cAAE,MAAM,cAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAIX,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,cAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyB,cAAE,OAAO;AAAA,EACtC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,QACd,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,cAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;;;AO9eD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;AAcX,IAAM,wBAAN,MAAgE;AAAA,EAqBrE,YACE,SACA,UACA,QACA;AAxBF,SAAS,uBAAuB;AAyB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAtBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAnCrC;AAoCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAvCvC;AA0CI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARxBM,SAAS,cACd,UAAmC,CAAC,GACnB;AApFnB;AAqFE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAqC,CAAC,MAEtC,IAAI,sBAAsB,SAAS,UAAU;AAAA,IAC3C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider","import_provider_utils","import_provider_utils","import_zod","import_zod","import_provider","import_provider","import_provider_utils","import_zod"]}
package/dist/index.mjs CHANGED
@@ -12,9 +12,10 @@ import {
12
12
  combineHeaders,
13
13
  createEventSourceResponseHandler,
14
14
  createJsonResponseHandler,
15
+ parseProviderOptions,
15
16
  postJsonToApi
16
17
  } from "@ai-sdk/provider-utils";
17
- import { z as z2 } from "zod";
18
+ import { z as z3 } from "zod";
18
19
 
19
20
  // src/convert-to-mistral-chat-messages.ts
20
21
  import {
@@ -139,15 +140,28 @@ function mapMistralFinishReason(finishReason) {
139
140
  }
140
141
  }
141
142
 
143
+ // src/mistral-chat-options.ts
144
+ import { z } from "zod";
145
+ var mistralProviderOptions = z.object({
146
+ /**
147
+ Whether to inject a safety prompt before all conversations.
148
+
149
+ Defaults to `false`.
150
+ */
151
+ safePrompt: z.boolean().nullish(),
152
+ documentImageLimit: z.number().nullish(),
153
+ documentPageLimit: z.number().nullish()
154
+ });
155
+
142
156
  // src/mistral-error.ts
143
157
  import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
144
- import { z } from "zod";
145
- var mistralErrorDataSchema = z.object({
146
- object: z.literal("error"),
147
- message: z.string(),
148
- type: z.string(),
149
- param: z.string().nullable(),
150
- code: z.string().nullable()
158
+ import { z as z2 } from "zod";
159
+ var mistralErrorDataSchema = z2.object({
160
+ object: z2.literal("error"),
161
+ message: z2.string(),
162
+ type: z2.string(),
163
+ param: z2.string().nullable(),
164
+ code: z2.string().nullable()
151
165
  });
152
166
  var mistralFailedResponseHandler = createJsonErrorResponseHandler({
153
167
  errorSchema: mistralErrorDataSchema,
@@ -211,12 +225,11 @@ function prepareTools({
211
225
 
212
226
  // src/mistral-chat-language-model.ts
213
227
  var MistralChatLanguageModel = class {
214
- constructor(modelId, settings, config) {
228
+ constructor(modelId, config) {
215
229
  this.specificationVersion = "v2";
216
230
  this.defaultObjectGenerationMode = "json";
217
231
  this.supportsImageUrls = false;
218
232
  this.modelId = modelId;
219
- this.settings = settings;
220
233
  this.config = config;
221
234
  }
222
235
  get provider() {
@@ -240,8 +253,13 @@ var MistralChatLanguageModel = class {
240
253
  tools,
241
254
  toolChoice
242
255
  }) {
243
- var _a, _b;
256
+ var _a;
244
257
  const warnings = [];
258
+ const options = (_a = parseProviderOptions({
259
+ provider: "mistral",
260
+ providerOptions,
261
+ schema: mistralProviderOptions
262
+ })) != null ? _a : {};
245
263
  if (topK != null) {
246
264
  warnings.push({
247
265
  type: "unsupported-setting",
@@ -277,7 +295,7 @@ var MistralChatLanguageModel = class {
277
295
  // model id:
278
296
  model: this.modelId,
279
297
  // model specific settings:
280
- safe_prompt: this.settings.safePrompt,
298
+ safe_prompt: options.safePrompt,
281
299
  // standardized settings:
282
300
  max_tokens: maxOutputTokens,
283
301
  temperature,
@@ -286,8 +304,8 @@ var MistralChatLanguageModel = class {
286
304
  // response format:
287
305
  response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0,
288
306
  // mistral-specific provider options:
289
- document_image_limit: (_a = providerOptions == null ? void 0 : providerOptions.mistral) == null ? void 0 : _a.documentImageLimit,
290
- document_page_limit: (_b = providerOptions == null ? void 0 : providerOptions.mistral) == null ? void 0 : _b.documentPageLimit,
307
+ document_image_limit: options.documentImageLimit,
308
+ document_page_limit: options.documentPageLimit,
291
309
  // messages:
292
310
  messages: convertToMistralChatMessages(prompt)
293
311
  };
@@ -309,7 +327,6 @@ var MistralChatLanguageModel = class {
309
327
  };
310
328
  }
311
329
  async doGenerate(options) {
312
- var _a;
313
330
  const { args: body, warnings } = this.getArgs(options);
314
331
  const {
315
332
  responseHeaders,
@@ -327,19 +344,28 @@ var MistralChatLanguageModel = class {
327
344
  fetch: this.config.fetch
328
345
  });
329
346
  const choice = response.choices[0];
347
+ const content = [];
330
348
  let text = extractTextContent(choice.message.content);
331
349
  const lastMessage = body.messages[body.messages.length - 1];
332
350
  if (lastMessage.role === "assistant" && (text == null ? void 0 : text.startsWith(lastMessage.content))) {
333
351
  text = text.slice(lastMessage.content.length);
334
352
  }
353
+ if (text != null && text.length > 0) {
354
+ content.push({ type: "text", text });
355
+ }
356
+ if (choice.message.tool_calls != null) {
357
+ for (const toolCall of choice.message.tool_calls) {
358
+ content.push({
359
+ type: "tool-call",
360
+ toolCallType: "function",
361
+ toolCallId: toolCall.id,
362
+ toolName: toolCall.function.name,
363
+ args: toolCall.function.arguments
364
+ });
365
+ }
366
+ }
335
367
  return {
336
- text,
337
- toolCalls: (_a = choice.message.tool_calls) == null ? void 0 : _a.map((toolCall) => ({
338
- toolCallType: "function",
339
- toolCallId: toolCall.id,
340
- toolName: toolCall.function.name,
341
- args: toolCall.function.arguments
342
- })),
368
+ content,
343
369
  finishReason: mapMistralFinishReason(choice.finish_reason),
344
370
  usage: {
345
371
  inputTokens: response.usage.prompt_tokens,
@@ -378,6 +404,9 @@ var MistralChatLanguageModel = class {
378
404
  return {
379
405
  stream: response.pipeThrough(
380
406
  new TransformStream({
407
+ start(controller) {
408
+ controller.enqueue({ type: "stream-start", warnings });
409
+ },
381
410
  transform(chunk, controller) {
382
411
  if (!chunk.success) {
383
412
  controller.enqueue({ type: "error", error: chunk.error });
@@ -415,8 +444,8 @@ var MistralChatLanguageModel = class {
415
444
  }
416
445
  if (textContent != null) {
417
446
  controller.enqueue({
418
- type: "text-delta",
419
- textDelta: trimLeadingSpace ? textContent.trimStart() : textContent
447
+ type: "text",
448
+ text: trimLeadingSpace ? textContent.trimStart() : textContent
420
449
  });
421
450
  trimLeadingSpace = false;
422
451
  }
@@ -445,8 +474,7 @@ var MistralChatLanguageModel = class {
445
474
  })
446
475
  ),
447
476
  request: { body },
448
- response: { headers: responseHeaders },
449
- warnings
477
+ response: { headers: responseHeaders }
450
478
  };
451
479
  }
452
480
  };
@@ -475,80 +503,80 @@ function extractTextContent(content) {
475
503
  }
476
504
  return textContent.length ? textContent.join("") : void 0;
477
505
  }
478
- var mistralContentSchema = z2.union([
479
- z2.string(),
480
- z2.array(
481
- z2.discriminatedUnion("type", [
482
- z2.object({
483
- type: z2.literal("text"),
484
- text: z2.string()
506
+ var mistralContentSchema = z3.union([
507
+ z3.string(),
508
+ z3.array(
509
+ z3.discriminatedUnion("type", [
510
+ z3.object({
511
+ type: z3.literal("text"),
512
+ text: z3.string()
485
513
  }),
486
- z2.object({
487
- type: z2.literal("image_url"),
488
- image_url: z2.union([
489
- z2.string(),
490
- z2.object({
491
- url: z2.string(),
492
- detail: z2.string().nullable()
514
+ z3.object({
515
+ type: z3.literal("image_url"),
516
+ image_url: z3.union([
517
+ z3.string(),
518
+ z3.object({
519
+ url: z3.string(),
520
+ detail: z3.string().nullable()
493
521
  })
494
522
  ])
495
523
  }),
496
- z2.object({
497
- type: z2.literal("reference"),
498
- reference_ids: z2.array(z2.number())
524
+ z3.object({
525
+ type: z3.literal("reference"),
526
+ reference_ids: z3.array(z3.number())
499
527
  })
500
528
  ])
501
529
  )
502
530
  ]).nullish();
503
- var mistralChatResponseSchema = z2.object({
504
- id: z2.string().nullish(),
505
- created: z2.number().nullish(),
506
- model: z2.string().nullish(),
507
- choices: z2.array(
508
- z2.object({
509
- message: z2.object({
510
- role: z2.literal("assistant"),
531
+ var mistralChatResponseSchema = z3.object({
532
+ id: z3.string().nullish(),
533
+ created: z3.number().nullish(),
534
+ model: z3.string().nullish(),
535
+ choices: z3.array(
536
+ z3.object({
537
+ message: z3.object({
538
+ role: z3.literal("assistant"),
511
539
  content: mistralContentSchema,
512
- tool_calls: z2.array(
513
- z2.object({
514
- id: z2.string(),
515
- function: z2.object({ name: z2.string(), arguments: z2.string() })
540
+ tool_calls: z3.array(
541
+ z3.object({
542
+ id: z3.string(),
543
+ function: z3.object({ name: z3.string(), arguments: z3.string() })
516
544
  })
517
545
  ).nullish()
518
546
  }),
519
- index: z2.number(),
520
- finish_reason: z2.string().nullish()
547
+ index: z3.number(),
548
+ finish_reason: z3.string().nullish()
521
549
  })
522
550
  ),
523
- object: z2.literal("chat.completion"),
524
- usage: z2.object({
525
- prompt_tokens: z2.number(),
526
- completion_tokens: z2.number()
551
+ object: z3.literal("chat.completion"),
552
+ usage: z3.object({
553
+ prompt_tokens: z3.number(),
554
+ completion_tokens: z3.number()
527
555
  })
528
556
  });
529
- var mistralChatChunkSchema = z2.object({
530
- id: z2.string().nullish(),
531
- created: z2.number().nullish(),
532
- model: z2.string().nullish(),
533
- choices: z2.array(
534
- z2.object({
535
- delta: z2.object({
536
- role: z2.enum(["assistant"]).optional(),
557
+ var mistralChatChunkSchema = z3.object({
558
+ id: z3.string().nullish(),
559
+ created: z3.number().nullish(),
560
+ model: z3.string().nullish(),
561
+ choices: z3.array(
562
+ z3.object({
563
+ delta: z3.object({
564
+ role: z3.enum(["assistant"]).optional(),
537
565
  content: mistralContentSchema,
538
- tool_calls: z2.array(
539
- z2.object({
540
- id: z2.string(),
541
- function: z2.object({ name: z2.string(), arguments: z2.string() })
566
+ tool_calls: z3.array(
567
+ z3.object({
568
+ id: z3.string(),
569
+ function: z3.object({ name: z3.string(), arguments: z3.string() })
542
570
  })
543
571
  ).nullish()
544
572
  }),
545
- finish_reason: z2.string().nullish(),
546
- index: z2.number()
573
+ finish_reason: z3.string().nullish(),
574
+ index: z3.number()
547
575
  })
548
576
  ),
549
- usage: z2.object({
550
- prompt_tokens: z2.number(),
551
- completion_tokens: z2.number()
577
+ usage: z3.object({
578
+ prompt_tokens: z3.number(),
579
+ completion_tokens: z3.number()
552
580
  }).nullish()
553
581
  });
554
582
 
@@ -561,10 +589,10 @@ import {
561
589
  createJsonResponseHandler as createJsonResponseHandler2,
562
590
  postJsonToApi as postJsonToApi2
563
591
  } from "@ai-sdk/provider-utils";
564
- import { z as z3 } from "zod";
592
+ import { z as z4 } from "zod";
565
593
  var MistralEmbeddingModel = class {
566
594
  constructor(modelId, settings, config) {
567
- this.specificationVersion = "v1";
595
+ this.specificationVersion = "v2";
568
596
  this.modelId = modelId;
569
597
  this.settings = settings;
570
598
  this.config = config;
@@ -593,7 +621,11 @@ var MistralEmbeddingModel = class {
593
621
  values
594
622
  });
595
623
  }
596
- const { responseHeaders, value: response } = await postJsonToApi2({
624
+ const {
625
+ responseHeaders,
626
+ value: response,
627
+ rawValue
628
+ } = await postJsonToApi2({
597
629
  url: `${this.config.baseURL}/embeddings`,
598
630
  headers: combineHeaders2(this.config.headers(), headers),
599
631
  body: {
@@ -611,13 +643,13 @@ var MistralEmbeddingModel = class {
611
643
  return {
612
644
  embeddings: response.data.map((item) => item.embedding),
613
645
  usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
614
- rawResponse: { headers: responseHeaders }
646
+ response: { headers: responseHeaders, body: rawValue }
615
647
  };
616
648
  }
617
649
  };
618
- var MistralTextEmbeddingResponseSchema = z3.object({
619
- data: z3.array(z3.object({ embedding: z3.array(z3.number()) })),
620
- usage: z3.object({ prompt_tokens: z3.number() }).nullish()
650
+ var MistralTextEmbeddingResponseSchema = z4.object({
651
+ data: z4.array(z4.object({ embedding: z4.array(z4.number()) })),
652
+ usage: z4.object({ prompt_tokens: z4.number() }).nullish()
621
653
  });
622
654
 
623
655
  // src/mistral-provider.ts
@@ -632,7 +664,7 @@ function createMistral(options = {}) {
632
664
  })}`,
633
665
  ...options.headers
634
666
  });
635
- const createChatModel = (modelId, settings = {}) => new MistralChatLanguageModel(modelId, settings, {
667
+ const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
636
668
  provider: "mistral.chat",
637
669
  baseURL,
638
670
  headers: getHeaders,
@@ -644,13 +676,13 @@ function createMistral(options = {}) {
644
676
  headers: getHeaders,
645
677
  fetch: options.fetch
646
678
  });
647
- const provider = function(modelId, settings) {
679
+ const provider = function(modelId) {
648
680
  if (new.target) {
649
681
  throw new Error(
650
682
  "The Mistral model function cannot be called with the new keyword."
651
683
  );
652
684
  }
653
- return createChatModel(modelId, settings);
685
+ return createChatModel(modelId);
654
686
  };
655
687
  provider.languageModel = createChatModel;
656
688
  provider.chat = createChatModel;