@ai-sdk/google 2.0.0-alpha.13 → 2.0.0-alpha.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/dist/index.js +7 -7
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +7 -7
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +7 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +7 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,24 @@
|
|
1
1
|
# @ai-sdk/google
|
2
2
|
|
3
|
+
## 2.0.0-alpha.15
|
4
|
+
|
5
|
+
### Patch Changes
|
6
|
+
|
7
|
+
- Updated dependencies [48d257a]
|
8
|
+
- Updated dependencies [8ba77a7]
|
9
|
+
- @ai-sdk/provider@2.0.0-alpha.15
|
10
|
+
- @ai-sdk/provider-utils@3.0.0-alpha.15
|
11
|
+
|
12
|
+
## 2.0.0-alpha.14
|
13
|
+
|
14
|
+
### Patch Changes
|
15
|
+
|
16
|
+
- Updated dependencies [b5da06a]
|
17
|
+
- Updated dependencies [63f9e9b]
|
18
|
+
- Updated dependencies [2e13791]
|
19
|
+
- @ai-sdk/provider@2.0.0-alpha.14
|
20
|
+
- @ai-sdk/provider-utils@3.0.0-alpha.14
|
21
|
+
|
3
22
|
## 2.0.0-alpha.13
|
4
23
|
|
5
24
|
### Patch Changes
|
package/dist/index.js
CHANGED
@@ -330,7 +330,7 @@ function convertToGoogleGenerativeAIMessages(prompt) {
|
|
330
330
|
return {
|
331
331
|
functionCall: {
|
332
332
|
name: part.toolName,
|
333
|
-
args: part.
|
333
|
+
args: part.input
|
334
334
|
}
|
335
335
|
};
|
336
336
|
}
|
@@ -348,7 +348,7 @@ function convertToGoogleGenerativeAIMessages(prompt) {
|
|
348
348
|
name: part.toolName,
|
349
349
|
response: {
|
350
350
|
name: part.toolName,
|
351
|
-
content: part.
|
351
|
+
content: part.output
|
352
352
|
}
|
353
353
|
}
|
354
354
|
}))
|
@@ -483,13 +483,13 @@ function prepareTools({
|
|
483
483
|
}
|
484
484
|
const functionDeclarations = [];
|
485
485
|
for (const tool of tools) {
|
486
|
-
if (tool.type === "provider-defined") {
|
486
|
+
if (tool.type === "provider-defined-client" || tool.type === "provider-defined-server") {
|
487
487
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
488
488
|
} else {
|
489
489
|
functionDeclarations.push({
|
490
490
|
name: tool.name,
|
491
491
|
description: (_a = tool.description) != null ? _a : "",
|
492
|
-
parameters: convertJSONSchemaToOpenAPISchema(tool.
|
492
|
+
parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
|
493
493
|
});
|
494
494
|
}
|
495
495
|
}
|
@@ -696,7 +696,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
696
696
|
toolCallType: "function",
|
697
697
|
toolCallId: this.config.generateId(),
|
698
698
|
toolName: part.functionCall.name,
|
699
|
-
|
699
|
+
input: JSON.stringify(part.functionCall.args)
|
700
700
|
});
|
701
701
|
} else if ("inlineData" in part) {
|
702
702
|
content.push({
|
@@ -829,14 +829,14 @@ var GoogleGenerativeAILanguageModel = class {
|
|
829
829
|
toolCallType: "function",
|
830
830
|
toolCallId: toolCall.toolCallId,
|
831
831
|
toolName: toolCall.toolName,
|
832
|
-
|
832
|
+
inputTextDelta: toolCall.args
|
833
833
|
});
|
834
834
|
controller.enqueue({
|
835
835
|
type: "tool-call",
|
836
836
|
toolCallType: "function",
|
837
837
|
toolCallId: toolCall.toolCallId,
|
838
838
|
toolName: toolCall.toolName,
|
839
|
-
|
839
|
+
input: toolCall.args
|
840
840
|
});
|
841
841
|
hasToolCalls = true;
|
842
842
|
}
|
package/dist/index.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-embedding-model.ts","../src/google-error.ts","../src/google-generative-ai-embedding-options.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-generative-ai-options.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts"],"sourcesContent":["export type { GoogleErrorData } from './google-error';\nexport type { GoogleGenerativeAIProviderOptions } from './google-generative-ai-options';\nexport type { GoogleGenerativeAIProviderMetadata } from './google-generative-ai-prompt';\nexport { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport { GoogleGenerativeAIEmbeddingModelId } from './google-generative-ai-embedding-options';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport { GoogleGenerativeAIModelId } from './google-generative-ai-options';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV2 {\n (modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n languageModel(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n chat(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (modelId: GoogleGenerativeAIModelId) =>\n new GoogleGenerativeAILanguageModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n supportedUrls: () => ({\n '*': [\n // Only allow requests to the Google Generative Language \"files\" endpoint\n // e.g. https://generativelanguage.googleapis.com/v1beta/files/...\n new RegExp(`^${baseURL}/files/.*$`),\n ],\n }),\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: GoogleGenerativeAIEmbeddingModelId) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: GoogleGenerativeAIModelId) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n googleGenerativeAIEmbeddingProviderOptions,\n} from './google-generative-ai-embedding-options';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n // Parse provider options\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIEmbeddingProviderOptions,\n });\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: googleOptions?.outputDimensionality,\n taskType: googleOptions?.taskType,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import { z } from 'zod';\n\nexport type GoogleGenerativeAIEmbeddingModelId =\n | 'text-embedding-004'\n | (string & {});\n\nexport const googleGenerativeAIEmbeddingProviderOptions = z.object({\n /**\n * Optional. Optional reduced dimension for the output embedding.\n * If set, excessive values in the output embedding are truncated from the end.\n */\n outputDimensionality: z.number().optional(),\n\n /**\n * Optional. Specifies the task type for generating embeddings.\n * Supported task types:\n * - SEMANTIC_SIMILARITY: Optimized for text similarity.\n * - CLASSIFICATION: Optimized for text classification.\n * - CLUSTERING: Optimized for clustering texts based on similarity.\n * - RETRIEVAL_DOCUMENT: Optimized for document retrieval.\n * - RETRIEVAL_QUERY: Optimized for query-based retrieval.\n * - QUESTION_ANSWERING: Optimized for answering questions.\n * - FACT_VERIFICATION: Optimized for verifying factual information.\n * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.\n */\n taskType: z\n .enum([\n 'SEMANTIC_SIMILARITY',\n 'CLASSIFICATION',\n 'CLUSTERING',\n 'RETRIEVAL_DOCUMENT',\n 'RETRIEVAL_QUERY',\n 'QUESTION_ANSWERING',\n 'FACT_VERIFICATION',\n 'CODE_RETRIEVAL_QUERY',\n ])\n .optional(),\n});\n\nexport type GoogleGenerativeAIEmbeddingProviderOptions = z.infer<\n typeof googleGenerativeAIEmbeddingProviderOptions\n>;\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n googleGenerativeAIProviderOptions,\n} from './google-generative-ai-options';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: GoogleGenerativeAIModelId;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptions,\n });\n\n // Add warning if includeThoughts is used with a non-Vertex Google provider\n if (\n googleOptions?.thinkingConfig?.includeThoughts === true &&\n !this.config.provider.startsWith('google.vertex.')\n ) {\n warnings.push({\n type: 'other',\n message:\n \"The 'includeThoughts' option is only supported with the Google Vertex provider \" +\n 'and might not be supported or could behave unexpectedly with the current Google provider ' +\n `(${this.config.provider}).`,\n });\n }\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: googleOptions?.useSearchGrounding ?? false,\n dynamicRetrievalConfig: googleOptions?.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n // TODO convert into provider option\n (googleOptions?.structuredOutputs ?? true)\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(googleOptions?.audioTimestamp && {\n audioTimestamp: googleOptions.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n thinkingConfig: googleOptions?.thinkingConfig,\n },\n contents,\n systemInstruction,\n safetySettings: googleOptions?.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: googleOptions?.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // map ordered parts to content:\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : (candidate.content.parts ?? []);\n\n const usageMetadata = response.usageMetadata;\n\n // Build content array from all parts\n for (const part of parts) {\n if ('text' in part && part.text != null && part.text.length > 0) {\n if (part.thought === true) {\n content.push({ type: 'reasoning', text: part.text });\n } else {\n content.push({ type: 'text', text: part.text });\n }\n } else if ('functionCall' in part) {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: this.config.generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n });\n } else if ('inlineData' in part) {\n content.push({\n type: 'file' as const,\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n });\n }\n }\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }) ?? [];\n for (const source of sources) {\n content.push(source);\n }\n\n return {\n content,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n totalTokens: usageMetadata?.totalTokenCount ?? undefined,\n reasoningTokens: usageMetadata?.thoughtsTokenCount ?? undefined,\n cachedInputTokens: usageMetadata?.cachedContentTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n usage.totalTokens = usageMetadata.totalTokenCount ?? undefined;\n usage.reasoningTokens =\n usageMetadata.thoughtsTokenCount ?? undefined;\n usage.cachedInputTokens =\n usageMetadata.cachedContentTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n // Process text parts individually to handle reasoning parts\n const parts = content.parts ?? [];\n for (const part of parts) {\n if (\n 'text' in part &&\n part.text != null &&\n part.text.length > 0\n ) {\n if (part.thought === true) {\n controller.enqueue({ type: 'reasoning', text: part.text });\n } else {\n controller.enqueue({ type: 'text', text: part.text });\n }\n }\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n parts: z\n .array(\n z.union([\n // note: order matters since text can be fully empty\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n z.object({\n text: z.string().nullish(),\n thought: z.boolean().nullish(),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string().nullish(),\n probability: z.string().nullish(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst usageSchema = z.object({\n cachedContentTokenCount: z.number().nullish(),\n thoughtsTokenCount: z.number().nullish(),\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: usageSchema.nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport {\n convertToBase64,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { z } from 'zod';\n\nexport type GoogleGenerativeAIModelId =\n // Stable models\n // https://ai.google.dev/gemini-api/docs/models/gemini\n | 'gemini-1.5-flash'\n | 'gemini-1.5-flash-latest'\n | 'gemini-1.5-flash-001'\n | 'gemini-1.5-flash-002'\n | 'gemini-1.5-flash-8b'\n | 'gemini-1.5-flash-8b-latest'\n | 'gemini-1.5-flash-8b-001'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-pro-latest'\n | 'gemini-1.5-pro-001'\n | 'gemini-1.5-pro-002'\n | 'gemini-2.0-flash'\n | 'gemini-2.0-flash-001'\n | 'gemini-2.0-flash-live-001'\n | 'gemini-2.0-flash-lite'\n | 'gemini-2.0-pro-exp-02-05'\n | 'gemini-2.0-flash-thinking-exp-01-21'\n | 'gemini-2.0-flash-exp'\n // Experimental models\n // https://ai.google.dev/gemini-api/docs/models/experimental-models\n | 'gemini-2.5-pro-exp-03-25'\n | 'gemini-2.5-flash-preview-04-17'\n | 'gemini-exp-1206'\n | 'gemma-3-27b-it'\n | 'learnlm-1.5-pro-experimental'\n | (string & {});\n\nconst dynamicRetrievalConfig = z.object({\n /**\n * The mode of the predictor to be used in dynamic retrieval.\n */\n mode: z.enum(['MODE_UNSPECIFIED', 'MODE_DYNAMIC']).optional(),\n\n /**\n * The threshold to be used in dynamic retrieval. If not set, a system default\n * value is used.\n */\n dynamicThreshold: z.number().optional(),\n});\n\nexport type DynamicRetrievalConfig = z.infer<typeof dynamicRetrievalConfig>;\n\nexport const googleGenerativeAIProviderOptions = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).optional(),\n\n thinkingConfig: z\n .object({\n thinkingBudget: z.number().optional(),\n includeThoughts: z.boolean().optional(),\n })\n .optional(),\n\n /**\nOptional.\nThe name of the cached content used as context to serve the prediction.\nFormat: cachedContents/{cachedContent}\n */\n cachedContent: z.string().optional(),\n\n /**\n * Optional. Enable structured output. Default is true.\n *\n * This is useful when the JSON Schema contains elements that are\n * not supported by the OpenAPI schema version that\n * Google Generative AI uses. You can use this to disable\n * structured outputs if you need to.\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\nOptional. A list of unique safety settings for blocking unsafe content.\n */\n safetySettings: z\n .array(\n z.object({\n category: z.enum([\n 'HARM_CATEGORY_UNSPECIFIED',\n 'HARM_CATEGORY_HATE_SPEECH',\n 'HARM_CATEGORY_DANGEROUS_CONTENT',\n 'HARM_CATEGORY_HARASSMENT',\n 'HARM_CATEGORY_SEXUALLY_EXPLICIT',\n 'HARM_CATEGORY_CIVIC_INTEGRITY',\n ]),\n threshold: z.enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ]),\n }),\n )\n .optional(),\n\n threshold: z\n .enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ])\n .optional(),\n\n /**\n * Optional. Enables timestamp understanding for audio-only files.\n *\n * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/audio-understanding\n */\n audioTimestamp: z.boolean().optional(),\n\n /**\nOptional. When enabled, the model will use Google search to ground the response.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/grounding/overview\n */\n useSearchGrounding: z.boolean().optional(),\n\n /**\nOptional. Specifies the dynamic retrieval configuration.\n\n@note Dynamic retrieval is only compatible with Gemini 1.5 Flash.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-with-google-search#dynamic-retrieval\n */\n dynamicRetrievalConfig: dynamicRetrievalConfig.optional(),\n});\n\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptions\n>;\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-options';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAKO;;;ACXP,sBAGO;AACP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACZlB,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,sDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,cAAkB;AAMX,IAAM,6CAA6C,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjE,sBAAsB,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc1C,UAAU,cACP,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AACd,CAAC;;;AFXM,IAAM,mCAAN,MAEP;AAAA,EAWE,YACE,SACA,QACA;AAbF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAW/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EATA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EASA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAEA,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,+CAAe;AAAA,UACrC,UAAU,+CAAe;AAAA,QAC3B,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AGrGD,IAAAC,yBAUO;AACP,IAAAC,cAAkB;;;AChBX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChIA,IAAAC,mBAGO;AAMP,IAAAC,yBAGO;AAEA,SAAS,oCACd,QAC0B;AAC1B,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC5JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,cAAkB;AAgClB,IAAM,yBAAyB,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAItC,MAAM,cAAE,KAAK,CAAC,oBAAoB,cAAc,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5D,kBAAkB,cAAE,OAAO,EAAE,SAAS;AACxC,CAAC;AAIM,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACxD,oBAAoB,cAAE,MAAM,cAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,EAEhE,gBAAgB,cACb,OAAO;AAAA,IACN,gBAAgB,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,iBAAiB,cAAE,QAAQ,EAAE,SAAS;AAAA,EACxC,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,eAAe,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUnC,mBAAmB,cAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKxC,gBAAgB,cACb;AAAA,IACC,cAAE,OAAO;AAAA,MACP,UAAU,cAAE,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,MACD,WAAW,cAAE,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,SAAS;AAAA,EAEZ,WAAW,cACR,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,gBAAgB,cAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrC,oBAAoB,cAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASzC,wBAAwB,uBAAuB,SAAS;AAC1D,CAAC;;;ACrID,IAAAC,mBAIO;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA,wBAAAC;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAACA,0BAC1B,CAAC,IACD,EAAE,wBAAAA,wBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACvIO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANkBO,IAAM,kCAAN,MAAiE;AAAA,EAOtE,YACE,SACA,QACA;AATF,SAAS,uBAAuB;AAU9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAlEtB;AAmEI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApFnD;AAqFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UACE,oDAAe,mBAAf,mBAA+B,qBAAoB,QACnD,CAAC,KAAK,OAAO,SAAS,WAAW,gBAAgB,GACjD;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SACE,4KAEI,KAAK,OAAO,QAAQ;AAAA,MAC5B,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,oDAAe,uBAAf,YAAqC;AAAA,MACzD,wBAAwB,+CAAe;AAAA,MACvC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA;AAAA,YAIxB,oDAAe,sBAAf,YAAoC,QACjC,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,IAAI,+CAAe,mBAAkB;AAAA,YACnC,gBAAgB,cAAc;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,UACnC,gBAAgB,+CAAe;AAAA,QACjC;AAAA,QACA;AAAA,QACA;AAAA,QACA,gBAAgB,+CAAe;AAAA,QAC/B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,+CAAe;AAAA,MAChC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxKjE;AAyKI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AACvC,UAAM,UAAyC,CAAC;AAGhD,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,KACA,eAAU,QAAQ,UAAlB,YAA2B,CAAC;AAEnC,UAAM,gBAAgB,SAAS;AAG/B,eAAW,QAAQ,OAAO;AACxB,UAAI,UAAU,QAAQ,KAAK,QAAQ,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC/D,YAAI,KAAK,YAAY,MAAM;AACzB,kBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QACrD,OAAO;AACL,kBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,QAChD;AAAA,MACF,WAAW,kBAAkB,MAAM;AACjC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,YAAY,KAAK,OAAO,WAAW;AAAA,UACnC,UAAU,KAAK,aAAa;AAAA,UAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,QAC7C,CAAC;AAAA,MACH,WAAW,gBAAgB,MAAM;AAC/B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,KAAK,WAAW;AAAA,UACtB,WAAW,KAAK,WAAW;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,WACJ,oBAAe;AAAA,MACb,mBAAmB,UAAU;AAAA,MAC7B,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC,MAHD,YAGM,CAAC;AACT,eAAW,UAAU,SAAS;AAC5B,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,QACrD,cAAa,oDAAe,oBAAf,YAAkC;AAAA,QAC/C,kBAAiB,oDAAe,uBAAf,YAAqC;AAAA,QACtD,oBAAmB,oDAAe,4BAAf,YAA0C;AAAA,MAC/D;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AACA,QAAI,mBAAyD;AAE7D,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAzTvC;AA0TY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AACxC,oBAAM,eAAc,mBAAc,oBAAd,YAAiC;AACrD,oBAAM,mBACJ,mBAAc,uBAAd,YAAoC;AACtC,oBAAM,qBACJ,mBAAc,4BAAd,YAAyC;AAAA,YAC7C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AAEnB,oBAAM,SAAQ,aAAQ,UAAR,YAAiB,CAAC;AAChC,yBAAW,QAAQ,OAAO;AACxB,oBACE,UAAU,QACV,KAAK,QAAQ,QACb,KAAK,KAAK,SAAS,GACnB;AACA,sBAAI,KAAK,YAAY,MAAM;AACzB,+BAAW,QAAQ,EAAE,MAAM,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,kBAC3D,OAAO;AACL,+BAAW,QAAQ,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,kBACtD;AAAA,gBACF;AAAA,cACF;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf,YAAAA;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,eAAe,SAAS;AAAA,kBAC1B,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,MAAM,SAAS;AAAA,kBACjB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B,YAAAA;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAYA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA,YAAAC;AACF,GAGwC;AAjfxC;AAkfE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAIA,YAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,OAAO,cACJ;AAAA,IACC,cAAE,MAAM;AAAA;AAAA,MAEN,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,YAAY,cAAE,OAAO;AAAA,UACnB,UAAU,cAAE,OAAO;AAAA,UACnB,MAAM,cAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,QACzB,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAAA,MAC/B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuB,cAAE,OAAO;AAAA,EACpC,KAAK,cAAE,OAAO,EAAE,KAAK,cAAE,OAAO,GAAG,OAAO,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkB,cAAE,OAAO,EAAE,KAAK,cAAE,OAAO,GAAG,OAAO,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0B,cAAE,OAAO;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,OAAO,EAAE,iBAAiB,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiB,cAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmB,cAChB;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,YAAY,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,cAChB,MAAM;AAAA,IACL,cAAE,OAAO;AAAA,MACP,0BAA0B,cAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACD,cAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqB,cAAE,OAAO;AAAA,EACzC,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,aAAa,cAAE,OAAO,EAAE,QAAQ;AAAA,EAChC,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,yBAAyB,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5C,oBAAoB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AACtC,CAAC;AAED,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,YAAY,QAAQ;AACrC,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,YAAY,QAAQ;AACrC,CAAC;;;AJ9hBM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAnF9B;AAoFE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YAAoC;AAjG/D,QAAAC;AAkGI,eAAI,gCAAgC,SAAS;AAAA,MAC3C,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,eAAe,OAAO;AAAA,QACpB,KAAK;AAAA;AAAA;AAAA,UAGH,IAAI,OAAO,IAAI,OAAO,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,MACA,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,iCAAiC,SAAS;AAAA,IAC5C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAAoC;AAC7D,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider","import_provider_utils","import_provider_utils","import_zod","import_zod","import_provider_utils","import_zod","import_provider","import_provider_utils","import_zod","import_provider","dynamicRetrievalConfig","generateId","generateId","_a"]}
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-embedding-model.ts","../src/google-error.ts","../src/google-generative-ai-embedding-options.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-generative-ai-options.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts"],"sourcesContent":["export type { GoogleErrorData } from './google-error';\nexport type { GoogleGenerativeAIProviderOptions } from './google-generative-ai-options';\nexport type { GoogleGenerativeAIProviderMetadata } from './google-generative-ai-prompt';\nexport { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport { GoogleGenerativeAIEmbeddingModelId } from './google-generative-ai-embedding-options';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport { GoogleGenerativeAIModelId } from './google-generative-ai-options';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV2 {\n (modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n languageModel(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n chat(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (modelId: GoogleGenerativeAIModelId) =>\n new GoogleGenerativeAILanguageModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n supportedUrls: () => ({\n '*': [\n // Only allow requests to the Google Generative Language \"files\" endpoint\n // e.g. https://generativelanguage.googleapis.com/v1beta/files/...\n new RegExp(`^${baseURL}/files/.*$`),\n ],\n }),\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: GoogleGenerativeAIEmbeddingModelId) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: GoogleGenerativeAIModelId) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n googleGenerativeAIEmbeddingProviderOptions,\n} from './google-generative-ai-embedding-options';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n // Parse provider options\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIEmbeddingProviderOptions,\n });\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: googleOptions?.outputDimensionality,\n taskType: googleOptions?.taskType,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import { z } from 'zod';\n\nexport type GoogleGenerativeAIEmbeddingModelId =\n | 'text-embedding-004'\n | (string & {});\n\nexport const googleGenerativeAIEmbeddingProviderOptions = z.object({\n /**\n * Optional. Optional reduced dimension for the output embedding.\n * If set, excessive values in the output embedding are truncated from the end.\n */\n outputDimensionality: z.number().optional(),\n\n /**\n * Optional. Specifies the task type for generating embeddings.\n * Supported task types:\n * - SEMANTIC_SIMILARITY: Optimized for text similarity.\n * - CLASSIFICATION: Optimized for text classification.\n * - CLUSTERING: Optimized for clustering texts based on similarity.\n * - RETRIEVAL_DOCUMENT: Optimized for document retrieval.\n * - RETRIEVAL_QUERY: Optimized for query-based retrieval.\n * - QUESTION_ANSWERING: Optimized for answering questions.\n * - FACT_VERIFICATION: Optimized for verifying factual information.\n * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.\n */\n taskType: z\n .enum([\n 'SEMANTIC_SIMILARITY',\n 'CLASSIFICATION',\n 'CLUSTERING',\n 'RETRIEVAL_DOCUMENT',\n 'RETRIEVAL_QUERY',\n 'QUESTION_ANSWERING',\n 'FACT_VERIFICATION',\n 'CODE_RETRIEVAL_QUERY',\n ])\n .optional(),\n});\n\nexport type GoogleGenerativeAIEmbeddingProviderOptions = z.infer<\n typeof googleGenerativeAIEmbeddingProviderOptions\n>;\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n googleGenerativeAIProviderOptions,\n} from './google-generative-ai-options';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: GoogleGenerativeAIModelId;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptions,\n });\n\n // Add warning if includeThoughts is used with a non-Vertex Google provider\n if (\n googleOptions?.thinkingConfig?.includeThoughts === true &&\n !this.config.provider.startsWith('google.vertex.')\n ) {\n warnings.push({\n type: 'other',\n message:\n \"The 'includeThoughts' option is only supported with the Google Vertex provider \" +\n 'and might not be supported or could behave unexpectedly with the current Google provider ' +\n `(${this.config.provider}).`,\n });\n }\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: googleOptions?.useSearchGrounding ?? false,\n dynamicRetrievalConfig: googleOptions?.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n // TODO convert into provider option\n (googleOptions?.structuredOutputs ?? true)\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(googleOptions?.audioTimestamp && {\n audioTimestamp: googleOptions.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n thinkingConfig: googleOptions?.thinkingConfig,\n },\n contents,\n systemInstruction,\n safetySettings: googleOptions?.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: googleOptions?.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // map ordered parts to content:\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : (candidate.content.parts ?? []);\n\n const usageMetadata = response.usageMetadata;\n\n // Build content array from all parts\n for (const part of parts) {\n if ('text' in part && part.text != null && part.text.length > 0) {\n if (part.thought === true) {\n content.push({ type: 'reasoning', text: part.text });\n } else {\n content.push({ type: 'text', text: part.text });\n }\n } else if ('functionCall' in part) {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: this.config.generateId(),\n toolName: part.functionCall.name,\n input: JSON.stringify(part.functionCall.args),\n });\n } else if ('inlineData' in part) {\n content.push({\n type: 'file' as const,\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n });\n }\n }\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }) ?? [];\n for (const source of sources) {\n content.push(source);\n }\n\n return {\n content,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n totalTokens: usageMetadata?.totalTokenCount ?? undefined,\n reasoningTokens: usageMetadata?.thoughtsTokenCount ?? undefined,\n cachedInputTokens: usageMetadata?.cachedContentTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n usage.totalTokens = usageMetadata.totalTokenCount ?? undefined;\n usage.reasoningTokens =\n usageMetadata.thoughtsTokenCount ?? undefined;\n usage.cachedInputTokens =\n usageMetadata.cachedContentTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n // Process text parts individually to handle reasoning parts\n const parts = content.parts ?? [];\n for (const part of parts) {\n if (\n 'text' in part &&\n part.text != null &&\n part.text.length > 0\n ) {\n if (part.thought === true) {\n controller.enqueue({ type: 'reasoning', text: part.text });\n } else {\n controller.enqueue({ type: 'text', text: part.text });\n }\n }\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n inputTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n input: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n parts: z\n .array(\n z.union([\n // note: order matters since text can be fully empty\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n z.object({\n text: z.string().nullish(),\n thought: z.boolean().nullish(),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string().nullish(),\n probability: z.string().nullish(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst usageSchema = z.object({\n cachedContentTokenCount: z.number().nullish(),\n thoughtsTokenCount: z.number().nullish(),\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: usageSchema.nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport {\n convertToBase64,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.input,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.output,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { z } from 'zod';\n\nexport type GoogleGenerativeAIModelId =\n // Stable models\n // https://ai.google.dev/gemini-api/docs/models/gemini\n | 'gemini-1.5-flash'\n | 'gemini-1.5-flash-latest'\n | 'gemini-1.5-flash-001'\n | 'gemini-1.5-flash-002'\n | 'gemini-1.5-flash-8b'\n | 'gemini-1.5-flash-8b-latest'\n | 'gemini-1.5-flash-8b-001'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-pro-latest'\n | 'gemini-1.5-pro-001'\n | 'gemini-1.5-pro-002'\n | 'gemini-2.0-flash'\n | 'gemini-2.0-flash-001'\n | 'gemini-2.0-flash-live-001'\n | 'gemini-2.0-flash-lite'\n | 'gemini-2.0-pro-exp-02-05'\n | 'gemini-2.0-flash-thinking-exp-01-21'\n | 'gemini-2.0-flash-exp'\n // Experimental models\n // https://ai.google.dev/gemini-api/docs/models/experimental-models\n | 'gemini-2.5-pro-exp-03-25'\n | 'gemini-2.5-flash-preview-04-17'\n | 'gemini-exp-1206'\n | 'gemma-3-27b-it'\n | 'learnlm-1.5-pro-experimental'\n | (string & {});\n\nconst dynamicRetrievalConfig = z.object({\n /**\n * The mode of the predictor to be used in dynamic retrieval.\n */\n mode: z.enum(['MODE_UNSPECIFIED', 'MODE_DYNAMIC']).optional(),\n\n /**\n * The threshold to be used in dynamic retrieval. If not set, a system default\n * value is used.\n */\n dynamicThreshold: z.number().optional(),\n});\n\nexport type DynamicRetrievalConfig = z.infer<typeof dynamicRetrievalConfig>;\n\nexport const googleGenerativeAIProviderOptions = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).optional(),\n\n thinkingConfig: z\n .object({\n thinkingBudget: z.number().optional(),\n includeThoughts: z.boolean().optional(),\n })\n .optional(),\n\n /**\nOptional.\nThe name of the cached content used as context to serve the prediction.\nFormat: cachedContents/{cachedContent}\n */\n cachedContent: z.string().optional(),\n\n /**\n * Optional. Enable structured output. Default is true.\n *\n * This is useful when the JSON Schema contains elements that are\n * not supported by the OpenAPI schema version that\n * Google Generative AI uses. You can use this to disable\n * structured outputs if you need to.\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\nOptional. A list of unique safety settings for blocking unsafe content.\n */\n safetySettings: z\n .array(\n z.object({\n category: z.enum([\n 'HARM_CATEGORY_UNSPECIFIED',\n 'HARM_CATEGORY_HATE_SPEECH',\n 'HARM_CATEGORY_DANGEROUS_CONTENT',\n 'HARM_CATEGORY_HARASSMENT',\n 'HARM_CATEGORY_SEXUALLY_EXPLICIT',\n 'HARM_CATEGORY_CIVIC_INTEGRITY',\n ]),\n threshold: z.enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ]),\n }),\n )\n .optional(),\n\n threshold: z\n .enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ])\n .optional(),\n\n /**\n * Optional. Enables timestamp understanding for audio-only files.\n *\n * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/audio-understanding\n */\n audioTimestamp: z.boolean().optional(),\n\n /**\nOptional. When enabled, the model will use Google search to ground the response.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/grounding/overview\n */\n useSearchGrounding: z.boolean().optional(),\n\n /**\nOptional. Specifies the dynamic retrieval configuration.\n\n@note Dynamic retrieval is only compatible with Gemini 1.5 Flash.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-with-google-search#dynamic-retrieval\n */\n dynamicRetrievalConfig: dynamicRetrievalConfig.optional(),\n});\n\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptions\n>;\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-options';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (\n tool.type === 'provider-defined-client' ||\n tool.type === 'provider-defined-server'\n ) {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAKO;;;ACXP,sBAGO;AACP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACZlB,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,sDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,cAAkB;AAMX,IAAM,6CAA6C,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjE,sBAAsB,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc1C,UAAU,cACP,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AACd,CAAC;;;AFXM,IAAM,mCAAN,MAEP;AAAA,EAWE,YACE,SACA,QACA;AAbF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAW/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EATA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EASA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAEA,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,+CAAe;AAAA,UACrC,UAAU,+CAAe;AAAA,QAC3B,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AGrGD,IAAAC,yBAUO;AACP,IAAAC,cAAkB;;;AChBX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChIA,IAAAC,mBAGO;AAMP,IAAAC,yBAGO;AAEA,SAAS,oCACd,QAC0B;AAC1B,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC5JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,cAAkB;AAgClB,IAAM,yBAAyB,cAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAItC,MAAM,cAAE,KAAK,CAAC,oBAAoB,cAAc,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5D,kBAAkB,cAAE,OAAO,EAAE,SAAS;AACxC,CAAC;AAIM,IAAM,oCAAoC,cAAE,OAAO;AAAA,EACxD,oBAAoB,cAAE,MAAM,cAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,EAEhE,gBAAgB,cACb,OAAO;AAAA,IACN,gBAAgB,cAAE,OAAO,EAAE,SAAS;AAAA,IACpC,iBAAiB,cAAE,QAAQ,EAAE,SAAS;AAAA,EACxC,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,eAAe,cAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUnC,mBAAmB,cAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKxC,gBAAgB,cACb;AAAA,IACC,cAAE,OAAO;AAAA,MACP,UAAU,cAAE,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,MACD,WAAW,cAAE,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,SAAS;AAAA,EAEZ,WAAW,cACR,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,gBAAgB,cAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrC,oBAAoB,cAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASzC,wBAAwB,uBAAuB,SAAS;AAC1D,CAAC;;;ACrID,IAAAC,mBAIO;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA,wBAAAC;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAACA,0BAC1B,CAAC,IACD,EAAE,wBAAAA,wBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QACE,KAAK,SAAS,6BACd,KAAK,SAAS,2BACd;AACA,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,WAAW;AAAA,MAC/D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AC1IO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANkBO,IAAM,kCAAN,MAAiE;AAAA,EAOtE,YACE,SACA,QACA;AATF,SAAS,uBAAuB;AAU9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAlEtB;AAmEI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApFnD;AAqFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UACE,oDAAe,mBAAf,mBAA+B,qBAAoB,QACnD,CAAC,KAAK,OAAO,SAAS,WAAW,gBAAgB,GACjD;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SACE,4KAEI,KAAK,OAAO,QAAQ;AAAA,MAC5B,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,oDAAe,uBAAf,YAAqC;AAAA,MACzD,wBAAwB,+CAAe;AAAA,MACvC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA;AAAA,YAIxB,oDAAe,sBAAf,YAAoC,QACjC,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,IAAI,+CAAe,mBAAkB;AAAA,YACnC,gBAAgB,cAAc;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,UACnC,gBAAgB,+CAAe;AAAA,QACjC;AAAA,QACA;AAAA,QACA;AAAA,QACA,gBAAgB,+CAAe;AAAA,QAC/B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,+CAAe;AAAA,MAChC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxKjE;AAyKI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AACvC,UAAM,UAAyC,CAAC;AAGhD,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,KACA,eAAU,QAAQ,UAAlB,YAA2B,CAAC;AAEnC,UAAM,gBAAgB,SAAS;AAG/B,eAAW,QAAQ,OAAO;AACxB,UAAI,UAAU,QAAQ,KAAK,QAAQ,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC/D,YAAI,KAAK,YAAY,MAAM;AACzB,kBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QACrD,OAAO;AACL,kBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,QAChD;AAAA,MACF,WAAW,kBAAkB,MAAM;AACjC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,YAAY,KAAK,OAAO,WAAW;AAAA,UACnC,UAAU,KAAK,aAAa;AAAA,UAC5B,OAAO,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,QAC9C,CAAC;AAAA,MACH,WAAW,gBAAgB,MAAM;AAC/B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,KAAK,WAAW;AAAA,UACtB,WAAW,KAAK,WAAW;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,WACJ,oBAAe;AAAA,MACb,mBAAmB,UAAU;AAAA,MAC7B,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC,MAHD,YAGM,CAAC;AACT,eAAW,UAAU,SAAS;AAC5B,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,QACrD,cAAa,oDAAe,oBAAf,YAAkC;AAAA,QAC/C,kBAAiB,oDAAe,uBAAf,YAAqC;AAAA,QACtD,oBAAmB,oDAAe,4BAAf,YAA0C;AAAA,MAC/D;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AACA,QAAI,mBAAyD;AAE7D,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAzTvC;AA0TY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AACxC,oBAAM,eAAc,mBAAc,oBAAd,YAAiC;AACrD,oBAAM,mBACJ,mBAAc,uBAAd,YAAoC;AACtC,oBAAM,qBACJ,mBAAc,4BAAd,YAAyC;AAAA,YAC7C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AAEnB,oBAAM,SAAQ,aAAQ,UAAR,YAAiB,CAAC;AAChC,yBAAW,QAAQ,OAAO;AACxB,oBACE,UAAU,QACV,KAAK,QAAQ,QACb,KAAK,KAAK,SAAS,GACnB;AACA,sBAAI,KAAK,YAAY,MAAM;AACzB,+BAAW,QAAQ,EAAE,MAAM,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,kBAC3D,OAAO;AACL,+BAAW,QAAQ,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,kBACtD;AAAA,gBACF;AAAA,cACF;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf,YAAAA;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,gBAAgB,SAAS;AAAA,kBAC3B,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,OAAO,SAAS;AAAA,kBAClB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B,YAAAA;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAYA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA,YAAAC;AACF,GAGwC;AAjfxC;AAkfE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAIA,YAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,OAAO,cACJ;AAAA,IACC,cAAE,MAAM;AAAA;AAAA,MAEN,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,YAAY,cAAE,OAAO;AAAA,UACnB,UAAU,cAAE,OAAO;AAAA,UACnB,MAAM,cAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,QACzB,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAAA,MAC/B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuB,cAAE,OAAO;AAAA,EACpC,KAAK,cAAE,OAAO,EAAE,KAAK,cAAE,OAAO,GAAG,OAAO,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkB,cAAE,OAAO,EAAE,KAAK,cAAE,OAAO,GAAG,OAAO,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0B,cAAE,OAAO;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,OAAO,EAAE,iBAAiB,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiB,cAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmB,cAChB;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,YAAY,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,cAChB,MAAM;AAAA,IACL,cAAE,OAAO;AAAA,MACP,0BAA0B,cAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACD,cAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqB,cAAE,OAAO;AAAA,EACzC,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,aAAa,cAAE,OAAO,EAAE,QAAQ;AAAA,EAChC,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,yBAAyB,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5C,oBAAoB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AACtC,CAAC;AAED,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,YAAY,QAAQ;AACrC,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,YAAY,QAAQ;AACrC,CAAC;;;AJ9hBM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAnF9B;AAoFE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YAAoC;AAjG/D,QAAAC;AAkGI,eAAI,gCAAgC,SAAS;AAAA,MAC3C,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,eAAe,OAAO;AAAA,QACpB,KAAK;AAAA;AAAA;AAAA,UAGH,IAAI,OAAO,IAAI,OAAO,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,MACA,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,iCAAiC,SAAS;AAAA,IAC5C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAAoC;AAC7D,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider","import_provider_utils","import_provider_utils","import_zod","import_zod","import_provider_utils","import_zod","import_provider","import_provider_utils","import_zod","import_provider","dynamicRetrievalConfig","generateId","generateId","_a"]}
|
package/dist/index.mjs
CHANGED
@@ -328,7 +328,7 @@ function convertToGoogleGenerativeAIMessages(prompt) {
|
|
328
328
|
return {
|
329
329
|
functionCall: {
|
330
330
|
name: part.toolName,
|
331
|
-
args: part.
|
331
|
+
args: part.input
|
332
332
|
}
|
333
333
|
};
|
334
334
|
}
|
@@ -346,7 +346,7 @@ function convertToGoogleGenerativeAIMessages(prompt) {
|
|
346
346
|
name: part.toolName,
|
347
347
|
response: {
|
348
348
|
name: part.toolName,
|
349
|
-
content: part.
|
349
|
+
content: part.output
|
350
350
|
}
|
351
351
|
}
|
352
352
|
}))
|
@@ -483,13 +483,13 @@ function prepareTools({
|
|
483
483
|
}
|
484
484
|
const functionDeclarations = [];
|
485
485
|
for (const tool of tools) {
|
486
|
-
if (tool.type === "provider-defined") {
|
486
|
+
if (tool.type === "provider-defined-client" || tool.type === "provider-defined-server") {
|
487
487
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
488
488
|
} else {
|
489
489
|
functionDeclarations.push({
|
490
490
|
name: tool.name,
|
491
491
|
description: (_a = tool.description) != null ? _a : "",
|
492
|
-
parameters: convertJSONSchemaToOpenAPISchema(tool.
|
492
|
+
parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
|
493
493
|
});
|
494
494
|
}
|
495
495
|
}
|
@@ -696,7 +696,7 @@ var GoogleGenerativeAILanguageModel = class {
|
|
696
696
|
toolCallType: "function",
|
697
697
|
toolCallId: this.config.generateId(),
|
698
698
|
toolName: part.functionCall.name,
|
699
|
-
|
699
|
+
input: JSON.stringify(part.functionCall.args)
|
700
700
|
});
|
701
701
|
} else if ("inlineData" in part) {
|
702
702
|
content.push({
|
@@ -829,14 +829,14 @@ var GoogleGenerativeAILanguageModel = class {
|
|
829
829
|
toolCallType: "function",
|
830
830
|
toolCallId: toolCall.toolCallId,
|
831
831
|
toolName: toolCall.toolName,
|
832
|
-
|
832
|
+
inputTextDelta: toolCall.args
|
833
833
|
});
|
834
834
|
controller.enqueue({
|
835
835
|
type: "tool-call",
|
836
836
|
toolCallType: "function",
|
837
837
|
toolCallId: toolCall.toolCallId,
|
838
838
|
toolName: toolCall.toolName,
|
839
|
-
|
839
|
+
input: toolCall.args
|
840
840
|
});
|
841
841
|
hasToolCalls = true;
|
842
842
|
}
|