@ai-sdk/google 2.0.0-beta.4 → 2.0.0-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @ai-sdk/google
2
2
 
3
+ ## 2.0.0-beta.6
4
+
5
+ ### Patch Changes
6
+
7
+ - 878bf45: removes (unsupported) `additionalProperties` from the Schema sent in the request payloads to Google APIs
8
+
9
+ ## 2.0.0-beta.5
10
+
11
+ ### Patch Changes
12
+
13
+ - 42fcd32: feat(google): automatically handle system instructions for Gemma models
14
+
3
15
  ## 2.0.0-beta.4
4
16
 
5
17
  ### Patch Changes
package/dist/index.js CHANGED
@@ -173,8 +173,7 @@ function convertJSONSchemaToOpenAPISchema(jsonSchema) {
173
173
  format,
174
174
  const: constValue,
175
175
  minLength,
176
- enum: enumValues,
177
- additionalProperties
176
+ enum: enumValues
178
177
  } = jsonSchema;
179
178
  const result = {};
180
179
  if (description)
@@ -245,11 +244,6 @@ function convertJSONSchemaToOpenAPISchema(jsonSchema) {
245
244
  if (minLength !== void 0) {
246
245
  result.minLength = minLength;
247
246
  }
248
- if (additionalProperties === true) {
249
- result.additionalProperties = true;
250
- } else if (additionalProperties) {
251
- result.additionalProperties = convertJSONSchemaToOpenAPISchema(additionalProperties);
252
- }
253
247
  return result;
254
248
  }
255
249
  function isEmptyObjectSchema(jsonSchema) {
@@ -259,10 +253,12 @@ function isEmptyObjectSchema(jsonSchema) {
259
253
  // src/convert-to-google-generative-ai-messages.ts
260
254
  var import_provider2 = require("@ai-sdk/provider");
261
255
  var import_provider_utils3 = require("@ai-sdk/provider-utils");
262
- function convertToGoogleGenerativeAIMessages(prompt) {
256
+ function convertToGoogleGenerativeAIMessages(prompt, options) {
257
+ var _a;
263
258
  const systemInstructionParts = [];
264
259
  const contents = [];
265
260
  let systemMessagesAllowed = true;
261
+ const isGemmaModel = (_a = options == null ? void 0 : options.isGemmaModel) != null ? _a : false;
266
262
  for (const { role, content } of prompt) {
267
263
  switch (role) {
268
264
  case "system": {
@@ -363,8 +359,12 @@ function convertToGoogleGenerativeAIMessages(prompt) {
363
359
  }
364
360
  }
365
361
  }
362
+ if (isGemmaModel && systemInstructionParts.length > 0 && contents.length > 0 && contents[0].role === "user") {
363
+ const systemText = systemInstructionParts.map((part) => part.text).join("\n\n");
364
+ contents[0].parts.unshift({ text: systemText + "\n\n" });
365
+ }
366
366
  return {
367
- systemInstruction: systemInstructionParts.length > 0 ? { parts: systemInstructionParts } : void 0,
367
+ systemInstruction: systemInstructionParts.length > 0 && !isGemmaModel ? { parts: systemInstructionParts } : void 0,
368
368
  contents
369
369
  };
370
370
  }
@@ -615,14 +615,11 @@ var GoogleGenerativeAILanguageModel = class {
615
615
  message: `The 'includeThoughts' option is only supported with the Google Vertex provider and might not be supported or could behave unexpectedly with the current Google provider (${this.config.provider}).`
616
616
  });
617
617
  }
618
- const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
619
618
  const isGemmaModel = this.modelId.toLowerCase().startsWith("gemma-");
620
- if (isGemmaModel && systemInstruction && systemInstruction.parts.length > 0) {
621
- warnings.push({
622
- type: "other",
623
- message: `GEMMA models do not support system instructions. System messages will be ignored. Consider including instructions in the first user message instead.`
624
- });
625
- }
619
+ const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(
620
+ prompt,
621
+ { isGemmaModel }
622
+ );
626
623
  const {
627
624
  tools: googleTools,
628
625
  toolConfig: googleToolConfig,
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-embedding-model.ts","../src/google-error.ts","../src/google-generative-ai-embedding-options.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-generative-ai-options.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts"],"sourcesContent":["export type { GoogleErrorData } from './google-error';\nexport type { GoogleGenerativeAIProviderOptions } from './google-generative-ai-options';\nexport type { GoogleGenerativeAIProviderMetadata } from './google-generative-ai-prompt';\nexport { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport { GoogleGenerativeAIEmbeddingModelId } from './google-generative-ai-embedding-options';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport { GoogleGenerativeAIModelId } from './google-generative-ai-options';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV2 {\n (modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n languageModel(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n chat(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (modelId: GoogleGenerativeAIModelId) =>\n new GoogleGenerativeAILanguageModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n supportedUrls: () => ({\n '*': [\n // Only allow requests to the Google Generative Language \"files\" endpoint\n // e.g. https://generativelanguage.googleapis.com/v1beta/files/...\n new RegExp(`^${baseURL}/files/.*$`),\n ],\n }),\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: GoogleGenerativeAIEmbeddingModelId) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: GoogleGenerativeAIModelId) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n googleGenerativeAIEmbeddingProviderOptions,\n} from './google-generative-ai-embedding-options';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n // Parse provider options\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIEmbeddingProviderOptions,\n });\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: googleOptions?.outputDimensionality,\n taskType: googleOptions?.taskType,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import { z } from 'zod/v4';\n\nexport type GoogleGenerativeAIEmbeddingModelId =\n | 'text-embedding-004'\n | (string & {});\n\nexport const googleGenerativeAIEmbeddingProviderOptions = z.object({\n /**\n * Optional. Optional reduced dimension for the output embedding.\n * If set, excessive values in the output embedding are truncated from the end.\n */\n outputDimensionality: z.number().optional(),\n\n /**\n * Optional. Specifies the task type for generating embeddings.\n * Supported task types:\n * - SEMANTIC_SIMILARITY: Optimized for text similarity.\n * - CLASSIFICATION: Optimized for text classification.\n * - CLUSTERING: Optimized for clustering texts based on similarity.\n * - RETRIEVAL_DOCUMENT: Optimized for document retrieval.\n * - RETRIEVAL_QUERY: Optimized for query-based retrieval.\n * - QUESTION_ANSWERING: Optimized for answering questions.\n * - FACT_VERIFICATION: Optimized for verifying factual information.\n * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.\n */\n taskType: z\n .enum([\n 'SEMANTIC_SIMILARITY',\n 'CLASSIFICATION',\n 'CLUSTERING',\n 'RETRIEVAL_DOCUMENT',\n 'RETRIEVAL_QUERY',\n 'QUESTION_ANSWERING',\n 'FACT_VERIFICATION',\n 'CODE_RETRIEVAL_QUERY',\n ])\n .optional(),\n});\n\nexport type GoogleGenerativeAIEmbeddingProviderOptions = z.infer<\n typeof googleGenerativeAIEmbeddingProviderOptions\n>;\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n googleGenerativeAIProviderOptions,\n} from './google-generative-ai-options';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: GoogleGenerativeAIModelId;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptions,\n });\n\n // Add warning if includeThoughts is used with a non-Vertex Google provider\n if (\n googleOptions?.thinkingConfig?.includeThoughts === true &&\n !this.config.provider.startsWith('google.vertex.')\n ) {\n warnings.push({\n type: 'other',\n message:\n \"The 'includeThoughts' option is only supported with the Google Vertex provider \" +\n 'and might not be supported or could behave unexpectedly with the current Google provider ' +\n `(${this.config.provider}).`,\n });\n }\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n const isGemmaModel = this.modelId.toLowerCase().startsWith('gemma-');\n if (\n isGemmaModel &&\n systemInstruction &&\n systemInstruction.parts.length > 0\n ) {\n warnings.push({\n type: 'other',\n message:\n `GEMMA models do not support system instructions. System messages will be ignored. ` +\n `Consider including instructions in the first user message instead.`,\n });\n }\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: googleOptions?.useSearchGrounding ?? false,\n dynamicRetrievalConfig: googleOptions?.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n // TODO convert into provider option\n (googleOptions?.structuredOutputs ?? true)\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(googleOptions?.audioTimestamp && {\n audioTimestamp: googleOptions.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n thinkingConfig: googleOptions?.thinkingConfig,\n },\n contents,\n systemInstruction: isGemmaModel ? undefined : systemInstruction,\n safetySettings: googleOptions?.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: googleOptions?.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // map ordered parts to content:\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : (candidate.content.parts ?? []);\n\n const usageMetadata = response.usageMetadata;\n\n // Build content array from all parts\n for (const part of parts) {\n if ('text' in part && part.text != null && part.text.length > 0) {\n if (part.thought === true) {\n content.push({ type: 'reasoning', text: part.text });\n } else {\n content.push({ type: 'text', text: part.text });\n }\n } else if ('functionCall' in part) {\n content.push({\n type: 'tool-call' as const,\n toolCallId: this.config.generateId(),\n toolName: part.functionCall.name,\n input: JSON.stringify(part.functionCall.args),\n });\n } else if ('inlineData' in part) {\n content.push({\n type: 'file' as const,\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n });\n }\n }\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }) ?? [];\n for (const source of sources) {\n content.push(source);\n }\n\n return {\n content,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n totalTokens: usageMetadata?.totalTokenCount ?? undefined,\n reasoningTokens: usageMetadata?.thoughtsTokenCount ?? undefined,\n cachedInputTokens: usageMetadata?.cachedContentTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n // Track active blocks to group consecutive parts of same type\n let currentTextBlockId: string | null = null;\n let currentReasoningBlockId: string | null = null;\n let blockCounter = 0;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n usage.totalTokens = usageMetadata.totalTokenCount ?? undefined;\n usage.reasoningTokens =\n usageMetadata.thoughtsTokenCount ?? undefined;\n usage.cachedInputTokens =\n usageMetadata.cachedContentTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n // Process text parts individually to handle reasoning parts\n const parts = content.parts ?? [];\n for (const part of parts) {\n if (\n 'text' in part &&\n part.text != null &&\n part.text.length > 0\n ) {\n if (part.thought === true) {\n // End any active text block before starting reasoning\n if (currentTextBlockId !== null) {\n controller.enqueue({\n type: 'text-end',\n id: currentTextBlockId,\n });\n currentTextBlockId = null;\n }\n\n // Start new reasoning block if not already active\n if (currentReasoningBlockId === null) {\n currentReasoningBlockId = String(blockCounter++);\n controller.enqueue({\n type: 'reasoning-start',\n id: currentReasoningBlockId,\n });\n }\n\n controller.enqueue({\n type: 'reasoning-delta',\n id: currentReasoningBlockId,\n delta: part.text,\n });\n } else {\n // End any active reasoning block before starting text\n if (currentReasoningBlockId !== null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: currentReasoningBlockId,\n });\n currentReasoningBlockId = null;\n }\n\n // Start new text block if not already active\n if (currentTextBlockId === null) {\n currentTextBlockId = String(blockCounter++);\n controller.enqueue({\n type: 'text-start',\n id: currentTextBlockId,\n });\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: currentTextBlockId,\n delta: part.text,\n });\n }\n }\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCall.toolCallId,\n toolName: toolCall.toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.toolCallId,\n delta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n input: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n // Close any open blocks before finishing\n if (currentTextBlockId !== null) {\n controller.enqueue({\n type: 'text-end',\n id: currentTextBlockId,\n });\n }\n if (currentReasoningBlockId !== null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: currentReasoningBlockId,\n });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n parts: z\n .array(\n z.union([\n // note: order matters since text can be fully empty\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n z.object({\n text: z.string().nullish(),\n thought: z.boolean().nullish(),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string().nullish(),\n probability: z.string().nullish(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst usageSchema = z.object({\n cachedContentTokenCount: z.number().nullish(),\n thoughtsTokenCount: z.number().nullish(),\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: usageSchema.nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n additionalProperties,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n if (additionalProperties === true) {\n result.additionalProperties = true;\n } else if (additionalProperties) {\n result.additionalProperties =\n convertJSONSchemaToOpenAPISchema(additionalProperties);\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0) &&\n !jsonSchema.additionalProperties\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.input,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.output.value,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { z } from 'zod/v4';\n\nexport type GoogleGenerativeAIModelId =\n // Stable models\n // https://ai.google.dev/gemini-api/docs/models/gemini\n | 'gemini-1.5-flash'\n | 'gemini-1.5-flash-latest'\n | 'gemini-1.5-flash-001'\n | 'gemini-1.5-flash-002'\n | 'gemini-1.5-flash-8b'\n | 'gemini-1.5-flash-8b-latest'\n | 'gemini-1.5-flash-8b-001'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-pro-latest'\n | 'gemini-1.5-pro-001'\n | 'gemini-1.5-pro-002'\n | 'gemini-2.0-flash'\n | 'gemini-2.0-flash-001'\n | 'gemini-2.0-flash-live-001'\n | 'gemini-2.0-flash-lite'\n | 'gemini-2.0-pro-exp-02-05'\n | 'gemini-2.0-flash-thinking-exp-01-21'\n | 'gemini-2.0-flash-exp'\n | 'gemini-2.5-pro'\n | 'gemini-2.5-flash'\n // Experimental models\n // https://ai.google.dev/gemini-api/docs/models/experimental-models\n | 'gemini-2.5-pro-exp-03-25'\n | 'gemini-2.5-flash-preview-04-17'\n | 'gemini-exp-1206'\n | 'gemma-3-12b-it'\n | 'gemma-3-27b-it'\n | (string & {});\n\nconst dynamicRetrievalConfig = z.object({\n /**\n * The mode of the predictor to be used in dynamic retrieval.\n */\n mode: z.enum(['MODE_UNSPECIFIED', 'MODE_DYNAMIC']).optional(),\n\n /**\n * The threshold to be used in dynamic retrieval. If not set, a system default\n * value is used.\n */\n dynamicThreshold: z.number().optional(),\n});\n\nexport type DynamicRetrievalConfig = z.infer<typeof dynamicRetrievalConfig>;\n\nexport const googleGenerativeAIProviderOptions = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).optional(),\n\n thinkingConfig: z\n .object({\n thinkingBudget: z.number().optional(),\n includeThoughts: z.boolean().optional(),\n })\n .optional(),\n\n /**\nOptional.\nThe name of the cached content used as context to serve the prediction.\nFormat: cachedContents/{cachedContent}\n */\n cachedContent: z.string().optional(),\n\n /**\n * Optional. Enable structured output. Default is true.\n *\n * This is useful when the JSON Schema contains elements that are\n * not supported by the OpenAPI schema version that\n * Google Generative AI uses. You can use this to disable\n * structured outputs if you need to.\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\nOptional. A list of unique safety settings for blocking unsafe content.\n */\n safetySettings: z\n .array(\n z.object({\n category: z.enum([\n 'HARM_CATEGORY_UNSPECIFIED',\n 'HARM_CATEGORY_HATE_SPEECH',\n 'HARM_CATEGORY_DANGEROUS_CONTENT',\n 'HARM_CATEGORY_HARASSMENT',\n 'HARM_CATEGORY_SEXUALLY_EXPLICIT',\n 'HARM_CATEGORY_CIVIC_INTEGRITY',\n ]),\n threshold: z.enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ]),\n }),\n )\n .optional(),\n\n threshold: z\n .enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ])\n .optional(),\n\n /**\n * Optional. Enables timestamp understanding for audio-only files.\n *\n * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/audio-understanding\n */\n audioTimestamp: z.boolean().optional(),\n\n /**\nOptional. When enabled, the model will use Google search to ground the response.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/grounding/overview\n */\n useSearchGrounding: z.boolean().optional(),\n\n /**\nOptional. Specifies the dynamic retrieval configuration.\n\n@note Dynamic retrieval is only compatible with Gemini 1.5 Flash.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-with-google-search#dynamic-retrieval\n */\n dynamicRetrievalConfig: dynamicRetrievalConfig.optional(),\n});\n\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptions\n>;\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-options';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAKO;;;ACXP,sBAGO;AACP,IAAAC,yBAOO;AACP,IAAAC,aAAkB;;;ACZlB,4BAA+C;AAC/C,gBAAkB;AAElB,IAAM,wBAAwB,YAAE,OAAO;AAAA,EACrC,OAAO,YAAE,OAAO;AAAA,IACd,MAAM,YAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,YAAE,OAAO;AAAA,IAClB,QAAQ,YAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,sDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,aAAkB;AAMX,IAAM,6CAA6C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjE,sBAAsB,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc1C,UAAU,aACP,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AACd,CAAC;;;AFXM,IAAM,mCAAN,MAEP;AAAA,EAWE,YACE,SACA,QACA;AAbF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAW/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EATA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EASA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAEA,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,+CAAe;AAAA,UACrC,UAAU,+CAAe;AAAA,QAC3B,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,aAAE,OAAO;AAAA,EAC7D,YAAY,aAAE,MAAM,aAAE,OAAO,EAAE,QAAQ,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AGrGD,IAAAC,yBAUO;AACP,IAAAC,aAAkB;;;AChBX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,IACN;AAAA,EACF,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,MAAI,yBAAyB,MAAM;AACjC,WAAO,uBAAuB;AAAA,EAChC,WAAW,sBAAsB;AAC/B,WAAO,uBACL,iCAAiC,oBAAoB;AAAA,EACzD;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW,MAChD,CAAC,WAAW;AAEhB;;;ACzIA,IAAAC,mBAGO;AAMP,IAAAC,yBAAgC;AAEzB,SAAS,oCACd,QAC0B;AAC1B,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK,OAAO;AAAA,cACvB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;ACzJO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,aAAkB;AAkClB,IAAM,yBAAyB,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAItC,MAAM,aAAE,KAAK,CAAC,oBAAoB,cAAc,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5D,kBAAkB,aAAE,OAAO,EAAE,SAAS;AACxC,CAAC;AAIM,IAAM,oCAAoC,aAAE,OAAO;AAAA,EACxD,oBAAoB,aAAE,MAAM,aAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,EAEhE,gBAAgB,aACb,OAAO;AAAA,IACN,gBAAgB,aAAE,OAAO,EAAE,SAAS;AAAA,IACpC,iBAAiB,aAAE,QAAQ,EAAE,SAAS;AAAA,EACxC,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,eAAe,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUnC,mBAAmB,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKxC,gBAAgB,aACb;AAAA,IACC,aAAE,OAAO;AAAA,MACP,UAAU,aAAE,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,MACD,WAAW,aAAE,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,SAAS;AAAA,EAEZ,WAAW,aACR,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,gBAAgB,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrC,oBAAoB,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASzC,wBAAwB,uBAAuB,SAAS;AAC1D,CAAC;;;ACvID,IAAAC,mBAIO;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA,wBAAAC;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAACA,0BAC1B,CAAC,IACD,EAAE,wBAAAA,wBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,WAAW;AAAA,MAC/D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACvIO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANkBO,IAAM,kCAAN,MAAiE;AAAA,EAOtE,YACE,SACA,QACA;AATF,SAAS,uBAAuB;AAU9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAlEtB;AAmEI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApFnD;AAqFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UACE,oDAAe,mBAAf,mBAA+B,qBAAoB,QACnD,CAAC,KAAK,OAAO,SAAS,WAAW,gBAAgB,GACjD;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SACE,4KAEI,KAAK,OAAO,QAAQ;AAAA,MAC5B,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,UAAM,eAAe,KAAK,QAAQ,YAAY,EAAE,WAAW,QAAQ;AACnE,QACE,gBACA,qBACA,kBAAkB,MAAM,SAAS,GACjC;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SACE;AAAA,MAEJ,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,oDAAe,uBAAf,YAAqC;AAAA,MACzD,wBAAwB,+CAAe;AAAA,MACvC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA;AAAA,YAIxB,oDAAe,sBAAf,YAAoC,QACjC,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,IAAI,+CAAe,mBAAkB;AAAA,YACnC,gBAAgB,cAAc;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,UACnC,gBAAgB,+CAAe;AAAA,QACjC;AAAA,QACA;AAAA,QACA,mBAAmB,eAAe,SAAY;AAAA,QAC9C,gBAAgB,+CAAe;AAAA,QAC/B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,+CAAe;AAAA,MAChC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAtLjE;AAuLI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AACvC,UAAM,UAAyC,CAAC;AAGhD,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,KACA,eAAU,QAAQ,UAAlB,YAA2B,CAAC;AAEnC,UAAM,gBAAgB,SAAS;AAG/B,eAAW,QAAQ,OAAO;AACxB,UAAI,UAAU,QAAQ,KAAK,QAAQ,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC/D,YAAI,KAAK,YAAY,MAAM;AACzB,kBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QACrD,OAAO;AACL,kBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,QAChD;AAAA,MACF,WAAW,kBAAkB,MAAM;AACjC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,KAAK,OAAO,WAAW;AAAA,UACnC,UAAU,KAAK,aAAa;AAAA,UAC5B,OAAO,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,QAC9C,CAAC;AAAA,MACH,WAAW,gBAAgB,MAAM;AAC/B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,KAAK,WAAW;AAAA,UACtB,WAAW,KAAK,WAAW;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,WACJ,oBAAe;AAAA,MACb,mBAAmB,UAAU;AAAA,MAC7B,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC,MAHD,YAGM,CAAC;AACT,eAAW,UAAU,SAAS;AAC5B,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,QACrD,cAAa,oDAAe,oBAAf,YAAkC;AAAA,QAC/C,kBAAiB,oDAAe,uBAAf,YAAqC;AAAA,QACtD,oBAAmB,oDAAe,4BAAf,YAA0C;AAAA,MAC/D;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AACA,QAAI,mBAAyD;AAE7D,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAGnB,QAAI,qBAAoC;AACxC,QAAI,0BAAyC;AAC7C,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AA3UvC;AA4UY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AACxC,oBAAM,eAAc,mBAAc,oBAAd,YAAiC;AACrD,oBAAM,mBACJ,mBAAc,uBAAd,YAAoC;AACtC,oBAAM,qBACJ,mBAAc,4BAAd,YAAyC;AAAA,YAC7C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AAEnB,oBAAM,SAAQ,aAAQ,UAAR,YAAiB,CAAC;AAChC,yBAAW,QAAQ,OAAO;AACxB,oBACE,UAAU,QACV,KAAK,QAAQ,QACb,KAAK,KAAK,SAAS,GACnB;AACA,sBAAI,KAAK,YAAY,MAAM;AAEzB,wBAAI,uBAAuB,MAAM;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AACD,2CAAqB;AAAA,oBACvB;AAGA,wBAAI,4BAA4B,MAAM;AACpC,gDAA0B,OAAO,cAAc;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AAEA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO,KAAK;AAAA,oBACd,CAAC;AAAA,kBACH,OAAO;AAEL,wBAAI,4BAA4B,MAAM;AACpC,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AACD,gDAA0B;AAAA,oBAC5B;AAGA,wBAAI,uBAAuB,MAAM;AAC/B,2CAAqB,OAAO,cAAc;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AAEA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO,KAAK;AAAA,oBACd,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf,YAAAA;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,oBACb,UAAU,SAAS;AAAA,kBACrB,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,oBACb,OAAO,SAAS;AAAA,kBAClB,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,kBACf,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,OAAO,SAAS;AAAA,kBAClB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B,YAAAA;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAEhB,gBAAI,uBAAuB,MAAM;AAC/B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,4BAA4B,MAAM;AACpC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAYA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA,YAAAC;AACF,GAGwC;AApkBxC;AAqkBE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAIA,YAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgB,aAAE,OAAO;AAAA,EAC7B,OAAO,aACJ;AAAA,IACC,aAAE,MAAM;AAAA;AAAA,MAEN,aAAE,OAAO;AAAA,QACP,cAAc,aAAE,OAAO;AAAA,UACrB,MAAM,aAAE,OAAO;AAAA,UACf,MAAM,aAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,YAAY,aAAE,OAAO;AAAA,UACnB,UAAU,aAAE,OAAO;AAAA,UACnB,MAAM,aAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,QACzB,SAAS,aAAE,QAAQ,EAAE,QAAQ;AAAA,MAC/B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuB,aAAE,OAAO;AAAA,EACpC,KAAK,aAAE,OAAO,EAAE,KAAK,aAAE,OAAO,GAAG,OAAO,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkB,aAAE,OAAO,EAAE,KAAK,aAAE,OAAO,GAAG,OAAO,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0B,aAAE,OAAO;AAAA,EAC9C,kBAAkB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,aAAE,OAAO,EAAE,iBAAiB,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiB,aAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmB,aAChB;AAAA,IACC,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,YAAY,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAU,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,aAChB,MAAM;AAAA,IACL,aAAE,OAAO;AAAA,MACP,0BAA0B,aAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACD,aAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqB,aAAE,OAAO;AAAA,EACzC,UAAU,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,aAAa,aAAE,OAAO,EAAE,QAAQ;AAAA,EAChC,kBAAkB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAU,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAAS,aAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,cAAc,aAAE,OAAO;AAAA,EAC3B,yBAAyB,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5C,oBAAoB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,kBAAkB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,sBAAsB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACzC,iBAAiB,aAAE,OAAO,EAAE,QAAQ;AACtC,CAAC;AAED,IAAM,iBAAiB,aAAE,OAAO;AAAA,EAC9B,YAAY,aAAE;AAAA,IACZ,aAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,aAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,YAAY,QAAQ;AACrC,CAAC;AAID,IAAM,cAAc,aAAE,OAAO;AAAA,EAC3B,YAAY,aACT;AAAA,IACC,aAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,aAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,YAAY,QAAQ;AACrC,CAAC;;;AJjnBM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAnF9B;AAoFE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YAAoC;AAjG/D,QAAAC;AAkGI,eAAI,gCAAgC,SAAS;AAAA,MAC3C,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,eAAe,OAAO;AAAA,QACpB,KAAK;AAAA;AAAA;AAAA,UAGH,IAAI,OAAO,IAAI,OAAO,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,MACA,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,iCAAiC,SAAS;AAAA,IAC5C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAAoC;AAC7D,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider","import_provider_utils","import_provider_utils","import_v4","import_v4","import_provider_utils","import_v4","import_provider","import_provider_utils","import_v4","import_provider","dynamicRetrievalConfig","generateId","generateId","_a"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-embedding-model.ts","../src/google-error.ts","../src/google-generative-ai-embedding-options.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-generative-ai-options.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts"],"sourcesContent":["export type { GoogleErrorData } from './google-error';\nexport type { GoogleGenerativeAIProviderOptions } from './google-generative-ai-options';\nexport type { GoogleGenerativeAIProviderMetadata } from './google-generative-ai-prompt';\nexport { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport { GoogleGenerativeAIEmbeddingModelId } from './google-generative-ai-embedding-options';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport { GoogleGenerativeAIModelId } from './google-generative-ai-options';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV2 {\n (modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n languageModel(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n chat(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (modelId: GoogleGenerativeAIModelId) =>\n new GoogleGenerativeAILanguageModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n supportedUrls: () => ({\n '*': [\n // Only allow requests to the Google Generative Language \"files\" endpoint\n // e.g. https://generativelanguage.googleapis.com/v1beta/files/...\n new RegExp(`^${baseURL}/files/.*$`),\n ],\n }),\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: GoogleGenerativeAIEmbeddingModelId) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: GoogleGenerativeAIModelId) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n googleGenerativeAIEmbeddingProviderOptions,\n} from './google-generative-ai-embedding-options';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n // Parse provider options\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIEmbeddingProviderOptions,\n });\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: googleOptions?.outputDimensionality,\n taskType: googleOptions?.taskType,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import { z } from 'zod/v4';\n\nexport type GoogleGenerativeAIEmbeddingModelId =\n | 'text-embedding-004'\n | (string & {});\n\nexport const googleGenerativeAIEmbeddingProviderOptions = z.object({\n /**\n * Optional. Optional reduced dimension for the output embedding.\n * If set, excessive values in the output embedding are truncated from the end.\n */\n outputDimensionality: z.number().optional(),\n\n /**\n * Optional. Specifies the task type for generating embeddings.\n * Supported task types:\n * - SEMANTIC_SIMILARITY: Optimized for text similarity.\n * - CLASSIFICATION: Optimized for text classification.\n * - CLUSTERING: Optimized for clustering texts based on similarity.\n * - RETRIEVAL_DOCUMENT: Optimized for document retrieval.\n * - RETRIEVAL_QUERY: Optimized for query-based retrieval.\n * - QUESTION_ANSWERING: Optimized for answering questions.\n * - FACT_VERIFICATION: Optimized for verifying factual information.\n * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.\n */\n taskType: z\n .enum([\n 'SEMANTIC_SIMILARITY',\n 'CLASSIFICATION',\n 'CLUSTERING',\n 'RETRIEVAL_DOCUMENT',\n 'RETRIEVAL_QUERY',\n 'QUESTION_ANSWERING',\n 'FACT_VERIFICATION',\n 'CODE_RETRIEVAL_QUERY',\n ])\n .optional(),\n});\n\nexport type GoogleGenerativeAIEmbeddingProviderOptions = z.infer<\n typeof googleGenerativeAIEmbeddingProviderOptions\n>;\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n googleGenerativeAIProviderOptions,\n} from './google-generative-ai-options';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: GoogleGenerativeAIModelId;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptions,\n });\n\n // Add warning if includeThoughts is used with a non-Vertex Google provider\n if (\n googleOptions?.thinkingConfig?.includeThoughts === true &&\n !this.config.provider.startsWith('google.vertex.')\n ) {\n warnings.push({\n type: 'other',\n message:\n \"The 'includeThoughts' option is only supported with the Google Vertex provider \" +\n 'and might not be supported or could behave unexpectedly with the current Google provider ' +\n `(${this.config.provider}).`,\n });\n }\n\n const isGemmaModel = this.modelId.toLowerCase().startsWith('gemma-');\n\n const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(\n prompt,\n { isGemmaModel },\n );\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: googleOptions?.useSearchGrounding ?? false,\n dynamicRetrievalConfig: googleOptions?.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n // TODO convert into provider option\n (googleOptions?.structuredOutputs ?? true)\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(googleOptions?.audioTimestamp && {\n audioTimestamp: googleOptions.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n thinkingConfig: googleOptions?.thinkingConfig,\n },\n contents,\n systemInstruction: isGemmaModel ? undefined : systemInstruction,\n safetySettings: googleOptions?.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: googleOptions?.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // map ordered parts to content:\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : (candidate.content.parts ?? []);\n\n const usageMetadata = response.usageMetadata;\n\n // Build content array from all parts\n for (const part of parts) {\n if ('text' in part && part.text != null && part.text.length > 0) {\n if (part.thought === true) {\n content.push({ type: 'reasoning', text: part.text });\n } else {\n content.push({ type: 'text', text: part.text });\n }\n } else if ('functionCall' in part) {\n content.push({\n type: 'tool-call' as const,\n toolCallId: this.config.generateId(),\n toolName: part.functionCall.name,\n input: JSON.stringify(part.functionCall.args),\n });\n } else if ('inlineData' in part) {\n content.push({\n type: 'file' as const,\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n });\n }\n }\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }) ?? [];\n for (const source of sources) {\n content.push(source);\n }\n\n return {\n content,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n totalTokens: usageMetadata?.totalTokenCount ?? undefined,\n reasoningTokens: usageMetadata?.thoughtsTokenCount ?? undefined,\n cachedInputTokens: usageMetadata?.cachedContentTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n // Track active blocks to group consecutive parts of same type\n let currentTextBlockId: string | null = null;\n let currentReasoningBlockId: string | null = null;\n let blockCounter = 0;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n usage.totalTokens = usageMetadata.totalTokenCount ?? undefined;\n usage.reasoningTokens =\n usageMetadata.thoughtsTokenCount ?? undefined;\n usage.cachedInputTokens =\n usageMetadata.cachedContentTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n // Process text parts individually to handle reasoning parts\n const parts = content.parts ?? [];\n for (const part of parts) {\n if (\n 'text' in part &&\n part.text != null &&\n part.text.length > 0\n ) {\n if (part.thought === true) {\n // End any active text block before starting reasoning\n if (currentTextBlockId !== null) {\n controller.enqueue({\n type: 'text-end',\n id: currentTextBlockId,\n });\n currentTextBlockId = null;\n }\n\n // Start new reasoning block if not already active\n if (currentReasoningBlockId === null) {\n currentReasoningBlockId = String(blockCounter++);\n controller.enqueue({\n type: 'reasoning-start',\n id: currentReasoningBlockId,\n });\n }\n\n controller.enqueue({\n type: 'reasoning-delta',\n id: currentReasoningBlockId,\n delta: part.text,\n });\n } else {\n // End any active reasoning block before starting text\n if (currentReasoningBlockId !== null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: currentReasoningBlockId,\n });\n currentReasoningBlockId = null;\n }\n\n // Start new text block if not already active\n if (currentTextBlockId === null) {\n currentTextBlockId = String(blockCounter++);\n controller.enqueue({\n type: 'text-start',\n id: currentTextBlockId,\n });\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: currentTextBlockId,\n delta: part.text,\n });\n }\n }\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCall.toolCallId,\n toolName: toolCall.toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCall.toolCallId,\n delta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCall.toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n input: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n // Close any open blocks before finishing\n if (currentTextBlockId !== null) {\n controller.enqueue({\n type: 'text-end',\n id: currentTextBlockId,\n });\n }\n if (currentReasoningBlockId !== null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: currentReasoningBlockId,\n });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n parts: z\n .array(\n z.union([\n // note: order matters since text can be fully empty\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n z.object({\n text: z.string().nullish(),\n thought: z.boolean().nullish(),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string().nullish(),\n probability: z.string().nullish(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst usageSchema = z.object({\n cachedContentTokenCount: z.number().nullish(),\n thoughtsTokenCount: z.number().nullish(),\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: usageSchema.nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0) &&\n !jsonSchema.additionalProperties\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n options?: { isGemmaModel?: boolean },\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n const isGemmaModel = options?.isGemmaModel ?? false;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.input,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.output.value,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n if (\n isGemmaModel &&\n systemInstructionParts.length > 0 &&\n contents.length > 0 &&\n contents[0].role === 'user'\n ) {\n const systemText = systemInstructionParts\n .map(part => part.text)\n .join('\\n\\n');\n\n contents[0].parts.unshift({ text: systemText + '\\n\\n' });\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0 && !isGemmaModel\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { z } from 'zod/v4';\n\nexport type GoogleGenerativeAIModelId =\n // Stable models\n // https://ai.google.dev/gemini-api/docs/models/gemini\n | 'gemini-1.5-flash'\n | 'gemini-1.5-flash-latest'\n | 'gemini-1.5-flash-001'\n | 'gemini-1.5-flash-002'\n | 'gemini-1.5-flash-8b'\n | 'gemini-1.5-flash-8b-latest'\n | 'gemini-1.5-flash-8b-001'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-pro-latest'\n | 'gemini-1.5-pro-001'\n | 'gemini-1.5-pro-002'\n | 'gemini-2.0-flash'\n | 'gemini-2.0-flash-001'\n | 'gemini-2.0-flash-live-001'\n | 'gemini-2.0-flash-lite'\n | 'gemini-2.0-pro-exp-02-05'\n | 'gemini-2.0-flash-thinking-exp-01-21'\n | 'gemini-2.0-flash-exp'\n | 'gemini-2.5-pro'\n | 'gemini-2.5-flash'\n // Experimental models\n // https://ai.google.dev/gemini-api/docs/models/experimental-models\n | 'gemini-2.5-pro-exp-03-25'\n | 'gemini-2.5-flash-preview-04-17'\n | 'gemini-exp-1206'\n | 'gemma-3-12b-it'\n | 'gemma-3-27b-it'\n | (string & {});\n\nconst dynamicRetrievalConfig = z.object({\n /**\n * The mode of the predictor to be used in dynamic retrieval.\n */\n mode: z.enum(['MODE_UNSPECIFIED', 'MODE_DYNAMIC']).optional(),\n\n /**\n * The threshold to be used in dynamic retrieval. If not set, a system default\n * value is used.\n */\n dynamicThreshold: z.number().optional(),\n});\n\nexport type DynamicRetrievalConfig = z.infer<typeof dynamicRetrievalConfig>;\n\nexport const googleGenerativeAIProviderOptions = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).optional(),\n\n thinkingConfig: z\n .object({\n thinkingBudget: z.number().optional(),\n includeThoughts: z.boolean().optional(),\n })\n .optional(),\n\n /**\nOptional.\nThe name of the cached content used as context to serve the prediction.\nFormat: cachedContents/{cachedContent}\n */\n cachedContent: z.string().optional(),\n\n /**\n * Optional. Enable structured output. Default is true.\n *\n * This is useful when the JSON Schema contains elements that are\n * not supported by the OpenAPI schema version that\n * Google Generative AI uses. You can use this to disable\n * structured outputs if you need to.\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\nOptional. A list of unique safety settings for blocking unsafe content.\n */\n safetySettings: z\n .array(\n z.object({\n category: z.enum([\n 'HARM_CATEGORY_UNSPECIFIED',\n 'HARM_CATEGORY_HATE_SPEECH',\n 'HARM_CATEGORY_DANGEROUS_CONTENT',\n 'HARM_CATEGORY_HARASSMENT',\n 'HARM_CATEGORY_SEXUALLY_EXPLICIT',\n 'HARM_CATEGORY_CIVIC_INTEGRITY',\n ]),\n threshold: z.enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ]),\n }),\n )\n .optional(),\n\n threshold: z\n .enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ])\n .optional(),\n\n /**\n * Optional. Enables timestamp understanding for audio-only files.\n *\n * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/audio-understanding\n */\n audioTimestamp: z.boolean().optional(),\n\n /**\nOptional. When enabled, the model will use Google search to ground the response.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/grounding/overview\n */\n useSearchGrounding: z.boolean().optional(),\n\n /**\nOptional. Specifies the dynamic retrieval configuration.\n\n@note Dynamic retrieval is only compatible with Gemini 1.5 Flash.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-with-google-search#dynamic-retrieval\n */\n dynamicRetrievalConfig: dynamicRetrievalConfig.optional(),\n});\n\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptions\n>;\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-options';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAKO;;;ACXP,sBAGO;AACP,IAAAC,yBAOO;AACP,IAAAC,aAAkB;;;ACZlB,4BAA+C;AAC/C,gBAAkB;AAElB,IAAM,wBAAwB,YAAE,OAAO;AAAA,EACrC,OAAO,YAAE,OAAO;AAAA,IACd,MAAM,YAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,YAAE,OAAO;AAAA,IAClB,QAAQ,YAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,sDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,IAAAC,aAAkB;AAMX,IAAM,6CAA6C,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjE,sBAAsB,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc1C,UAAU,aACP,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AACd,CAAC;;;AFXM,IAAM,mCAAN,MAEP;AAAA,EAWE,YACE,SACA,QACA;AAbF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAW/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EATA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EASA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAEA,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,+CAAe;AAAA,UACrC,UAAU,+CAAe;AAAA,QAC3B,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,aAAE,OAAO;AAAA,EAC7D,YAAY,aAAE,MAAM,aAAE,OAAO,EAAE,QAAQ,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AGrGD,IAAAC,yBAUO;AACP,IAAAC,aAAkB;;;AChBX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW,MAChD,CAAC,WAAW;AAEhB;;;ACjIA,IAAAC,mBAGO;AAMP,IAAAC,yBAAgC;AAEzB,SAAS,oCACd,QACA,SAC0B;AAd5B;AAeE,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAC5B,QAAM,gBAAe,wCAAS,iBAAT,YAAyB;AAE9C,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK,OAAO;AAAA,cACvB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MACE,gBACA,uBAAuB,SAAS,KAChC,SAAS,SAAS,KAClB,SAAS,CAAC,EAAE,SAAS,QACrB;AACA,UAAM,aAAa,uBAChB,IAAI,UAAQ,KAAK,IAAI,EACrB,KAAK,MAAM;AAEd,aAAS,CAAC,EAAE,MAAM,QAAQ,EAAE,MAAM,aAAa,OAAO,CAAC;AAAA,EACzD;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,KAAK,CAAC,eAClC,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;ACxKO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,aAAkB;AAkClB,IAAM,yBAAyB,aAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAItC,MAAM,aAAE,KAAK,CAAC,oBAAoB,cAAc,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5D,kBAAkB,aAAE,OAAO,EAAE,SAAS;AACxC,CAAC;AAIM,IAAM,oCAAoC,aAAE,OAAO;AAAA,EACxD,oBAAoB,aAAE,MAAM,aAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,EAEhE,gBAAgB,aACb,OAAO;AAAA,IACN,gBAAgB,aAAE,OAAO,EAAE,SAAS;AAAA,IACpC,iBAAiB,aAAE,QAAQ,EAAE,SAAS;AAAA,EACxC,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,eAAe,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUnC,mBAAmB,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKxC,gBAAgB,aACb;AAAA,IACC,aAAE,OAAO;AAAA,MACP,UAAU,aAAE,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,MACD,WAAW,aAAE,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,SAAS;AAAA,EAEZ,WAAW,aACR,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,gBAAgB,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrC,oBAAoB,aAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASzC,wBAAwB,uBAAuB,SAAS;AAC1D,CAAC;;;ACvID,IAAAC,mBAIO;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA,wBAAAC;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAACA,0BAC1B,CAAC,IACD,EAAE,wBAAAA,wBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,WAAW;AAAA,MAC/D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACvIO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANkBO,IAAM,kCAAN,MAAiE;AAAA,EAOtE,YACE,SACA,QACA;AATF,SAAS,uBAAuB;AAU9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAlEtB;AAmEI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApFnD;AAqFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,gBAAgB,UAAM,6CAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UACE,oDAAe,mBAAf,mBAA+B,qBAAoB,QACnD,CAAC,KAAK,OAAO,SAAS,WAAW,gBAAgB,GACjD;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SACE,4KAEI,KAAK,OAAO,QAAQ;AAAA,MAC5B,CAAC;AAAA,IACH;AAEA,UAAM,eAAe,KAAK,QAAQ,YAAY,EAAE,WAAW,QAAQ;AAEnE,UAAM,EAAE,UAAU,kBAAkB,IAAI;AAAA,MACtC;AAAA,MACA,EAAE,aAAa;AAAA,IACjB;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,oDAAe,uBAAf,YAAqC;AAAA,MACzD,wBAAwB,+CAAe;AAAA,MACvC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA;AAAA,YAIxB,oDAAe,sBAAf,YAAoC,QACjC,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,IAAI,+CAAe,mBAAkB;AAAA,YACnC,gBAAgB,cAAc;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,UACnC,gBAAgB,+CAAe;AAAA,QACjC;AAAA,QACA;AAAA,QACA,mBAAmB,eAAe,SAAY;AAAA,QAC9C,gBAAgB,+CAAe;AAAA,QAC/B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,+CAAe;AAAA,MAChC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA5KjE;AA6KI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AACvC,UAAM,UAAyC,CAAC;AAGhD,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,KACA,eAAU,QAAQ,UAAlB,YAA2B,CAAC;AAEnC,UAAM,gBAAgB,SAAS;AAG/B,eAAW,QAAQ,OAAO;AACxB,UAAI,UAAU,QAAQ,KAAK,QAAQ,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC/D,YAAI,KAAK,YAAY,MAAM;AACzB,kBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QACrD,OAAO;AACL,kBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,QAChD;AAAA,MACF,WAAW,kBAAkB,MAAM;AACjC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,KAAK,OAAO,WAAW;AAAA,UACnC,UAAU,KAAK,aAAa;AAAA,UAC5B,OAAO,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,QAC9C,CAAC;AAAA,MACH,WAAW,gBAAgB,MAAM;AAC/B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,KAAK,WAAW;AAAA,UACtB,WAAW,KAAK,WAAW;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,WACJ,oBAAe;AAAA,MACb,mBAAmB,UAAU;AAAA,MAC7B,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC,MAHD,YAGM,CAAC;AACT,eAAW,UAAU,SAAS;AAC5B,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,QACrD,cAAa,oDAAe,oBAAf,YAAkC;AAAA,QAC/C,kBAAiB,oDAAe,uBAAf,YAAqC;AAAA,QACtD,oBAAmB,oDAAe,4BAAf,YAA0C;AAAA,MAC/D;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AACA,QAAI,mBAAyD;AAE7D,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAGnB,QAAI,qBAAoC;AACxC,QAAI,0BAAyC;AAC7C,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAjUvC;AAkUY,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AACxC,oBAAM,eAAc,mBAAc,oBAAd,YAAiC;AACrD,oBAAM,mBACJ,mBAAc,uBAAd,YAAoC;AACtC,oBAAM,qBACJ,mBAAc,4BAAd,YAAyC;AAAA,YAC7C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AAEnB,oBAAM,SAAQ,aAAQ,UAAR,YAAiB,CAAC;AAChC,yBAAW,QAAQ,OAAO;AACxB,oBACE,UAAU,QACV,KAAK,QAAQ,QACb,KAAK,KAAK,SAAS,GACnB;AACA,sBAAI,KAAK,YAAY,MAAM;AAEzB,wBAAI,uBAAuB,MAAM;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AACD,2CAAqB;AAAA,oBACvB;AAGA,wBAAI,4BAA4B,MAAM;AACpC,gDAA0B,OAAO,cAAc;AAC/C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AAEA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO,KAAK;AAAA,oBACd,CAAC;AAAA,kBACH,OAAO;AAEL,wBAAI,4BAA4B,MAAM;AACpC,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AACD,gDAA0B;AAAA,oBAC5B;AAGA,wBAAI,uBAAuB,MAAM;AAC/B,2CAAqB,OAAO,cAAc;AAC1C,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AAEA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO,KAAK;AAAA,oBACd,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf,YAAAA;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,oBACb,UAAU,SAAS;AAAA,kBACrB,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,oBACb,OAAO,SAAS;AAAA,kBAClB,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI,SAAS;AAAA,kBACf,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,OAAO,SAAS;AAAA,kBAClB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B,YAAAA;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAEhB,gBAAI,uBAAuB,MAAM;AAC/B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,4BAA4B,MAAM;AACpC,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAYA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA,YAAAC;AACF,GAGwC;AA1jBxC;AA2jBE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAIA,YAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgB,aAAE,OAAO;AAAA,EAC7B,OAAO,aACJ;AAAA,IACC,aAAE,MAAM;AAAA;AAAA,MAEN,aAAE,OAAO;AAAA,QACP,cAAc,aAAE,OAAO;AAAA,UACrB,MAAM,aAAE,OAAO;AAAA,UACf,MAAM,aAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,YAAY,aAAE,OAAO;AAAA,UACnB,UAAU,aAAE,OAAO;AAAA,UACnB,MAAM,aAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,QACzB,SAAS,aAAE,QAAQ,EAAE,QAAQ;AAAA,MAC/B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuB,aAAE,OAAO;AAAA,EACpC,KAAK,aAAE,OAAO,EAAE,KAAK,aAAE,OAAO,GAAG,OAAO,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkB,aAAE,OAAO,EAAE,KAAK,aAAE,OAAO,GAAG,OAAO,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0B,aAAE,OAAO;AAAA,EAC9C,kBAAkB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,aAAE,OAAO,EAAE,iBAAiB,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiB,aAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmB,aAChB;AAAA,IACC,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,YAAY,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAU,aAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAM,aAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiB,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,aAChB,MAAM;AAAA,IACL,aAAE,OAAO;AAAA,MACP,0BAA0B,aAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACD,aAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqB,aAAE,OAAO;AAAA,EACzC,UAAU,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,aAAa,aAAE,OAAO,EAAE,QAAQ;AAAA,EAChC,kBAAkB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAU,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAAS,aAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,cAAc,aAAE,OAAO;AAAA,EAC3B,yBAAyB,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5C,oBAAoB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,kBAAkB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,sBAAsB,aAAE,OAAO,EAAE,QAAQ;AAAA,EACzC,iBAAiB,aAAE,OAAO,EAAE,QAAQ;AACtC,CAAC;AAED,IAAM,iBAAiB,aAAE,OAAO;AAAA,EAC9B,YAAY,aAAE;AAAA,IACZ,aAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAG,aAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,aAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,YAAY,QAAQ;AACrC,CAAC;AAID,IAAM,cAAc,aAAE,OAAO;AAAA,EAC3B,YAAY,aACT;AAAA,IACC,aAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,aAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,aAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,YAAY,QAAQ;AACrC,CAAC;;;AJvmBM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAnF9B;AAoFE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YAAoC;AAjG/D,QAAAC;AAkGI,eAAI,gCAAgC,SAAS;AAAA,MAC3C,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,eAAe,OAAO;AAAA,QACpB,KAAK;AAAA;AAAA;AAAA,UAGH,IAAI,OAAO,IAAI,OAAO,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,MACA,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,iCAAiC,SAAS;AAAA,IAC5C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAAoC;AAC7D,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider","import_provider_utils","import_provider_utils","import_v4","import_v4","import_provider_utils","import_v4","import_provider","import_provider_utils","import_v4","import_provider","dynamicRetrievalConfig","generateId","generateId","_a"]}
package/dist/index.mjs CHANGED
@@ -167,8 +167,7 @@ function convertJSONSchemaToOpenAPISchema(jsonSchema) {
167
167
  format,
168
168
  const: constValue,
169
169
  minLength,
170
- enum: enumValues,
171
- additionalProperties
170
+ enum: enumValues
172
171
  } = jsonSchema;
173
172
  const result = {};
174
173
  if (description)
@@ -239,11 +238,6 @@ function convertJSONSchemaToOpenAPISchema(jsonSchema) {
239
238
  if (minLength !== void 0) {
240
239
  result.minLength = minLength;
241
240
  }
242
- if (additionalProperties === true) {
243
- result.additionalProperties = true;
244
- } else if (additionalProperties) {
245
- result.additionalProperties = convertJSONSchemaToOpenAPISchema(additionalProperties);
246
- }
247
241
  return result;
248
242
  }
249
243
  function isEmptyObjectSchema(jsonSchema) {
@@ -255,10 +249,12 @@ import {
255
249
  UnsupportedFunctionalityError
256
250
  } from "@ai-sdk/provider";
257
251
  import { convertToBase64 } from "@ai-sdk/provider-utils";
258
- function convertToGoogleGenerativeAIMessages(prompt) {
252
+ function convertToGoogleGenerativeAIMessages(prompt, options) {
253
+ var _a;
259
254
  const systemInstructionParts = [];
260
255
  const contents = [];
261
256
  let systemMessagesAllowed = true;
257
+ const isGemmaModel = (_a = options == null ? void 0 : options.isGemmaModel) != null ? _a : false;
262
258
  for (const { role, content } of prompt) {
263
259
  switch (role) {
264
260
  case "system": {
@@ -359,8 +355,12 @@ function convertToGoogleGenerativeAIMessages(prompt) {
359
355
  }
360
356
  }
361
357
  }
358
+ if (isGemmaModel && systemInstructionParts.length > 0 && contents.length > 0 && contents[0].role === "user") {
359
+ const systemText = systemInstructionParts.map((part) => part.text).join("\n\n");
360
+ contents[0].parts.unshift({ text: systemText + "\n\n" });
361
+ }
362
362
  return {
363
- systemInstruction: systemInstructionParts.length > 0 ? { parts: systemInstructionParts } : void 0,
363
+ systemInstruction: systemInstructionParts.length > 0 && !isGemmaModel ? { parts: systemInstructionParts } : void 0,
364
364
  contents
365
365
  };
366
366
  }
@@ -613,14 +613,11 @@ var GoogleGenerativeAILanguageModel = class {
613
613
  message: `The 'includeThoughts' option is only supported with the Google Vertex provider and might not be supported or could behave unexpectedly with the current Google provider (${this.config.provider}).`
614
614
  });
615
615
  }
616
- const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
617
616
  const isGemmaModel = this.modelId.toLowerCase().startsWith("gemma-");
618
- if (isGemmaModel && systemInstruction && systemInstruction.parts.length > 0) {
619
- warnings.push({
620
- type: "other",
621
- message: `GEMMA models do not support system instructions. System messages will be ignored. Consider including instructions in the first user message instead.`
622
- });
623
- }
617
+ const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(
618
+ prompt,
619
+ { isGemmaModel }
620
+ );
624
621
  const {
625
622
  tools: googleTools,
626
623
  toolConfig: googleToolConfig,