@ai-sdk/google 2.0.0-alpha.1 → 2.0.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -382,7 +382,8 @@ var dynamicRetrievalConfig = z4.object({
382
382
  var googleGenerativeAIProviderOptions = z4.object({
383
383
  responseModalities: z4.array(z4.enum(["TEXT", "IMAGE"])).optional(),
384
384
  thinkingConfig: z4.object({
385
- thinkingBudget: z4.number().optional()
385
+ thinkingBudget: z4.number().optional(),
386
+ includeThoughts: z4.boolean().optional()
386
387
  }).optional(),
387
388
  /**
388
389
  Optional.
@@ -595,13 +596,19 @@ var GoogleGenerativeAILanguageModel = class {
595
596
  toolChoice,
596
597
  providerOptions
597
598
  }) {
598
- var _a, _b;
599
+ var _a, _b, _c;
599
600
  const warnings = [];
600
601
  const googleOptions = await parseProviderOptions2({
601
602
  provider: "google",
602
603
  providerOptions,
603
604
  schema: googleGenerativeAIProviderOptions
604
605
  });
606
+ if (((_a = googleOptions == null ? void 0 : googleOptions.thinkingConfig) == null ? void 0 : _a.includeThoughts) === true && !this.config.provider.startsWith("google.vertex.")) {
607
+ warnings.push({
608
+ type: "other",
609
+ message: `The 'includeThoughts' option is only supported with the Google Vertex provider and might not be supported or could behave unexpectedly with the current Google provider (${this.config.provider}).`
610
+ });
611
+ }
605
612
  const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
606
613
  const {
607
614
  tools: googleTools,
@@ -610,7 +617,7 @@ var GoogleGenerativeAILanguageModel = class {
610
617
  } = prepareTools({
611
618
  tools,
612
619
  toolChoice,
613
- useSearchGrounding: (_a = googleOptions == null ? void 0 : googleOptions.useSearchGrounding) != null ? _a : false,
620
+ useSearchGrounding: (_b = googleOptions == null ? void 0 : googleOptions.useSearchGrounding) != null ? _b : false,
614
621
  dynamicRetrievalConfig: googleOptions == null ? void 0 : googleOptions.dynamicRetrievalConfig,
615
622
  modelId: this.modelId
616
623
  });
@@ -631,7 +638,7 @@ var GoogleGenerativeAILanguageModel = class {
631
638
  responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
632
639
  // so this is needed as an escape hatch:
633
640
  // TODO convert into provider option
634
- ((_b = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _b : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
641
+ ((_c = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _c : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
635
642
  ...(googleOptions == null ? void 0 : googleOptions.audioTimestamp) && {
636
643
  audioTimestamp: googleOptions.audioTimestamp
637
644
  },
@@ -675,9 +682,14 @@ var GoogleGenerativeAILanguageModel = class {
675
682
  const candidate = response.candidates[0];
676
683
  const content = [];
677
684
  const parts = candidate.content == null || typeof candidate.content !== "object" || !("parts" in candidate.content) ? [] : (_a = candidate.content.parts) != null ? _a : [];
685
+ const usageMetadata = response.usageMetadata;
678
686
  for (const part of parts) {
679
- if ("text" in part && part.text.length > 0) {
680
- content.push({ type: "text", text: part.text });
687
+ if ("text" in part && part.text != null && part.text.length > 0) {
688
+ if (part.thought === true) {
689
+ content.push({ type: "reasoning", text: part.text });
690
+ } else {
691
+ content.push({ type: "text", text: part.text });
692
+ }
681
693
  } else if ("functionCall" in part) {
682
694
  content.push({
683
695
  type: "tool-call",
@@ -701,7 +713,6 @@ var GoogleGenerativeAILanguageModel = class {
701
713
  for (const source of sources) {
702
714
  content.push(source);
703
715
  }
704
- const usageMetadata = response.usageMetadata;
705
716
  return {
706
717
  content,
707
718
  finishReason: mapGoogleGenerativeAIFinishReason({
@@ -764,7 +775,7 @@ var GoogleGenerativeAILanguageModel = class {
764
775
  controller.enqueue({ type: "stream-start", warnings });
765
776
  },
766
777
  transform(chunk, controller) {
767
- var _a, _b, _c, _d, _e, _f, _g, _h, _i;
778
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
768
779
  if (!chunk.success) {
769
780
  controller.enqueue({ type: "error", error: chunk.error });
770
781
  return;
@@ -784,9 +795,15 @@ var GoogleGenerativeAILanguageModel = class {
784
795
  }
785
796
  const content = candidate.content;
786
797
  if (content != null) {
787
- const deltaText = getTextFromParts(content.parts);
788
- if (deltaText != null) {
789
- controller.enqueue(deltaText);
798
+ const parts = (_g = content.parts) != null ? _g : [];
799
+ for (const part of parts) {
800
+ if ("text" in part && part.text != null && part.text.length > 0) {
801
+ if (part.thought === true) {
802
+ controller.enqueue({ type: "reasoning", text: part.text });
803
+ } else {
804
+ controller.enqueue({ type: "text", text: part.text });
805
+ }
806
+ }
790
807
  }
791
808
  const inlineDataParts = getInlineDataParts(content.parts);
792
809
  if (inlineDataParts != null) {
@@ -827,17 +844,17 @@ var GoogleGenerativeAILanguageModel = class {
827
844
  finishReason: candidate.finishReason,
828
845
  hasToolCalls
829
846
  });
830
- const sources = (_g = extractSources({
847
+ const sources = (_h = extractSources({
831
848
  groundingMetadata: candidate.groundingMetadata,
832
849
  generateId: generateId2
833
- })) != null ? _g : [];
850
+ })) != null ? _h : [];
834
851
  for (const source of sources) {
835
852
  controller.enqueue(source);
836
853
  }
837
854
  providerMetadata = {
838
855
  google: {
839
- groundingMetadata: (_h = candidate.groundingMetadata) != null ? _h : null,
840
- safetyRatings: (_i = candidate.safetyRatings) != null ? _i : null
856
+ groundingMetadata: (_i = candidate.groundingMetadata) != null ? _i : null,
857
+ safetyRatings: (_j = candidate.safetyRatings) != null ? _j : null
841
858
  }
842
859
  };
843
860
  }
@@ -872,13 +889,6 @@ function getToolCallsFromParts({
872
889
  args: JSON.stringify(part.functionCall.args)
873
890
  }));
874
891
  }
875
- function getTextFromParts(parts) {
876
- const textParts = parts == null ? void 0 : parts.filter((part) => "text" in part);
877
- return textParts == null || textParts.length === 0 ? void 0 : {
878
- type: "text",
879
- text: textParts.map((part) => part.text).join("")
880
- };
881
- }
882
892
  function getInlineDataParts(parts) {
883
893
  return parts == null ? void 0 : parts.filter(
884
894
  (part) => "inlineData" in part
@@ -900,12 +910,9 @@ function extractSources({
900
910
  }));
901
911
  }
902
912
  var contentSchema = z5.object({
903
- role: z5.string(),
904
913
  parts: z5.array(
905
914
  z5.union([
906
- z5.object({
907
- text: z5.string()
908
- }),
915
+ // note: order matters since text can be fully empty
909
916
  z5.object({
910
917
  functionCall: z5.object({
911
918
  name: z5.string(),
@@ -917,6 +924,10 @@ var contentSchema = z5.object({
917
924
  mimeType: z5.string(),
918
925
  data: z5.string()
919
926
  })
927
+ }),
928
+ z5.object({
929
+ text: z5.string().nullish(),
930
+ thought: z5.boolean().nullish()
920
931
  })
921
932
  ])
922
933
  ).nullish()
@@ -1010,8 +1021,9 @@ function createGoogleGenerativeAI(options = {}) {
1010
1021
  generateId: (_a2 = options.generateId) != null ? _a2 : generateId,
1011
1022
  supportedUrls: () => ({
1012
1023
  "*": [
1013
- // HTTP URLs:
1014
- /^https?:\/\/.*$/
1024
+ // Only allow requests to the Google Generative Language "files" endpoint
1025
+ // e.g. https://generativelanguage.googleapis.com/v1beta/files/...
1026
+ new RegExp(`^${baseURL}/files/.*$`)
1015
1027
  ]
1016
1028
  }),
1017
1029
  fetch: options.fetch
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/google-provider.ts","../src/google-generative-ai-embedding-model.ts","../src/google-error.ts","../src/google-generative-ai-embedding-options.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-generative-ai-options.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport { GoogleGenerativeAIEmbeddingModelId } from './google-generative-ai-embedding-options';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport { GoogleGenerativeAIModelId } from './google-generative-ai-options';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV2 {\n (modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n languageModel(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n chat(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (modelId: GoogleGenerativeAIModelId) =>\n new GoogleGenerativeAILanguageModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n supportedUrls: () => ({\n '*': [\n // HTTP URLs:\n /^https?:\\/\\/.*$/,\n ],\n }),\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: GoogleGenerativeAIEmbeddingModelId) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: GoogleGenerativeAIModelId) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n googleGenerativeAIEmbeddingProviderOptions,\n} from './google-generative-ai-embedding-options';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n // Parse provider options\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIEmbeddingProviderOptions,\n });\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: googleOptions?.outputDimensionality,\n taskType: googleOptions?.taskType,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import { z } from 'zod';\n\nexport type GoogleGenerativeAIEmbeddingModelId =\n | 'text-embedding-004'\n | (string & {});\n\nexport const googleGenerativeAIEmbeddingProviderOptions = z.object({\n /**\n * Optional. Optional reduced dimension for the output embedding.\n * If set, excessive values in the output embedding are truncated from the end.\n */\n outputDimensionality: z.number().optional(),\n\n /**\n * Optional. Specifies the task type for generating embeddings.\n * Supported task types:\n * - SEMANTIC_SIMILARITY: Optimized for text similarity.\n * - CLASSIFICATION: Optimized for text classification.\n * - CLUSTERING: Optimized for clustering texts based on similarity.\n * - RETRIEVAL_DOCUMENT: Optimized for document retrieval.\n * - RETRIEVAL_QUERY: Optimized for query-based retrieval.\n * - QUESTION_ANSWERING: Optimized for answering questions.\n * - FACT_VERIFICATION: Optimized for verifying factual information.\n * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.\n */\n taskType: z\n .enum([\n 'SEMANTIC_SIMILARITY',\n 'CLASSIFICATION',\n 'CLUSTERING',\n 'RETRIEVAL_DOCUMENT',\n 'RETRIEVAL_QUERY',\n 'QUESTION_ANSWERING',\n 'FACT_VERIFICATION',\n 'CODE_RETRIEVAL_QUERY',\n ])\n .optional(),\n});\n\nexport type GoogleGenerativeAIEmbeddingProviderOptions = z.infer<\n typeof googleGenerativeAIEmbeddingProviderOptions\n>;\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n googleGenerativeAIProviderOptions,\n} from './google-generative-ai-options';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: GoogleGenerativeAIModelId;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptions,\n });\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: googleOptions?.useSearchGrounding ?? false,\n dynamicRetrievalConfig: googleOptions?.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n // TODO convert into provider option\n (googleOptions?.structuredOutputs ?? true)\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(googleOptions?.audioTimestamp && {\n audioTimestamp: googleOptions.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n thinkingConfig: googleOptions?.thinkingConfig,\n },\n contents,\n systemInstruction,\n safetySettings: googleOptions?.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: googleOptions?.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // map ordered parts to content:\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : (candidate.content.parts ?? []);\n\n for (const part of parts) {\n if ('text' in part && part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n } else if ('functionCall' in part) {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: this.config.generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n });\n } else if ('inlineData' in part) {\n content.push({\n type: 'file' as const,\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n });\n }\n }\n\n // sources\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }) ?? [];\n for (const source of sources) {\n content.push(source);\n }\n\n const usageMetadata = response.usageMetadata;\n\n return {\n content,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n totalTokens: usageMetadata?.totalTokenCount ?? undefined,\n reasoningTokens: usageMetadata?.thoughtsTokenCount ?? undefined,\n cachedInputTokens: usageMetadata?.cachedContentTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n usage.totalTokens = usageMetadata.totalTokenCount ?? undefined;\n usage.reasoningTokens =\n usageMetadata.thoughtsTokenCount ?? undefined;\n usage.cachedInputTokens =\n usageMetadata.cachedContentTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue(deltaText);\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : {\n type: 'text' as const,\n text: textParts.map(part => part.text).join(''),\n };\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z\n .array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string().nullish(),\n probability: z.string().nullish(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst usageSchema = z.object({\n cachedContentTokenCount: z.number().nullish(),\n thoughtsTokenCount: z.number().nullish(),\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: usageSchema.nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport {\n convertToBase64,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { z } from 'zod';\n\nexport type GoogleGenerativeAIModelId =\n // Stable models\n // https://ai.google.dev/gemini-api/docs/models/gemini\n | 'gemini-1.5-flash'\n | 'gemini-1.5-flash-latest'\n | 'gemini-1.5-flash-001'\n | 'gemini-1.5-flash-002'\n | 'gemini-1.5-flash-8b'\n | 'gemini-1.5-flash-8b-latest'\n | 'gemini-1.5-flash-8b-001'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-pro-latest'\n | 'gemini-1.5-pro-001'\n | 'gemini-1.5-pro-002'\n | 'gemini-2.0-flash'\n | 'gemini-2.0-flash-001'\n | 'gemini-2.0-flash-live-001'\n | 'gemini-2.0-flash-lite'\n | 'gemini-2.0-pro-exp-02-05'\n | 'gemini-2.0-flash-thinking-exp-01-21'\n | 'gemini-2.0-flash-exp'\n // Experimental models\n // https://ai.google.dev/gemini-api/docs/models/experimental-models\n | 'gemini-2.5-pro-exp-03-25'\n | 'gemini-2.5-flash-preview-04-17'\n | 'gemini-exp-1206'\n | 'gemma-3-27b-it'\n | 'learnlm-1.5-pro-experimental'\n | (string & {});\n\nconst dynamicRetrievalConfig = z.object({\n /**\n * The mode of the predictor to be used in dynamic retrieval.\n */\n mode: z.enum(['MODE_UNSPECIFIED', 'MODE_DYNAMIC']).optional(),\n\n /**\n * The threshold to be used in dynamic retrieval. If not set, a system default\n * value is used.\n */\n dynamicThreshold: z.number().optional(),\n});\n\nexport type DynamicRetrievalConfig = z.infer<typeof dynamicRetrievalConfig>;\n\nexport const googleGenerativeAIProviderOptions = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).optional(),\n\n thinkingConfig: z\n .object({\n thinkingBudget: z.number().optional(),\n })\n .optional(),\n\n /**\nOptional.\nThe name of the cached content used as context to serve the prediction.\nFormat: cachedContents/{cachedContent}\n */\n cachedContent: z.string().optional(),\n\n /**\n * Optional. Enable structured output. Default is true.\n *\n * This is useful when the JSON Schema contains elements that are\n * not supported by the OpenAPI schema version that\n * Google Generative AI uses. You can use this to disable\n * structured outputs if you need to.\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\nOptional. A list of unique safety settings for blocking unsafe content.\n */\n safetySettings: z\n .array(\n z.object({\n category: z.enum([\n 'HARM_CATEGORY_UNSPECIFIED',\n 'HARM_CATEGORY_HATE_SPEECH',\n 'HARM_CATEGORY_DANGEROUS_CONTENT',\n 'HARM_CATEGORY_HARASSMENT',\n 'HARM_CATEGORY_SEXUALLY_EXPLICIT',\n 'HARM_CATEGORY_CIVIC_INTEGRITY',\n ]),\n threshold: z.enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ]),\n }),\n )\n .optional(),\n\n threshold: z\n .enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ])\n .optional(),\n\n /**\n * Optional. Enables timestamp understanding for audio-only files.\n *\n * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/audio-understanding\n */\n audioTimestamp: z.boolean().optional(),\n\n /**\nOptional. When enabled, the model will use Google search to ground the response.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/grounding/overview\n */\n useSearchGrounding: z.boolean().optional(),\n\n /**\nOptional. Specifies the dynamic retrieval configuration.\n\n@note Dynamic retrieval is only compatible with Gemini 1.5 Flash.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-with-google-search#dynamic-retrieval\n */\n dynamicRetrievalConfig: dynamicRetrievalConfig.optional(),\n});\n\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptions\n>;\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-options';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n"],"mappings":";AAAA;AAAA,EAGE;AAAA,OAEK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,OACK;;;ACXP;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACZlB,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,wBAAwB,EAAE,OAAO;AAAA,EACrC,OAAO,EAAE,OAAO;AAAA,IACd,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,EAAE,OAAO;AAAA,IAClB,QAAQ,EAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,8BAA8B,+BAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,SAAS,KAAAC,UAAS;AAMX,IAAM,6CAA6CA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjE,sBAAsBA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc1C,UAAUA,GACP,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AACd,CAAC;;;AFXM,IAAM,mCAAN,MAEP;AAAA,EAWE,YACE,SACA,QACA;AAbF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAW/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EATA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EASA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAEA,UAAM,gBAAgB,MAAM,qBAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,gBAAgB;AAAA,MACpB,MAAM,QAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,+CAAe;AAAA,UACrC,UAAU,+CAAe;AAAA,QAC3B,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,gDAAgDC,GAAE,OAAO;AAAA,EAC7D,YAAYA,GAAE,MAAMA,GAAE,OAAO,EAAE,QAAQA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AGrGD;AAAA,EAIE,kBAAAC;AAAA,EACA;AAAA,EACA,6BAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,iBAAAC;AAAA,EACA,WAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;AChBX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChIA;AAAA,EAEE;AAAA,OACK;AAMP;AAAA,EACE;AAAA,OAEK;AAEA,SAAS,oCACd,QAC0B;AAC1B,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8BAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,MAAM,gBAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,gBAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC5JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,SAAS,KAAAC,UAAS;AAgClB,IAAM,yBAAyBA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAItC,MAAMA,GAAE,KAAK,CAAC,oBAAoB,cAAc,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5D,kBAAkBA,GAAE,OAAO,EAAE,SAAS;AACxC,CAAC;AAIM,IAAM,oCAAoCA,GAAE,OAAO;AAAA,EACxD,oBAAoBA,GAAE,MAAMA,GAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,EAEhE,gBAAgBA,GACb,OAAO;AAAA,IACN,gBAAgBA,GAAE,OAAO,EAAE,SAAS;AAAA,EACtC,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,eAAeA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUnC,mBAAmBA,GAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKxC,gBAAgBA,GACb;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,UAAUA,GAAE,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,MACD,WAAWA,GAAE,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,SAAS;AAAA,EAEZ,WAAWA,GACR,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,gBAAgBA,GAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrC,oBAAoBA,GAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASzC,wBAAwB,uBAAuB,SAAS;AAC1D,CAAC;;;ACpID;AAAA,EAGE,iCAAAC;AAAA,OACK;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA,wBAAAC;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAACA,0BAC1B,CAAC,IACD,EAAE,wBAAAA,wBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIC,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACvIO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANkBO,IAAM,kCAAN,MAAiE;AAAA,EAOtE,YACE,SACA,QACA;AATF,SAAS,uBAAuB;AAU9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAlEtB;AAmEI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApFnD;AAqFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,gBAAgB,MAAMC,sBAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,oDAAe,uBAAf,YAAqC;AAAA,MACzD,wBAAwB,+CAAe;AAAA,MACvC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA;AAAA,YAIxB,oDAAe,sBAAf,YAAoC,QACjC,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,IAAI,+CAAe,mBAAkB;AAAA,YACnC,gBAAgB,cAAc;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,UACnC,gBAAgB,+CAAe;AAAA,QACjC;AAAA,QACA;AAAA,QACA;AAAA,QACA,gBAAgB,+CAAe;AAAA,QAC/B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,+CAAe;AAAA,MAChC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA1JjE;AA2JI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,gBAAgBC;AAAA,MACpB,MAAMC,SAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2BC,2BAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AACvC,UAAM,UAAyC,CAAC;AAGhD,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,KACA,eAAU,QAAQ,UAAlB,YAA2B,CAAC;AAEnC,eAAW,QAAQ,OAAO;AACxB,UAAI,UAAU,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC1C,gBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,MAChD,WAAW,kBAAkB,MAAM;AACjC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,YAAY,KAAK,OAAO,WAAW;AAAA,UACnC,UAAU,KAAK,aAAa;AAAA,UAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,QAC7C,CAAC;AAAA,MACH,WAAW,gBAAgB,MAAM;AAC/B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,KAAK,WAAW;AAAA,UACtB,WAAW,KAAK,WAAW;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,WACJ,oBAAe;AAAA,MACb,mBAAmB,UAAU;AAAA,MAC7B,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC,MAHD,YAGM,CAAC;AACT,eAAW,UAAU,SAAS;AAC5B,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,QACrD,cAAa,oDAAe,oBAAf,YAAkC;AAAA,QAC/C,kBAAiB,oDAAe,uBAAf,YAAqC;AAAA,QACtD,oBAAmB,oDAAe,4BAAf,YAA0C;AAAA,MAC/D;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,UAAUH;AAAA,MACd,MAAMC,SAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B,iCAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AACA,QAAI,mBAAyD;AAE7D,UAAME,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAvSvC;AAwSY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AACxC,oBAAM,eAAc,mBAAc,oBAAd,YAAiC;AACrD,oBAAM,mBACJ,mBAAc,uBAAd,YAAoC;AACtC,oBAAM,qBACJ,mBAAc,4BAAd,YAAyC;AAAA,YAC7C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AACnB,oBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,kBAAI,aAAa,MAAM;AACrB,2BAAW,QAAQ,SAAS;AAAA,cAC9B;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf,YAAAA;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,eAAe,SAAS;AAAA,kBAC1B,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,MAAM,SAAS;AAAA,kBACjB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B,YAAAA;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,+BAAO,OAAO,UAAQ,UAAU;AAIlD,SAAO,aAAa,QAAQ,UAAU,WAAW,IAC7C,SACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAAA,EAChD;AACN;AAEA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA,YAAAA;AACF,GAGwC;AAndxC;AAodE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAIA,YAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgBC,GAAE,OAAO;AAAA,EAC7B,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GACJ;AAAA,IACCA,GAAE,MAAM;AAAA,MACNA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,cAAcA,GAAE,OAAO;AAAA,UACrB,MAAMA,GAAE,OAAO;AAAA,UACf,MAAMA,GAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,YAAYA,GAAE,OAAO;AAAA,UACnB,UAAUA,GAAE,OAAO;AAAA,UACnB,MAAMA,GAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuBA,GAAE,OAAO;AAAA,EACpC,KAAKA,GAAE,OAAO,EAAE,KAAKA,GAAE,OAAO,GAAG,OAAOA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkBA,GAAE,OAAO,EAAE,KAAKA,GAAE,OAAO,GAAG,OAAOA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0BA,GAAE,OAAO;AAAA,EAC9C,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkBA,GAAE,OAAO,EAAE,iBAAiBA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiBA,GAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmBA,GAChB;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,YAAYA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmBA,GAChB,MAAM;AAAA,IACLA,GAAE,OAAO;AAAA,MACP,0BAA0BA,GAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACDA,GAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EACzC,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,aAAaA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAChC,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAASA,GAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,cAAcA,GAAE,OAAO;AAAA,EAC3B,yBAAyBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5C,oBAAoBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,sBAAsBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACzC,iBAAiBA,GAAE,OAAO,EAAE,QAAQ;AACtC,CAAC;AAED,IAAM,iBAAiBA,GAAE,OAAO;AAAA,EAC9B,YAAYA,GAAE;AAAA,IACZA,GAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAGA,GAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAeA,GAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,YAAY,QAAQ;AACrC,CAAC;AAID,IAAM,cAAcA,GAAE,OAAO;AAAA,EAC3B,YAAYA,GACT;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAeA,GAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,YAAY,QAAQ;AACrC,CAAC;;;AJ/fM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAnF9B;AAoFE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,kBAAkB,WAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YAAoC;AAjG/D,QAAAC;AAkGI,eAAI,gCAAgC,SAAS;AAAA,MAC3C,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,eAAe,OAAO;AAAA,QACpB,KAAK;AAAA;AAAA,UAEH;AAAA,QACF;AAAA,MACF;AAAA,MACA,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,iCAAiC,SAAS;AAAA,IAC5C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAAoC;AAC7D,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["z","z","z","combineHeaders","createJsonResponseHandler","parseProviderOptions","postJsonToApi","resolve","z","z","UnsupportedFunctionalityError","dynamicRetrievalConfig","UnsupportedFunctionalityError","parseProviderOptions","combineHeaders","resolve","postJsonToApi","createJsonResponseHandler","generateId","z","_a"]}
1
+ {"version":3,"sources":["../src/google-provider.ts","../src/google-generative-ai-embedding-model.ts","../src/google-error.ts","../src/google-generative-ai-embedding-options.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-generative-ai-options.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport { GoogleGenerativeAIEmbeddingModelId } from './google-generative-ai-embedding-options';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport { GoogleGenerativeAIModelId } from './google-generative-ai-options';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV2 {\n (modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n languageModel(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n chat(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(modelId: GoogleGenerativeAIModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n ): EmbeddingModelV2<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (modelId: GoogleGenerativeAIModelId) =>\n new GoogleGenerativeAILanguageModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n supportedUrls: () => ({\n '*': [\n // Only allow requests to the Google Generative Language \"files\" endpoint\n // e.g. https://generativelanguage.googleapis.com/v1beta/files/...\n new RegExp(`^${baseURL}/files/.*$`),\n ],\n }),\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: GoogleGenerativeAIEmbeddingModelId) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: GoogleGenerativeAIModelId) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n googleGenerativeAIEmbeddingProviderOptions,\n} from './google-generative-ai-embedding-options';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 2048;\n readonly supportsParallelCalls = true;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n providerOptions,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n // Parse provider options\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIEmbeddingProviderOptions,\n });\n\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: googleOptions?.outputDimensionality,\n taskType: googleOptions?.taskType,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import { z } from 'zod';\n\nexport type GoogleGenerativeAIEmbeddingModelId =\n | 'text-embedding-004'\n | (string & {});\n\nexport const googleGenerativeAIEmbeddingProviderOptions = z.object({\n /**\n * Optional. Optional reduced dimension for the output embedding.\n * If set, excessive values in the output embedding are truncated from the end.\n */\n outputDimensionality: z.number().optional(),\n\n /**\n * Optional. Specifies the task type for generating embeddings.\n * Supported task types:\n * - SEMANTIC_SIMILARITY: Optimized for text similarity.\n * - CLASSIFICATION: Optimized for text classification.\n * - CLUSTERING: Optimized for clustering texts based on similarity.\n * - RETRIEVAL_DOCUMENT: Optimized for document retrieval.\n * - RETRIEVAL_QUERY: Optimized for query-based retrieval.\n * - QUESTION_ANSWERING: Optimized for answering questions.\n * - FACT_VERIFICATION: Optimized for verifying factual information.\n * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.\n */\n taskType: z\n .enum([\n 'SEMANTIC_SIMILARITY',\n 'CLASSIFICATION',\n 'CLUSTERING',\n 'RETRIEVAL_DOCUMENT',\n 'RETRIEVAL_QUERY',\n 'QUESTION_ANSWERING',\n 'FACT_VERIFICATION',\n 'CODE_RETRIEVAL_QUERY',\n ])\n .optional(),\n});\n\nexport type GoogleGenerativeAIEmbeddingProviderOptions = z.infer<\n typeof googleGenerativeAIEmbeddingProviderOptions\n>;\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n googleGenerativeAIProviderOptions,\n} from './google-generative-ai-options';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n\n /**\n * The supported URLs for the model.\n */\n supportedUrls?: () => LanguageModelV2['supportedUrls'];\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: GoogleGenerativeAIModelId;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get supportedUrls() {\n return this.config.supportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = await parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptions,\n });\n\n // Add warning if includeThoughts is used with a non-Vertex Google provider\n if (\n googleOptions?.thinkingConfig?.includeThoughts === true &&\n !this.config.provider.startsWith('google.vertex.')\n ) {\n warnings.push({\n type: 'other',\n message:\n \"The 'includeThoughts' option is only supported with the Google Vertex provider \" +\n 'and might not be supported or could behave unexpectedly with the current Google provider ' +\n `(${this.config.provider}).`,\n });\n }\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: googleOptions?.useSearchGrounding ?? false,\n dynamicRetrievalConfig: googleOptions?.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n // TODO convert into provider option\n (googleOptions?.structuredOutputs ?? true)\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(googleOptions?.audioTimestamp && {\n audioTimestamp: googleOptions.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n thinkingConfig: googleOptions?.thinkingConfig,\n },\n contents,\n systemInstruction,\n safetySettings: googleOptions?.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: googleOptions?.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // map ordered parts to content:\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : (candidate.content.parts ?? []);\n\n const usageMetadata = response.usageMetadata;\n\n // Build content array from all parts\n for (const part of parts) {\n if ('text' in part && part.text != null && part.text.length > 0) {\n if (part.thought === true) {\n content.push({ type: 'reasoning', text: part.text });\n } else {\n content.push({ type: 'text', text: part.text });\n }\n } else if ('functionCall' in part) {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: this.config.generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n });\n } else if ('inlineData' in part) {\n content.push({\n type: 'file' as const,\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n });\n }\n }\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }) ?? [];\n for (const source of sources) {\n content.push(source);\n }\n\n return {\n content,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n totalTokens: usageMetadata?.totalTokenCount ?? undefined,\n reasoningTokens: usageMetadata?.thoughtsTokenCount ?? undefined,\n cachedInputTokens: usageMetadata?.cachedContentTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n usage.totalTokens = usageMetadata.totalTokenCount ?? undefined;\n usage.reasoningTokens =\n usageMetadata.thoughtsTokenCount ?? undefined;\n usage.cachedInputTokens =\n usageMetadata.cachedContentTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n // Process text parts individually to handle reasoning parts\n const parts = content.parts ?? [];\n for (const part of parts) {\n if (\n 'text' in part &&\n part.text != null &&\n part.text.length > 0\n ) {\n if (part.thought === true) {\n controller.enqueue({ type: 'reasoning', text: part.text });\n } else {\n controller.enqueue({ type: 'text', text: part.text });\n }\n }\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : textParts.map(part => part.text).join('');\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n parts: z\n .array(\n z.union([\n // note: order matters since text can be fully empty\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n z.object({\n text: z.string().nullish(),\n thought: z.boolean().nullish(),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string().nullish(),\n probability: z.string().nullish(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst usageSchema = z.object({\n cachedContentTokenCount: z.number().nullish(),\n thoughtsTokenCount: z.number().nullish(),\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: usageSchema.nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: usageSchema.nullish(),\n});\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport {\n convertToBase64,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { z } from 'zod';\n\nexport type GoogleGenerativeAIModelId =\n // Stable models\n // https://ai.google.dev/gemini-api/docs/models/gemini\n | 'gemini-1.5-flash'\n | 'gemini-1.5-flash-latest'\n | 'gemini-1.5-flash-001'\n | 'gemini-1.5-flash-002'\n | 'gemini-1.5-flash-8b'\n | 'gemini-1.5-flash-8b-latest'\n | 'gemini-1.5-flash-8b-001'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-pro-latest'\n | 'gemini-1.5-pro-001'\n | 'gemini-1.5-pro-002'\n | 'gemini-2.0-flash'\n | 'gemini-2.0-flash-001'\n | 'gemini-2.0-flash-live-001'\n | 'gemini-2.0-flash-lite'\n | 'gemini-2.0-pro-exp-02-05'\n | 'gemini-2.0-flash-thinking-exp-01-21'\n | 'gemini-2.0-flash-exp'\n // Experimental models\n // https://ai.google.dev/gemini-api/docs/models/experimental-models\n | 'gemini-2.5-pro-exp-03-25'\n | 'gemini-2.5-flash-preview-04-17'\n | 'gemini-exp-1206'\n | 'gemma-3-27b-it'\n | 'learnlm-1.5-pro-experimental'\n | (string & {});\n\nconst dynamicRetrievalConfig = z.object({\n /**\n * The mode of the predictor to be used in dynamic retrieval.\n */\n mode: z.enum(['MODE_UNSPECIFIED', 'MODE_DYNAMIC']).optional(),\n\n /**\n * The threshold to be used in dynamic retrieval. If not set, a system default\n * value is used.\n */\n dynamicThreshold: z.number().optional(),\n});\n\nexport type DynamicRetrievalConfig = z.infer<typeof dynamicRetrievalConfig>;\n\nexport const googleGenerativeAIProviderOptions = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).optional(),\n\n thinkingConfig: z\n .object({\n thinkingBudget: z.number().optional(),\n includeThoughts: z.boolean().optional(),\n })\n .optional(),\n\n /**\nOptional.\nThe name of the cached content used as context to serve the prediction.\nFormat: cachedContents/{cachedContent}\n */\n cachedContent: z.string().optional(),\n\n /**\n * Optional. Enable structured output. Default is true.\n *\n * This is useful when the JSON Schema contains elements that are\n * not supported by the OpenAPI schema version that\n * Google Generative AI uses. You can use this to disable\n * structured outputs if you need to.\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\nOptional. A list of unique safety settings for blocking unsafe content.\n */\n safetySettings: z\n .array(\n z.object({\n category: z.enum([\n 'HARM_CATEGORY_UNSPECIFIED',\n 'HARM_CATEGORY_HATE_SPEECH',\n 'HARM_CATEGORY_DANGEROUS_CONTENT',\n 'HARM_CATEGORY_HARASSMENT',\n 'HARM_CATEGORY_SEXUALLY_EXPLICIT',\n 'HARM_CATEGORY_CIVIC_INTEGRITY',\n ]),\n threshold: z.enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ]),\n }),\n )\n .optional(),\n\n threshold: z\n .enum([\n 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',\n 'BLOCK_LOW_AND_ABOVE',\n 'BLOCK_MEDIUM_AND_ABOVE',\n 'BLOCK_ONLY_HIGH',\n 'BLOCK_NONE',\n 'OFF',\n ])\n .optional(),\n\n /**\n * Optional. Enables timestamp understanding for audio-only files.\n *\n * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/audio-understanding\n */\n audioTimestamp: z.boolean().optional(),\n\n /**\nOptional. When enabled, the model will use Google search to ground the response.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/grounding/overview\n */\n useSearchGrounding: z.boolean().optional(),\n\n /**\nOptional. Specifies the dynamic retrieval configuration.\n\n@note Dynamic retrieval is only compatible with Gemini 1.5 Flash.\n\n@see https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-with-google-search#dynamic-retrieval\n */\n dynamicRetrievalConfig: dynamicRetrievalConfig.optional(),\n});\n\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptions\n>;\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-options';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n"],"mappings":";AAAA;AAAA,EAGE;AAAA,OAEK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,OACK;;;ACXP;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACZlB,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,wBAAwB,EAAE,OAAO;AAAA,EACrC,OAAO,EAAE,OAAO;AAAA,IACd,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,EAAE,OAAO;AAAA,IAClB,QAAQ,EAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,8BAA8B,+BAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD,SAAS,KAAAC,UAAS;AAMX,IAAM,6CAA6CA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA,EAKjE,sBAAsBA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAc1C,UAAUA,GACP,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AACd,CAAC;;;AFXM,IAAM,mCAAN,MAEP;AAAA,EAWE,YACE,SACA,QACA;AAbF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAW/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EATA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EASA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AAEA,UAAM,gBAAgB,MAAM,qBAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,gBAAgB;AAAA,MACpB,MAAM,QAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,+CAAe;AAAA,UACrC,UAAU,+CAAe;AAAA,QAC3B,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,gDAAgDC,GAAE,OAAO;AAAA,EAC7D,YAAYA,GAAE,MAAMA,GAAE,OAAO,EAAE,QAAQA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AGrGD;AAAA,EAIE,kBAAAC;AAAA,EACA;AAAA,EACA,6BAAAC;AAAA,EACA,wBAAAC;AAAA,EACA,iBAAAC;AAAA,EACA,WAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;AChBX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChIA;AAAA,EAEE;AAAA,OACK;AAMP;AAAA,EACE;AAAA,OAEK;AAEA,SAAS,oCACd,QAC0B;AAC1B,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8BAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,MAAM,gBAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,gBAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC5JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,SAAS,KAAAC,UAAS;AAgClB,IAAM,yBAAyBA,GAAE,OAAO;AAAA;AAAA;AAAA;AAAA,EAItC,MAAMA,GAAE,KAAK,CAAC,oBAAoB,cAAc,CAAC,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5D,kBAAkBA,GAAE,OAAO,EAAE,SAAS;AACxC,CAAC;AAIM,IAAM,oCAAoCA,GAAE,OAAO;AAAA,EACxD,oBAAoBA,GAAE,MAAMA,GAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,SAAS;AAAA,EAEhE,gBAAgBA,GACb,OAAO;AAAA,IACN,gBAAgBA,GAAE,OAAO,EAAE,SAAS;AAAA,IACpC,iBAAiBA,GAAE,QAAQ,EAAE,SAAS;AAAA,EACxC,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,eAAeA,GAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUnC,mBAAmBA,GAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA,EAKxC,gBAAgBA,GACb;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,UAAUA,GAAE,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,MACD,WAAWA,GAAE,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,SAAS;AAAA,EAEZ,WAAWA,GACR,KAAK;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,EACA,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOZ,gBAAgBA,GAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrC,oBAAoBA,GAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASzC,wBAAwB,uBAAuB,SAAS;AAC1D,CAAC;;;ACrID;AAAA,EAGE,iCAAAC;AAAA,OACK;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA,wBAAAC;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAACA,0BAC1B,CAAC,IACD,EAAE,wBAAAA,wBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIC,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACvIO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANkBO,IAAM,kCAAN,MAAiE;AAAA,EAOtE,YACE,SACA,QACA;AATF,SAAS,uBAAuB;AAU9B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,gBAAgB;AAlEtB;AAmEI,YAAO,sBAAK,QAAO,kBAAZ,4CAAiC,CAAC;AAAA,EAC3C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApFnD;AAqFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,gBAAgB,MAAMC,sBAAqB;AAAA,MAC/C,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAGD,UACE,oDAAe,mBAAf,mBAA+B,qBAAoB,QACnD,CAAC,KAAK,OAAO,SAAS,WAAW,gBAAgB,GACjD;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SACE,4KAEI,KAAK,OAAO,QAAQ;AAAA,MAC5B,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,oDAAe,uBAAf,YAAqC;AAAA,MACzD,wBAAwB,+CAAe;AAAA,MACvC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA;AAAA,YAIxB,oDAAe,sBAAf,YAAoC,QACjC,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,IAAI,+CAAe,mBAAkB;AAAA,YACnC,gBAAgB,cAAc;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,UACnC,gBAAgB,+CAAe;AAAA,QACjC;AAAA,QACA;AAAA,QACA;AAAA,QACA,gBAAgB,+CAAe;AAAA,QAC/B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,+CAAe;AAAA,MAChC;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAxKjE;AAyKI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,gBAAgBC;AAAA,MACpB,MAAMC,SAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2BC,2BAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AACvC,UAAM,UAAyC,CAAC;AAGhD,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,KACA,eAAU,QAAQ,UAAlB,YAA2B,CAAC;AAEnC,UAAM,gBAAgB,SAAS;AAG/B,eAAW,QAAQ,OAAO;AACxB,UAAI,UAAU,QAAQ,KAAK,QAAQ,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC/D,YAAI,KAAK,YAAY,MAAM;AACzB,kBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QACrD,OAAO;AACL,kBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,QAChD;AAAA,MACF,WAAW,kBAAkB,MAAM;AACjC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,YAAY,KAAK,OAAO,WAAW;AAAA,UACnC,UAAU,KAAK,aAAa;AAAA,UAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,QAC7C,CAAC;AAAA,MACH,WAAW,gBAAgB,MAAM;AAC/B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,KAAK,WAAW;AAAA,UACtB,WAAW,KAAK,WAAW;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,WACJ,oBAAe;AAAA,MACb,mBAAmB,UAAU;AAAA,MAC7B,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC,MAHD,YAGM,CAAC;AACT,eAAW,UAAU,SAAS;AAC5B,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,QACrD,cAAa,oDAAe,oBAAf,YAAkC;AAAA,QAC/C,kBAAiB,oDAAe,uBAAf,YAAqC;AAAA,QACtD,oBAAmB,oDAAe,4BAAf,YAA0C;AAAA,MAC/D;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,UAAUH;AAAA,MACd,MAAMC,SAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B,iCAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AACA,QAAI,mBAAyD;AAE7D,UAAME,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAzTvC;AA0TY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AACxC,oBAAM,eAAc,mBAAc,oBAAd,YAAiC;AACrD,oBAAM,mBACJ,mBAAc,uBAAd,YAAoC;AACtC,oBAAM,qBACJ,mBAAc,4BAAd,YAAyC;AAAA,YAC7C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AAEnB,oBAAM,SAAQ,aAAQ,UAAR,YAAiB,CAAC;AAChC,yBAAW,QAAQ,OAAO;AACxB,oBACE,UAAU,QACV,KAAK,QAAQ,QACb,KAAK,KAAK,SAAS,GACnB;AACA,sBAAI,KAAK,YAAY,MAAM;AACzB,+BAAW,QAAQ,EAAE,MAAM,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,kBAC3D,OAAO;AACL,+BAAW,QAAQ,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,kBACtD;AAAA,gBACF;AAAA,cACF;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf,YAAAA;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,eAAe,SAAS;AAAA,kBAC1B,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,MAAM,SAAS;AAAA,kBACjB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B,YAAAA;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAYA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA,YAAAC;AACF,GAGwC;AA7exC;AA8eE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAIA,YAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgBC,GAAE,OAAO;AAAA,EAC7B,OAAOA,GACJ;AAAA,IACCA,GAAE,MAAM;AAAA;AAAA,MAENA,GAAE,OAAO;AAAA,QACP,cAAcA,GAAE,OAAO;AAAA,UACrB,MAAMA,GAAE,OAAO;AAAA,UACf,MAAMA,GAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,YAAYA,GAAE,OAAO;AAAA,UACnB,UAAUA,GAAE,OAAO;AAAA,UACnB,MAAMA,GAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,QACzB,SAASA,GAAE,QAAQ,EAAE,QAAQ;AAAA,MAC/B,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuBA,GAAE,OAAO;AAAA,EACpC,KAAKA,GAAE,OAAO,EAAE,KAAKA,GAAE,OAAO,GAAG,OAAOA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkBA,GAAE,OAAO,EAAE,KAAKA,GAAE,OAAO,GAAG,OAAOA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0BA,GAAE,OAAO;AAAA,EAC9C,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkBA,GAAE,OAAO,EAAE,iBAAiBA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiBA,GAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmBA,GAChB;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,YAAYA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmBA,GAChB,MAAM;AAAA,IACLA,GAAE,OAAO;AAAA,MACP,0BAA0BA,GAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACDA,GAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EACzC,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,aAAaA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAChC,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAASA,GAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,cAAcA,GAAE,OAAO;AAAA,EAC3B,yBAAyBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5C,oBAAoBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvC,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,sBAAsBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACzC,iBAAiBA,GAAE,OAAO,EAAE,QAAQ;AACtC,CAAC;AAED,IAAM,iBAAiBA,GAAE,OAAO;AAAA,EAC9B,YAAYA,GAAE;AAAA,IACZA,GAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAGA,GAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAeA,GAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,YAAY,QAAQ;AACrC,CAAC;AAID,IAAM,cAAcA,GAAE,OAAO;AAAA,EAC3B,YAAYA,GACT;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAeA,GAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,YAAY,QAAQ;AACrC,CAAC;;;AJ1hBM,SAAS,yBACd,UAA8C,CAAC,GACnB;AAnF9B;AAoFE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,kBAAkB,WAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YAAoC;AAjG/D,QAAAC;AAkGI,eAAI,gCAAgC,SAAS;AAAA,MAC3C,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,eAAe,OAAO;AAAA,QACpB,KAAK;AAAA;AAAA;AAAA,UAGH,IAAI,OAAO,IAAI,OAAO,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,MACA,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,iCAAiC,SAAS;AAAA,IAC5C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAAoC;AAC7D,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["z","z","z","combineHeaders","createJsonResponseHandler","parseProviderOptions","postJsonToApi","resolve","z","z","UnsupportedFunctionalityError","dynamicRetrievalConfig","UnsupportedFunctionalityError","parseProviderOptions","combineHeaders","resolve","postJsonToApi","createJsonResponseHandler","generateId","generateId","z","_a"]}