@ai-sdk/google 2.0.0-canary.8 → 2.0.0-canary.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,16 @@
1
1
  # @ai-sdk/google
2
2
 
3
+ ## 2.0.0-canary.9
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [5d142ab]
8
+ - Updated dependencies [b6b43c7]
9
+ - Updated dependencies [8aa9e20]
10
+ - Updated dependencies [3795467]
11
+ - @ai-sdk/provider-utils@3.0.0-canary.8
12
+ - @ai-sdk/provider@2.0.0-canary.7
13
+
3
14
  ## 2.0.0-canary.8
4
15
 
5
16
  ### Patch Changes
package/dist/index.js CHANGED
@@ -534,7 +534,7 @@ var GoogleGenerativeAILanguageModel = class {
534
534
  return this.config.isSupportedUrl(url);
535
535
  }
536
536
  async doGenerate(options) {
537
- var _a, _b, _c, _d, _e;
537
+ var _a, _b, _c, _d, _e, _f;
538
538
  const { args, warnings } = await this.getArgs(options);
539
539
  const body = JSON.stringify(args);
540
540
  const mergedHeaders = (0, import_provider_utils4.combineHeaders)(
@@ -557,39 +557,52 @@ var GoogleGenerativeAILanguageModel = class {
557
557
  fetch: this.config.fetch
558
558
  });
559
559
  const candidate = response.candidates[0];
560
- const parts = candidate.content == null || typeof candidate.content !== "object" || !("parts" in candidate.content) ? [] : candidate.content.parts;
561
- const toolCalls = getToolCallsFromParts({
562
- parts,
560
+ const content = [];
561
+ const parts = candidate.content == null || typeof candidate.content !== "object" || !("parts" in candidate.content) ? [] : (_a = candidate.content.parts) != null ? _a : [];
562
+ for (const part of parts) {
563
+ if ("text" in part && part.text.length > 0) {
564
+ content.push({ type: "text", text: part.text });
565
+ } else if ("functionCall" in part) {
566
+ content.push({
567
+ type: "tool-call",
568
+ toolCallType: "function",
569
+ toolCallId: this.config.generateId(),
570
+ toolName: part.functionCall.name,
571
+ args: JSON.stringify(part.functionCall.args)
572
+ });
573
+ } else if ("inlineData" in part) {
574
+ content.push({
575
+ type: "file",
576
+ data: part.inlineData.data,
577
+ mediaType: part.inlineData.mimeType
578
+ });
579
+ }
580
+ }
581
+ const sources = (_b = extractSources({
582
+ groundingMetadata: candidate.groundingMetadata,
563
583
  generateId: this.config.generateId
564
- });
584
+ })) != null ? _b : [];
585
+ for (const source of sources) {
586
+ content.push(source);
587
+ }
565
588
  const usageMetadata = response.usageMetadata;
566
589
  return {
567
- text: getTextFromParts(parts),
568
- files: (_a = getInlineDataParts(parts)) == null ? void 0 : _a.map((part) => ({
569
- type: "file",
570
- data: part.inlineData.data,
571
- mediaType: part.inlineData.mimeType
572
- })),
573
- toolCalls,
590
+ content,
574
591
  finishReason: mapGoogleGenerativeAIFinishReason({
575
592
  finishReason: candidate.finishReason,
576
- hasToolCalls: toolCalls != null && toolCalls.length > 0
593
+ hasToolCalls: content.some((part) => part.type === "tool-call")
577
594
  }),
578
595
  usage: {
579
- inputTokens: (_b = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _b : void 0,
580
- outputTokens: (_c = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _c : void 0
596
+ inputTokens: (_c = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _c : void 0,
597
+ outputTokens: (_d = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _d : void 0
581
598
  },
582
599
  warnings,
583
600
  providerMetadata: {
584
601
  google: {
585
- groundingMetadata: (_d = candidate.groundingMetadata) != null ? _d : null,
586
- safetyRatings: (_e = candidate.safetyRatings) != null ? _e : null
602
+ groundingMetadata: (_e = candidate.groundingMetadata) != null ? _e : null,
603
+ safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null
587
604
  }
588
605
  },
589
- sources: extractSources({
590
- groundingMetadata: candidate.groundingMetadata,
591
- generateId: this.config.generateId
592
- }),
593
606
  request: { body },
594
607
  response: {
595
608
  // TODO timestamp, model id, id
@@ -627,6 +640,9 @@ var GoogleGenerativeAILanguageModel = class {
627
640
  return {
628
641
  stream: response.pipeThrough(
629
642
  new TransformStream({
643
+ start(controller) {
644
+ controller.enqueue({ type: "stream-start", warnings });
645
+ },
630
646
  transform(chunk, controller) {
631
647
  var _a, _b, _c, _d, _e, _f;
632
648
  if (!chunk.success) {
@@ -714,7 +730,6 @@ var GoogleGenerativeAILanguageModel = class {
714
730
  })
715
731
  ),
716
732
  response: { headers: responseHeaders },
717
- warnings,
718
733
  request: { body }
719
734
  };
720
735
  }
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-embedding-model.ts","../src/google-error.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-supported-file-url.ts"],"sourcesContent":["export type { GoogleErrorData } from './google-error';\nexport type { GoogleGenerativeAIProviderOptions } from './google-generative-ai-language-model';\nexport type { GoogleGenerativeAIProviderMetadata } from './google-generative-ai-prompt';\nexport { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { isSupportedFileUrl } from './google-supported-file-url';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV2 {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV2;\n\n languageModel(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV2;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV2;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) =>\n new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n isSupportedUrl: isSupportedFileUrl,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings = {},\n ) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n private readonly settings: GoogleGenerativeAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return true;\n }\n\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: this.settings.outputDimensionality,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2FinishReason,\n SharedV2ProviderMetadata,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n InternalGoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n isSupportedUrl: (url: URL) => boolean;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n get supportsStructuredOutputs() {\n return this.settings.structuredOutputs ?? true;\n }\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: InternalGoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: InternalGoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptionsSchema,\n });\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: this.settings.useSearchGrounding ?? false,\n dynamicRetrievalConfig: this.settings.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens: maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(this.settings.audioTimestamp && {\n audioTimestamp: this.settings.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n supportsUrl(url: URL): boolean {\n return this.config.isSupportedUrl(url);\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : candidate.content.parts;\n\n const toolCalls = getToolCallsFromParts({\n parts,\n generateId: this.config.generateId,\n });\n\n const usageMetadata = response.usageMetadata;\n\n return {\n text: getTextFromParts(parts),\n files: getInlineDataParts(parts)?.map(part => ({\n type: 'file',\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n })),\n toolCalls,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: toolCalls != null && toolCalls.length > 0,\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n sources: extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }),\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue(deltaText);\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n warnings,\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : {\n type: 'text' as const,\n text: textParts.map(part => part.text).join(''),\n };\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z\n .array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string(),\n probability: z.string(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\nconst googleGenerativeAIProviderOptionsSchema = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).nullish(),\n});\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptionsSchema\n>;\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport {\n convertToBase64,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-settings';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n","export function isSupportedFileUrl(url: URL): boolean {\n return url\n .toString()\n .startsWith('https://generativelanguage.googleapis.com/v1beta/files/');\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAKO;;;ACXP,sBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;;;ACXlB,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,sDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ADSM,IAAM,mCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,wBAAiC;AACnC,WAAO;AAAA,EACT;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,KAAK,SAAS;AAAA,QACtC,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AEtGD,IAAAC,yBAUO;AACP,IAAAC,cAAkB;;;ACfX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChIA,IAAAC,mBAGO;AAMP,IAAAC,yBAGO;AAEA,SAAS,oCACd,QAC0B;AAC1B,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC5JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,mBAIO;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAAC,yBAC1B,CAAC,IACD,EAAE,uBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACvIO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ALaO,IAAM,kCAAN,MAAiE;AAAA,EActE,YACE,SACA,UACA,QACA;AAjBF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAgB3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAjBA,IAAI,4BAA4B;AA/ClC;AAgDI,YAAO,UAAK,SAAS,sBAAd,YAAmC;AAAA,EAC5C;AAAA,EAiBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AApFnD;AAqFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,oBAAgB,6CAAqB;AAAA,MACzC,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,UAAK,SAAS,uBAAd,YAAoC;AAAA,MACxD,wBAAwB,KAAK,SAAS;AAAA,MACtC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA,UAGzB,KAAK,4BACD,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,GAAI,KAAK,SAAS,kBAAkB;AAAA,YAClC,gBAAgB,KAAK,SAAS;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,QACrC;AAAA,QACA;AAAA,QACA;AAAA,QACA,gBAAgB,KAAK,SAAS;AAAA,QAC9B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,KAAK,SAAS;AAAA,MAC/B;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,KAAK,OAAO,eAAe,GAAG;AAAA,EACvC;AAAA,EAEA,MAAM,WACJ,SAC6D;AA5JjE;AA6JI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AAEvC,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,IACD,UAAU,QAAQ;AAExB,UAAM,YAAY,sBAAsB;AAAA,MACtC;AAAA,MACA,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL,MAAM,iBAAiB,KAAK;AAAA,MAC5B,QAAO,wBAAmB,KAAK,MAAxB,mBAA2B,IAAI,WAAS;AAAA,QAC7C,MAAM;AAAA,QACN,MAAM,KAAK,WAAW;AAAA,QACtB,WAAW,KAAK,WAAW;AAAA,MAC7B;AAAA,MACA;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,aAAa,QAAQ,UAAU,SAAS;AAAA,MACxD,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,MACvD;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,eAAe;AAAA,QACtB,mBAAmB,UAAU;AAAA,QAC7B,YAAY,KAAK,OAAO;AAAA,MAC1B,CAAC;AAAA,MACD,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,mBAAyD;AAE7D,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAhRvC;AAiRY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AAAA,YAC1C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AACnB,oBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,kBAAI,aAAa,MAAM;AACrB,2BAAW,QAAQ,SAAS;AAAA,cAC9B;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf,YAAAA;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,eAAe,SAAS;AAAA,kBAC1B,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,MAAM,SAAS;AAAA,kBACjB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B,YAAAA;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,+BAAO,OAAO,UAAQ,UAAU;AAIlD,SAAO,aAAa,QAAQ,UAAU,WAAW,IAC7C,SACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAAA,EAChD;AACN;AAEA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA,YAAAA;AACF,GAGwC;AAxbxC;AAybE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAIA,YAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cACJ;AAAA,IACC,cAAE,MAAM;AAAA,MACN,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,YAAY,cAAE,OAAO;AAAA,UACnB,UAAU,cAAE,OAAO;AAAA,UACnB,MAAM,cAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuB,cAAE,OAAO;AAAA,EACpC,KAAK,cAAE,OAAO,EAAE,KAAK,cAAE,OAAO,GAAG,OAAO,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkB,cAAE,OAAO,EAAE,KAAK,cAAE,OAAO,GAAG,OAAO,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0B,cAAE,OAAO;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,OAAO,EAAE,iBAAiB,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiB,cAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmB,cAChB;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,YAAY,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,cAChB,MAAM;AAAA,IACL,cAAE,OAAO;AAAA,MACP,0BAA0B,cAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACD,cAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqB,cAAE,OAAO;AAAA,EACzC,UAAU,cAAE,OAAO;AAAA,EACnB,aAAa,cAAE,OAAO;AAAA,EACtB,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAED,IAAM,0CAA0C,cAAE,OAAO;AAAA,EACvD,oBAAoB,cAAE,MAAM,cAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,QAAQ;AACjE,CAAC;;;AM7jBM,SAAS,mBAAmB,KAAmB;AACpD,SAAO,IACJ,SAAS,EACT,WAAW,yDAAyD;AACzE;;;ATmGO,SAAS,yBACd,UAA8C,CAAC,GACnB;AAzG9B;AA0GE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAuC,CAAC,MACxC;AA1HJ,QAAAC;AA2HI,eAAI,gCAAgC,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,gBAAgB;AAAA,MAChB,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAgD,CAAC,MAEjD,IAAI,iCAAiC,SAAS,UAAU;AAAA,IACtD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider","import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider","import_provider_utils","import_provider","generateId","_a"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/google-provider.ts","../src/google-generative-ai-embedding-model.ts","../src/google-error.ts","../src/google-generative-ai-language-model.ts","../src/convert-json-schema-to-openapi-schema.ts","../src/convert-to-google-generative-ai-messages.ts","../src/get-model-path.ts","../src/google-prepare-tools.ts","../src/map-google-generative-ai-finish-reason.ts","../src/google-supported-file-url.ts"],"sourcesContent":["export type { GoogleErrorData } from './google-error';\nexport type { GoogleGenerativeAIProviderOptions } from './google-generative-ai-language-model';\nexport type { GoogleGenerativeAIProviderMetadata } from './google-generative-ai-prompt';\nexport { createGoogleGenerativeAI, google } from './google-provider';\nexport type {\n GoogleGenerativeAIProvider,\n GoogleGenerativeAIProviderSettings,\n} from './google-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { GoogleGenerativeAIEmbeddingModel } from './google-generative-ai-embedding-model';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\nimport { GoogleGenerativeAILanguageModel } from './google-generative-ai-language-model';\nimport {\n GoogleGenerativeAIModelId,\n GoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { isSupportedFileUrl } from './google-supported-file-url';\n\nexport interface GoogleGenerativeAIProvider extends ProviderV2 {\n (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV2;\n\n languageModel(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV2;\n\n chat(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV2;\n\n /**\n * @deprecated Use `chat()` instead.\n */\n generativeAI(\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n\n textEmbeddingModel(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings?: GoogleGenerativeAIEmbeddingSettings,\n ): EmbeddingModelV2<string>;\n}\n\nexport interface GoogleGenerativeAIProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://generativelanguage.googleapis.com/v1beta`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `x-goog-api-key` header.\nIt defaults to the `GOOGLE_GENERATIVE_AI_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string | undefined>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n /**\nOptional function to generate a unique ID for each request.\n */\n generateId?: () => string;\n}\n\n/**\nCreate a Google Generative AI provider instance.\n */\nexport function createGoogleGenerativeAI(\n options: GoogleGenerativeAIProviderSettings = {},\n): GoogleGenerativeAIProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ??\n 'https://generativelanguage.googleapis.com/v1beta';\n\n const getHeaders = () => ({\n 'x-goog-api-key': loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'GOOGLE_GENERATIVE_AI_API_KEY',\n description: 'Google Generative AI',\n }),\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: GoogleGenerativeAIModelId,\n settings: GoogleGenerativeAISettings = {},\n ) =>\n new GoogleGenerativeAILanguageModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n generateId: options.generateId ?? generateId,\n isSupportedUrl: isSupportedFileUrl,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings = {},\n ) =>\n new GoogleGenerativeAIEmbeddingModel(modelId, settings, {\n provider: 'google.generative-ai',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: GoogleGenerativeAIModelId,\n settings?: GoogleGenerativeAISettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Google Generative AI model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.generativeAI = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Google Generative AI provider instance.\n */\nexport const google = createGoogleGenerativeAI();\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { googleFailedResponseHandler } from './google-error';\nimport {\n GoogleGenerativeAIEmbeddingModelId,\n GoogleGenerativeAIEmbeddingSettings,\n} from './google-generative-ai-embedding-settings';\n\ntype GoogleGenerativeAIEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class GoogleGenerativeAIEmbeddingModel\n implements EmbeddingModelV2<string>\n{\n readonly specificationVersion = 'v2';\n readonly modelId: GoogleGenerativeAIEmbeddingModelId;\n\n private readonly config: GoogleGenerativeAIEmbeddingConfig;\n private readonly settings: GoogleGenerativeAIEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return 2048;\n }\n\n get supportsParallelCalls(): boolean {\n return true;\n }\n\n constructor(\n modelId: GoogleGenerativeAIEmbeddingModelId,\n settings: GoogleGenerativeAIEmbeddingSettings,\n config: GoogleGenerativeAIEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n headers,\n abortSignal,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,\n headers: mergedHeaders,\n body: {\n requests: values.map(value => ({\n model: `models/${this.modelId}`,\n content: { role: 'user', parts: [{ text: value }] },\n outputDimensionality: this.settings.outputDimensionality,\n })),\n },\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n googleGenerativeAITextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.embeddings.map(item => item.values),\n usage: undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst googleGenerativeAITextEmbeddingResponseSchema = z.object({\n embeddings: z.array(z.object({ values: z.array(z.number()) })),\n});\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n InternalGoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n isSupportedUrl: (url: URL) => boolean;\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n get supportsStructuredOutputs() {\n return this.settings.structuredOutputs ?? true;\n }\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: InternalGoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: InternalGoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptionsSchema,\n });\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: this.settings.useSearchGrounding ?? false,\n dynamicRetrievalConfig: this.settings.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n this.supportsStructuredOutputs\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(this.settings.audioTimestamp && {\n audioTimestamp: this.settings.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n supportsUrl(url: URL): boolean {\n return this.config.isSupportedUrl(url);\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // map ordered parts to content:\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : (candidate.content.parts ?? []);\n\n for (const part of parts) {\n if ('text' in part && part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n } else if ('functionCall' in part) {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: this.config.generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n });\n } else if ('inlineData' in part) {\n content.push({\n type: 'file' as const,\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n });\n }\n }\n\n // sources\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }) ?? [];\n for (const source of sources) {\n content.push(source);\n }\n\n const usageMetadata = response.usageMetadata;\n\n return {\n content,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue(deltaText);\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : {\n type: 'text' as const,\n text: textParts.map(part => part.text).join(''),\n };\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z\n .array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string(),\n probability: z.string(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\nconst googleGenerativeAIProviderOptionsSchema = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).nullish(),\n});\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptionsSchema\n>;\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport {\n convertToBase64,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-settings';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n","export function isSupportedFileUrl(url: URL): boolean {\n return url\n .toString()\n .startsWith('https://generativelanguage.googleapis.com/v1beta/files/');\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAKO;;;ACXP,sBAGO;AACP,IAAAC,yBAMO;AACP,IAAAC,cAAkB;;;ACXlB,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,wBAAwB,aAAE,OAAO;AAAA,EACrC,OAAO,aAAE,OAAO;AAAA,IACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,aAAE,OAAO;AAAA,IAClB,QAAQ,aAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,kCAA8B,sDAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;ADSM,IAAM,mCAAN,MAEP;AAAA,EAmBE,YACE,SACA,UACA,QACA;AAtBF,SAAS,uBAAuB;AAuB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EApBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,IAAI,wBAAiC;AACnC,WAAO;AAAA,EACT;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC;AAAA,IACF;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,WAAW,KAAK,OAAO;AAAA,MAClD,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,UAAU,OAAO,IAAI,YAAU;AAAA,UAC7B,OAAO,UAAU,KAAK,OAAO;AAAA,UAC7B,SAAS,EAAE,MAAM,QAAQ,OAAO,CAAC,EAAE,MAAM,MAAM,CAAC,EAAE;AAAA,UAClD,sBAAsB,KAAK,SAAS;AAAA,QACtC,EAAE;AAAA,MACJ;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,WAAW,IAAI,UAAQ,KAAK,MAAM;AAAA,MACvD,OAAO;AAAA,MACP,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,gDAAgD,cAAE,OAAO;AAAA,EAC7D,YAAY,cAAE,MAAM,cAAE,OAAO,EAAE,QAAQ,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAC/D,CAAC;;;AErGD,IAAAC,yBAUO;AACP,IAAAC,cAAkB;;;AChBX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChIA,IAAAC,mBAGO;AAMP,IAAAC,yBAGO;AAEA,SAAS,oCACd,QAC0B;AAC1B,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,+CAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,+CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,UAAM,wCAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC5JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,IAAAC,mBAIO;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAAC,yBAC1B,CAAC,IACD,EAAE,uBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACvIO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ALcO,IAAM,kCAAN,MAAiE;AAAA,EActE,YACE,SACA,UACA,QACA;AAjBF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAgB3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAjBA,IAAI,4BAA4B;AAhDlC;AAiDI,YAAO,UAAK,SAAS,sBAAd,YAAmC;AAAA,EAC5C;AAAA,EAiBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AArFnD;AAsFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,oBAAgB,6CAAqB;AAAA,MACzC,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,UAAK,SAAS,uBAAd,YAAoC;AAAA,MACxD,wBAAwB,KAAK,SAAS;AAAA,MACtC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA,UAGzB,KAAK,4BACD,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,GAAI,KAAK,SAAS,kBAAkB;AAAA,YAClC,gBAAgB,KAAK,SAAS;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,QACrC;AAAA,QACA;AAAA,QACA;AAAA,QACA,gBAAgB,KAAK,SAAS;AAAA,QAC9B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,KAAK,SAAS;AAAA,MAC/B;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,YAAY,KAAmB;AAC7B,WAAO,KAAK,OAAO,eAAe,GAAG;AAAA,EACvC;AAAA,EAEA,MAAM,WACJ,SAC6D;AA7JjE;AA8JI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,oBAAgB;AAAA,MACpB,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,kDAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AACvC,UAAM,UAAyC,CAAC;AAGhD,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,KACA,eAAU,QAAQ,UAAlB,YAA2B,CAAC;AAEnC,eAAW,QAAQ,OAAO;AACxB,UAAI,UAAU,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC1C,gBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,MAChD,WAAW,kBAAkB,MAAM;AACjC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,YAAY,KAAK,OAAO,WAAW;AAAA,UACnC,UAAU,KAAK,aAAa;AAAA,UAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,QAC7C,CAAC;AAAA,MACH,WAAW,gBAAgB,MAAM;AAC/B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,KAAK,WAAW;AAAA,UACtB,WAAW,KAAK,WAAW;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,WACJ,oBAAe;AAAA,MACb,mBAAmB,UAAU;AAAA,MAC7B,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC,MAHD,YAGM,CAAC;AACT,eAAW,UAAU,SAAS;AAC5B,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,MACvD;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,cAAU;AAAA,MACd,UAAM,gCAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B,yDAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,mBAAyD;AAE7D,UAAMC,cAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAtSvC;AAuSY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AAAA,YAC1C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AACnB,oBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,kBAAI,aAAa,MAAM;AACrB,2BAAW,QAAQ,SAAS;AAAA,cAC9B;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf,YAAAA;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,eAAe,SAAS;AAAA,kBAC1B,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,MAAM,SAAS;AAAA,kBACjB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B,YAAAA;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA,YAAAA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,cAAc;AAAA,IACd,YAAYA,YAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,+BAAO,OAAO,UAAQ,UAAU;AAIlD,SAAO,aAAa,QAAQ,UAAU,WAAW,IAC7C,SACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAAA,EAChD;AACN;AAEA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA,YAAAA;AACF,GAGwC;AA7cxC;AA8cE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAIA,YAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgB,cAAE,OAAO;AAAA,EAC7B,MAAM,cAAE,OAAO;AAAA,EACf,OAAO,cACJ;AAAA,IACC,cAAE,MAAM;AAAA,MACN,cAAE,OAAO;AAAA,QACP,MAAM,cAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,cAAc,cAAE,OAAO;AAAA,UACrB,MAAM,cAAE,OAAO;AAAA,UACf,MAAM,cAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACD,cAAE,OAAO;AAAA,QACP,YAAY,cAAE,OAAO;AAAA,UACnB,UAAU,cAAE,OAAO;AAAA,UACnB,MAAM,cAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuB,cAAE,OAAO;AAAA,EACpC,KAAK,cAAE,OAAO,EAAE,KAAK,cAAE,OAAO,GAAG,OAAO,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkB,cAAE,OAAO,EAAE,KAAK,cAAE,OAAO,GAAG,OAAO,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0B,cAAE,OAAO;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkB,cAAE,OAAO,EAAE,iBAAiB,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiB,cAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmB,cAChB;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,YAAY,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAM,cAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiB,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmB,cAChB,MAAM;AAAA,IACL,cAAE,OAAO;AAAA,MACP,0BAA0B,cAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACD,cAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqB,cAAE,OAAO;AAAA,EACzC,UAAU,cAAE,OAAO;AAAA,EACnB,aAAa,cAAE,OAAO;AAAA,EACtB,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAU,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAAS,cAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,iBAAiB,cAAE,OAAO;AAAA,EAC9B,YAAY,cAAE;AAAA,IACZ,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAG,cAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,cAAc,cAAE,OAAO;AAAA,EAC3B,YAAY,cACT;AAAA,IACC,cAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAc,cAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAe,cAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAe,cACZ,OAAO;AAAA,IACN,kBAAkB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsB,cAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiB,cAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAED,IAAM,0CAA0C,cAAE,OAAO;AAAA,EACvD,oBAAoB,cAAE,MAAM,cAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,QAAQ;AACjE,CAAC;;;AMllBM,SAAS,mBAAmB,KAAmB;AACpD,SAAO,IACJ,SAAS,EACT,WAAW,yDAAyD;AACzE;;;ATmGO,SAAS,yBACd,UAA8C,CAAC,GACnB;AAzG9B;AA0GE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YACA;AAEF,QAAM,aAAa,OAAO;AAAA,IACxB,sBAAkB,mCAAW;AAAA,MAC3B,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC;AAAA,IACD,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAuC,CAAC,MACxC;AA1HJ,QAAAC;AA2HI,eAAI,gCAAgC,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAYA,MAAA,QAAQ,eAAR,OAAAA,MAAsB;AAAA,MAClC,gBAAgB;AAAA,MAChB,OAAO,QAAQ;AAAA,IACjB,CAAC;AAAA;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAgD,CAAC,MAEjD,IAAI,iCAAiC,SAAS,UAAU;AAAA,IACtD,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,eAAe;AACxB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,SAAS,yBAAyB;","names":["import_provider","import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_zod","import_provider","import_provider_utils","import_provider","generateId","_a"]}
package/dist/index.mjs CHANGED
@@ -533,7 +533,7 @@ var GoogleGenerativeAILanguageModel = class {
533
533
  return this.config.isSupportedUrl(url);
534
534
  }
535
535
  async doGenerate(options) {
536
- var _a, _b, _c, _d, _e;
536
+ var _a, _b, _c, _d, _e, _f;
537
537
  const { args, warnings } = await this.getArgs(options);
538
538
  const body = JSON.stringify(args);
539
539
  const mergedHeaders = combineHeaders2(
@@ -556,39 +556,52 @@ var GoogleGenerativeAILanguageModel = class {
556
556
  fetch: this.config.fetch
557
557
  });
558
558
  const candidate = response.candidates[0];
559
- const parts = candidate.content == null || typeof candidate.content !== "object" || !("parts" in candidate.content) ? [] : candidate.content.parts;
560
- const toolCalls = getToolCallsFromParts({
561
- parts,
559
+ const content = [];
560
+ const parts = candidate.content == null || typeof candidate.content !== "object" || !("parts" in candidate.content) ? [] : (_a = candidate.content.parts) != null ? _a : [];
561
+ for (const part of parts) {
562
+ if ("text" in part && part.text.length > 0) {
563
+ content.push({ type: "text", text: part.text });
564
+ } else if ("functionCall" in part) {
565
+ content.push({
566
+ type: "tool-call",
567
+ toolCallType: "function",
568
+ toolCallId: this.config.generateId(),
569
+ toolName: part.functionCall.name,
570
+ args: JSON.stringify(part.functionCall.args)
571
+ });
572
+ } else if ("inlineData" in part) {
573
+ content.push({
574
+ type: "file",
575
+ data: part.inlineData.data,
576
+ mediaType: part.inlineData.mimeType
577
+ });
578
+ }
579
+ }
580
+ const sources = (_b = extractSources({
581
+ groundingMetadata: candidate.groundingMetadata,
562
582
  generateId: this.config.generateId
563
- });
583
+ })) != null ? _b : [];
584
+ for (const source of sources) {
585
+ content.push(source);
586
+ }
564
587
  const usageMetadata = response.usageMetadata;
565
588
  return {
566
- text: getTextFromParts(parts),
567
- files: (_a = getInlineDataParts(parts)) == null ? void 0 : _a.map((part) => ({
568
- type: "file",
569
- data: part.inlineData.data,
570
- mediaType: part.inlineData.mimeType
571
- })),
572
- toolCalls,
589
+ content,
573
590
  finishReason: mapGoogleGenerativeAIFinishReason({
574
591
  finishReason: candidate.finishReason,
575
- hasToolCalls: toolCalls != null && toolCalls.length > 0
592
+ hasToolCalls: content.some((part) => part.type === "tool-call")
576
593
  }),
577
594
  usage: {
578
- inputTokens: (_b = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _b : void 0,
579
- outputTokens: (_c = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _c : void 0
595
+ inputTokens: (_c = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _c : void 0,
596
+ outputTokens: (_d = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _d : void 0
580
597
  },
581
598
  warnings,
582
599
  providerMetadata: {
583
600
  google: {
584
- groundingMetadata: (_d = candidate.groundingMetadata) != null ? _d : null,
585
- safetyRatings: (_e = candidate.safetyRatings) != null ? _e : null
601
+ groundingMetadata: (_e = candidate.groundingMetadata) != null ? _e : null,
602
+ safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null
586
603
  }
587
604
  },
588
- sources: extractSources({
589
- groundingMetadata: candidate.groundingMetadata,
590
- generateId: this.config.generateId
591
- }),
592
605
  request: { body },
593
606
  response: {
594
607
  // TODO timestamp, model id, id
@@ -626,6 +639,9 @@ var GoogleGenerativeAILanguageModel = class {
626
639
  return {
627
640
  stream: response.pipeThrough(
628
641
  new TransformStream({
642
+ start(controller) {
643
+ controller.enqueue({ type: "stream-start", warnings });
644
+ },
629
645
  transform(chunk, controller) {
630
646
  var _a, _b, _c, _d, _e, _f;
631
647
  if (!chunk.success) {
@@ -713,7 +729,6 @@ var GoogleGenerativeAILanguageModel = class {
713
729
  })
714
730
  ),
715
731
  response: { headers: responseHeaders },
716
- warnings,
717
732
  request: { body }
718
733
  };
719
734
  }