@ai-sdk/google 2.0.0-canary.0 → 2.0.0-canary.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,7 @@ import { z as z2 } from "zod";
11
11
 
12
12
  // src/convert-json-schema-to-openapi-schema.ts
13
13
  function convertJSONSchemaToOpenAPISchema(jsonSchema) {
14
- if (isEmptyObjectSchema(jsonSchema)) {
14
+ if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {
15
15
  return void 0;
16
16
  }
17
17
  if (typeof jsonSchema === "boolean") {
@@ -110,9 +110,10 @@ function isEmptyObjectSchema(jsonSchema) {
110
110
  import {
111
111
  UnsupportedFunctionalityError
112
112
  } from "@ai-sdk/provider";
113
- import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
113
+ import {
114
+ convertToBase64
115
+ } from "@ai-sdk/provider-utils";
114
116
  function convertToGoogleGenerativeAIMessages(prompt) {
115
- var _a, _b;
116
117
  const systemInstructionParts = [];
117
118
  const contents = [];
118
119
  let systemMessagesAllowed = true;
@@ -136,33 +137,18 @@ function convertToGoogleGenerativeAIMessages(prompt) {
136
137
  parts.push({ text: part.text });
137
138
  break;
138
139
  }
139
- case "image": {
140
- parts.push(
141
- part.image instanceof URL ? {
142
- fileData: {
143
- mimeType: (_a = part.mimeType) != null ? _a : "image/jpeg",
144
- fileUri: part.image.toString()
145
- }
146
- } : {
147
- inlineData: {
148
- mimeType: (_b = part.mimeType) != null ? _b : "image/jpeg",
149
- data: convertUint8ArrayToBase64(part.image)
150
- }
151
- }
152
- );
153
- break;
154
- }
155
140
  case "file": {
141
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
156
142
  parts.push(
157
143
  part.data instanceof URL ? {
158
144
  fileData: {
159
- mimeType: part.mimeType,
145
+ mimeType: mediaType,
160
146
  fileUri: part.data.toString()
161
147
  }
162
148
  } : {
163
149
  inlineData: {
164
- mimeType: part.mimeType,
165
- data: part.data
150
+ mimeType: mediaType,
151
+ data: convertToBase64(part.data)
166
152
  }
167
153
  }
168
154
  );
@@ -183,7 +169,7 @@ function convertToGoogleGenerativeAIMessages(prompt) {
183
169
  return part.text.length === 0 ? void 0 : { text: part.text };
184
170
  }
185
171
  case "file": {
186
- if (part.mimeType !== "image/png") {
172
+ if (part.mediaType !== "image/png") {
187
173
  throw new UnsupportedFunctionalityError({
188
174
  functionality: "Only PNG images are supported in assistant messages"
189
175
  });
@@ -195,8 +181,8 @@ function convertToGoogleGenerativeAIMessages(prompt) {
195
181
  }
196
182
  return {
197
183
  inlineData: {
198
- mimeType: part.mimeType,
199
- data: part.data
184
+ mimeType: part.mediaType,
185
+ data: convertToBase64(part.data)
200
186
  }
201
187
  };
202
188
  }
@@ -261,9 +247,15 @@ var googleFailedResponseHandler = createJsonErrorResponseHandler({
261
247
  import {
262
248
  UnsupportedFunctionalityError as UnsupportedFunctionalityError2
263
249
  } from "@ai-sdk/provider";
264
- function prepareTools(mode, useSearchGrounding, dynamicRetrievalConfig, modelId) {
265
- var _a, _b;
266
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
250
+ function prepareTools({
251
+ tools,
252
+ toolChoice,
253
+ useSearchGrounding,
254
+ dynamicRetrievalConfig,
255
+ modelId
256
+ }) {
257
+ var _a;
258
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
267
259
  const toolWarnings = [];
268
260
  const isGemini2 = modelId.includes("gemini-2");
269
261
  const supportsDynamicRetrieval = modelId.includes("gemini-1.5-flash") && !modelId.includes("-8b");
@@ -286,12 +278,11 @@ function prepareTools(mode, useSearchGrounding, dynamicRetrievalConfig, modelId)
286
278
  } else {
287
279
  functionDeclarations.push({
288
280
  name: tool.name,
289
- description: (_b = tool.description) != null ? _b : "",
281
+ description: (_a = tool.description) != null ? _a : "",
290
282
  parameters: convertJSONSchemaToOpenAPISchema(tool.parameters)
291
283
  });
292
284
  }
293
285
  }
294
- const toolChoice = mode.toolChoice;
295
286
  if (toolChoice == null) {
296
287
  return {
297
288
  tools: { functionDeclarations },
@@ -333,7 +324,7 @@ function prepareTools(mode, useSearchGrounding, dynamicRetrievalConfig, modelId)
333
324
  default: {
334
325
  const _exhaustiveCheck = type;
335
326
  throw new UnsupportedFunctionalityError2({
336
- functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
327
+ functionality: `tool choice type: ${_exhaustiveCheck}`
337
328
  });
338
329
  }
339
330
  }
@@ -369,24 +360,21 @@ function mapGoogleGenerativeAIFinishReason({
369
360
  // src/google-generative-ai-language-model.ts
370
361
  var GoogleGenerativeAILanguageModel = class {
371
362
  constructor(modelId, settings, config) {
372
- this.specificationVersion = "v1";
373
- this.defaultObjectGenerationMode = "json";
374
- this.supportsImageUrls = false;
363
+ this.specificationVersion = "v2";
375
364
  this.modelId = modelId;
376
365
  this.settings = settings;
377
366
  this.config = config;
378
367
  }
379
- get supportsStructuredOutputs() {
380
- var _a;
381
- return (_a = this.settings.structuredOutputs) != null ? _a : true;
382
- }
383
368
  get provider() {
384
369
  return this.config.provider;
385
370
  }
371
+ async getSupportedUrls() {
372
+ var _a, _b, _c;
373
+ return (_c = (_b = (_a = this.config).getSupportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
374
+ }
386
375
  async getArgs({
387
- mode,
388
376
  prompt,
389
- maxTokens,
377
+ maxOutputTokens,
390
378
  temperature,
391
379
  topP,
392
380
  topK,
@@ -395,113 +383,66 @@ var GoogleGenerativeAILanguageModel = class {
395
383
  stopSequences,
396
384
  responseFormat,
397
385
  seed,
398
- providerMetadata
386
+ tools,
387
+ toolChoice,
388
+ providerOptions
399
389
  }) {
400
390
  var _a, _b;
401
- const type = mode.type;
402
391
  const warnings = [];
403
392
  const googleOptions = parseProviderOptions({
404
393
  provider: "google",
405
- providerOptions: providerMetadata,
406
- schema: z2.object({
407
- responseModalities: z2.array(z2.enum(["TEXT", "IMAGE"])).nullish()
408
- })
394
+ providerOptions,
395
+ schema: googleGenerativeAIProviderOptionsSchema
409
396
  });
410
- const generationConfig = {
411
- // standardized settings:
412
- maxOutputTokens: maxTokens,
413
- temperature,
414
- topK,
415
- topP,
416
- frequencyPenalty,
417
- presencePenalty,
418
- stopSequences,
419
- seed,
420
- // response format:
421
- responseMimeType: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? "application/json" : void 0,
422
- responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
423
- // so this is needed as an escape hatch:
424
- this.supportsStructuredOutputs ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
425
- ...this.settings.audioTimestamp && {
426
- audioTimestamp: this.settings.audioTimestamp
427
- },
428
- // provider options:
429
- responseModalities: googleOptions == null ? void 0 : googleOptions.responseModalities
430
- };
431
397
  const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
432
- switch (type) {
433
- case "regular": {
434
- const { tools, toolConfig, toolWarnings } = prepareTools(
435
- mode,
436
- (_a = this.settings.useSearchGrounding) != null ? _a : false,
437
- this.settings.dynamicRetrievalConfig,
438
- this.modelId
439
- );
440
- return {
441
- args: {
442
- generationConfig,
443
- contents,
444
- systemInstruction,
445
- safetySettings: this.settings.safetySettings,
446
- tools,
447
- toolConfig,
448
- cachedContent: this.settings.cachedContent
449
- },
450
- warnings: [...warnings, ...toolWarnings]
451
- };
452
- }
453
- case "object-json": {
454
- return {
455
- args: {
456
- generationConfig: {
457
- ...generationConfig,
458
- responseMimeType: "application/json",
459
- responseSchema: mode.schema != null && // Google GenAI does not support all OpenAPI Schema features,
460
- // so this is needed as an escape hatch:
461
- this.supportsStructuredOutputs ? convertJSONSchemaToOpenAPISchema(mode.schema) : void 0
462
- },
463
- contents,
464
- systemInstruction,
465
- safetySettings: this.settings.safetySettings,
466
- cachedContent: this.settings.cachedContent
467
- },
468
- warnings
469
- };
470
- }
471
- case "object-tool": {
472
- return {
473
- args: {
474
- generationConfig,
475
- contents,
476
- tools: {
477
- functionDeclarations: [
478
- {
479
- name: mode.tool.name,
480
- description: (_b = mode.tool.description) != null ? _b : "",
481
- parameters: convertJSONSchemaToOpenAPISchema(
482
- mode.tool.parameters
483
- )
484
- }
485
- ]
486
- },
487
- toolConfig: { functionCallingConfig: { mode: "ANY" } },
488
- safetySettings: this.settings.safetySettings,
489
- cachedContent: this.settings.cachedContent
398
+ const {
399
+ tools: googleTools,
400
+ toolConfig: googleToolConfig,
401
+ toolWarnings
402
+ } = prepareTools({
403
+ tools,
404
+ toolChoice,
405
+ useSearchGrounding: (_a = this.settings.useSearchGrounding) != null ? _a : false,
406
+ dynamicRetrievalConfig: this.settings.dynamicRetrievalConfig,
407
+ modelId: this.modelId
408
+ });
409
+ return {
410
+ args: {
411
+ generationConfig: {
412
+ // standardized settings:
413
+ maxOutputTokens,
414
+ temperature,
415
+ topK,
416
+ topP,
417
+ frequencyPenalty,
418
+ presencePenalty,
419
+ stopSequences,
420
+ seed,
421
+ // response format:
422
+ responseMimeType: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? "application/json" : void 0,
423
+ responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
424
+ // so this is needed as an escape hatch:
425
+ // TODO convert into provider option
426
+ ((_b = this.settings.structuredOutputs) != null ? _b : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
427
+ ...this.settings.audioTimestamp && {
428
+ audioTimestamp: this.settings.audioTimestamp
490
429
  },
491
- warnings
492
- };
493
- }
494
- default: {
495
- const _exhaustiveCheck = type;
496
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
497
- }
498
- }
499
- }
500
- supportsUrl(url) {
501
- return this.config.isSupportedUrl(url);
430
+ // provider options:
431
+ responseModalities: googleOptions == null ? void 0 : googleOptions.responseModalities,
432
+ thinkingConfig: googleOptions == null ? void 0 : googleOptions.thinkingConfig
433
+ },
434
+ contents,
435
+ systemInstruction,
436
+ safetySettings: this.settings.safetySettings,
437
+ tools: googleTools,
438
+ toolConfig: googleToolConfig,
439
+ cachedContent: this.settings.cachedContent
440
+ },
441
+ warnings: [...warnings, ...toolWarnings]
442
+ };
502
443
  }
503
444
  async doGenerate(options) {
504
- var _a, _b, _c, _d, _e;
445
+ var _a, _b, _c, _d, _e, _f;
505
446
  const { args, warnings } = await this.getArgs(options);
506
447
  const body = JSON.stringify(args);
507
448
  const mergedHeaders = combineHeaders(
@@ -523,43 +464,59 @@ var GoogleGenerativeAILanguageModel = class {
523
464
  abortSignal: options.abortSignal,
524
465
  fetch: this.config.fetch
525
466
  });
526
- const { contents: rawPrompt, ...rawSettings } = args;
527
467
  const candidate = response.candidates[0];
528
- const parts = candidate.content == null || typeof candidate.content !== "object" || !("parts" in candidate.content) ? [] : candidate.content.parts;
529
- const toolCalls = getToolCallsFromParts({
530
- parts,
468
+ const content = [];
469
+ const parts = candidate.content == null || typeof candidate.content !== "object" || !("parts" in candidate.content) ? [] : (_a = candidate.content.parts) != null ? _a : [];
470
+ for (const part of parts) {
471
+ if ("text" in part && part.text.length > 0) {
472
+ content.push({ type: "text", text: part.text });
473
+ } else if ("functionCall" in part) {
474
+ content.push({
475
+ type: "tool-call",
476
+ toolCallType: "function",
477
+ toolCallId: this.config.generateId(),
478
+ toolName: part.functionCall.name,
479
+ args: JSON.stringify(part.functionCall.args)
480
+ });
481
+ } else if ("inlineData" in part) {
482
+ content.push({
483
+ type: "file",
484
+ data: part.inlineData.data,
485
+ mediaType: part.inlineData.mimeType
486
+ });
487
+ }
488
+ }
489
+ const sources = (_b = extractSources({
490
+ groundingMetadata: candidate.groundingMetadata,
531
491
  generateId: this.config.generateId
532
- });
492
+ })) != null ? _b : [];
493
+ for (const source of sources) {
494
+ content.push(source);
495
+ }
533
496
  const usageMetadata = response.usageMetadata;
534
497
  return {
535
- text: getTextFromParts(parts),
536
- files: (_a = getInlineDataParts(parts)) == null ? void 0 : _a.map((part) => ({
537
- data: part.inlineData.data,
538
- mimeType: part.inlineData.mimeType
539
- })),
540
- toolCalls,
498
+ content,
541
499
  finishReason: mapGoogleGenerativeAIFinishReason({
542
500
  finishReason: candidate.finishReason,
543
- hasToolCalls: toolCalls != null && toolCalls.length > 0
501
+ hasToolCalls: content.some((part) => part.type === "tool-call")
544
502
  }),
545
503
  usage: {
546
- promptTokens: (_b = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _b : NaN,
547
- completionTokens: (_c = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _c : NaN
504
+ inputTokens: (_c = usageMetadata == null ? void 0 : usageMetadata.promptTokenCount) != null ? _c : void 0,
505
+ outputTokens: (_d = usageMetadata == null ? void 0 : usageMetadata.candidatesTokenCount) != null ? _d : void 0
548
506
  },
549
- rawCall: { rawPrompt, rawSettings },
550
- rawResponse: { headers: responseHeaders, body: rawResponse },
551
507
  warnings,
552
508
  providerMetadata: {
553
509
  google: {
554
- groundingMetadata: (_d = candidate.groundingMetadata) != null ? _d : null,
555
- safetyRatings: (_e = candidate.safetyRatings) != null ? _e : null
510
+ groundingMetadata: (_e = candidate.groundingMetadata) != null ? _e : null,
511
+ safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null
556
512
  }
557
513
  },
558
- sources: extractSources({
559
- groundingMetadata: candidate.groundingMetadata,
560
- generateId: this.config.generateId
561
- }),
562
- request: { body }
514
+ request: { body },
515
+ response: {
516
+ // TODO timestamp, model id, id
517
+ headers: responseHeaders,
518
+ body: rawResponse
519
+ }
563
520
  };
564
521
  }
565
522
  async doStream(options) {
@@ -580,11 +537,10 @@ var GoogleGenerativeAILanguageModel = class {
580
537
  abortSignal: options.abortSignal,
581
538
  fetch: this.config.fetch
582
539
  });
583
- const { contents: rawPrompt, ...rawSettings } = args;
584
540
  let finishReason = "unknown";
585
- let usage = {
586
- promptTokens: Number.NaN,
587
- completionTokens: Number.NaN
541
+ const usage = {
542
+ inputTokens: void 0,
543
+ outputTokens: void 0
588
544
  };
589
545
  let providerMetadata = void 0;
590
546
  const generateId = this.config.generateId;
@@ -592,6 +548,9 @@ var GoogleGenerativeAILanguageModel = class {
592
548
  return {
593
549
  stream: response.pipeThrough(
594
550
  new TransformStream({
551
+ start(controller) {
552
+ controller.enqueue({ type: "stream-start", warnings });
553
+ },
595
554
  transform(chunk, controller) {
596
555
  var _a, _b, _c, _d, _e, _f;
597
556
  if (!chunk.success) {
@@ -601,10 +560,8 @@ var GoogleGenerativeAILanguageModel = class {
601
560
  const value = chunk.value;
602
561
  const usageMetadata = value.usageMetadata;
603
562
  if (usageMetadata != null) {
604
- usage = {
605
- promptTokens: (_a = usageMetadata.promptTokenCount) != null ? _a : NaN,
606
- completionTokens: (_b = usageMetadata.candidatesTokenCount) != null ? _b : NaN
607
- };
563
+ usage.inputTokens = (_a = usageMetadata.promptTokenCount) != null ? _a : void 0;
564
+ usage.outputTokens = (_b = usageMetadata.candidatesTokenCount) != null ? _b : void 0;
608
565
  }
609
566
  const candidate = (_c = value.candidates) == null ? void 0 : _c[0];
610
567
  if (candidate == null) {
@@ -614,17 +571,14 @@ var GoogleGenerativeAILanguageModel = class {
614
571
  if (content != null) {
615
572
  const deltaText = getTextFromParts(content.parts);
616
573
  if (deltaText != null) {
617
- controller.enqueue({
618
- type: "text-delta",
619
- textDelta: deltaText
620
- });
574
+ controller.enqueue(deltaText);
621
575
  }
622
576
  const inlineDataParts = getInlineDataParts(content.parts);
623
577
  if (inlineDataParts != null) {
624
578
  for (const part of inlineDataParts) {
625
579
  controller.enqueue({
626
580
  type: "file",
627
- mimeType: part.inlineData.mimeType,
581
+ mediaType: part.inlineData.mimeType,
628
582
  data: part.inlineData.data
629
583
  });
630
584
  }
@@ -663,7 +617,7 @@ var GoogleGenerativeAILanguageModel = class {
663
617
  generateId
664
618
  })) != null ? _d : [];
665
619
  for (const source of sources) {
666
- controller.enqueue({ type: "source", source });
620
+ controller.enqueue(source);
667
621
  }
668
622
  providerMetadata = {
669
623
  google: {
@@ -683,9 +637,7 @@ var GoogleGenerativeAILanguageModel = class {
683
637
  }
684
638
  })
685
639
  ),
686
- rawCall: { rawPrompt, rawSettings },
687
- rawResponse: { headers: responseHeaders },
688
- warnings,
640
+ response: { headers: responseHeaders },
689
641
  request: { body }
690
642
  };
691
643
  }
@@ -698,6 +650,7 @@ function getToolCallsFromParts({
698
650
  (part) => "functionCall" in part
699
651
  );
700
652
  return functionCallParts == null || functionCallParts.length === 0 ? void 0 : functionCallParts.map((part) => ({
653
+ type: "tool-call",
701
654
  toolCallType: "function",
702
655
  toolCallId: generateId(),
703
656
  toolName: part.functionCall.name,
@@ -706,7 +659,10 @@ function getToolCallsFromParts({
706
659
  }
707
660
  function getTextFromParts(parts) {
708
661
  const textParts = parts == null ? void 0 : parts.filter((part) => "text" in part);
709
- return textParts == null || textParts.length === 0 ? void 0 : textParts.map((part) => part.text).join("");
662
+ return textParts == null || textParts.length === 0 ? void 0 : {
663
+ type: "text",
664
+ text: textParts.map((part) => part.text).join("")
665
+ };
710
666
  }
711
667
  function getInlineDataParts(parts) {
712
668
  return parts == null ? void 0 : parts.filter(
@@ -721,6 +677,7 @@ function extractSources({
721
677
  return (_a = groundingMetadata == null ? void 0 : groundingMetadata.groundingChunks) == null ? void 0 : _a.filter(
722
678
  (chunk) => chunk.web != null
723
679
  ).map((chunk) => ({
680
+ type: "source",
724
681
  sourceType: "url",
725
682
  id: generateId(),
726
683
  url: chunk.web.uri,
@@ -817,6 +774,12 @@ var chunkSchema = z2.object({
817
774
  totalTokenCount: z2.number().nullish()
818
775
  }).nullish()
819
776
  });
777
+ var googleGenerativeAIProviderOptionsSchema = z2.object({
778
+ responseModalities: z2.array(z2.enum(["TEXT", "IMAGE"])).nullish(),
779
+ thinkingConfig: z2.object({
780
+ thinkingBudget: z2.number().nullish()
781
+ }).nullish()
782
+ });
820
783
  export {
821
784
  GoogleGenerativeAILanguageModel,
822
785
  groundingMetadataSchema,
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/google-generative-ai-language-model.ts","../../src/convert-json-schema-to-openapi-schema.ts","../../src/convert-to-google-generative-ai-messages.ts","../../src/get-model-path.ts","../../src/google-error.ts","../../src/google-prepare-tools.ts","../../src/map-google-generative-ai-finish-reason.ts"],"sourcesContent":["import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2Source,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n SharedV2ProviderMetadata,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n Resolvable,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n parseProviderOptions,\n postJsonToApi,\n resolve,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport { convertToGoogleGenerativeAIMessages } from './convert-to-google-generative-ai-messages';\nimport { getModelPath } from './get-model-path';\nimport { googleFailedResponseHandler } from './google-error';\nimport { GoogleGenerativeAIContentPart } from './google-generative-ai-prompt';\nimport {\n GoogleGenerativeAIModelId,\n InternalGoogleGenerativeAISettings,\n} from './google-generative-ai-settings';\nimport { prepareTools } from './google-prepare-tools';\nimport { mapGoogleGenerativeAIFinishReason } from './map-google-generative-ai-finish-reason';\n\ntype GoogleGenerativeAIConfig = {\n provider: string;\n baseURL: string;\n headers: Resolvable<Record<string, string | undefined>>;\n fetch?: FetchFunction;\n generateId: () => string;\n\n /**\n * The supported URLs for the model.\n */\n getSupportedUrls?: LanguageModelV2['getSupportedUrls'];\n};\n\nexport class GoogleGenerativeAILanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: GoogleGenerativeAIModelId;\n readonly settings: InternalGoogleGenerativeAISettings;\n\n private readonly config: GoogleGenerativeAIConfig;\n\n constructor(\n modelId: GoogleGenerativeAIModelId,\n settings: InternalGoogleGenerativeAISettings,\n config: GoogleGenerativeAIConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n async getSupportedUrls(): Promise<Record<string, RegExp[]>> {\n return this.config.getSupportedUrls?.() ?? {};\n }\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n tools,\n toolChoice,\n providerOptions,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const googleOptions = parseProviderOptions({\n provider: 'google',\n providerOptions,\n schema: googleGenerativeAIProviderOptionsSchema,\n });\n\n const { contents, systemInstruction } =\n convertToGoogleGenerativeAIMessages(prompt);\n\n const {\n tools: googleTools,\n toolConfig: googleToolConfig,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n useSearchGrounding: this.settings.useSearchGrounding ?? false,\n dynamicRetrievalConfig: this.settings.dynamicRetrievalConfig,\n modelId: this.modelId,\n });\n\n return {\n args: {\n generationConfig: {\n // standardized settings:\n maxOutputTokens,\n temperature,\n topK,\n topP,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n seed,\n\n // response format:\n responseMimeType:\n responseFormat?.type === 'json' ? 'application/json' : undefined,\n responseSchema:\n responseFormat?.type === 'json' &&\n responseFormat.schema != null &&\n // Google GenAI does not support all OpenAPI Schema features,\n // so this is needed as an escape hatch:\n // TODO convert into provider option\n (this.settings.structuredOutputs ?? true)\n ? convertJSONSchemaToOpenAPISchema(responseFormat.schema)\n : undefined,\n ...(this.settings.audioTimestamp && {\n audioTimestamp: this.settings.audioTimestamp,\n }),\n\n // provider options:\n responseModalities: googleOptions?.responseModalities,\n thinkingConfig: googleOptions?.thinkingConfig,\n },\n contents,\n systemInstruction,\n safetySettings: this.settings.safetySettings,\n tools: googleTools,\n toolConfig: googleToolConfig,\n cachedContent: this.settings.cachedContent,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = JSON.stringify(args);\n\n const mergedHeaders = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:generateContent`,\n headers: mergedHeaders,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(responseSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const candidate = response.candidates[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // map ordered parts to content:\n const parts =\n candidate.content == null ||\n typeof candidate.content !== 'object' ||\n !('parts' in candidate.content)\n ? []\n : (candidate.content.parts ?? []);\n\n for (const part of parts) {\n if ('text' in part && part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n } else if ('functionCall' in part) {\n content.push({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: this.config.generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n });\n } else if ('inlineData' in part) {\n content.push({\n type: 'file' as const,\n data: part.inlineData.data,\n mediaType: part.inlineData.mimeType,\n });\n }\n }\n\n // sources\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId: this.config.generateId,\n }) ?? [];\n for (const source of sources) {\n content.push(source);\n }\n\n const usageMetadata = response.usageMetadata;\n\n return {\n content,\n finishReason: mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls: content.some(part => part.type === 'tool-call'),\n }),\n usage: {\n inputTokens: usageMetadata?.promptTokenCount ?? undefined,\n outputTokens: usageMetadata?.candidatesTokenCount ?? undefined,\n },\n warnings,\n providerMetadata: {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n },\n request: { body },\n response: {\n // TODO timestamp, model id, id\n headers: responseHeaders,\n body: rawResponse,\n },\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n\n const body = JSON.stringify(args);\n const headers = combineHeaders(\n await resolve(this.config.headers),\n options.headers,\n );\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/${getModelPath(\n this.modelId,\n )}:streamGenerateContent?alt=sse`,\n headers,\n body: args,\n failedResponseHandler: googleFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n };\n let providerMetadata: SharedV2ProviderMetadata | undefined = undefined;\n\n const generateId = this.config.generateId;\n let hasToolCalls = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof chunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n const usageMetadata = value.usageMetadata;\n\n if (usageMetadata != null) {\n usage.inputTokens = usageMetadata.promptTokenCount ?? undefined;\n usage.outputTokens =\n usageMetadata.candidatesTokenCount ?? undefined;\n }\n\n const candidate = value.candidates?.[0];\n\n // sometimes the API returns an empty candidates array\n if (candidate == null) {\n return;\n }\n\n const content = candidate.content;\n\n // Process tool call's parts before determining finishReason to ensure hasToolCalls is properly set\n if (content != null) {\n const deltaText = getTextFromParts(content.parts);\n if (deltaText != null) {\n controller.enqueue(deltaText);\n }\n\n const inlineDataParts = getInlineDataParts(content.parts);\n if (inlineDataParts != null) {\n for (const part of inlineDataParts) {\n controller.enqueue({\n type: 'file',\n mediaType: part.inlineData.mimeType,\n data: part.inlineData.data,\n });\n }\n }\n\n const toolCallDeltas = getToolCallsFromParts({\n parts: content.parts,\n generateId,\n });\n\n if (toolCallDeltas != null) {\n for (const toolCall of toolCallDeltas) {\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n argsTextDelta: toolCall.args,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.toolCallId,\n toolName: toolCall.toolName,\n args: toolCall.args,\n });\n\n hasToolCalls = true;\n }\n }\n }\n\n if (candidate.finishReason != null) {\n finishReason = mapGoogleGenerativeAIFinishReason({\n finishReason: candidate.finishReason,\n hasToolCalls,\n });\n\n const sources =\n extractSources({\n groundingMetadata: candidate.groundingMetadata,\n generateId,\n }) ?? [];\n\n for (const source of sources) {\n controller.enqueue(source);\n }\n\n providerMetadata = {\n google: {\n groundingMetadata: candidate.groundingMetadata ?? null,\n safetyRatings: candidate.safetyRatings ?? null,\n },\n };\n }\n },\n\n flush(controller) {\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n providerMetadata,\n });\n },\n }),\n ),\n response: { headers: responseHeaders },\n request: { body },\n };\n }\n}\n\nfunction getToolCallsFromParts({\n parts,\n generateId,\n}: {\n parts: z.infer<typeof contentSchema>['parts'];\n generateId: () => string;\n}) {\n const functionCallParts = parts?.filter(\n part => 'functionCall' in part,\n ) as Array<\n GoogleGenerativeAIContentPart & {\n functionCall: { name: string; args: unknown };\n }\n >;\n\n return functionCallParts == null || functionCallParts.length === 0\n ? undefined\n : functionCallParts.map(part => ({\n type: 'tool-call' as const,\n toolCallType: 'function' as const,\n toolCallId: generateId(),\n toolName: part.functionCall.name,\n args: JSON.stringify(part.functionCall.args),\n }));\n}\n\nfunction getTextFromParts(parts: z.infer<typeof contentSchema>['parts']) {\n const textParts = parts?.filter(part => 'text' in part) as Array<\n GoogleGenerativeAIContentPart & { text: string }\n >;\n\n return textParts == null || textParts.length === 0\n ? undefined\n : {\n type: 'text' as const,\n text: textParts.map(part => part.text).join(''),\n };\n}\n\nfunction getInlineDataParts(parts: z.infer<typeof contentSchema>['parts']) {\n return parts?.filter(\n (\n part,\n ): part is {\n inlineData: { mimeType: string; data: string };\n } => 'inlineData' in part,\n );\n}\n\nfunction extractSources({\n groundingMetadata,\n generateId,\n}: {\n groundingMetadata: z.infer<typeof groundingMetadataSchema> | undefined | null;\n generateId: () => string;\n}): undefined | LanguageModelV2Source[] {\n return groundingMetadata?.groundingChunks\n ?.filter(\n (\n chunk,\n ): chunk is z.infer<typeof groundingChunkSchema> & {\n web: { uri: string; title?: string };\n } => chunk.web != null,\n )\n .map(chunk => ({\n type: 'source',\n sourceType: 'url',\n id: generateId(),\n url: chunk.web.uri,\n title: chunk.web.title,\n }));\n}\n\nconst contentSchema = z.object({\n role: z.string(),\n parts: z\n .array(\n z.union([\n z.object({\n text: z.string(),\n }),\n z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.unknown(),\n }),\n }),\n z.object({\n inlineData: z.object({\n mimeType: z.string(),\n data: z.string(),\n }),\n }),\n ]),\n )\n .nullish(),\n});\n\n// https://ai.google.dev/gemini-api/docs/grounding\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/ground-gemini#ground-to-search\nconst groundingChunkSchema = z.object({\n web: z.object({ uri: z.string(), title: z.string() }).nullish(),\n retrievedContext: z.object({ uri: z.string(), title: z.string() }).nullish(),\n});\n\nexport const groundingMetadataSchema = z.object({\n webSearchQueries: z.array(z.string()).nullish(),\n retrievalQueries: z.array(z.string()).nullish(),\n searchEntryPoint: z.object({ renderedContent: z.string() }).nullish(),\n groundingChunks: z.array(groundingChunkSchema).nullish(),\n groundingSupports: z\n .array(\n z.object({\n segment: z.object({\n startIndex: z.number().nullish(),\n endIndex: z.number().nullish(),\n text: z.string().nullish(),\n }),\n segment_text: z.string().nullish(),\n groundingChunkIndices: z.array(z.number()).nullish(),\n supportChunkIndices: z.array(z.number()).nullish(),\n confidenceScores: z.array(z.number()).nullish(),\n confidenceScore: z.array(z.number()).nullish(),\n }),\n )\n .nullish(),\n retrievalMetadata: z\n .union([\n z.object({\n webDynamicRetrievalScore: z.number(),\n }),\n z.object({}),\n ])\n .nullish(),\n});\n\n// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/configure-safety-filters\nexport const safetyRatingSchema = z.object({\n category: z.string(),\n probability: z.string(),\n probabilityScore: z.number().nullish(),\n severity: z.string().nullish(),\n severityScore: z.number().nullish(),\n blocked: z.boolean().nullish(),\n});\n\nconst responseSchema = z.object({\n candidates: z.array(\n z.object({\n content: contentSchema.nullish().or(z.object({}).strict()),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n ),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst chunkSchema = z.object({\n candidates: z\n .array(\n z.object({\n content: contentSchema.nullish(),\n finishReason: z.string().nullish(),\n safetyRatings: z.array(safetyRatingSchema).nullish(),\n groundingMetadata: groundingMetadataSchema.nullish(),\n }),\n )\n .nullish(),\n usageMetadata: z\n .object({\n promptTokenCount: z.number().nullish(),\n candidatesTokenCount: z.number().nullish(),\n totalTokenCount: z.number().nullish(),\n })\n .nullish(),\n});\n\nconst googleGenerativeAIProviderOptionsSchema = z.object({\n responseModalities: z.array(z.enum(['TEXT', 'IMAGE'])).nullish(),\n thinkingConfig: z\n .object({\n thinkingBudget: z.number().nullish(),\n })\n .nullish(),\n});\nexport type GoogleGenerativeAIProviderOptions = z.infer<\n typeof googleGenerativeAIProviderOptionsSchema\n>;\n","import { JSONSchema7Definition } from '@ai-sdk/provider';\n\n/**\n * Converts JSON Schema 7 to OpenAPI Schema 3.0\n */\nexport function convertJSONSchemaToOpenAPISchema(\n jsonSchema: JSONSchema7Definition | undefined,\n): unknown {\n // parameters need to be undefined if they are empty objects:\n if (jsonSchema == null || isEmptyObjectSchema(jsonSchema)) {\n return undefined;\n }\n\n if (typeof jsonSchema === 'boolean') {\n return { type: 'boolean', properties: {} };\n }\n\n const {\n type,\n description,\n required,\n properties,\n items,\n allOf,\n anyOf,\n oneOf,\n format,\n const: constValue,\n minLength,\n enum: enumValues,\n } = jsonSchema;\n\n const result: Record<string, unknown> = {};\n\n if (description) result.description = description;\n if (required) result.required = required;\n if (format) result.format = format;\n\n if (constValue !== undefined) {\n result.enum = [constValue];\n }\n\n // Handle type\n if (type) {\n if (Array.isArray(type)) {\n if (type.includes('null')) {\n result.type = type.filter(t => t !== 'null')[0];\n result.nullable = true;\n } else {\n result.type = type;\n }\n } else if (type === 'null') {\n result.type = 'null';\n } else {\n result.type = type;\n }\n }\n\n // Handle enum\n if (enumValues !== undefined) {\n result.enum = enumValues;\n }\n\n if (properties != null) {\n result.properties = Object.entries(properties).reduce(\n (acc, [key, value]) => {\n acc[key] = convertJSONSchemaToOpenAPISchema(value);\n return acc;\n },\n {} as Record<string, unknown>,\n );\n }\n\n if (items) {\n result.items = Array.isArray(items)\n ? items.map(convertJSONSchemaToOpenAPISchema)\n : convertJSONSchemaToOpenAPISchema(items);\n }\n\n if (allOf) {\n result.allOf = allOf.map(convertJSONSchemaToOpenAPISchema);\n }\n if (anyOf) {\n // Handle cases where anyOf includes a null type\n if (\n anyOf.some(\n schema => typeof schema === 'object' && schema?.type === 'null',\n )\n ) {\n const nonNullSchemas = anyOf.filter(\n schema => !(typeof schema === 'object' && schema?.type === 'null'),\n );\n\n if (nonNullSchemas.length === 1) {\n // If there's only one non-null schema, convert it and make it nullable\n const converted = convertJSONSchemaToOpenAPISchema(nonNullSchemas[0]);\n if (typeof converted === 'object') {\n result.nullable = true;\n Object.assign(result, converted);\n }\n } else {\n // If there are multiple non-null schemas, keep them in anyOf\n result.anyOf = nonNullSchemas.map(convertJSONSchemaToOpenAPISchema);\n result.nullable = true;\n }\n } else {\n result.anyOf = anyOf.map(convertJSONSchemaToOpenAPISchema);\n }\n }\n if (oneOf) {\n result.oneOf = oneOf.map(convertJSONSchemaToOpenAPISchema);\n }\n\n if (minLength !== undefined) {\n result.minLength = minLength;\n }\n\n return result;\n}\n\nfunction isEmptyObjectSchema(jsonSchema: JSONSchema7Definition): boolean {\n return (\n jsonSchema != null &&\n typeof jsonSchema === 'object' &&\n jsonSchema.type === 'object' &&\n (jsonSchema.properties == null ||\n Object.keys(jsonSchema.properties).length === 0)\n );\n}\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n GoogleGenerativeAIContent,\n GoogleGenerativeAIContentPart,\n GoogleGenerativeAIPrompt,\n} from './google-generative-ai-prompt';\nimport {\n convertToBase64,\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\n\nexport function convertToGoogleGenerativeAIMessages(\n prompt: LanguageModelV2Prompt,\n): GoogleGenerativeAIPrompt {\n const systemInstructionParts: Array<{ text: string }> = [];\n const contents: Array<GoogleGenerativeAIContent> = [];\n let systemMessagesAllowed = true;\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n if (!systemMessagesAllowed) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'system messages are only supported at the beginning of the conversation',\n });\n }\n\n systemInstructionParts.push({ text: content });\n break;\n }\n\n case 'user': {\n systemMessagesAllowed = false;\n\n const parts: GoogleGenerativeAIContentPart[] = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n parts.push({ text: part.text });\n break;\n }\n\n case 'file': {\n // default to image/jpeg for unknown image/* types\n const mediaType =\n part.mediaType === 'image/*' ? 'image/jpeg' : part.mediaType;\n\n parts.push(\n part.data instanceof URL\n ? {\n fileData: {\n mimeType: mediaType,\n fileUri: part.data.toString(),\n },\n }\n : {\n inlineData: {\n mimeType: mediaType,\n data: convertToBase64(part.data),\n },\n },\n );\n\n break;\n }\n }\n }\n\n contents.push({ role: 'user', parts });\n break;\n }\n\n case 'assistant': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'model',\n parts: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text.length === 0\n ? undefined\n : { text: part.text };\n }\n\n case 'file': {\n if (part.mediaType !== 'image/png') {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only PNG images are supported in assistant messages',\n });\n }\n\n if (part.data instanceof URL) {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'File data URLs in assistant messages are not supported',\n });\n }\n\n return {\n inlineData: {\n mimeType: part.mediaType,\n data: convertToBase64(part.data),\n },\n };\n }\n\n case 'tool-call': {\n return {\n functionCall: {\n name: part.toolName,\n args: part.args,\n },\n };\n }\n }\n })\n .filter(part => part !== undefined),\n });\n break;\n }\n\n case 'tool': {\n systemMessagesAllowed = false;\n\n contents.push({\n role: 'user',\n parts: content.map(part => ({\n functionResponse: {\n name: part.toolName,\n response: {\n name: part.toolName,\n content: part.result,\n },\n },\n })),\n });\n break;\n }\n }\n }\n\n return {\n systemInstruction:\n systemInstructionParts.length > 0\n ? { parts: systemInstructionParts }\n : undefined,\n contents,\n };\n}\n","export function getModelPath(modelId: string): string {\n return modelId.includes('/') ? modelId : `models/${modelId}`;\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst googleErrorDataSchema = z.object({\n error: z.object({\n code: z.number().nullable(),\n message: z.string(),\n status: z.string(),\n }),\n});\n\nexport type GoogleErrorData = z.infer<typeof googleErrorDataSchema>;\n\nexport const googleFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: googleErrorDataSchema,\n errorToMessage: data => data.error.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertJSONSchemaToOpenAPISchema } from './convert-json-schema-to-openapi-schema';\nimport {\n DynamicRetrievalConfig,\n GoogleGenerativeAIModelId,\n} from './google-generative-ai-settings';\n\nexport function prepareTools({\n tools,\n toolChoice,\n useSearchGrounding,\n dynamicRetrievalConfig,\n modelId,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n useSearchGrounding: boolean;\n dynamicRetrievalConfig: DynamicRetrievalConfig | undefined;\n modelId: GoogleGenerativeAIModelId;\n}): {\n tools:\n | undefined\n | {\n functionDeclarations: Array<{\n name: string;\n description: string | undefined;\n parameters: unknown;\n }>;\n }\n | {\n googleSearchRetrieval:\n | Record<string, never>\n | { dynamicRetrievalConfig: DynamicRetrievalConfig };\n }\n | { googleSearch: Record<string, never> };\n toolConfig:\n | undefined\n | {\n functionCallingConfig: {\n mode: 'AUTO' | 'NONE' | 'ANY';\n allowedFunctionNames?: string[];\n };\n };\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n const isGemini2 = modelId.includes('gemini-2');\n const supportsDynamicRetrieval =\n modelId.includes('gemini-1.5-flash') && !modelId.includes('-8b');\n\n if (useSearchGrounding) {\n return {\n tools: isGemini2\n ? { googleSearch: {} }\n : {\n googleSearchRetrieval:\n !supportsDynamicRetrieval || !dynamicRetrievalConfig\n ? {}\n : { dynamicRetrievalConfig },\n },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n if (tools == null) {\n return { tools: undefined, toolConfig: undefined, toolWarnings };\n }\n\n const functionDeclarations = [];\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n functionDeclarations.push({\n name: tool.name,\n description: tool.description ?? '',\n parameters: convertJSONSchemaToOpenAPISchema(tool.parameters),\n });\n }\n }\n\n if (toolChoice == null) {\n return {\n tools: { functionDeclarations },\n toolConfig: undefined,\n toolWarnings,\n };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'AUTO' } },\n toolWarnings,\n };\n case 'none':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'NONE' } },\n toolWarnings,\n };\n case 'required':\n return {\n tools: { functionDeclarations },\n toolConfig: { functionCallingConfig: { mode: 'ANY' } },\n toolWarnings,\n };\n case 'tool':\n return {\n tools: { functionDeclarations },\n toolConfig: {\n functionCallingConfig: {\n mode: 'ANY',\n allowedFunctionNames: [toolChoice.toolName],\n },\n },\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapGoogleGenerativeAIFinishReason({\n finishReason,\n hasToolCalls,\n}: {\n finishReason: string | null | undefined;\n hasToolCalls: boolean;\n}): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'STOP':\n return hasToolCalls ? 'tool-calls' : 'stop';\n case 'MAX_TOKENS':\n return 'length';\n case 'IMAGE_SAFETY':\n case 'RECITATION':\n case 'SAFETY':\n case 'BLOCKLIST':\n case 'PROHIBITED_CONTENT':\n case 'SPII':\n return 'content-filter';\n case 'FINISH_REASON_UNSPECIFIED':\n case 'OTHER':\n return 'other';\n case 'MALFORMED_FUNCTION_CALL':\n return 'error';\n default:\n return 'unknown';\n }\n}\n"],"mappings":";AAUA;AAAA,EAIE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;AChBX,SAAS,iCACd,YACS;AAET,MAAI,cAAc,QAAQ,oBAAoB,UAAU,GAAG;AACzD,WAAO;AAAA,EACT;AAEA,MAAI,OAAO,eAAe,WAAW;AACnC,WAAO,EAAE,MAAM,WAAW,YAAY,CAAC,EAAE;AAAA,EAC3C;AAEA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,EACR,IAAI;AAEJ,QAAM,SAAkC,CAAC;AAEzC,MAAI;AAAa,WAAO,cAAc;AACtC,MAAI;AAAU,WAAO,WAAW;AAChC,MAAI;AAAQ,WAAO,SAAS;AAE5B,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO,CAAC,UAAU;AAAA,EAC3B;AAGA,MAAI,MAAM;AACR,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,UAAI,KAAK,SAAS,MAAM,GAAG;AACzB,eAAO,OAAO,KAAK,OAAO,OAAK,MAAM,MAAM,EAAE,CAAC;AAC9C,eAAO,WAAW;AAAA,MACpB,OAAO;AACL,eAAO,OAAO;AAAA,MAChB;AAAA,IACF,WAAW,SAAS,QAAQ;AAC1B,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,aAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAGA,MAAI,eAAe,QAAW;AAC5B,WAAO,OAAO;AAAA,EAChB;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,aAAa,OAAO,QAAQ,UAAU,EAAE;AAAA,MAC7C,CAAC,KAAK,CAAC,KAAK,KAAK,MAAM;AACrB,YAAI,GAAG,IAAI,iCAAiC,KAAK;AACjD,eAAO;AAAA,MACT;AAAA,MACA,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,QAAQ,KAAK,IAC9B,MAAM,IAAI,gCAAgC,IAC1C,iCAAiC,KAAK;AAAA,EAC5C;AAEA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AACA,MAAI,OAAO;AAET,QACE,MAAM;AAAA,MACJ,YAAU,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,IAC3D,GACA;AACA,YAAM,iBAAiB,MAAM;AAAA,QAC3B,YAAU,EAAE,OAAO,WAAW,aAAY,iCAAQ,UAAS;AAAA,MAC7D;AAEA,UAAI,eAAe,WAAW,GAAG;AAE/B,cAAM,YAAY,iCAAiC,eAAe,CAAC,CAAC;AACpE,YAAI,OAAO,cAAc,UAAU;AACjC,iBAAO,WAAW;AAClB,iBAAO,OAAO,QAAQ,SAAS;AAAA,QACjC;AAAA,MACF,OAAO;AAEL,eAAO,QAAQ,eAAe,IAAI,gCAAgC;AAClE,eAAO,WAAW;AAAA,MACpB;AAAA,IACF,OAAO;AACL,aAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,IAC3D;AAAA,EACF;AACA,MAAI,OAAO;AACT,WAAO,QAAQ,MAAM,IAAI,gCAAgC;AAAA,EAC3D;AAEA,MAAI,cAAc,QAAW;AAC3B,WAAO,YAAY;AAAA,EACrB;AAEA,SAAO;AACT;AAEA,SAAS,oBAAoB,YAA4C;AACvE,SACE,cAAc,QACd,OAAO,eAAe,YACtB,WAAW,SAAS,aACnB,WAAW,cAAc,QACxB,OAAO,KAAK,WAAW,UAAU,EAAE,WAAW;AAEpD;;;AChIA;AAAA,EAEE;AAAA,OACK;AAMP;AAAA,EACE;AAAA,OAEK;AAEA,SAAS,oCACd,QAC0B;AAC1B,QAAM,yBAAkD,CAAC;AACzD,QAAM,WAA6C,CAAC;AACpD,MAAI,wBAAwB;AAE5B,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,YAAI,CAAC,uBAAuB;AAC1B,gBAAM,IAAI,8BAA8B;AAAA,YACtC,eACE;AAAA,UACJ,CAAC;AAAA,QACH;AAEA,+BAAuB,KAAK,EAAE,MAAM,QAAQ,CAAC;AAC7C;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,cAAM,QAAyC,CAAC;AAEhD,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,oBAAM,KAAK,EAAE,MAAM,KAAK,KAAK,CAAC;AAC9B;AAAA,YACF;AAAA,YAEA,KAAK,QAAQ;AAEX,oBAAM,YACJ,KAAK,cAAc,YAAY,eAAe,KAAK;AAErD,oBAAM;AAAA,gBACJ,KAAK,gBAAgB,MACjB;AAAA,kBACE,UAAU;AAAA,oBACR,UAAU;AAAA,oBACV,SAAS,KAAK,KAAK,SAAS;AAAA,kBAC9B;AAAA,gBACF,IACA;AAAA,kBACE,YAAY;AAAA,oBACV,UAAU;AAAA,oBACV,MAAM,gBAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACN;AAEA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK,EAAE,MAAM,QAAQ,MAAM,CAAC;AACrC;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QACJ,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK,KAAK,WAAW,IACxB,SACA,EAAE,MAAM,KAAK,KAAK;AAAA,cACxB;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,cAAc,aAAa;AAClC,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,oBAAI,KAAK,gBAAgB,KAAK;AAC5B,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAEA,uBAAO;AAAA,kBACL,YAAY;AAAA,oBACV,UAAU,KAAK;AAAA,oBACf,MAAM,gBAAgB,KAAK,IAAI;AAAA,kBACjC;AAAA,gBACF;AAAA,cACF;AAAA,cAEA,KAAK,aAAa;AAChB,uBAAO;AAAA,kBACL,cAAc;AAAA,oBACZ,MAAM,KAAK;AAAA,oBACX,MAAM,KAAK;AAAA,kBACb;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC,EACA,OAAO,UAAQ,SAAS,MAAS;AAAA,QACtC,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,gCAAwB;AAExB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,OAAO,QAAQ,IAAI,WAAS;AAAA,YAC1B,kBAAkB;AAAA,cAChB,MAAM,KAAK;AAAA,cACX,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,SAAS,KAAK;AAAA,cAChB;AAAA,YACF;AAAA,UACF,EAAE;AAAA,QACJ,CAAC;AACD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,mBACE,uBAAuB,SAAS,IAC5B,EAAE,OAAO,uBAAuB,IAChC;AAAA,IACN;AAAA,EACF;AACF;;;AC5JO,SAAS,aAAa,SAAyB;AACpD,SAAO,QAAQ,SAAS,GAAG,IAAI,UAAU,UAAU,OAAO;AAC5D;;;ACFA,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,wBAAwB,EAAE,OAAO;AAAA,EACrC,OAAO,EAAE,OAAO;AAAA,IACd,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,IAC1B,SAAS,EAAE,OAAO;AAAA,IAClB,QAAQ,EAAE,OAAO;AAAA,EACnB,CAAC;AACH,CAAC;AAIM,IAAM,8BAA8B,+BAA+B;AAAA,EACxE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK,MAAM;AACrC,CAAC;;;AChBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAOA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GA+BE;AAhDF;AAkDE,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,QAAM,YAAY,QAAQ,SAAS,UAAU;AAC7C,QAAM,2BACJ,QAAQ,SAAS,kBAAkB,KAAK,CAAC,QAAQ,SAAS,KAAK;AAEjE,MAAI,oBAAoB;AACtB,WAAO;AAAA,MACL,OAAO,YACH,EAAE,cAAc,CAAC,EAAE,IACnB;AAAA,QACE,uBACE,CAAC,4BAA4B,CAAC,yBAC1B,CAAC,IACD,EAAE,uBAAuB;AAAA,MACjC;AAAA,MACJ,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,uBAAuB,CAAC;AAC9B,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,2BAAqB,KAAK;AAAA,QACxB,MAAM,KAAK;AAAA,QACX,cAAa,UAAK,gBAAL,YAAoB;AAAA,QACjC,YAAY,iCAAiC,KAAK,UAAU;AAAA,MAC9D,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO;AAAA,MACL,OAAO,EAAE,qBAAqB;AAAA,MAC9B,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,OAAO,EAAE;AAAA,QACtD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY,EAAE,uBAAuB,EAAE,MAAM,MAAM,EAAE;AAAA,QACrD;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO;AAAA,QACL,OAAO,EAAE,qBAAqB;AAAA,QAC9B,YAAY;AAAA,UACV,uBAAuB;AAAA,YACrB,MAAM;AAAA,YACN,sBAAsB,CAAC,WAAW,QAAQ;AAAA,UAC5C;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIC,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACvIO,SAAS,kCAAkC;AAAA,EAChD;AAAA,EACA;AACF,GAGgC;AAC9B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO,eAAe,eAAe;AAAA,IACvC,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;ANkBO,IAAM,kCAAN,MAAiE;AAAA,EAQtE,YACE,SACA,UACA,QACA;AAXF,SAAS,uBAAuB;AAY9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,MAAM,mBAAsD;AArE9D;AAsEI,YAAO,sBAAK,QAAO,qBAAZ,4CAAoC,CAAC;AAAA,EAC9C;AAAA,EAEA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAvFnD;AAwFI,UAAM,WAAyC,CAAC;AAEhD,UAAM,gBAAgB,qBAAqB;AAAA,MACzC,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,EAAE,UAAU,kBAAkB,IAClC,oCAAoC,MAAM;AAE5C,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,MACA,qBAAoB,UAAK,SAAS,uBAAd,YAAoC;AAAA,MACxD,wBAAwB,KAAK,SAAS;AAAA,MACtC,SAAS,KAAK;AAAA,IAChB,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,kBAAkB;AAAA;AAAA,UAEhB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA;AAAA,UAGA,mBACE,iDAAgB,UAAS,SAAS,qBAAqB;AAAA,UACzD,iBACE,iDAAgB,UAAS,UACzB,eAAe,UAAU;AAAA;AAAA;AAAA,YAIxB,UAAK,SAAS,sBAAd,YAAmC,QAChC,iCAAiC,eAAe,MAAM,IACtD;AAAA,UACN,GAAI,KAAK,SAAS,kBAAkB;AAAA,YAClC,gBAAgB,KAAK,SAAS;AAAA,UAChC;AAAA;AAAA,UAGA,oBAAoB,+CAAe;AAAA,UACnC,gBAAgB,+CAAe;AAAA,QACjC;AAAA,QACA;AAAA,QACA;AAAA,QACA,gBAAgB,KAAK,SAAS;AAAA,QAC9B,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,eAAe,KAAK,SAAS;AAAA,MAC/B;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA7JjE;AA8JI,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,KAAK,UAAU,IAAI;AAEhC,UAAM,gBAAgB;AAAA,MACpB,MAAM,QAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD,SAAS;AAAA,MACT,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B,0BAA0B,cAAc;AAAA,MACnE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,YAAY,SAAS,WAAW,CAAC;AACvC,UAAM,UAAyC,CAAC;AAGhD,UAAM,QACJ,UAAU,WAAW,QACrB,OAAO,UAAU,YAAY,YAC7B,EAAE,WAAW,UAAU,WACnB,CAAC,KACA,eAAU,QAAQ,UAAlB,YAA2B,CAAC;AAEnC,eAAW,QAAQ,OAAO;AACxB,UAAI,UAAU,QAAQ,KAAK,KAAK,SAAS,GAAG;AAC1C,gBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,MAChD,WAAW,kBAAkB,MAAM;AACjC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,cAAc;AAAA,UACd,YAAY,KAAK,OAAO,WAAW;AAAA,UACnC,UAAU,KAAK,aAAa;AAAA,UAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,QAC7C,CAAC;AAAA,MACH,WAAW,gBAAgB,MAAM;AAC/B,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,MAAM,KAAK,WAAW;AAAA,UACtB,WAAW,KAAK,WAAW;AAAA,QAC7B,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,WACJ,oBAAe;AAAA,MACb,mBAAmB,UAAU;AAAA,MAC7B,YAAY,KAAK,OAAO;AAAA,IAC1B,CAAC,MAHD,YAGM,CAAC;AACT,eAAW,UAAU,SAAS;AAC5B,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,UAAM,gBAAgB,SAAS;AAE/B,WAAO;AAAA,MACL;AAAA,MACA,cAAc,kCAAkC;AAAA,QAC9C,cAAc,UAAU;AAAA,QACxB,cAAc,QAAQ,KAAK,UAAQ,KAAK,SAAS,WAAW;AAAA,MAC9D,CAAC;AAAA,MACD,OAAO;AAAA,QACL,cAAa,oDAAe,qBAAf,YAAmC;AAAA,QAChD,eAAc,oDAAe,yBAAf,YAAuC;AAAA,MACvD;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,QAChB,QAAQ;AAAA,UACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,UAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,QAC5C;AAAA,MACF;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA;AAAA,QAER,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAErD,UAAM,OAAO,KAAK,UAAU,IAAI;AAChC,UAAM,UAAU;AAAA,MACd,MAAM,QAAQ,KAAK,OAAO,OAAO;AAAA,MACjC,QAAQ;AAAA,IACV;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO,IAAI;AAAA,QAC7B,KAAK;AAAA,MACP,CAAC;AAAA,MACD;AAAA,MACA,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B,iCAAiC,WAAW;AAAA,MACvE,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,IAChB;AACA,QAAI,mBAAyD;AAE7D,UAAM,aAAa,KAAK,OAAO;AAC/B,QAAI,eAAe;AAEnB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAtSvC;AAuSY,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,kBAAM,gBAAgB,MAAM;AAE5B,gBAAI,iBAAiB,MAAM;AACzB,oBAAM,eAAc,mBAAc,qBAAd,YAAkC;AACtD,oBAAM,gBACJ,mBAAc,yBAAd,YAAsC;AAAA,YAC1C;AAEA,kBAAM,aAAY,WAAM,eAAN,mBAAmB;AAGrC,gBAAI,aAAa,MAAM;AACrB;AAAA,YACF;AAEA,kBAAM,UAAU,UAAU;AAG1B,gBAAI,WAAW,MAAM;AACnB,oBAAM,YAAY,iBAAiB,QAAQ,KAAK;AAChD,kBAAI,aAAa,MAAM;AACrB,2BAAW,QAAQ,SAAS;AAAA,cAC9B;AAEA,oBAAM,kBAAkB,mBAAmB,QAAQ,KAAK;AACxD,kBAAI,mBAAmB,MAAM;AAC3B,2BAAW,QAAQ,iBAAiB;AAClC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,WAAW,KAAK,WAAW;AAAA,oBAC3B,MAAM,KAAK,WAAW;AAAA,kBACxB,CAAC;AAAA,gBACH;AAAA,cACF;AAEA,oBAAM,iBAAiB,sBAAsB;AAAA,gBAC3C,OAAO,QAAQ;AAAA,gBACf;AAAA,cACF,CAAC;AAED,kBAAI,kBAAkB,MAAM;AAC1B,2BAAW,YAAY,gBAAgB;AACrC,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,eAAe,SAAS;AAAA,kBAC1B,CAAC;AAED,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,cAAc;AAAA,oBACd,YAAY,SAAS;AAAA,oBACrB,UAAU,SAAS;AAAA,oBACnB,MAAM,SAAS;AAAA,kBACjB,CAAC;AAED,iCAAe;AAAA,gBACjB;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,UAAU,gBAAgB,MAAM;AAClC,6BAAe,kCAAkC;AAAA,gBAC/C,cAAc,UAAU;AAAA,gBACxB;AAAA,cACF,CAAC;AAED,oBAAM,WACJ,oBAAe;AAAA,gBACb,mBAAmB,UAAU;AAAA,gBAC7B;AAAA,cACF,CAAC,MAHD,YAGM,CAAC;AAET,yBAAW,UAAU,SAAS;AAC5B,2BAAW,QAAQ,MAAM;AAAA,cAC3B;AAEA,iCAAmB;AAAA,gBACjB,QAAQ;AAAA,kBACN,oBAAmB,eAAU,sBAAV,YAA+B;AAAA,kBAClD,gBAAe,eAAU,kBAAV,YAA2B;AAAA,gBAC5C;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,UAAU,EAAE,SAAS,gBAAgB;AAAA,MACrC,SAAS,EAAE,KAAK;AAAA,IAClB;AAAA,EACF;AACF;AAEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA;AACF,GAGG;AACD,QAAM,oBAAoB,+BAAO;AAAA,IAC/B,UAAQ,kBAAkB;AAAA;AAO5B,SAAO,qBAAqB,QAAQ,kBAAkB,WAAW,IAC7D,SACA,kBAAkB,IAAI,WAAS;AAAA,IAC7B,MAAM;AAAA,IACN,cAAc;AAAA,IACd,YAAY,WAAW;AAAA,IACvB,UAAU,KAAK,aAAa;AAAA,IAC5B,MAAM,KAAK,UAAU,KAAK,aAAa,IAAI;AAAA,EAC7C,EAAE;AACR;AAEA,SAAS,iBAAiB,OAA+C;AACvE,QAAM,YAAY,+BAAO,OAAO,UAAQ,UAAU;AAIlD,SAAO,aAAa,QAAQ,UAAU,WAAW,IAC7C,SACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM,UAAU,IAAI,UAAQ,KAAK,IAAI,EAAE,KAAK,EAAE;AAAA,EAChD;AACN;AAEA,SAAS,mBAAmB,OAA+C;AACzE,SAAO,+BAAO;AAAA,IACZ,CACE,SAGG,gBAAgB;AAAA;AAEzB;AAEA,SAAS,eAAe;AAAA,EACtB;AAAA,EACA;AACF,GAGwC;AA7cxC;AA8cE,UAAO,4DAAmB,oBAAnB,mBACH;AAAA,IACA,CACE,UAGG,MAAM,OAAO;AAAA,IAEnB,IAAI,YAAU;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,IAAI,WAAW;AAAA,IACf,KAAK,MAAM,IAAI;AAAA,IACf,OAAO,MAAM,IAAI;AAAA,EACnB;AACJ;AAEA,IAAM,gBAAgBC,GAAE,OAAO;AAAA,EAC7B,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GACJ;AAAA,IACCA,GAAE,MAAM;AAAA,MACNA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,cAAcA,GAAE,OAAO;AAAA,UACrB,MAAMA,GAAE,OAAO;AAAA,UACf,MAAMA,GAAE,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,YAAYA,GAAE,OAAO;AAAA,UACnB,UAAUA,GAAE,OAAO;AAAA,UACnB,MAAMA,GAAE,OAAO;AAAA,QACjB,CAAC;AAAA,MACH,CAAC;AAAA,IACH,CAAC;AAAA,EACH,EACC,QAAQ;AACb,CAAC;AAID,IAAM,uBAAuBA,GAAE,OAAO;AAAA,EACpC,KAAKA,GAAE,OAAO,EAAE,KAAKA,GAAE,OAAO,GAAG,OAAOA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EAC9D,kBAAkBA,GAAE,OAAO,EAAE,KAAKA,GAAE,OAAO,GAAG,OAAOA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAC7E,CAAC;AAEM,IAAM,0BAA0BA,GAAE,OAAO;AAAA,EAC9C,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,EAC9C,kBAAkBA,GAAE,OAAO,EAAE,iBAAiBA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AAAA,EACpE,iBAAiBA,GAAE,MAAM,oBAAoB,EAAE,QAAQ;AAAA,EACvD,mBAAmBA,GAChB;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,YAAYA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC/B,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC7B,MAAMA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAC3B,CAAC;AAAA,MACD,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,uBAAuBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACnD,qBAAqBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MACjD,kBAAkBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,MAC9C,iBAAiBA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,QAAQ;AAAA,IAC/C,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,mBAAmBA,GAChB,MAAM;AAAA,IACLA,GAAE,OAAO;AAAA,MACP,0BAA0BA,GAAE,OAAO;AAAA,IACrC,CAAC;AAAA,IACDA,GAAE,OAAO,CAAC,CAAC;AAAA,EACb,CAAC,EACA,QAAQ;AACb,CAAC;AAGM,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EACzC,UAAUA,GAAE,OAAO;AAAA,EACnB,aAAaA,GAAE,OAAO;AAAA,EACtB,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,UAAUA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC7B,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAClC,SAASA,GAAE,QAAQ,EAAE,QAAQ;AAC/B,CAAC;AAED,IAAM,iBAAiBA,GAAE,OAAO;AAAA,EAC9B,YAAYA,GAAE;AAAA,IACZA,GAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ,EAAE,GAAGA,GAAE,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC;AAAA,MACzD,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAeA,GAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AAAA,EACA,eAAeA,GACZ,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAID,IAAM,cAAcA,GAAE,OAAO;AAAA,EAC3B,YAAYA,GACT;AAAA,IACCA,GAAE,OAAO;AAAA,MACP,SAAS,cAAc,QAAQ;AAAA,MAC/B,cAAcA,GAAE,OAAO,EAAE,QAAQ;AAAA,MACjC,eAAeA,GAAE,MAAM,kBAAkB,EAAE,QAAQ;AAAA,MACnD,mBAAmB,wBAAwB,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH,EACC,QAAQ;AAAA,EACX,eAAeA,GACZ,OAAO;AAAA,IACN,kBAAkBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACrC,sBAAsBA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACzC,iBAAiBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACtC,CAAC,EACA,QAAQ;AACb,CAAC;AAED,IAAM,0CAA0CA,GAAE,OAAO;AAAA,EACvD,oBAAoBA,GAAE,MAAMA,GAAE,KAAK,CAAC,QAAQ,OAAO,CAAC,CAAC,EAAE,QAAQ;AAAA,EAC/D,gBAAgBA,GACb,OAAO;AAAA,IACN,gBAAgBA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACrC,CAAC,EACA,QAAQ;AACb,CAAC;","names":["z","UnsupportedFunctionalityError","UnsupportedFunctionalityError","z"]}