@ai-sdk/google 4.0.0-beta.3 → 4.0.0-beta.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -7,7 +7,7 @@ import {
7
7
  } from "@ai-sdk/provider-utils";
8
8
 
9
9
  // src/version.ts
10
- var VERSION = true ? "4.0.0-beta.3" : "0.0.0-test";
10
+ var VERSION = true ? "4.0.0-beta.30" : "0.0.0-test";
11
11
 
12
12
  // src/google-generative-ai-embedding-model.ts
13
13
  import {
@@ -53,6 +53,15 @@ import {
53
53
  zodSchema as zodSchema2
54
54
  } from "@ai-sdk/provider-utils";
55
55
  import { z as z2 } from "zod/v4";
56
+ var googleEmbeddingContentPartSchema = z2.union([
57
+ z2.object({ text: z2.string() }),
58
+ z2.object({
59
+ inlineData: z2.object({
60
+ mimeType: z2.string(),
61
+ data: z2.string()
62
+ })
63
+ })
64
+ ]);
56
65
  var googleEmbeddingModelOptions = lazySchema2(
57
66
  () => zodSchema2(
58
67
  z2.object({
@@ -82,7 +91,17 @@ var googleEmbeddingModelOptions = lazySchema2(
82
91
  "QUESTION_ANSWERING",
83
92
  "FACT_VERIFICATION",
84
93
  "CODE_RETRIEVAL_QUERY"
85
- ]).optional()
94
+ ]).optional(),
95
+ /**
96
+ * Optional. Per-value multimodal content parts for embedding non-text
97
+ * content (images, video, PDF, audio). Each entry corresponds to the
98
+ * embedding value at the same index and its parts are merged with the
99
+ * text value in the request. Use `null` for entries that are text-only.
100
+ *
101
+ * The array length must match the number of values being embedded. In
102
+ * the case of a single embedding, the array length must be 1.
103
+ */
104
+ content: z2.array(z2.array(googleEmbeddingContentPartSchema).min(1).nullable()).optional()
86
105
  })
87
106
  )
88
107
  );
@@ -90,7 +109,7 @@ var googleEmbeddingModelOptions = lazySchema2(
90
109
  // src/google-generative-ai-embedding-model.ts
91
110
  var GoogleGenerativeAIEmbeddingModel = class {
92
111
  constructor(modelId, config) {
93
- this.specificationVersion = "v3";
112
+ this.specificationVersion = "v4";
94
113
  this.maxEmbeddingsPerCall = 2048;
95
114
  this.supportsParallelCalls = true;
96
115
  this.modelId = modelId;
@@ -122,7 +141,16 @@ var GoogleGenerativeAIEmbeddingModel = class {
122
141
  await resolve(this.config.headers),
123
142
  headers
124
143
  );
144
+ const multimodalContent = googleOptions == null ? void 0 : googleOptions.content;
145
+ if (multimodalContent != null && multimodalContent.length !== values.length) {
146
+ throw new Error(
147
+ `The number of multimodal content entries (${multimodalContent.length}) must match the number of values (${values.length}).`
148
+ );
149
+ }
125
150
  if (values.length === 1) {
151
+ const valueParts = multimodalContent == null ? void 0 : multimodalContent[0];
152
+ const textPart = values[0] ? [{ text: values[0] }] : [];
153
+ const parts = valueParts != null ? [...textPart, ...valueParts] : [{ text: values[0] }];
126
154
  const {
127
155
  responseHeaders: responseHeaders2,
128
156
  value: response2,
@@ -133,7 +161,7 @@ var GoogleGenerativeAIEmbeddingModel = class {
133
161
  body: {
134
162
  model: `models/${this.modelId}`,
135
163
  content: {
136
- parts: [{ text: values[0] }]
164
+ parts
137
165
  },
138
166
  outputDimensionality: googleOptions == null ? void 0 : googleOptions.outputDimensionality,
139
167
  taskType: googleOptions == null ? void 0 : googleOptions.taskType
@@ -160,12 +188,19 @@ var GoogleGenerativeAIEmbeddingModel = class {
160
188
  url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
161
189
  headers: mergedHeaders,
162
190
  body: {
163
- requests: values.map((value) => ({
164
- model: `models/${this.modelId}`,
165
- content: { role: "user", parts: [{ text: value }] },
166
- outputDimensionality: googleOptions == null ? void 0 : googleOptions.outputDimensionality,
167
- taskType: googleOptions == null ? void 0 : googleOptions.taskType
168
- }))
191
+ requests: values.map((value, index) => {
192
+ const valueParts = multimodalContent == null ? void 0 : multimodalContent[index];
193
+ const textPart = value ? [{ text: value }] : [];
194
+ return {
195
+ model: `models/${this.modelId}`,
196
+ content: {
197
+ role: "user",
198
+ parts: valueParts != null ? [...textPart, ...valueParts] : [{ text: value }]
199
+ },
200
+ outputDimensionality: googleOptions == null ? void 0 : googleOptions.outputDimensionality,
201
+ taskType: googleOptions == null ? void 0 : googleOptions.taskType
202
+ };
203
+ })
169
204
  },
170
205
  failedResponseHandler: googleFailedResponseHandler,
171
206
  successfulResponseHandler: createJsonResponseHandler(
@@ -203,7 +238,10 @@ import {
203
238
  createEventSourceResponseHandler,
204
239
  createJsonResponseHandler as createJsonResponseHandler2,
205
240
  generateId,
241
+ isCustomReasoning,
206
242
  lazySchema as lazySchema5,
243
+ mapReasoningToProviderBudget,
244
+ mapReasoningToProviderEffort,
207
245
  parseProviderOptions as parseProviderOptions2,
208
246
  postJsonToApi as postJsonToApi2,
209
247
  resolve as resolve2,
@@ -370,14 +408,123 @@ function isEmptyObjectSchema(jsonSchema) {
370
408
  import {
371
409
  UnsupportedFunctionalityError
372
410
  } from "@ai-sdk/provider";
373
- import { convertToBase64 } from "@ai-sdk/provider-utils";
411
+ import {
412
+ convertToBase64,
413
+ isProviderReference,
414
+ resolveProviderReference
415
+ } from "@ai-sdk/provider-utils";
416
+ var dataUrlRegex = /^data:([^;,]+);base64,(.+)$/s;
417
+ function parseBase64DataUrl(value) {
418
+ const match = dataUrlRegex.exec(value);
419
+ if (match == null) {
420
+ return void 0;
421
+ }
422
+ return {
423
+ mediaType: match[1],
424
+ data: match[2]
425
+ };
426
+ }
427
+ function convertUrlToolResultPart(url) {
428
+ const parsedDataUrl = parseBase64DataUrl(url);
429
+ if (parsedDataUrl == null) {
430
+ return void 0;
431
+ }
432
+ return {
433
+ inlineData: {
434
+ mimeType: parsedDataUrl.mediaType,
435
+ data: parsedDataUrl.data
436
+ }
437
+ };
438
+ }
439
+ function appendToolResultParts(parts, toolName, outputValue) {
440
+ const functionResponseParts = [];
441
+ const responseTextParts = [];
442
+ for (const contentPart of outputValue) {
443
+ switch (contentPart.type) {
444
+ case "text": {
445
+ responseTextParts.push(contentPart.text);
446
+ break;
447
+ }
448
+ case "image-data":
449
+ case "file-data": {
450
+ functionResponseParts.push({
451
+ inlineData: {
452
+ mimeType: contentPart.mediaType,
453
+ data: contentPart.data
454
+ }
455
+ });
456
+ break;
457
+ }
458
+ case "image-url":
459
+ case "file-url": {
460
+ const functionResponsePart = convertUrlToolResultPart(
461
+ contentPart.url
462
+ );
463
+ if (functionResponsePart != null) {
464
+ functionResponseParts.push(functionResponsePart);
465
+ } else {
466
+ responseTextParts.push(JSON.stringify(contentPart));
467
+ }
468
+ break;
469
+ }
470
+ default: {
471
+ responseTextParts.push(JSON.stringify(contentPart));
472
+ break;
473
+ }
474
+ }
475
+ }
476
+ parts.push({
477
+ functionResponse: {
478
+ name: toolName,
479
+ response: {
480
+ name: toolName,
481
+ content: responseTextParts.length > 0 ? responseTextParts.join("\n") : "Tool executed successfully."
482
+ },
483
+ ...functionResponseParts.length > 0 ? { parts: functionResponseParts } : {}
484
+ }
485
+ });
486
+ }
487
+ function appendLegacyToolResultParts(parts, toolName, outputValue) {
488
+ for (const contentPart of outputValue) {
489
+ switch (contentPart.type) {
490
+ case "text":
491
+ parts.push({
492
+ functionResponse: {
493
+ name: toolName,
494
+ response: {
495
+ name: toolName,
496
+ content: contentPart.text
497
+ }
498
+ }
499
+ });
500
+ break;
501
+ case "image-data":
502
+ parts.push(
503
+ {
504
+ inlineData: {
505
+ mimeType: String(contentPart.mediaType),
506
+ data: String(contentPart.data)
507
+ }
508
+ },
509
+ {
510
+ text: "Tool executed successfully and returned this image as a response"
511
+ }
512
+ );
513
+ break;
514
+ default:
515
+ parts.push({ text: JSON.stringify(contentPart) });
516
+ break;
517
+ }
518
+ }
519
+ }
374
520
  function convertToGoogleGenerativeAIMessages(prompt, options) {
375
- var _a, _b, _c;
521
+ var _a, _b, _c, _d, _e, _f, _g, _h;
376
522
  const systemInstructionParts = [];
377
523
  const contents = [];
378
524
  let systemMessagesAllowed = true;
379
525
  const isGemmaModel = (_a = options == null ? void 0 : options.isGemmaModel) != null ? _a : false;
380
526
  const providerOptionsName = (_b = options == null ? void 0 : options.providerOptionsName) != null ? _b : "google";
527
+ const supportsFunctionResponseParts = (_c = options == null ? void 0 : options.supportsFunctionResponseParts) != null ? _c : true;
381
528
  for (const { role, content } of prompt) {
382
529
  switch (role) {
383
530
  case "system": {
@@ -400,19 +547,36 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
400
547
  }
401
548
  case "file": {
402
549
  const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
403
- parts.push(
404
- part.data instanceof URL ? {
550
+ if (part.data instanceof URL) {
551
+ parts.push({
405
552
  fileData: {
406
553
  mimeType: mediaType,
407
554
  fileUri: part.data.toString()
408
555
  }
409
- } : {
556
+ });
557
+ } else if (isProviderReference(part.data)) {
558
+ if (providerOptionsName === "vertex") {
559
+ throw new UnsupportedFunctionalityError({
560
+ functionality: "file parts with provider references"
561
+ });
562
+ }
563
+ parts.push({
564
+ fileData: {
565
+ mimeType: mediaType,
566
+ fileUri: resolveProviderReference({
567
+ reference: part.data,
568
+ provider: "google"
569
+ })
570
+ }
571
+ });
572
+ } else {
573
+ parts.push({
410
574
  inlineData: {
411
575
  mimeType: mediaType,
412
576
  data: convertToBase64(part.data)
413
577
  }
414
- }
415
- );
578
+ });
579
+ }
416
580
  break;
417
581
  }
418
582
  }
@@ -425,8 +589,8 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
425
589
  contents.push({
426
590
  role: "model",
427
591
  parts: content.map((part) => {
428
- var _a2, _b2, _c2, _d;
429
- const providerOpts = (_d = (_a2 = part.providerOptions) == null ? void 0 : _a2[providerOptionsName]) != null ? _d : providerOptionsName !== "google" ? (_b2 = part.providerOptions) == null ? void 0 : _b2.google : (_c2 = part.providerOptions) == null ? void 0 : _c2.vertex;
592
+ var _a2, _b2, _c2, _d2;
593
+ const providerOpts = (_d2 = (_a2 = part.providerOptions) == null ? void 0 : _a2[providerOptionsName]) != null ? _d2 : providerOptionsName !== "google" ? (_b2 = part.providerOptions) == null ? void 0 : _b2.google : (_c2 = part.providerOptions) == null ? void 0 : _c2.vertex;
430
594
  const thoughtSignature = (providerOpts == null ? void 0 : providerOpts.thoughtSignature) != null ? String(providerOpts.thoughtSignature) : void 0;
431
595
  switch (part.type) {
432
596
  case "text": {
@@ -442,21 +606,67 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
442
606
  thoughtSignature
443
607
  };
444
608
  }
609
+ case "reasoning-file": {
610
+ if (part.data instanceof URL) {
611
+ throw new UnsupportedFunctionalityError({
612
+ functionality: "File data URLs in assistant messages are not supported"
613
+ });
614
+ }
615
+ return {
616
+ inlineData: {
617
+ mimeType: part.mediaType,
618
+ data: convertToBase64(part.data)
619
+ },
620
+ thought: true,
621
+ thoughtSignature
622
+ };
623
+ }
445
624
  case "file": {
446
625
  if (part.data instanceof URL) {
447
626
  throw new UnsupportedFunctionalityError({
448
627
  functionality: "File data URLs in assistant messages are not supported"
449
628
  });
450
629
  }
630
+ if (isProviderReference(part.data)) {
631
+ if (providerOptionsName === "vertex") {
632
+ throw new UnsupportedFunctionalityError({
633
+ functionality: "file parts with provider references"
634
+ });
635
+ }
636
+ return {
637
+ fileData: {
638
+ mimeType: part.mediaType,
639
+ fileUri: resolveProviderReference({
640
+ reference: part.data,
641
+ provider: "google"
642
+ })
643
+ },
644
+ ...(providerOpts == null ? void 0 : providerOpts.thought) === true ? { thought: true } : {},
645
+ thoughtSignature
646
+ };
647
+ }
451
648
  return {
452
649
  inlineData: {
453
650
  mimeType: part.mediaType,
454
651
  data: convertToBase64(part.data)
455
652
  },
653
+ ...(providerOpts == null ? void 0 : providerOpts.thought) === true ? { thought: true } : {},
456
654
  thoughtSignature
457
655
  };
458
656
  }
459
657
  case "tool-call": {
658
+ const serverToolCallId = (providerOpts == null ? void 0 : providerOpts.serverToolCallId) != null ? String(providerOpts.serverToolCallId) : void 0;
659
+ const serverToolType = (providerOpts == null ? void 0 : providerOpts.serverToolType) != null ? String(providerOpts.serverToolType) : void 0;
660
+ if (serverToolCallId && serverToolType) {
661
+ return {
662
+ toolCall: {
663
+ toolType: serverToolType,
664
+ args: typeof part.input === "string" ? JSON.parse(part.input) : part.input,
665
+ id: serverToolCallId
666
+ },
667
+ thoughtSignature
668
+ };
669
+ }
460
670
  return {
461
671
  functionCall: {
462
672
  name: part.toolName,
@@ -465,6 +675,21 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
465
675
  thoughtSignature
466
676
  };
467
677
  }
678
+ case "tool-result": {
679
+ const serverToolCallId = (providerOpts == null ? void 0 : providerOpts.serverToolCallId) != null ? String(providerOpts.serverToolCallId) : void 0;
680
+ const serverToolType = (providerOpts == null ? void 0 : providerOpts.serverToolType) != null ? String(providerOpts.serverToolType) : void 0;
681
+ if (serverToolCallId && serverToolType) {
682
+ return {
683
+ toolResponse: {
684
+ toolType: serverToolType,
685
+ response: part.output.type === "json" ? part.output.value : {},
686
+ id: serverToolCallId
687
+ },
688
+ thoughtSignature
689
+ };
690
+ }
691
+ return void 0;
692
+ }
468
693
  }
469
694
  }).filter((part) => part !== void 0)
470
695
  });
@@ -477,38 +702,32 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
477
702
  if (part.type === "tool-approval-response") {
478
703
  continue;
479
704
  }
705
+ const partProviderOpts = (_g = (_d = part.providerOptions) == null ? void 0 : _d[providerOptionsName]) != null ? _g : providerOptionsName !== "google" ? (_e = part.providerOptions) == null ? void 0 : _e.google : (_f = part.providerOptions) == null ? void 0 : _f.vertex;
706
+ const serverToolCallId = (partProviderOpts == null ? void 0 : partProviderOpts.serverToolCallId) != null ? String(partProviderOpts.serverToolCallId) : void 0;
707
+ const serverToolType = (partProviderOpts == null ? void 0 : partProviderOpts.serverToolType) != null ? String(partProviderOpts.serverToolType) : void 0;
708
+ if (serverToolCallId && serverToolType) {
709
+ const serverThoughtSignature = (partProviderOpts == null ? void 0 : partProviderOpts.thoughtSignature) != null ? String(partProviderOpts.thoughtSignature) : void 0;
710
+ if (contents.length > 0) {
711
+ const lastContent = contents[contents.length - 1];
712
+ if (lastContent.role === "model") {
713
+ lastContent.parts.push({
714
+ toolResponse: {
715
+ toolType: serverToolType,
716
+ response: part.output.type === "json" ? part.output.value : {},
717
+ id: serverToolCallId
718
+ },
719
+ thoughtSignature: serverThoughtSignature
720
+ });
721
+ continue;
722
+ }
723
+ }
724
+ }
480
725
  const output = part.output;
481
726
  if (output.type === "content") {
482
- for (const contentPart of output.value) {
483
- switch (contentPart.type) {
484
- case "text":
485
- parts.push({
486
- functionResponse: {
487
- name: part.toolName,
488
- response: {
489
- name: part.toolName,
490
- content: contentPart.text
491
- }
492
- }
493
- });
494
- break;
495
- case "image-data":
496
- parts.push(
497
- {
498
- inlineData: {
499
- mimeType: contentPart.mediaType,
500
- data: contentPart.data
501
- }
502
- },
503
- {
504
- text: "Tool executed successfully and returned this image as a response"
505
- }
506
- );
507
- break;
508
- default:
509
- parts.push({ text: JSON.stringify(contentPart) });
510
- break;
511
- }
727
+ if (supportsFunctionResponseParts) {
728
+ appendToolResultParts(parts, part.toolName, output.value);
729
+ } else {
730
+ appendLegacyToolResultParts(parts, part.toolName, output.value);
512
731
  }
513
732
  } else {
514
733
  parts.push({
@@ -516,7 +735,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
516
735
  name: part.toolName,
517
736
  response: {
518
737
  name: part.toolName,
519
- content: output.type === "execution-denied" ? (_c = output.reason) != null ? _c : "Tool execution denied." : output.value
738
+ content: output.type === "execution-denied" ? (_h = output.reason) != null ? _h : "Tool execution denied." : output.value
520
739
  }
521
740
  }
522
741
  });
@@ -662,7 +881,11 @@ var googleLanguageModelOptions = lazySchema4(
662
881
  latitude: z4.number(),
663
882
  longitude: z4.number()
664
883
  }).optional()
665
- }).optional()
884
+ }).optional(),
885
+ /**
886
+ * Optional. The service tier to use for the request.
887
+ */
888
+ serviceTier: z4.enum(["standard", "flex", "priority"]).optional()
666
889
  })
667
890
  )
668
891
  );
@@ -676,7 +899,7 @@ function prepareTools({
676
899
  toolChoice,
677
900
  modelId
678
901
  }) {
679
- var _a;
902
+ var _a, _b;
680
903
  tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
681
904
  const toolWarnings = [];
682
905
  const isLatest = [
@@ -685,13 +908,14 @@ function prepareTools({
685
908
  "gemini-pro-latest"
686
909
  ].some((id) => id === modelId);
687
910
  const isGemini2orNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || modelId.includes("nano-banana") || isLatest;
911
+ const isGemini3orNewer = modelId.includes("gemini-3");
688
912
  const supportsFileSearch = modelId.includes("gemini-2.5") || modelId.includes("gemini-3");
689
913
  if (tools == null) {
690
914
  return { tools: void 0, toolConfig: void 0, toolWarnings };
691
915
  }
692
916
  const hasFunctionTools = tools.some((tool) => tool.type === "function");
693
917
  const hasProviderTools = tools.some((tool) => tool.type === "provider");
694
- if (hasFunctionTools && hasProviderTools) {
918
+ if (hasFunctionTools && hasProviderTools && !isGemini3orNewer) {
695
919
  toolWarnings.push({
696
920
  type: "unsupported",
697
921
  feature: `combination of function and provider-defined tools`
@@ -742,7 +966,7 @@ function prepareTools({
742
966
  toolWarnings.push({
743
967
  type: "unsupported",
744
968
  feature: `provider-defined tool ${tool.id}`,
745
- details: "The code execution tools is not supported with other Gemini models than Gemini 2."
969
+ details: "The code execution tool is not supported with other Gemini models than Gemini 2."
746
970
  });
747
971
  }
748
972
  break;
@@ -796,6 +1020,45 @@ function prepareTools({
796
1020
  break;
797
1021
  }
798
1022
  });
1023
+ if (hasFunctionTools && isGemini3orNewer && googleTools2.length > 0) {
1024
+ const functionDeclarations2 = [];
1025
+ for (const tool of tools) {
1026
+ if (tool.type === "function") {
1027
+ functionDeclarations2.push({
1028
+ name: tool.name,
1029
+ description: (_a = tool.description) != null ? _a : "",
1030
+ parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
1031
+ });
1032
+ }
1033
+ }
1034
+ const combinedToolConfig = {
1035
+ functionCallingConfig: { mode: "VALIDATED" },
1036
+ includeServerSideToolInvocations: true
1037
+ };
1038
+ if (toolChoice != null) {
1039
+ switch (toolChoice.type) {
1040
+ case "auto":
1041
+ break;
1042
+ case "none":
1043
+ combinedToolConfig.functionCallingConfig = { mode: "NONE" };
1044
+ break;
1045
+ case "required":
1046
+ combinedToolConfig.functionCallingConfig = { mode: "ANY" };
1047
+ break;
1048
+ case "tool":
1049
+ combinedToolConfig.functionCallingConfig = {
1050
+ mode: "ANY",
1051
+ allowedFunctionNames: [toolChoice.toolName]
1052
+ };
1053
+ break;
1054
+ }
1055
+ }
1056
+ return {
1057
+ tools: [...googleTools2, { functionDeclarations: functionDeclarations2 }],
1058
+ toolConfig: combinedToolConfig,
1059
+ toolWarnings
1060
+ };
1061
+ }
799
1062
  return {
800
1063
  tools: googleTools2.length > 0 ? googleTools2 : void 0,
801
1064
  toolConfig: void 0,
@@ -809,7 +1072,7 @@ function prepareTools({
809
1072
  case "function":
810
1073
  functionDeclarations.push({
811
1074
  name: tool.name,
812
- description: (_a = tool.description) != null ? _a : "",
1075
+ description: (_b = tool.description) != null ? _b : "",
813
1076
  parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
814
1077
  });
815
1078
  if (tool.strict === true) {
@@ -908,7 +1171,7 @@ function mapGoogleGenerativeAIFinishReason({
908
1171
  // src/google-generative-ai-language-model.ts
909
1172
  var GoogleGenerativeAILanguageModel = class {
910
1173
  constructor(modelId, config) {
911
- this.specificationVersion = "v3";
1174
+ this.specificationVersion = "v4";
912
1175
  var _a;
913
1176
  this.modelId = modelId;
914
1177
  this.config = config;
@@ -934,6 +1197,7 @@ var GoogleGenerativeAILanguageModel = class {
934
1197
  seed,
935
1198
  tools,
936
1199
  toolChoice,
1200
+ reasoning,
937
1201
  providerOptions
938
1202
  }) {
939
1203
  var _a;
@@ -960,9 +1224,14 @@ var GoogleGenerativeAILanguageModel = class {
960
1224
  });
961
1225
  }
962
1226
  const isGemmaModel = this.modelId.toLowerCase().startsWith("gemma-");
1227
+ const supportsFunctionResponseParts = this.modelId.startsWith("gemini-3");
963
1228
  const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(
964
1229
  prompt,
965
- { isGemmaModel, providerOptionsName }
1230
+ {
1231
+ isGemmaModel,
1232
+ providerOptionsName,
1233
+ supportsFunctionResponseParts
1234
+ }
966
1235
  );
967
1236
  const {
968
1237
  tools: googleTools2,
@@ -973,6 +1242,12 @@ var GoogleGenerativeAILanguageModel = class {
973
1242
  toolChoice,
974
1243
  modelId: this.modelId
975
1244
  });
1245
+ const resolvedThinking = resolveThinkingConfig({
1246
+ reasoning,
1247
+ modelId: this.modelId,
1248
+ warnings
1249
+ });
1250
+ const thinkingConfig = (googleOptions == null ? void 0 : googleOptions.thinkingConfig) || resolvedThinking ? { ...resolvedThinking, ...googleOptions == null ? void 0 : googleOptions.thinkingConfig } : void 0;
976
1251
  return {
977
1252
  args: {
978
1253
  generationConfig: {
@@ -996,7 +1271,7 @@ var GoogleGenerativeAILanguageModel = class {
996
1271
  },
997
1272
  // provider options:
998
1273
  responseModalities: googleOptions == null ? void 0 : googleOptions.responseModalities,
999
- thinkingConfig: googleOptions == null ? void 0 : googleOptions.thinkingConfig,
1274
+ thinkingConfig,
1000
1275
  ...(googleOptions == null ? void 0 : googleOptions.mediaResolution) && {
1001
1276
  mediaResolution: googleOptions.mediaResolution
1002
1277
  },
@@ -1013,14 +1288,15 @@ var GoogleGenerativeAILanguageModel = class {
1013
1288
  retrievalConfig: googleOptions.retrievalConfig
1014
1289
  } : googleToolConfig,
1015
1290
  cachedContent: googleOptions == null ? void 0 : googleOptions.cachedContent,
1016
- labels: googleOptions == null ? void 0 : googleOptions.labels
1291
+ labels: googleOptions == null ? void 0 : googleOptions.labels,
1292
+ serviceTier: googleOptions == null ? void 0 : googleOptions.serviceTier
1017
1293
  },
1018
1294
  warnings: [...warnings, ...toolWarnings],
1019
1295
  providerOptionsName
1020
1296
  };
1021
1297
  }
1022
1298
  async doGenerate(options) {
1023
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
1299
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
1024
1300
  const { args, warnings, providerOptionsName } = await this.getArgs(options);
1025
1301
  const mergedHeaders = combineHeaders2(
1026
1302
  await resolve2(this.config.headers),
@@ -1046,6 +1322,7 @@ var GoogleGenerativeAILanguageModel = class {
1046
1322
  const parts = (_b = (_a = candidate.content) == null ? void 0 : _a.parts) != null ? _b : [];
1047
1323
  const usageMetadata = response.usageMetadata;
1048
1324
  let lastCodeExecutionToolCallId;
1325
+ let lastServerToolCallId;
1049
1326
  for (const part of parts) {
1050
1327
  if ("executableCode" in part && ((_c = part.executableCode) == null ? void 0 : _c.code)) {
1051
1328
  const toolCallId = this.config.generateId();
@@ -1100,22 +1377,68 @@ var GoogleGenerativeAILanguageModel = class {
1100
1377
  } : void 0
1101
1378
  });
1102
1379
  } else if ("inlineData" in part) {
1380
+ const hasThought = part.thought === true;
1381
+ const hasThoughtSignature = !!part.thoughtSignature;
1103
1382
  content.push({
1104
- type: "file",
1383
+ type: hasThought ? "reasoning-file" : "file",
1105
1384
  data: part.inlineData.data,
1106
1385
  mediaType: part.inlineData.mimeType,
1107
- providerMetadata: part.thoughtSignature ? {
1386
+ providerMetadata: hasThoughtSignature ? {
1108
1387
  [providerOptionsName]: {
1109
1388
  thoughtSignature: part.thoughtSignature
1110
1389
  }
1111
1390
  } : void 0
1112
1391
  });
1392
+ } else if ("toolCall" in part && part.toolCall) {
1393
+ const toolCallId = (_e = part.toolCall.id) != null ? _e : this.config.generateId();
1394
+ lastServerToolCallId = toolCallId;
1395
+ content.push({
1396
+ type: "tool-call",
1397
+ toolCallId,
1398
+ toolName: `server:${part.toolCall.toolType}`,
1399
+ input: JSON.stringify((_f = part.toolCall.args) != null ? _f : {}),
1400
+ providerExecuted: true,
1401
+ dynamic: true,
1402
+ providerMetadata: part.thoughtSignature ? {
1403
+ [providerOptionsName]: {
1404
+ thoughtSignature: part.thoughtSignature,
1405
+ serverToolCallId: toolCallId,
1406
+ serverToolType: part.toolCall.toolType
1407
+ }
1408
+ } : {
1409
+ [providerOptionsName]: {
1410
+ serverToolCallId: toolCallId,
1411
+ serverToolType: part.toolCall.toolType
1412
+ }
1413
+ }
1414
+ });
1415
+ } else if ("toolResponse" in part && part.toolResponse) {
1416
+ const responseToolCallId = (_g = lastServerToolCallId != null ? lastServerToolCallId : part.toolResponse.id) != null ? _g : this.config.generateId();
1417
+ content.push({
1418
+ type: "tool-result",
1419
+ toolCallId: responseToolCallId,
1420
+ toolName: `server:${part.toolResponse.toolType}`,
1421
+ result: (_h = part.toolResponse.response) != null ? _h : {},
1422
+ providerMetadata: part.thoughtSignature ? {
1423
+ [providerOptionsName]: {
1424
+ thoughtSignature: part.thoughtSignature,
1425
+ serverToolCallId: responseToolCallId,
1426
+ serverToolType: part.toolResponse.toolType
1427
+ }
1428
+ } : {
1429
+ [providerOptionsName]: {
1430
+ serverToolCallId: responseToolCallId,
1431
+ serverToolType: part.toolResponse.toolType
1432
+ }
1433
+ }
1434
+ });
1435
+ lastServerToolCallId = void 0;
1113
1436
  }
1114
1437
  }
1115
- const sources = (_e = extractSources({
1438
+ const sources = (_i = extractSources({
1116
1439
  groundingMetadata: candidate.groundingMetadata,
1117
1440
  generateId: this.config.generateId
1118
- })) != null ? _e : [];
1441
+ })) != null ? _i : [];
1119
1442
  for (const source of sources) {
1120
1443
  content.push(source);
1121
1444
  }
@@ -1129,17 +1452,19 @@ var GoogleGenerativeAILanguageModel = class {
1129
1452
  (part) => part.type === "tool-call" && !part.providerExecuted
1130
1453
  )
1131
1454
  }),
1132
- raw: (_f = candidate.finishReason) != null ? _f : void 0
1455
+ raw: (_j = candidate.finishReason) != null ? _j : void 0
1133
1456
  },
1134
1457
  usage: convertGoogleGenerativeAIUsage(usageMetadata),
1135
1458
  warnings,
1136
1459
  providerMetadata: {
1137
1460
  [providerOptionsName]: {
1138
- promptFeedback: (_g = response.promptFeedback) != null ? _g : null,
1139
- groundingMetadata: (_h = candidate.groundingMetadata) != null ? _h : null,
1140
- urlContextMetadata: (_i = candidate.urlContextMetadata) != null ? _i : null,
1141
- safetyRatings: (_j = candidate.safetyRatings) != null ? _j : null,
1142
- usageMetadata: usageMetadata != null ? usageMetadata : null
1461
+ promptFeedback: (_k = response.promptFeedback) != null ? _k : null,
1462
+ groundingMetadata: (_l = candidate.groundingMetadata) != null ? _l : null,
1463
+ urlContextMetadata: (_m = candidate.urlContextMetadata) != null ? _m : null,
1464
+ safetyRatings: (_n = candidate.safetyRatings) != null ? _n : null,
1465
+ usageMetadata: usageMetadata != null ? usageMetadata : null,
1466
+ finishMessage: (_o = candidate.finishMessage) != null ? _o : null,
1467
+ serviceTier: (_p = response.serviceTier) != null ? _p : null
1143
1468
  }
1144
1469
  },
1145
1470
  request: { body: args },
@@ -1175,6 +1500,7 @@ var GoogleGenerativeAILanguageModel = class {
1175
1500
  let providerMetadata = void 0;
1176
1501
  let lastGroundingMetadata = null;
1177
1502
  let lastUrlContextMetadata = null;
1503
+ let serviceTier = null;
1178
1504
  const generateId3 = this.config.generateId;
1179
1505
  let hasToolCalls = false;
1180
1506
  let currentTextBlockId = null;
@@ -1182,6 +1508,7 @@ var GoogleGenerativeAILanguageModel = class {
1182
1508
  let blockCounter = 0;
1183
1509
  const emittedSourceUrls = /* @__PURE__ */ new Set();
1184
1510
  let lastCodeExecutionToolCallId;
1511
+ let lastServerToolCallId;
1185
1512
  return {
1186
1513
  stream: response.pipeThrough(
1187
1514
  new TransformStream({
@@ -1189,7 +1516,7 @@ var GoogleGenerativeAILanguageModel = class {
1189
1516
  controller.enqueue({ type: "stream-start", warnings });
1190
1517
  },
1191
1518
  transform(chunk, controller) {
1192
- var _a, _b, _c, _d, _e, _f;
1519
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
1193
1520
  if (options.includeRawChunks) {
1194
1521
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1195
1522
  }
@@ -1202,6 +1529,9 @@ var GoogleGenerativeAILanguageModel = class {
1202
1529
  if (usageMetadata != null) {
1203
1530
  usage = usageMetadata;
1204
1531
  }
1532
+ if (value.serviceTier != null) {
1533
+ serviceTier = value.serviceTier;
1534
+ }
1205
1535
  const candidate = (_a = value.candidates) == null ? void 0 : _a[0];
1206
1536
  if (candidate == null) {
1207
1537
  return;
@@ -1327,17 +1657,55 @@ var GoogleGenerativeAILanguageModel = class {
1327
1657
  });
1328
1658
  currentReasoningBlockId = null;
1329
1659
  }
1330
- const thoughtSignatureMetadata = part.thoughtSignature ? {
1660
+ const hasThought = part.thought === true;
1661
+ const hasThoughtSignature = !!part.thoughtSignature;
1662
+ const fileMeta = hasThoughtSignature ? {
1331
1663
  [providerOptionsName]: {
1332
1664
  thoughtSignature: part.thoughtSignature
1333
1665
  }
1334
1666
  } : void 0;
1335
1667
  controller.enqueue({
1336
- type: "file",
1668
+ type: hasThought ? "reasoning-file" : "file",
1337
1669
  mediaType: part.inlineData.mimeType,
1338
1670
  data: part.inlineData.data,
1339
- providerMetadata: thoughtSignatureMetadata
1671
+ providerMetadata: fileMeta
1672
+ });
1673
+ } else if ("toolCall" in part && part.toolCall) {
1674
+ const toolCallId = (_e = part.toolCall.id) != null ? _e : generateId3();
1675
+ lastServerToolCallId = toolCallId;
1676
+ const serverMeta = {
1677
+ [providerOptionsName]: {
1678
+ ...part.thoughtSignature ? { thoughtSignature: part.thoughtSignature } : {},
1679
+ serverToolCallId: toolCallId,
1680
+ serverToolType: part.toolCall.toolType
1681
+ }
1682
+ };
1683
+ controller.enqueue({
1684
+ type: "tool-call",
1685
+ toolCallId,
1686
+ toolName: `server:${part.toolCall.toolType}`,
1687
+ input: JSON.stringify((_f = part.toolCall.args) != null ? _f : {}),
1688
+ providerExecuted: true,
1689
+ dynamic: true,
1690
+ providerMetadata: serverMeta
1340
1691
  });
1692
+ } else if ("toolResponse" in part && part.toolResponse) {
1693
+ const responseToolCallId = (_g = lastServerToolCallId != null ? lastServerToolCallId : part.toolResponse.id) != null ? _g : generateId3();
1694
+ const serverMeta = {
1695
+ [providerOptionsName]: {
1696
+ ...part.thoughtSignature ? { thoughtSignature: part.thoughtSignature } : {},
1697
+ serverToolCallId: responseToolCallId,
1698
+ serverToolType: part.toolResponse.toolType
1699
+ }
1700
+ };
1701
+ controller.enqueue({
1702
+ type: "tool-result",
1703
+ toolCallId: responseToolCallId,
1704
+ toolName: `server:${part.toolResponse.toolType}`,
1705
+ result: (_h = part.toolResponse.response) != null ? _h : {},
1706
+ providerMetadata: serverMeta
1707
+ });
1708
+ lastServerToolCallId = void 0;
1341
1709
  }
1342
1710
  }
1343
1711
  const toolCallDeltas = getToolCallsFromParts({
@@ -1385,15 +1753,15 @@ var GoogleGenerativeAILanguageModel = class {
1385
1753
  };
1386
1754
  providerMetadata = {
1387
1755
  [providerOptionsName]: {
1388
- promptFeedback: (_e = value.promptFeedback) != null ? _e : null,
1756
+ promptFeedback: (_i = value.promptFeedback) != null ? _i : null,
1389
1757
  groundingMetadata: lastGroundingMetadata,
1390
1758
  urlContextMetadata: lastUrlContextMetadata,
1391
- safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null
1759
+ safetyRatings: (_j = candidate.safetyRatings) != null ? _j : null,
1760
+ usageMetadata: usageMetadata != null ? usageMetadata : null,
1761
+ finishMessage: (_k = candidate.finishMessage) != null ? _k : null,
1762
+ serviceTier
1392
1763
  }
1393
1764
  };
1394
- if (usageMetadata != null) {
1395
- providerMetadata[providerOptionsName].usageMetadata = usageMetadata;
1396
- }
1397
1765
  }
1398
1766
  },
1399
1767
  flush(controller) {
@@ -1423,6 +1791,75 @@ var GoogleGenerativeAILanguageModel = class {
1423
1791
  };
1424
1792
  }
1425
1793
  };
1794
+ function isGemini3Model(modelId) {
1795
+ return /gemini-3[\.\-]/i.test(modelId) || /gemini-3$/i.test(modelId);
1796
+ }
1797
+ function getMaxOutputTokensForGemini25Model() {
1798
+ return 65536;
1799
+ }
1800
+ function getMaxThinkingTokensForGemini25Model(modelId) {
1801
+ const id = modelId.toLowerCase();
1802
+ if (id.includes("2.5-pro") || id.includes("gemini-3-pro-image")) {
1803
+ return 32768;
1804
+ }
1805
+ return 24576;
1806
+ }
1807
+ function resolveThinkingConfig({
1808
+ reasoning,
1809
+ modelId,
1810
+ warnings
1811
+ }) {
1812
+ if (!isCustomReasoning(reasoning)) {
1813
+ return void 0;
1814
+ }
1815
+ if (isGemini3Model(modelId) && !modelId.includes("gemini-3-pro-image")) {
1816
+ return resolveGemini3ThinkingConfig({ reasoning, warnings });
1817
+ }
1818
+ return resolveGemini25ThinkingConfig({ reasoning, modelId, warnings });
1819
+ }
1820
+ function resolveGemini3ThinkingConfig({
1821
+ reasoning,
1822
+ warnings
1823
+ }) {
1824
+ if (reasoning === "none") {
1825
+ return { thinkingLevel: "minimal" };
1826
+ }
1827
+ const thinkingLevel = mapReasoningToProviderEffort({
1828
+ reasoning,
1829
+ effortMap: {
1830
+ minimal: "minimal",
1831
+ low: "low",
1832
+ medium: "medium",
1833
+ high: "high",
1834
+ xhigh: "high"
1835
+ },
1836
+ warnings
1837
+ });
1838
+ if (thinkingLevel == null) {
1839
+ return void 0;
1840
+ }
1841
+ return { thinkingLevel };
1842
+ }
1843
+ function resolveGemini25ThinkingConfig({
1844
+ reasoning,
1845
+ modelId,
1846
+ warnings
1847
+ }) {
1848
+ if (reasoning === "none") {
1849
+ return { thinkingBudget: 0 };
1850
+ }
1851
+ const thinkingBudget = mapReasoningToProviderBudget({
1852
+ reasoning,
1853
+ maxOutputTokens: getMaxOutputTokensForGemini25Model(),
1854
+ maxReasoningBudget: getMaxThinkingTokensForGemini25Model(modelId),
1855
+ minReasoningBudget: 0,
1856
+ warnings
1857
+ });
1858
+ if (thinkingBudget == null) {
1859
+ return void 0;
1860
+ }
1861
+ return { thinkingBudget };
1862
+ }
1426
1863
  function getToolCallsFromParts({
1427
1864
  parts,
1428
1865
  generateId: generateId3,
@@ -1602,6 +2039,23 @@ var getContentSchema = () => z5.object({
1602
2039
  mimeType: z5.string(),
1603
2040
  data: z5.string()
1604
2041
  }),
2042
+ thought: z5.boolean().nullish(),
2043
+ thoughtSignature: z5.string().nullish()
2044
+ }),
2045
+ z5.object({
2046
+ toolCall: z5.object({
2047
+ toolType: z5.string(),
2048
+ args: z5.unknown().nullish(),
2049
+ id: z5.string()
2050
+ }),
2051
+ thoughtSignature: z5.string().nullish()
2052
+ }),
2053
+ z5.object({
2054
+ toolResponse: z5.object({
2055
+ toolType: z5.string(),
2056
+ response: z5.unknown().nullish(),
2057
+ id: z5.string()
2058
+ }),
1605
2059
  thoughtSignature: z5.string().nullish()
1606
2060
  }),
1607
2061
  z5.object({
@@ -1628,6 +2082,12 @@ var getSafetyRatingSchema = () => z5.object({
1628
2082
  severityScore: z5.number().nullish(),
1629
2083
  blocked: z5.boolean().nullish()
1630
2084
  });
2085
+ var tokenDetailsSchema = z5.array(
2086
+ z5.object({
2087
+ modality: z5.string(),
2088
+ tokenCount: z5.number()
2089
+ })
2090
+ ).nullish();
1631
2091
  var usageSchema = z5.object({
1632
2092
  cachedContentTokenCount: z5.number().nullish(),
1633
2093
  thoughtsTokenCount: z5.number().nullish(),
@@ -1635,7 +2095,10 @@ var usageSchema = z5.object({
1635
2095
  candidatesTokenCount: z5.number().nullish(),
1636
2096
  totalTokenCount: z5.number().nullish(),
1637
2097
  // https://cloud.google.com/vertex-ai/generative-ai/docs/reference/rest/v1/GenerateContentResponse#TrafficType
1638
- trafficType: z5.string().nullish()
2098
+ trafficType: z5.string().nullish(),
2099
+ // https://ai.google.dev/api/generate-content#Modality
2100
+ promptTokensDetails: tokenDetailsSchema,
2101
+ candidatesTokensDetails: tokenDetailsSchema
1639
2102
  });
1640
2103
  var getUrlContextMetadataSchema = () => z5.object({
1641
2104
  urlMetadata: z5.array(
@@ -1652,6 +2115,7 @@ var responseSchema = lazySchema5(
1652
2115
  z5.object({
1653
2116
  content: getContentSchema().nullish().or(z5.object({}).strict()),
1654
2117
  finishReason: z5.string().nullish(),
2118
+ finishMessage: z5.string().nullish(),
1655
2119
  safetyRatings: z5.array(getSafetyRatingSchema()).nullish(),
1656
2120
  groundingMetadata: getGroundingMetadataSchema().nullish(),
1657
2121
  urlContextMetadata: getUrlContextMetadataSchema().nullish()
@@ -1661,7 +2125,8 @@ var responseSchema = lazySchema5(
1661
2125
  promptFeedback: z5.object({
1662
2126
  blockReason: z5.string().nullish(),
1663
2127
  safetyRatings: z5.array(getSafetyRatingSchema()).nullish()
1664
- }).nullish()
2128
+ }).nullish(),
2129
+ serviceTier: z5.string().nullish()
1665
2130
  })
1666
2131
  )
1667
2132
  );
@@ -1672,6 +2137,7 @@ var chunkSchema = lazySchema5(
1672
2137
  z5.object({
1673
2138
  content: getContentSchema().nullish(),
1674
2139
  finishReason: z5.string().nullish(),
2140
+ finishMessage: z5.string().nullish(),
1675
2141
  safetyRatings: z5.array(getSafetyRatingSchema()).nullish(),
1676
2142
  groundingMetadata: getGroundingMetadataSchema().nullish(),
1677
2143
  urlContextMetadata: getUrlContextMetadataSchema().nullish()
@@ -1681,7 +2147,8 @@ var chunkSchema = lazySchema5(
1681
2147
  promptFeedback: z5.object({
1682
2148
  blockReason: z5.string().nullish(),
1683
2149
  safetyRatings: z5.array(getSafetyRatingSchema()).nullish()
1684
- }).nullish()
2150
+ }).nullish(),
2151
+ serviceTier: z5.string().nullish()
1685
2152
  })
1686
2153
  )
1687
2154
  );
@@ -1884,7 +2351,7 @@ var GoogleGenerativeAIImageModel = class {
1884
2351
  this.modelId = modelId;
1885
2352
  this.settings = settings;
1886
2353
  this.config = config;
1887
- this.specificationVersion = "v3";
2354
+ this.specificationVersion = "v4";
1888
2355
  }
1889
2356
  get maxImagesPerCall() {
1890
2357
  if (this.settings.maxImagesPerCall != null) {
@@ -2117,28 +2584,191 @@ var googleImageModelOptionsSchema = lazySchema11(
2117
2584
  )
2118
2585
  );
2119
2586
 
2120
- // src/google-generative-ai-video-model.ts
2587
+ // src/google-generative-ai-files.ts
2121
2588
  import {
2122
2589
  AISDKError
2123
2590
  } from "@ai-sdk/provider";
2124
2591
  import {
2125
2592
  combineHeaders as combineHeaders4,
2126
- convertUint8ArrayToBase64,
2127
2593
  createJsonResponseHandler as createJsonResponseHandler4,
2128
2594
  delay,
2129
- getFromApi,
2130
2595
  lazySchema as lazySchema12,
2131
2596
  parseProviderOptions as parseProviderOptions4,
2132
- postJsonToApi as postJsonToApi4,
2133
- resolve as resolve4,
2134
- zodSchema as zodSchema12
2597
+ zodSchema as zodSchema12,
2598
+ getFromApi
2135
2599
  } from "@ai-sdk/provider-utils";
2136
2600
  import { z as z14 } from "zod/v4";
2601
+ var GoogleGenerativeAIFiles = class {
2602
+ constructor(config) {
2603
+ this.config = config;
2604
+ this.specificationVersion = "v4";
2605
+ }
2606
+ get provider() {
2607
+ return this.config.provider;
2608
+ }
2609
+ async uploadFile(options) {
2610
+ var _a, _b, _c, _d;
2611
+ const googleOptions = await parseProviderOptions4({
2612
+ provider: "google",
2613
+ providerOptions: options.providerOptions,
2614
+ schema: googleFilesUploadOptionsSchema
2615
+ });
2616
+ const resolvedHeaders = this.config.headers();
2617
+ const fetchFn = (_a = this.config.fetch) != null ? _a : globalThis.fetch;
2618
+ const warnings = [];
2619
+ if (options.filename != null) {
2620
+ warnings.push({ type: "unsupported", feature: "filename" });
2621
+ }
2622
+ const data = options.data;
2623
+ const fileBytes = data instanceof Uint8Array ? data : Uint8Array.from(atob(data), (c) => c.charCodeAt(0));
2624
+ const mediaType = options.mediaType;
2625
+ const displayName = googleOptions == null ? void 0 : googleOptions.displayName;
2626
+ const baseOrigin = this.config.baseURL.replace(/\/v1beta$/, "");
2627
+ const initResponse = await fetchFn(`${baseOrigin}/upload/v1beta/files`, {
2628
+ method: "POST",
2629
+ headers: {
2630
+ ...resolvedHeaders,
2631
+ "X-Goog-Upload-Protocol": "resumable",
2632
+ "X-Goog-Upload-Command": "start",
2633
+ "X-Goog-Upload-Header-Content-Length": String(fileBytes.length),
2634
+ "X-Goog-Upload-Header-Content-Type": mediaType,
2635
+ "Content-Type": "application/json"
2636
+ },
2637
+ body: JSON.stringify({
2638
+ file: {
2639
+ ...displayName != null ? { display_name: displayName } : {}
2640
+ }
2641
+ })
2642
+ });
2643
+ if (!initResponse.ok) {
2644
+ const errorBody = await initResponse.text();
2645
+ throw new AISDKError({
2646
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
2647
+ message: `Failed to initiate resumable upload: ${initResponse.status} ${errorBody}`
2648
+ });
2649
+ }
2650
+ const uploadUrl = initResponse.headers.get("x-goog-upload-url");
2651
+ if (!uploadUrl) {
2652
+ throw new AISDKError({
2653
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
2654
+ message: "No upload URL returned from initiation request"
2655
+ });
2656
+ }
2657
+ const uploadResponse = await fetchFn(uploadUrl, {
2658
+ method: "POST",
2659
+ headers: {
2660
+ "Content-Length": String(fileBytes.length),
2661
+ "X-Goog-Upload-Offset": "0",
2662
+ "X-Goog-Upload-Command": "upload, finalize"
2663
+ },
2664
+ body: fileBytes
2665
+ });
2666
+ if (!uploadResponse.ok) {
2667
+ const errorBody = await uploadResponse.text();
2668
+ throw new AISDKError({
2669
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
2670
+ message: `Failed to upload file data: ${uploadResponse.status} ${errorBody}`
2671
+ });
2672
+ }
2673
+ const uploadResult = await uploadResponse.json();
2674
+ let file = uploadResult.file;
2675
+ const pollIntervalMs = (_b = googleOptions == null ? void 0 : googleOptions.pollIntervalMs) != null ? _b : 2e3;
2676
+ const pollTimeoutMs = (_c = googleOptions == null ? void 0 : googleOptions.pollTimeoutMs) != null ? _c : 3e5;
2677
+ const startTime = Date.now();
2678
+ while (file.state === "PROCESSING") {
2679
+ if (Date.now() - startTime > pollTimeoutMs) {
2680
+ throw new AISDKError({
2681
+ name: "GOOGLE_FILES_UPLOAD_TIMEOUT",
2682
+ message: `File processing timed out after ${pollTimeoutMs}ms`
2683
+ });
2684
+ }
2685
+ await delay(pollIntervalMs);
2686
+ const { value: fileStatus } = await getFromApi({
2687
+ url: `${this.config.baseURL}/${file.name}`,
2688
+ headers: combineHeaders4(resolvedHeaders),
2689
+ successfulResponseHandler: createJsonResponseHandler4(
2690
+ googleFileResponseSchema
2691
+ ),
2692
+ failedResponseHandler: googleFailedResponseHandler,
2693
+ fetch: this.config.fetch
2694
+ });
2695
+ file = fileStatus;
2696
+ }
2697
+ if (file.state === "FAILED") {
2698
+ throw new AISDKError({
2699
+ name: "GOOGLE_FILES_UPLOAD_FAILED",
2700
+ message: `File processing failed for ${file.name}`
2701
+ });
2702
+ }
2703
+ return {
2704
+ warnings,
2705
+ providerReference: { google: file.uri },
2706
+ mediaType: (_d = file.mimeType) != null ? _d : options.mediaType,
2707
+ providerMetadata: {
2708
+ google: {
2709
+ name: file.name,
2710
+ displayName: file.displayName,
2711
+ mimeType: file.mimeType,
2712
+ sizeBytes: file.sizeBytes,
2713
+ state: file.state,
2714
+ uri: file.uri,
2715
+ ...file.createTime != null ? { createTime: file.createTime } : {},
2716
+ ...file.updateTime != null ? { updateTime: file.updateTime } : {},
2717
+ ...file.expirationTime != null ? { expirationTime: file.expirationTime } : {},
2718
+ ...file.sha256Hash != null ? { sha256Hash: file.sha256Hash } : {}
2719
+ }
2720
+ }
2721
+ };
2722
+ }
2723
+ };
2724
+ var googleFileResponseSchema = lazySchema12(
2725
+ () => zodSchema12(
2726
+ z14.object({
2727
+ name: z14.string(),
2728
+ displayName: z14.string().nullish(),
2729
+ mimeType: z14.string(),
2730
+ sizeBytes: z14.string().nullish(),
2731
+ createTime: z14.string().nullish(),
2732
+ updateTime: z14.string().nullish(),
2733
+ expirationTime: z14.string().nullish(),
2734
+ sha256Hash: z14.string().nullish(),
2735
+ uri: z14.string(),
2736
+ state: z14.string()
2737
+ })
2738
+ )
2739
+ );
2740
+ var googleFilesUploadOptionsSchema = lazySchema12(
2741
+ () => zodSchema12(
2742
+ z14.object({
2743
+ displayName: z14.string().nullish(),
2744
+ pollIntervalMs: z14.number().positive().nullish(),
2745
+ pollTimeoutMs: z14.number().positive().nullish()
2746
+ }).passthrough()
2747
+ )
2748
+ );
2749
+
2750
+ // src/google-generative-ai-video-model.ts
2751
+ import {
2752
+ AISDKError as AISDKError2
2753
+ } from "@ai-sdk/provider";
2754
+ import {
2755
+ combineHeaders as combineHeaders5,
2756
+ convertUint8ArrayToBase64 as convertUint8ArrayToBase642,
2757
+ createJsonResponseHandler as createJsonResponseHandler5,
2758
+ delay as delay2,
2759
+ getFromApi as getFromApi2,
2760
+ lazySchema as lazySchema13,
2761
+ parseProviderOptions as parseProviderOptions5,
2762
+ postJsonToApi as postJsonToApi5,
2763
+ resolve as resolve4,
2764
+ zodSchema as zodSchema13
2765
+ } from "@ai-sdk/provider-utils";
2766
+ import { z as z15 } from "zod/v4";
2137
2767
  var GoogleGenerativeAIVideoModel = class {
2138
2768
  constructor(modelId, config) {
2139
2769
  this.modelId = modelId;
2140
2770
  this.config = config;
2141
- this.specificationVersion = "v3";
2771
+ this.specificationVersion = "v4";
2142
2772
  }
2143
2773
  get provider() {
2144
2774
  return this.config.provider;
@@ -2150,7 +2780,7 @@ var GoogleGenerativeAIVideoModel = class {
2150
2780
  var _a, _b, _c, _d, _e, _f, _g, _h;
2151
2781
  const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
2152
2782
  const warnings = [];
2153
- const googleOptions = await parseProviderOptions4({
2783
+ const googleOptions = await parseProviderOptions5({
2154
2784
  provider: "google",
2155
2785
  providerOptions: options.providerOptions,
2156
2786
  schema: googleVideoModelOptionsSchema
@@ -2168,7 +2798,7 @@ var GoogleGenerativeAIVideoModel = class {
2168
2798
  details: "Google Generative AI video models require base64-encoded images. URL will be ignored."
2169
2799
  });
2170
2800
  } else {
2171
- const base64Data = typeof options.image.data === "string" ? options.image.data : convertUint8ArrayToBase64(options.image.data);
2801
+ const base64Data = typeof options.image.data === "string" ? options.image.data : convertUint8ArrayToBase642(options.image.data);
2172
2802
  instance.image = {
2173
2803
  inlineData: {
2174
2804
  mimeType: options.image.mediaType || "image/png",
@@ -2234,9 +2864,9 @@ var GoogleGenerativeAIVideoModel = class {
2234
2864
  }
2235
2865
  }
2236
2866
  }
2237
- const { value: operation } = await postJsonToApi4({
2867
+ const { value: operation } = await postJsonToApi5({
2238
2868
  url: `${this.config.baseURL}/models/${this.modelId}:predictLongRunning`,
2239
- headers: combineHeaders4(
2869
+ headers: combineHeaders5(
2240
2870
  await resolve4(this.config.headers),
2241
2871
  options.headers
2242
2872
  ),
@@ -2244,7 +2874,7 @@ var GoogleGenerativeAIVideoModel = class {
2244
2874
  instances,
2245
2875
  parameters
2246
2876
  },
2247
- successfulResponseHandler: createJsonResponseHandler4(
2877
+ successfulResponseHandler: createJsonResponseHandler5(
2248
2878
  googleOperationSchema
2249
2879
  ),
2250
2880
  failedResponseHandler: googleFailedResponseHandler,
@@ -2253,7 +2883,7 @@ var GoogleGenerativeAIVideoModel = class {
2253
2883
  });
2254
2884
  const operationName = operation.name;
2255
2885
  if (!operationName) {
2256
- throw new AISDKError({
2886
+ throw new AISDKError2({
2257
2887
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2258
2888
  message: "No operation name returned from API"
2259
2889
  });
@@ -2265,25 +2895,25 @@ var GoogleGenerativeAIVideoModel = class {
2265
2895
  let responseHeaders;
2266
2896
  while (!finalOperation.done) {
2267
2897
  if (Date.now() - startTime > pollTimeoutMs) {
2268
- throw new AISDKError({
2898
+ throw new AISDKError2({
2269
2899
  name: "GOOGLE_VIDEO_GENERATION_TIMEOUT",
2270
2900
  message: `Video generation timed out after ${pollTimeoutMs}ms`
2271
2901
  });
2272
2902
  }
2273
- await delay(pollIntervalMs);
2903
+ await delay2(pollIntervalMs);
2274
2904
  if ((_f = options.abortSignal) == null ? void 0 : _f.aborted) {
2275
- throw new AISDKError({
2905
+ throw new AISDKError2({
2276
2906
  name: "GOOGLE_VIDEO_GENERATION_ABORTED",
2277
2907
  message: "Video generation request was aborted"
2278
2908
  });
2279
2909
  }
2280
- const { value: statusOperation, responseHeaders: pollHeaders } = await getFromApi({
2910
+ const { value: statusOperation, responseHeaders: pollHeaders } = await getFromApi2({
2281
2911
  url: `${this.config.baseURL}/${operationName}`,
2282
- headers: combineHeaders4(
2912
+ headers: combineHeaders5(
2283
2913
  await resolve4(this.config.headers),
2284
2914
  options.headers
2285
2915
  ),
2286
- successfulResponseHandler: createJsonResponseHandler4(
2916
+ successfulResponseHandler: createJsonResponseHandler5(
2287
2917
  googleOperationSchema
2288
2918
  ),
2289
2919
  failedResponseHandler: googleFailedResponseHandler,
@@ -2294,14 +2924,14 @@ var GoogleGenerativeAIVideoModel = class {
2294
2924
  responseHeaders = pollHeaders;
2295
2925
  }
2296
2926
  if (finalOperation.error) {
2297
- throw new AISDKError({
2927
+ throw new AISDKError2({
2298
2928
  name: "GOOGLE_VIDEO_GENERATION_FAILED",
2299
2929
  message: `Video generation failed: ${finalOperation.error.message}`
2300
2930
  });
2301
2931
  }
2302
2932
  const response = finalOperation.response;
2303
2933
  if (!((_g = response == null ? void 0 : response.generateVideoResponse) == null ? void 0 : _g.generatedSamples) || response.generateVideoResponse.generatedSamples.length === 0) {
2304
- throw new AISDKError({
2934
+ throw new AISDKError2({
2305
2935
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2306
2936
  message: `No videos in response. Response: ${JSON.stringify(finalOperation)}`
2307
2937
  });
@@ -2324,7 +2954,7 @@ var GoogleGenerativeAIVideoModel = class {
2324
2954
  }
2325
2955
  }
2326
2956
  if (videos.length === 0) {
2327
- throw new AISDKError({
2957
+ throw new AISDKError2({
2328
2958
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2329
2959
  message: "No valid videos in response"
2330
2960
  });
@@ -2345,37 +2975,37 @@ var GoogleGenerativeAIVideoModel = class {
2345
2975
  };
2346
2976
  }
2347
2977
  };
2348
- var googleOperationSchema = z14.object({
2349
- name: z14.string().nullish(),
2350
- done: z14.boolean().nullish(),
2351
- error: z14.object({
2352
- code: z14.number().nullish(),
2353
- message: z14.string(),
2354
- status: z14.string().nullish()
2978
+ var googleOperationSchema = z15.object({
2979
+ name: z15.string().nullish(),
2980
+ done: z15.boolean().nullish(),
2981
+ error: z15.object({
2982
+ code: z15.number().nullish(),
2983
+ message: z15.string(),
2984
+ status: z15.string().nullish()
2355
2985
  }).nullish(),
2356
- response: z14.object({
2357
- generateVideoResponse: z14.object({
2358
- generatedSamples: z14.array(
2359
- z14.object({
2360
- video: z14.object({
2361
- uri: z14.string().nullish()
2986
+ response: z15.object({
2987
+ generateVideoResponse: z15.object({
2988
+ generatedSamples: z15.array(
2989
+ z15.object({
2990
+ video: z15.object({
2991
+ uri: z15.string().nullish()
2362
2992
  }).nullish()
2363
2993
  })
2364
2994
  ).nullish()
2365
2995
  }).nullish()
2366
2996
  }).nullish()
2367
2997
  });
2368
- var googleVideoModelOptionsSchema = lazySchema12(
2369
- () => zodSchema12(
2370
- z14.object({
2371
- pollIntervalMs: z14.number().positive().nullish(),
2372
- pollTimeoutMs: z14.number().positive().nullish(),
2373
- personGeneration: z14.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
2374
- negativePrompt: z14.string().nullish(),
2375
- referenceImages: z14.array(
2376
- z14.object({
2377
- bytesBase64Encoded: z14.string().nullish(),
2378
- gcsUri: z14.string().nullish()
2998
+ var googleVideoModelOptionsSchema = lazySchema13(
2999
+ () => zodSchema13(
3000
+ z15.object({
3001
+ pollIntervalMs: z15.number().positive().nullish(),
3002
+ pollTimeoutMs: z15.number().positive().nullish(),
3003
+ personGeneration: z15.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
3004
+ negativePrompt: z15.string().nullish(),
3005
+ referenceImages: z15.array(
3006
+ z15.object({
3007
+ bytesBase64Encoded: z15.string().nullish(),
3008
+ gcsUri: z15.string().nullish()
2379
3009
  })
2380
3010
  ).nullish()
2381
3011
  }).passthrough()
@@ -2432,6 +3062,12 @@ function createGoogleGenerativeAI(options = {}) {
2432
3062
  headers: getHeaders,
2433
3063
  fetch: options.fetch
2434
3064
  });
3065
+ const createFiles = () => new GoogleGenerativeAIFiles({
3066
+ provider: providerName,
3067
+ baseURL,
3068
+ headers: getHeaders,
3069
+ fetch: options.fetch
3070
+ });
2435
3071
  const createVideoModel = (modelId) => {
2436
3072
  var _a2;
2437
3073
  return new GoogleGenerativeAIVideoModel(modelId, {
@@ -2450,7 +3086,7 @@ function createGoogleGenerativeAI(options = {}) {
2450
3086
  }
2451
3087
  return createChatModel(modelId);
2452
3088
  };
2453
- provider.specificationVersion = "v3";
3089
+ provider.specificationVersion = "v4";
2454
3090
  provider.languageModel = createChatModel;
2455
3091
  provider.chat = createChatModel;
2456
3092
  provider.generativeAI = createChatModel;
@@ -2462,6 +3098,7 @@ function createGoogleGenerativeAI(options = {}) {
2462
3098
  provider.imageModel = createImageModel;
2463
3099
  provider.video = createVideoModel;
2464
3100
  provider.videoModel = createVideoModel;
3101
+ provider.files = createFiles;
2465
3102
  provider.tools = googleTools;
2466
3103
  return provider;
2467
3104
  }