@ai-sdk/google 4.0.0-beta.4 → 4.0.0-beta.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,72 +1,78 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
- var __getOwnPropNames = Object.getOwnPropertyNames;
5
- var __hasOwnProp = Object.prototype.hasOwnProperty;
6
- var __export = (target, all) => {
7
- for (var name in all)
8
- __defProp(target, name, { get: all[name], enumerable: true });
9
- };
10
- var __copyProps = (to, from, except, desc) => {
11
- if (from && typeof from === "object" || typeof from === "function") {
12
- for (let key of __getOwnPropNames(from))
13
- if (!__hasOwnProp.call(to, key) && key !== except)
14
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
- }
16
- return to;
17
- };
18
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
-
20
- // src/index.ts
21
- var src_exports = {};
22
- __export(src_exports, {
23
- VERSION: () => VERSION,
24
- createGoogleGenerativeAI: () => createGoogleGenerativeAI,
25
- google: () => google
26
- });
27
- module.exports = __toCommonJS(src_exports);
28
-
29
1
  // src/google-provider.ts
30
- var import_provider_utils16 = require("@ai-sdk/provider-utils");
2
+ import {
3
+ generateId as generateId2,
4
+ loadApiKey,
5
+ withoutTrailingSlash,
6
+ withUserAgentSuffix
7
+ } from "@ai-sdk/provider-utils";
31
8
 
32
9
  // src/version.ts
33
- var VERSION = true ? "4.0.0-beta.4" : "0.0.0-test";
10
+ var VERSION = true ? "4.0.0-beta.41" : "0.0.0-test";
34
11
 
35
12
  // src/google-generative-ai-embedding-model.ts
36
- var import_provider = require("@ai-sdk/provider");
37
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
38
- var import_v43 = require("zod/v4");
13
+ import {
14
+ TooManyEmbeddingValuesForCallError
15
+ } from "@ai-sdk/provider";
16
+ import {
17
+ combineHeaders,
18
+ createJsonResponseHandler,
19
+ lazySchema as lazySchema3,
20
+ parseProviderOptions,
21
+ postJsonToApi,
22
+ resolve,
23
+ serializeModelOptions,
24
+ WORKFLOW_SERIALIZE,
25
+ WORKFLOW_DESERIALIZE,
26
+ zodSchema as zodSchema3
27
+ } from "@ai-sdk/provider-utils";
28
+ import { z as z3 } from "zod/v4";
39
29
 
40
30
  // src/google-error.ts
41
- var import_provider_utils = require("@ai-sdk/provider-utils");
42
- var import_v4 = require("zod/v4");
43
- var googleErrorDataSchema = (0, import_provider_utils.lazySchema)(
44
- () => (0, import_provider_utils.zodSchema)(
45
- import_v4.z.object({
46
- error: import_v4.z.object({
47
- code: import_v4.z.number().nullable(),
48
- message: import_v4.z.string(),
49
- status: import_v4.z.string()
31
+ import {
32
+ createJsonErrorResponseHandler,
33
+ lazySchema,
34
+ zodSchema
35
+ } from "@ai-sdk/provider-utils";
36
+ import { z } from "zod/v4";
37
+ var googleErrorDataSchema = lazySchema(
38
+ () => zodSchema(
39
+ z.object({
40
+ error: z.object({
41
+ code: z.number().nullable(),
42
+ message: z.string(),
43
+ status: z.string()
50
44
  })
51
45
  })
52
46
  )
53
47
  );
54
- var googleFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
48
+ var googleFailedResponseHandler = createJsonErrorResponseHandler({
55
49
  errorSchema: googleErrorDataSchema,
56
50
  errorToMessage: (data) => data.error.message
57
51
  });
58
52
 
59
53
  // src/google-generative-ai-embedding-options.ts
60
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
61
- var import_v42 = require("zod/v4");
62
- var googleEmbeddingModelOptions = (0, import_provider_utils2.lazySchema)(
63
- () => (0, import_provider_utils2.zodSchema)(
64
- import_v42.z.object({
54
+ import {
55
+ lazySchema as lazySchema2,
56
+ zodSchema as zodSchema2
57
+ } from "@ai-sdk/provider-utils";
58
+ import { z as z2 } from "zod/v4";
59
+ var googleEmbeddingContentPartSchema = z2.union([
60
+ z2.object({ text: z2.string() }),
61
+ z2.object({
62
+ inlineData: z2.object({
63
+ mimeType: z2.string(),
64
+ data: z2.string()
65
+ })
66
+ })
67
+ ]);
68
+ var googleEmbeddingModelOptions = lazySchema2(
69
+ () => zodSchema2(
70
+ z2.object({
65
71
  /**
66
72
  * Optional. Optional reduced dimension for the output embedding.
67
73
  * If set, excessive values in the output embedding are truncated from the end.
68
74
  */
69
- outputDimensionality: import_v42.z.number().optional(),
75
+ outputDimensionality: z2.number().optional(),
70
76
  /**
71
77
  * Optional. Specifies the task type for generating embeddings.
72
78
  * Supported task types:
@@ -79,7 +85,7 @@ var googleEmbeddingModelOptions = (0, import_provider_utils2.lazySchema)(
79
85
  * - FACT_VERIFICATION: Optimized for verifying factual information.
80
86
  * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.
81
87
  */
82
- taskType: import_v42.z.enum([
88
+ taskType: z2.enum([
83
89
  "SEMANTIC_SIMILARITY",
84
90
  "CLASSIFICATION",
85
91
  "CLUSTERING",
@@ -88,20 +94,42 @@ var googleEmbeddingModelOptions = (0, import_provider_utils2.lazySchema)(
88
94
  "QUESTION_ANSWERING",
89
95
  "FACT_VERIFICATION",
90
96
  "CODE_RETRIEVAL_QUERY"
91
- ]).optional()
97
+ ]).optional(),
98
+ /**
99
+ * Optional. Per-value multimodal content parts for embedding non-text
100
+ * content (images, video, PDF, audio). Each entry corresponds to the
101
+ * embedding value at the same index and its parts are merged with the
102
+ * text value in the request. Use `null` for entries that are text-only.
103
+ *
104
+ * The array length must match the number of values being embedded. In
105
+ * the case of a single embedding, the array length must be 1.
106
+ */
107
+ content: z2.array(z2.array(googleEmbeddingContentPartSchema).min(1).nullable()).optional()
92
108
  })
93
109
  )
94
110
  );
95
111
 
96
112
  // src/google-generative-ai-embedding-model.ts
97
- var GoogleGenerativeAIEmbeddingModel = class {
113
+ var GoogleGenerativeAIEmbeddingModel = class _GoogleGenerativeAIEmbeddingModel {
98
114
  constructor(modelId, config) {
99
- this.specificationVersion = "v3";
115
+ this.specificationVersion = "v4";
100
116
  this.maxEmbeddingsPerCall = 2048;
101
117
  this.supportsParallelCalls = true;
102
118
  this.modelId = modelId;
103
119
  this.config = config;
104
120
  }
121
+ static [WORKFLOW_SERIALIZE](model) {
122
+ return serializeModelOptions({
123
+ modelId: model.modelId,
124
+ config: model.config
125
+ });
126
+ }
127
+ static [WORKFLOW_DESERIALIZE](options) {
128
+ return new _GoogleGenerativeAIEmbeddingModel(
129
+ options.modelId,
130
+ options.config
131
+ );
132
+ }
105
133
  get provider() {
106
134
  return this.config.provider;
107
135
  }
@@ -111,41 +139,50 @@ var GoogleGenerativeAIEmbeddingModel = class {
111
139
  abortSignal,
112
140
  providerOptions
113
141
  }) {
114
- const googleOptions = await (0, import_provider_utils3.parseProviderOptions)({
142
+ const googleOptions = await parseProviderOptions({
115
143
  provider: "google",
116
144
  providerOptions,
117
145
  schema: googleEmbeddingModelOptions
118
146
  });
119
147
  if (values.length > this.maxEmbeddingsPerCall) {
120
- throw new import_provider.TooManyEmbeddingValuesForCallError({
148
+ throw new TooManyEmbeddingValuesForCallError({
121
149
  provider: this.provider,
122
150
  modelId: this.modelId,
123
151
  maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
124
152
  values
125
153
  });
126
154
  }
127
- const mergedHeaders = (0, import_provider_utils3.combineHeaders)(
128
- await (0, import_provider_utils3.resolve)(this.config.headers),
155
+ const mergedHeaders = combineHeaders(
156
+ this.config.headers ? await resolve(this.config.headers) : void 0,
129
157
  headers
130
158
  );
159
+ const multimodalContent = googleOptions == null ? void 0 : googleOptions.content;
160
+ if (multimodalContent != null && multimodalContent.length !== values.length) {
161
+ throw new Error(
162
+ `The number of multimodal content entries (${multimodalContent.length}) must match the number of values (${values.length}).`
163
+ );
164
+ }
131
165
  if (values.length === 1) {
166
+ const valueParts = multimodalContent == null ? void 0 : multimodalContent[0];
167
+ const textPart = values[0] ? [{ text: values[0] }] : [];
168
+ const parts = valueParts != null ? [...textPart, ...valueParts] : [{ text: values[0] }];
132
169
  const {
133
170
  responseHeaders: responseHeaders2,
134
171
  value: response2,
135
172
  rawValue: rawValue2
136
- } = await (0, import_provider_utils3.postJsonToApi)({
173
+ } = await postJsonToApi({
137
174
  url: `${this.config.baseURL}/models/${this.modelId}:embedContent`,
138
175
  headers: mergedHeaders,
139
176
  body: {
140
177
  model: `models/${this.modelId}`,
141
178
  content: {
142
- parts: [{ text: values[0] }]
179
+ parts
143
180
  },
144
181
  outputDimensionality: googleOptions == null ? void 0 : googleOptions.outputDimensionality,
145
182
  taskType: googleOptions == null ? void 0 : googleOptions.taskType
146
183
  },
147
184
  failedResponseHandler: googleFailedResponseHandler,
148
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
185
+ successfulResponseHandler: createJsonResponseHandler(
149
186
  googleGenerativeAISingleEmbeddingResponseSchema
150
187
  ),
151
188
  abortSignal,
@@ -162,19 +199,26 @@ var GoogleGenerativeAIEmbeddingModel = class {
162
199
  responseHeaders,
163
200
  value: response,
164
201
  rawValue
165
- } = await (0, import_provider_utils3.postJsonToApi)({
202
+ } = await postJsonToApi({
166
203
  url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
167
204
  headers: mergedHeaders,
168
205
  body: {
169
- requests: values.map((value) => ({
170
- model: `models/${this.modelId}`,
171
- content: { role: "user", parts: [{ text: value }] },
172
- outputDimensionality: googleOptions == null ? void 0 : googleOptions.outputDimensionality,
173
- taskType: googleOptions == null ? void 0 : googleOptions.taskType
174
- }))
206
+ requests: values.map((value, index) => {
207
+ const valueParts = multimodalContent == null ? void 0 : multimodalContent[index];
208
+ const textPart = value ? [{ text: value }] : [];
209
+ return {
210
+ model: `models/${this.modelId}`,
211
+ content: {
212
+ role: "user",
213
+ parts: valueParts != null ? [...textPart, ...valueParts] : [{ text: value }]
214
+ },
215
+ outputDimensionality: googleOptions == null ? void 0 : googleOptions.outputDimensionality,
216
+ taskType: googleOptions == null ? void 0 : googleOptions.taskType
217
+ };
218
+ })
175
219
  },
176
220
  failedResponseHandler: googleFailedResponseHandler,
177
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
221
+ successfulResponseHandler: createJsonResponseHandler(
178
222
  googleGenerativeAITextEmbeddingResponseSchema
179
223
  ),
180
224
  abortSignal,
@@ -188,24 +232,40 @@ var GoogleGenerativeAIEmbeddingModel = class {
188
232
  };
189
233
  }
190
234
  };
191
- var googleGenerativeAITextEmbeddingResponseSchema = (0, import_provider_utils3.lazySchema)(
192
- () => (0, import_provider_utils3.zodSchema)(
193
- import_v43.z.object({
194
- embeddings: import_v43.z.array(import_v43.z.object({ values: import_v43.z.array(import_v43.z.number()) }))
235
+ var googleGenerativeAITextEmbeddingResponseSchema = lazySchema3(
236
+ () => zodSchema3(
237
+ z3.object({
238
+ embeddings: z3.array(z3.object({ values: z3.array(z3.number()) }))
195
239
  })
196
240
  )
197
241
  );
198
- var googleGenerativeAISingleEmbeddingResponseSchema = (0, import_provider_utils3.lazySchema)(
199
- () => (0, import_provider_utils3.zodSchema)(
200
- import_v43.z.object({
201
- embedding: import_v43.z.object({ values: import_v43.z.array(import_v43.z.number()) })
242
+ var googleGenerativeAISingleEmbeddingResponseSchema = lazySchema3(
243
+ () => zodSchema3(
244
+ z3.object({
245
+ embedding: z3.object({ values: z3.array(z3.number()) })
202
246
  })
203
247
  )
204
248
  );
205
249
 
206
250
  // src/google-generative-ai-language-model.ts
207
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
208
- var import_v45 = require("zod/v4");
251
+ import {
252
+ combineHeaders as combineHeaders2,
253
+ createEventSourceResponseHandler,
254
+ createJsonResponseHandler as createJsonResponseHandler2,
255
+ generateId,
256
+ isCustomReasoning,
257
+ lazySchema as lazySchema5,
258
+ mapReasoningToProviderBudget,
259
+ mapReasoningToProviderEffort,
260
+ parseProviderOptions as parseProviderOptions2,
261
+ postJsonToApi as postJsonToApi2,
262
+ resolve as resolve2,
263
+ serializeModelOptions as serializeModelOptions2,
264
+ WORKFLOW_SERIALIZE as WORKFLOW_SERIALIZE2,
265
+ WORKFLOW_DESERIALIZE as WORKFLOW_DESERIALIZE2,
266
+ zodSchema as zodSchema5
267
+ } from "@ai-sdk/provider-utils";
268
+ import { z as z5 } from "zod/v4";
209
269
 
210
270
  // src/convert-google-generative-ai-usage.ts
211
271
  function convertGoogleGenerativeAIUsage(usage) {
@@ -363,20 +423,133 @@ function isEmptyObjectSchema(jsonSchema) {
363
423
  }
364
424
 
365
425
  // src/convert-to-google-generative-ai-messages.ts
366
- var import_provider2 = require("@ai-sdk/provider");
367
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
426
+ import {
427
+ UnsupportedFunctionalityError
428
+ } from "@ai-sdk/provider";
429
+ import {
430
+ convertToBase64,
431
+ isProviderReference,
432
+ resolveProviderReference
433
+ } from "@ai-sdk/provider-utils";
434
+ var dataUrlRegex = /^data:([^;,]+);base64,(.+)$/s;
435
+ function parseBase64DataUrl(value) {
436
+ const match = dataUrlRegex.exec(value);
437
+ if (match == null) {
438
+ return void 0;
439
+ }
440
+ return {
441
+ mediaType: match[1],
442
+ data: match[2]
443
+ };
444
+ }
445
+ function convertUrlToolResultPart(url) {
446
+ const parsedDataUrl = parseBase64DataUrl(url);
447
+ if (parsedDataUrl == null) {
448
+ return void 0;
449
+ }
450
+ return {
451
+ inlineData: {
452
+ mimeType: parsedDataUrl.mediaType,
453
+ data: parsedDataUrl.data
454
+ }
455
+ };
456
+ }
457
+ function appendToolResultParts(parts, toolName, outputValue) {
458
+ const functionResponseParts = [];
459
+ const responseTextParts = [];
460
+ for (const contentPart of outputValue) {
461
+ switch (contentPart.type) {
462
+ case "text": {
463
+ responseTextParts.push(contentPart.text);
464
+ break;
465
+ }
466
+ case "file-data": {
467
+ functionResponseParts.push({
468
+ inlineData: {
469
+ mimeType: contentPart.mediaType,
470
+ data: contentPart.data
471
+ }
472
+ });
473
+ break;
474
+ }
475
+ case "file-url": {
476
+ const functionResponsePart = convertUrlToolResultPart(
477
+ contentPart.url
478
+ );
479
+ if (functionResponsePart != null) {
480
+ functionResponseParts.push(functionResponsePart);
481
+ } else {
482
+ responseTextParts.push(JSON.stringify(contentPart));
483
+ }
484
+ break;
485
+ }
486
+ default: {
487
+ responseTextParts.push(JSON.stringify(contentPart));
488
+ break;
489
+ }
490
+ }
491
+ }
492
+ parts.push({
493
+ functionResponse: {
494
+ name: toolName,
495
+ response: {
496
+ name: toolName,
497
+ content: responseTextParts.length > 0 ? responseTextParts.join("\n") : "Tool executed successfully."
498
+ },
499
+ ...functionResponseParts.length > 0 ? { parts: functionResponseParts } : {}
500
+ }
501
+ });
502
+ }
503
+ function appendLegacyToolResultParts(parts, toolName, outputValue) {
504
+ for (const contentPart of outputValue) {
505
+ switch (contentPart.type) {
506
+ case "text":
507
+ parts.push({
508
+ functionResponse: {
509
+ name: toolName,
510
+ response: {
511
+ name: toolName,
512
+ content: contentPart.text
513
+ }
514
+ }
515
+ });
516
+ break;
517
+ case "file-data":
518
+ if (contentPart.mediaType.startsWith("image/")) {
519
+ parts.push(
520
+ {
521
+ inlineData: {
522
+ mimeType: contentPart.mediaType,
523
+ data: contentPart.data
524
+ }
525
+ },
526
+ {
527
+ text: "Tool executed successfully and returned this image as a response"
528
+ }
529
+ );
530
+ } else {
531
+ parts.push({ text: JSON.stringify(contentPart) });
532
+ }
533
+ break;
534
+ default:
535
+ parts.push({ text: JSON.stringify(contentPart) });
536
+ break;
537
+ }
538
+ }
539
+ }
368
540
  function convertToGoogleGenerativeAIMessages(prompt, options) {
369
- var _a, _b, _c;
541
+ var _a, _b, _c, _d, _e, _f, _g, _h;
370
542
  const systemInstructionParts = [];
371
543
  const contents = [];
372
544
  let systemMessagesAllowed = true;
373
545
  const isGemmaModel = (_a = options == null ? void 0 : options.isGemmaModel) != null ? _a : false;
374
546
  const providerOptionsName = (_b = options == null ? void 0 : options.providerOptionsName) != null ? _b : "google";
547
+ const supportsFunctionResponseParts = (_c = options == null ? void 0 : options.supportsFunctionResponseParts) != null ? _c : true;
375
548
  for (const { role, content } of prompt) {
376
549
  switch (role) {
377
550
  case "system": {
378
551
  if (!systemMessagesAllowed) {
379
- throw new import_provider2.UnsupportedFunctionalityError({
552
+ throw new UnsupportedFunctionalityError({
380
553
  functionality: "system messages are only supported at the beginning of the conversation"
381
554
  });
382
555
  }
@@ -394,19 +567,36 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
394
567
  }
395
568
  case "file": {
396
569
  const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
397
- parts.push(
398
- part.data instanceof URL ? {
570
+ if (part.data instanceof URL) {
571
+ parts.push({
399
572
  fileData: {
400
573
  mimeType: mediaType,
401
574
  fileUri: part.data.toString()
402
575
  }
403
- } : {
576
+ });
577
+ } else if (isProviderReference(part.data)) {
578
+ if (providerOptionsName === "vertex") {
579
+ throw new UnsupportedFunctionalityError({
580
+ functionality: "file parts with provider references"
581
+ });
582
+ }
583
+ parts.push({
584
+ fileData: {
585
+ mimeType: mediaType,
586
+ fileUri: resolveProviderReference({
587
+ reference: part.data,
588
+ provider: "google"
589
+ })
590
+ }
591
+ });
592
+ } else {
593
+ parts.push({
404
594
  inlineData: {
405
595
  mimeType: mediaType,
406
- data: (0, import_provider_utils4.convertToBase64)(part.data)
596
+ data: convertToBase64(part.data)
407
597
  }
408
- }
409
- );
598
+ });
599
+ }
410
600
  break;
411
601
  }
412
602
  }
@@ -419,8 +609,8 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
419
609
  contents.push({
420
610
  role: "model",
421
611
  parts: content.map((part) => {
422
- var _a2, _b2, _c2, _d;
423
- const providerOpts = (_d = (_a2 = part.providerOptions) == null ? void 0 : _a2[providerOptionsName]) != null ? _d : providerOptionsName !== "google" ? (_b2 = part.providerOptions) == null ? void 0 : _b2.google : (_c2 = part.providerOptions) == null ? void 0 : _c2.vertex;
612
+ var _a2, _b2, _c2, _d2;
613
+ const providerOpts = (_d2 = (_a2 = part.providerOptions) == null ? void 0 : _a2[providerOptionsName]) != null ? _d2 : providerOptionsName !== "google" ? (_b2 = part.providerOptions) == null ? void 0 : _b2.google : (_c2 = part.providerOptions) == null ? void 0 : _c2.vertex;
424
614
  const thoughtSignature = (providerOpts == null ? void 0 : providerOpts.thoughtSignature) != null ? String(providerOpts.thoughtSignature) : void 0;
425
615
  switch (part.type) {
426
616
  case "text": {
@@ -436,22 +626,67 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
436
626
  thoughtSignature
437
627
  };
438
628
  }
629
+ case "reasoning-file": {
630
+ if (part.data instanceof URL) {
631
+ throw new UnsupportedFunctionalityError({
632
+ functionality: "File data URLs in assistant messages are not supported"
633
+ });
634
+ }
635
+ return {
636
+ inlineData: {
637
+ mimeType: part.mediaType,
638
+ data: convertToBase64(part.data)
639
+ },
640
+ thought: true,
641
+ thoughtSignature
642
+ };
643
+ }
439
644
  case "file": {
440
645
  if (part.data instanceof URL) {
441
- throw new import_provider2.UnsupportedFunctionalityError({
646
+ throw new UnsupportedFunctionalityError({
442
647
  functionality: "File data URLs in assistant messages are not supported"
443
648
  });
444
649
  }
650
+ if (isProviderReference(part.data)) {
651
+ if (providerOptionsName === "vertex") {
652
+ throw new UnsupportedFunctionalityError({
653
+ functionality: "file parts with provider references"
654
+ });
655
+ }
656
+ return {
657
+ fileData: {
658
+ mimeType: part.mediaType,
659
+ fileUri: resolveProviderReference({
660
+ reference: part.data,
661
+ provider: "google"
662
+ })
663
+ },
664
+ ...(providerOpts == null ? void 0 : providerOpts.thought) === true ? { thought: true } : {},
665
+ thoughtSignature
666
+ };
667
+ }
445
668
  return {
446
669
  inlineData: {
447
670
  mimeType: part.mediaType,
448
- data: (0, import_provider_utils4.convertToBase64)(part.data)
671
+ data: convertToBase64(part.data)
449
672
  },
450
673
  ...(providerOpts == null ? void 0 : providerOpts.thought) === true ? { thought: true } : {},
451
674
  thoughtSignature
452
675
  };
453
676
  }
454
677
  case "tool-call": {
678
+ const serverToolCallId = (providerOpts == null ? void 0 : providerOpts.serverToolCallId) != null ? String(providerOpts.serverToolCallId) : void 0;
679
+ const serverToolType = (providerOpts == null ? void 0 : providerOpts.serverToolType) != null ? String(providerOpts.serverToolType) : void 0;
680
+ if (serverToolCallId && serverToolType) {
681
+ return {
682
+ toolCall: {
683
+ toolType: serverToolType,
684
+ args: typeof part.input === "string" ? JSON.parse(part.input) : part.input,
685
+ id: serverToolCallId
686
+ },
687
+ thoughtSignature
688
+ };
689
+ }
455
690
  return {
456
691
  functionCall: {
457
692
  name: part.toolName,
@@ -460,6 +695,21 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
460
695
  thoughtSignature
461
696
  };
462
697
  }
698
+ case "tool-result": {
699
+ const serverToolCallId = (providerOpts == null ? void 0 : providerOpts.serverToolCallId) != null ? String(providerOpts.serverToolCallId) : void 0;
700
+ const serverToolType = (providerOpts == null ? void 0 : providerOpts.serverToolType) != null ? String(providerOpts.serverToolType) : void 0;
701
+ if (serverToolCallId && serverToolType) {
702
+ return {
703
+ toolResponse: {
704
+ toolType: serverToolType,
705
+ response: part.output.type === "json" ? part.output.value : {},
706
+ id: serverToolCallId
707
+ },
708
+ thoughtSignature
709
+ };
710
+ }
711
+ return void 0;
712
+ }
463
713
  }
464
714
  }).filter((part) => part !== void 0)
465
715
  });
@@ -472,38 +722,32 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
472
722
  if (part.type === "tool-approval-response") {
473
723
  continue;
474
724
  }
725
+ const partProviderOpts = (_g = (_d = part.providerOptions) == null ? void 0 : _d[providerOptionsName]) != null ? _g : providerOptionsName !== "google" ? (_e = part.providerOptions) == null ? void 0 : _e.google : (_f = part.providerOptions) == null ? void 0 : _f.vertex;
726
+ const serverToolCallId = (partProviderOpts == null ? void 0 : partProviderOpts.serverToolCallId) != null ? String(partProviderOpts.serverToolCallId) : void 0;
727
+ const serverToolType = (partProviderOpts == null ? void 0 : partProviderOpts.serverToolType) != null ? String(partProviderOpts.serverToolType) : void 0;
728
+ if (serverToolCallId && serverToolType) {
729
+ const serverThoughtSignature = (partProviderOpts == null ? void 0 : partProviderOpts.thoughtSignature) != null ? String(partProviderOpts.thoughtSignature) : void 0;
730
+ if (contents.length > 0) {
731
+ const lastContent = contents[contents.length - 1];
732
+ if (lastContent.role === "model") {
733
+ lastContent.parts.push({
734
+ toolResponse: {
735
+ toolType: serverToolType,
736
+ response: part.output.type === "json" ? part.output.value : {},
737
+ id: serverToolCallId
738
+ },
739
+ thoughtSignature: serverThoughtSignature
740
+ });
741
+ continue;
742
+ }
743
+ }
744
+ }
475
745
  const output = part.output;
476
746
  if (output.type === "content") {
477
- for (const contentPart of output.value) {
478
- switch (contentPart.type) {
479
- case "text":
480
- parts.push({
481
- functionResponse: {
482
- name: part.toolName,
483
- response: {
484
- name: part.toolName,
485
- content: contentPart.text
486
- }
487
- }
488
- });
489
- break;
490
- case "image-data":
491
- parts.push(
492
- {
493
- inlineData: {
494
- mimeType: contentPart.mediaType,
495
- data: contentPart.data
496
- }
497
- },
498
- {
499
- text: "Tool executed successfully and returned this image as a response"
500
- }
501
- );
502
- break;
503
- default:
504
- parts.push({ text: JSON.stringify(contentPart) });
505
- break;
506
- }
747
+ if (supportsFunctionResponseParts) {
748
+ appendToolResultParts(parts, part.toolName, output.value);
749
+ } else {
750
+ appendLegacyToolResultParts(parts, part.toolName, output.value);
507
751
  }
508
752
  } else {
509
753
  parts.push({
@@ -511,7 +755,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
511
755
  name: part.toolName,
512
756
  response: {
513
757
  name: part.toolName,
514
- content: output.type === "execution-denied" ? (_c = output.reason) != null ? _c : "Tool execution denied." : output.value
758
+ content: output.type === "execution-denied" ? (_h = output.reason) != null ? _h : "Tool execution denied." : output.value
515
759
  }
516
760
  }
517
761
  });
@@ -541,24 +785,24 @@ function getModelPath(modelId) {
541
785
  }
542
786
 
543
787
  // src/google-generative-ai-options.ts
544
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
545
- var import_v44 = require("zod/v4");
546
- var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
547
- () => (0, import_provider_utils5.zodSchema)(
548
- import_v44.z.object({
549
- responseModalities: import_v44.z.array(import_v44.z.enum(["TEXT", "IMAGE"])).optional(),
550
- thinkingConfig: import_v44.z.object({
551
- thinkingBudget: import_v44.z.number().optional(),
552
- includeThoughts: import_v44.z.boolean().optional(),
788
+ import { lazySchema as lazySchema4, zodSchema as zodSchema4 } from "@ai-sdk/provider-utils";
789
+ import { z as z4 } from "zod/v4";
790
+ var googleLanguageModelOptions = lazySchema4(
791
+ () => zodSchema4(
792
+ z4.object({
793
+ responseModalities: z4.array(z4.enum(["TEXT", "IMAGE"])).optional(),
794
+ thinkingConfig: z4.object({
795
+ thinkingBudget: z4.number().optional(),
796
+ includeThoughts: z4.boolean().optional(),
553
797
  // https://ai.google.dev/gemini-api/docs/gemini-3?thinking=high#thinking_level
554
- thinkingLevel: import_v44.z.enum(["minimal", "low", "medium", "high"]).optional()
798
+ thinkingLevel: z4.enum(["minimal", "low", "medium", "high"]).optional()
555
799
  }).optional(),
556
800
  /**
557
801
  * Optional.
558
802
  * The name of the cached content used as context to serve the prediction.
559
803
  * Format: cachedContents/{cachedContent}
560
804
  */
561
- cachedContent: import_v44.z.string().optional(),
805
+ cachedContent: z4.string().optional(),
562
806
  /**
563
807
  * Optional. Enable structured output. Default is true.
564
808
  *
@@ -567,13 +811,13 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
567
811
  * Google Generative AI uses. You can use this to disable
568
812
  * structured outputs if you need to.
569
813
  */
570
- structuredOutputs: import_v44.z.boolean().optional(),
814
+ structuredOutputs: z4.boolean().optional(),
571
815
  /**
572
816
  * Optional. A list of unique safety settings for blocking unsafe content.
573
817
  */
574
- safetySettings: import_v44.z.array(
575
- import_v44.z.object({
576
- category: import_v44.z.enum([
818
+ safetySettings: z4.array(
819
+ z4.object({
820
+ category: z4.enum([
577
821
  "HARM_CATEGORY_UNSPECIFIED",
578
822
  "HARM_CATEGORY_HATE_SPEECH",
579
823
  "HARM_CATEGORY_DANGEROUS_CONTENT",
@@ -581,7 +825,7 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
581
825
  "HARM_CATEGORY_SEXUALLY_EXPLICIT",
582
826
  "HARM_CATEGORY_CIVIC_INTEGRITY"
583
827
  ]),
584
- threshold: import_v44.z.enum([
828
+ threshold: z4.enum([
585
829
  "HARM_BLOCK_THRESHOLD_UNSPECIFIED",
586
830
  "BLOCK_LOW_AND_ABOVE",
587
831
  "BLOCK_MEDIUM_AND_ABOVE",
@@ -591,7 +835,7 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
591
835
  ])
592
836
  })
593
837
  ).optional(),
594
- threshold: import_v44.z.enum([
838
+ threshold: z4.enum([
595
839
  "HARM_BLOCK_THRESHOLD_UNSPECIFIED",
596
840
  "BLOCK_LOW_AND_ABOVE",
597
841
  "BLOCK_MEDIUM_AND_ABOVE",
@@ -604,19 +848,19 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
604
848
  *
605
849
  * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/audio-understanding
606
850
  */
607
- audioTimestamp: import_v44.z.boolean().optional(),
851
+ audioTimestamp: z4.boolean().optional(),
608
852
  /**
609
853
  * Optional. Defines labels used in billing reports. Available on Vertex AI only.
610
854
  *
611
855
  * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/add-labels-to-api-calls
612
856
  */
613
- labels: import_v44.z.record(import_v44.z.string(), import_v44.z.string()).optional(),
857
+ labels: z4.record(z4.string(), z4.string()).optional(),
614
858
  /**
615
859
  * Optional. If specified, the media resolution specified will be used.
616
860
  *
617
861
  * https://ai.google.dev/api/generate-content#MediaResolution
618
862
  */
619
- mediaResolution: import_v44.z.enum([
863
+ mediaResolution: z4.enum([
620
864
  "MEDIA_RESOLUTION_UNSPECIFIED",
621
865
  "MEDIA_RESOLUTION_LOW",
622
866
  "MEDIA_RESOLUTION_MEDIUM",
@@ -627,8 +871,8 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
627
871
  *
628
872
  * https://ai.google.dev/gemini-api/docs/image-generation#aspect_ratios
629
873
  */
630
- imageConfig: import_v44.z.object({
631
- aspectRatio: import_v44.z.enum([
874
+ imageConfig: z4.object({
875
+ aspectRatio: z4.enum([
632
876
  "1:1",
633
877
  "2:3",
634
878
  "3:2",
@@ -644,7 +888,7 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
644
888
  "1:4",
645
889
  "4:1"
646
890
  ]).optional(),
647
- imageSize: import_v44.z.enum(["1K", "2K", "4K", "512"]).optional()
891
+ imageSize: z4.enum(["1K", "2K", "4K", "512"]).optional()
648
892
  }).optional(),
649
893
  /**
650
894
  * Optional. Configuration for grounding retrieval.
@@ -652,24 +896,46 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
652
896
  *
653
897
  * https://cloud.google.com/vertex-ai/generative-ai/docs/grounding/grounding-with-google-maps
654
898
  */
655
- retrievalConfig: import_v44.z.object({
656
- latLng: import_v44.z.object({
657
- latitude: import_v44.z.number(),
658
- longitude: import_v44.z.number()
899
+ retrievalConfig: z4.object({
900
+ latLng: z4.object({
901
+ latitude: z4.number(),
902
+ longitude: z4.number()
659
903
  }).optional()
660
- }).optional()
904
+ }).optional(),
905
+ /**
906
+ * Optional. When set to true, function call arguments will be streamed
907
+ * incrementally via partialArgs in streaming responses. Only supported
908
+ * on the Vertex AI API (not the Gemini API) and only for Gemini 3+
909
+ * models.
910
+ *
911
+ * @default false
912
+ *
913
+ * https://docs.cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#streaming-fc
914
+ */
915
+ streamFunctionCallArguments: z4.boolean().optional(),
916
+ /**
917
+ * Optional. The service tier to use for the request.
918
+ */
919
+ serviceTier: z4.enum(["standard", "flex", "priority"]).optional()
661
920
  })
662
921
  )
663
922
  );
923
+ var VertexServiceTierMap = {
924
+ standard: "SERVICE_TIER_STANDARD",
925
+ flex: "SERVICE_TIER_FLEX",
926
+ priority: "SERVICE_TIER_PRIORITY"
927
+ };
664
928
 
665
929
  // src/google-prepare-tools.ts
666
- var import_provider3 = require("@ai-sdk/provider");
930
+ import {
931
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError2
932
+ } from "@ai-sdk/provider";
667
933
  function prepareTools({
668
934
  tools,
669
935
  toolChoice,
670
936
  modelId
671
937
  }) {
672
- var _a;
938
+ var _a, _b;
673
939
  tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
674
940
  const toolWarnings = [];
675
941
  const isLatest = [
@@ -678,13 +944,14 @@ function prepareTools({
678
944
  "gemini-pro-latest"
679
945
  ].some((id) => id === modelId);
680
946
  const isGemini2orNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || modelId.includes("nano-banana") || isLatest;
947
+ const isGemini3orNewer = modelId.includes("gemini-3");
681
948
  const supportsFileSearch = modelId.includes("gemini-2.5") || modelId.includes("gemini-3");
682
949
  if (tools == null) {
683
950
  return { tools: void 0, toolConfig: void 0, toolWarnings };
684
951
  }
685
952
  const hasFunctionTools = tools.some((tool) => tool.type === "function");
686
953
  const hasProviderTools = tools.some((tool) => tool.type === "provider");
687
- if (hasFunctionTools && hasProviderTools) {
954
+ if (hasFunctionTools && hasProviderTools && !isGemini3orNewer) {
688
955
  toolWarnings.push({
689
956
  type: "unsupported",
690
957
  feature: `combination of function and provider-defined tools`
@@ -735,7 +1002,7 @@ function prepareTools({
735
1002
  toolWarnings.push({
736
1003
  type: "unsupported",
737
1004
  feature: `provider-defined tool ${tool.id}`,
738
- details: "The code execution tools is not supported with other Gemini models than Gemini 2."
1005
+ details: "The code execution tool is not supported with other Gemini models than Gemini 2."
739
1006
  });
740
1007
  }
741
1008
  break;
@@ -789,6 +1056,45 @@ function prepareTools({
789
1056
  break;
790
1057
  }
791
1058
  });
1059
+ if (hasFunctionTools && isGemini3orNewer && googleTools2.length > 0) {
1060
+ const functionDeclarations2 = [];
1061
+ for (const tool of tools) {
1062
+ if (tool.type === "function") {
1063
+ functionDeclarations2.push({
1064
+ name: tool.name,
1065
+ description: (_a = tool.description) != null ? _a : "",
1066
+ parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
1067
+ });
1068
+ }
1069
+ }
1070
+ const combinedToolConfig = {
1071
+ functionCallingConfig: { mode: "VALIDATED" },
1072
+ includeServerSideToolInvocations: true
1073
+ };
1074
+ if (toolChoice != null) {
1075
+ switch (toolChoice.type) {
1076
+ case "auto":
1077
+ break;
1078
+ case "none":
1079
+ combinedToolConfig.functionCallingConfig = { mode: "NONE" };
1080
+ break;
1081
+ case "required":
1082
+ combinedToolConfig.functionCallingConfig = { mode: "ANY" };
1083
+ break;
1084
+ case "tool":
1085
+ combinedToolConfig.functionCallingConfig = {
1086
+ mode: "ANY",
1087
+ allowedFunctionNames: [toolChoice.toolName]
1088
+ };
1089
+ break;
1090
+ }
1091
+ }
1092
+ return {
1093
+ tools: [...googleTools2, { functionDeclarations: functionDeclarations2 }],
1094
+ toolConfig: combinedToolConfig,
1095
+ toolWarnings
1096
+ };
1097
+ }
792
1098
  return {
793
1099
  tools: googleTools2.length > 0 ? googleTools2 : void 0,
794
1100
  toolConfig: void 0,
@@ -802,7 +1108,7 @@ function prepareTools({
802
1108
  case "function":
803
1109
  functionDeclarations.push({
804
1110
  name: tool.name,
805
- description: (_a = tool.description) != null ? _a : "",
1111
+ description: (_b = tool.description) != null ? _b : "",
806
1112
  parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
807
1113
  });
808
1114
  if (tool.strict === true) {
@@ -865,13 +1171,236 @@ function prepareTools({
865
1171
  };
866
1172
  default: {
867
1173
  const _exhaustiveCheck = type;
868
- throw new import_provider3.UnsupportedFunctionalityError({
1174
+ throw new UnsupportedFunctionalityError2({
869
1175
  functionality: `tool choice type: ${_exhaustiveCheck}`
870
1176
  });
871
1177
  }
872
1178
  }
873
1179
  }
874
1180
 
1181
+ // src/google-json-accumulator.ts
1182
+ var GoogleJSONAccumulator = class {
1183
+ constructor() {
1184
+ this.accumulatedArgs = {};
1185
+ this.jsonText = "";
1186
+ /**
1187
+ * Stack representing the currently "open" containers in the JSON output.
1188
+ * Entry 0 is always the root `{` object once the first value is written.
1189
+ */
1190
+ this.pathStack = [];
1191
+ /**
1192
+ * Whether a string value is currently "open" (willContinue was true),
1193
+ * meaning the closing quote has not yet been emitted.
1194
+ */
1195
+ this.stringOpen = false;
1196
+ }
1197
+ /**
1198
+ * Input: [{jsonPath:"$.brightness",numberValue:50}]
1199
+ * Output: { currentJSON:{brightness:50}, textDelta:'{"brightness":50' }
1200
+ */
1201
+ processPartialArgs(partialArgs) {
1202
+ let delta = "";
1203
+ for (const arg of partialArgs) {
1204
+ const rawPath = arg.jsonPath.replace(/^\$\./, "");
1205
+ if (!rawPath) continue;
1206
+ const segments = parsePath(rawPath);
1207
+ const existingValue = getNestedValue(this.accumulatedArgs, segments);
1208
+ const isStringContinuation = arg.stringValue != null && existingValue !== void 0;
1209
+ if (isStringContinuation) {
1210
+ const escaped = JSON.stringify(arg.stringValue).slice(1, -1);
1211
+ setNestedValue(
1212
+ this.accumulatedArgs,
1213
+ segments,
1214
+ existingValue + arg.stringValue
1215
+ );
1216
+ delta += escaped;
1217
+ continue;
1218
+ }
1219
+ const resolved = resolvePartialArgValue(arg);
1220
+ if (resolved == null) continue;
1221
+ setNestedValue(this.accumulatedArgs, segments, resolved.value);
1222
+ delta += this.emitNavigationTo(segments, arg, resolved.json);
1223
+ }
1224
+ this.jsonText += delta;
1225
+ return {
1226
+ currentJSON: this.accumulatedArgs,
1227
+ textDelta: delta
1228
+ };
1229
+ }
1230
+ /**
1231
+ * Input: jsonText='{"brightness":50', accumulatedArgs={brightness:50}
1232
+ * Output: { finalJSON:'{"brightness":50}', closingDelta:'}' }
1233
+ */
1234
+ finalize() {
1235
+ const finalArgs = JSON.stringify(this.accumulatedArgs);
1236
+ const closingDelta = finalArgs.slice(this.jsonText.length);
1237
+ return { finalJSON: finalArgs, closingDelta };
1238
+ }
1239
+ /**
1240
+ * Input: pathStack=[] (first call) or pathStack=[root,...] (subsequent calls)
1241
+ * Output: '{' (first call) or '' (subsequent calls)
1242
+ */
1243
+ ensureRoot() {
1244
+ if (this.pathStack.length === 0) {
1245
+ this.pathStack.push({ segment: "", isArray: false, childCount: 0 });
1246
+ return "{";
1247
+ }
1248
+ return "";
1249
+ }
1250
+ /**
1251
+ * Emits the JSON text fragment needed to navigate from the current open
1252
+ * path to the new leaf at `targetSegments`, then writes the value.
1253
+ *
1254
+ * Input: targetSegments=["recipe","name"], arg={jsonPath:"$.recipe.name",stringValue:"Lasagna"}, valueJson='"Lasagna"'
1255
+ * Output: '{"recipe":{"name":"Lasagna"'
1256
+ */
1257
+ emitNavigationTo(targetSegments, arg, valueJson) {
1258
+ let fragment = "";
1259
+ if (this.stringOpen) {
1260
+ fragment += '"';
1261
+ this.stringOpen = false;
1262
+ }
1263
+ fragment += this.ensureRoot();
1264
+ const targetContainerSegments = targetSegments.slice(0, -1);
1265
+ const leafSegment = targetSegments[targetSegments.length - 1];
1266
+ const commonDepth = this.findCommonStackDepth(targetContainerSegments);
1267
+ fragment += this.closeDownTo(commonDepth);
1268
+ fragment += this.openDownTo(targetContainerSegments, leafSegment);
1269
+ fragment += this.emitLeaf(leafSegment, arg, valueJson);
1270
+ return fragment;
1271
+ }
1272
+ /**
1273
+ * Returns the stack depth to preserve when navigating to a new target
1274
+ * container path. Always >= 1 (the root is never popped).
1275
+ *
1276
+ * Input: stack=[root,"recipe","ingredients",0], target=["recipe","ingredients",1]
1277
+ * Output: 3 (keep root+"recipe"+"ingredients")
1278
+ */
1279
+ findCommonStackDepth(targetContainer) {
1280
+ const maxDepth = Math.min(
1281
+ this.pathStack.length - 1,
1282
+ targetContainer.length
1283
+ );
1284
+ let common = 0;
1285
+ for (let i = 0; i < maxDepth; i++) {
1286
+ if (this.pathStack[i + 1].segment === targetContainer[i]) {
1287
+ common++;
1288
+ } else {
1289
+ break;
1290
+ }
1291
+ }
1292
+ return common + 1;
1293
+ }
1294
+ /**
1295
+ * Closes containers from the current stack depth back down to `targetDepth`.
1296
+ *
1297
+ * Input: this.pathStack=[root,"recipe","ingredients",0], targetDepth=3
1298
+ * Output: '}'
1299
+ */
1300
+ closeDownTo(targetDepth) {
1301
+ let fragment = "";
1302
+ while (this.pathStack.length > targetDepth) {
1303
+ const entry = this.pathStack.pop();
1304
+ fragment += entry.isArray ? "]" : "}";
1305
+ }
1306
+ return fragment;
1307
+ }
1308
+ /**
1309
+ * Opens containers from the current stack depth down to the full target
1310
+ * container path, emitting opening `{`, `[`, keys, and commas as needed.
1311
+ * `leafSegment` is used to determine if the innermost container is an array.
1312
+ *
1313
+ * Input: this.pathStack=[root], targetContainer=["recipe","ingredients"], leafSegment=0
1314
+ * Output: '"recipe":{"ingredients":['
1315
+ */
1316
+ openDownTo(targetContainer, leafSegment) {
1317
+ let fragment = "";
1318
+ const startIdx = this.pathStack.length - 1;
1319
+ for (let i = startIdx; i < targetContainer.length; i++) {
1320
+ const seg = targetContainer[i];
1321
+ const parentEntry = this.pathStack[this.pathStack.length - 1];
1322
+ if (parentEntry.childCount > 0) {
1323
+ fragment += ",";
1324
+ }
1325
+ parentEntry.childCount++;
1326
+ if (typeof seg === "string") {
1327
+ fragment += `${JSON.stringify(seg)}:`;
1328
+ }
1329
+ const childSeg = i + 1 < targetContainer.length ? targetContainer[i + 1] : leafSegment;
1330
+ const isArray = typeof childSeg === "number";
1331
+ fragment += isArray ? "[" : "{";
1332
+ this.pathStack.push({ segment: seg, isArray, childCount: 0 });
1333
+ }
1334
+ return fragment;
1335
+ }
1336
+ /**
1337
+ * Emits the comma, key, and value for a leaf entry in the current container.
1338
+ *
1339
+ * Input: leafSegment="name", arg={stringValue:"Lasagna"}, valueJson='"Lasagna"'
1340
+ * Output: '"name":"Lasagna"' (or ',"name":"Lasagna"' if container.childCount > 0)
1341
+ */
1342
+ emitLeaf(leafSegment, arg, valueJson) {
1343
+ let fragment = "";
1344
+ const container = this.pathStack[this.pathStack.length - 1];
1345
+ if (container.childCount > 0) {
1346
+ fragment += ",";
1347
+ }
1348
+ container.childCount++;
1349
+ if (typeof leafSegment === "string") {
1350
+ fragment += `${JSON.stringify(leafSegment)}:`;
1351
+ }
1352
+ if (arg.stringValue != null && arg.willContinue) {
1353
+ fragment += valueJson.slice(0, -1);
1354
+ this.stringOpen = true;
1355
+ } else {
1356
+ fragment += valueJson;
1357
+ }
1358
+ return fragment;
1359
+ }
1360
+ };
1361
+ function parsePath(rawPath) {
1362
+ const segments = [];
1363
+ for (const part of rawPath.split(".")) {
1364
+ const bracketIdx = part.indexOf("[");
1365
+ if (bracketIdx === -1) {
1366
+ segments.push(part);
1367
+ } else {
1368
+ if (bracketIdx > 0) segments.push(part.slice(0, bracketIdx));
1369
+ for (const m of part.matchAll(/\[(\d+)\]/g)) {
1370
+ segments.push(parseInt(m[1], 10));
1371
+ }
1372
+ }
1373
+ }
1374
+ return segments;
1375
+ }
1376
+ function getNestedValue(obj, segments) {
1377
+ let current = obj;
1378
+ for (const seg of segments) {
1379
+ if (current == null || typeof current !== "object") return void 0;
1380
+ current = current[seg];
1381
+ }
1382
+ return current;
1383
+ }
1384
+ function setNestedValue(obj, segments, value) {
1385
+ let current = obj;
1386
+ for (let i = 0; i < segments.length - 1; i++) {
1387
+ const seg = segments[i];
1388
+ const nextSeg = segments[i + 1];
1389
+ if (current[seg] == null) {
1390
+ current[seg] = typeof nextSeg === "number" ? [] : {};
1391
+ }
1392
+ current = current[seg];
1393
+ }
1394
+ current[segments[segments.length - 1]] = value;
1395
+ }
1396
+ function resolvePartialArgValue(arg) {
1397
+ var _a, _b;
1398
+ const value = (_b = (_a = arg.stringValue) != null ? _a : arg.numberValue) != null ? _b : arg.boolValue;
1399
+ if (value != null) return { value, json: JSON.stringify(value) };
1400
+ if ("nullValue" in arg) return { value: null, json: "null" };
1401
+ return void 0;
1402
+ }
1403
+
875
1404
  // src/map-google-generative-ai-finish-reason.ts
876
1405
  function mapGoogleGenerativeAIFinishReason({
877
1406
  finishReason,
@@ -899,13 +1428,22 @@ function mapGoogleGenerativeAIFinishReason({
899
1428
  }
900
1429
 
901
1430
  // src/google-generative-ai-language-model.ts
902
- var GoogleGenerativeAILanguageModel = class {
1431
+ var GoogleGenerativeAILanguageModel = class _GoogleGenerativeAILanguageModel {
903
1432
  constructor(modelId, config) {
904
- this.specificationVersion = "v3";
1433
+ this.specificationVersion = "v4";
905
1434
  var _a;
906
1435
  this.modelId = modelId;
907
1436
  this.config = config;
908
- this.generateId = (_a = config.generateId) != null ? _a : import_provider_utils6.generateId;
1437
+ this.generateId = (_a = config.generateId) != null ? _a : generateId;
1438
+ }
1439
+ static [WORKFLOW_SERIALIZE2](model) {
1440
+ return serializeModelOptions2({
1441
+ modelId: model.modelId,
1442
+ config: model.config
1443
+ });
1444
+ }
1445
+ static [WORKFLOW_DESERIALIZE2](options) {
1446
+ return new _GoogleGenerativeAILanguageModel(options.modelId, options.config);
909
1447
  }
910
1448
  get provider() {
911
1449
  return this.config.provider;
@@ -927,35 +1465,52 @@ var GoogleGenerativeAILanguageModel = class {
927
1465
  seed,
928
1466
  tools,
929
1467
  toolChoice,
1468
+ reasoning,
930
1469
  providerOptions
931
- }) {
932
- var _a;
1470
+ }, { isStreaming = false } = {}) {
1471
+ var _a, _b;
933
1472
  const warnings = [];
934
1473
  const providerOptionsName = this.config.provider.includes("vertex") ? "vertex" : "google";
935
- let googleOptions = await (0, import_provider_utils6.parseProviderOptions)({
1474
+ let googleOptions = await parseProviderOptions2({
936
1475
  provider: providerOptionsName,
937
1476
  providerOptions,
938
1477
  schema: googleLanguageModelOptions
939
1478
  });
940
1479
  if (googleOptions == null && providerOptionsName !== "google") {
941
- googleOptions = await (0, import_provider_utils6.parseProviderOptions)({
1480
+ googleOptions = await parseProviderOptions2({
942
1481
  provider: "google",
943
1482
  providerOptions,
944
1483
  schema: googleLanguageModelOptions
945
1484
  });
946
1485
  }
1486
+ const isVertexProvider = this.config.provider.startsWith("google.vertex.");
947
1487
  if ((tools == null ? void 0 : tools.some(
948
1488
  (tool) => tool.type === "provider" && tool.id === "google.vertex_rag_store"
949
- )) && !this.config.provider.startsWith("google.vertex.")) {
1489
+ )) && !isVertexProvider) {
950
1490
  warnings.push({
951
1491
  type: "other",
952
1492
  message: `The 'vertex_rag_store' tool is only supported with the Google Vertex provider and might not be supported or could behave unexpectedly with the current Google provider (${this.config.provider}).`
953
1493
  });
954
1494
  }
1495
+ if ((googleOptions == null ? void 0 : googleOptions.streamFunctionCallArguments) && !isVertexProvider) {
1496
+ warnings.push({
1497
+ type: "other",
1498
+ message: `'streamFunctionCallArguments' is only supported on the Vertex AI API and will be ignored with the current Google provider (${this.config.provider}). See https://docs.cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#streaming-fc`
1499
+ });
1500
+ }
1501
+ let sanitizedServiceTier = googleOptions == null ? void 0 : googleOptions.serviceTier;
1502
+ if ((googleOptions == null ? void 0 : googleOptions.serviceTier) && isVertexProvider) {
1503
+ sanitizedServiceTier = VertexServiceTierMap[googleOptions.serviceTier];
1504
+ }
955
1505
  const isGemmaModel = this.modelId.toLowerCase().startsWith("gemma-");
1506
+ const supportsFunctionResponseParts = this.modelId.startsWith("gemini-3");
956
1507
  const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(
957
1508
  prompt,
958
- { isGemmaModel, providerOptionsName }
1509
+ {
1510
+ isGemmaModel,
1511
+ providerOptionsName,
1512
+ supportsFunctionResponseParts
1513
+ }
959
1514
  );
960
1515
  const {
961
1516
  tools: googleTools2,
@@ -966,6 +1521,25 @@ var GoogleGenerativeAILanguageModel = class {
966
1521
  toolChoice,
967
1522
  modelId: this.modelId
968
1523
  });
1524
+ const resolvedThinking = resolveThinkingConfig({
1525
+ reasoning,
1526
+ modelId: this.modelId,
1527
+ warnings
1528
+ });
1529
+ const thinkingConfig = (googleOptions == null ? void 0 : googleOptions.thinkingConfig) || resolvedThinking ? { ...resolvedThinking, ...googleOptions == null ? void 0 : googleOptions.thinkingConfig } : void 0;
1530
+ const streamFunctionCallArguments = isStreaming && isVertexProvider ? (_a = googleOptions == null ? void 0 : googleOptions.streamFunctionCallArguments) != null ? _a : false : void 0;
1531
+ const toolConfig = googleToolConfig || streamFunctionCallArguments || (googleOptions == null ? void 0 : googleOptions.retrievalConfig) ? {
1532
+ ...googleToolConfig,
1533
+ ...streamFunctionCallArguments && {
1534
+ functionCallingConfig: {
1535
+ ...googleToolConfig == null ? void 0 : googleToolConfig.functionCallingConfig,
1536
+ streamFunctionCallArguments: true
1537
+ }
1538
+ },
1539
+ ...(googleOptions == null ? void 0 : googleOptions.retrievalConfig) && {
1540
+ retrievalConfig: googleOptions.retrievalConfig
1541
+ }
1542
+ } : void 0;
969
1543
  return {
970
1544
  args: {
971
1545
  generationConfig: {
@@ -983,13 +1557,13 @@ var GoogleGenerativeAILanguageModel = class {
983
1557
  responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
984
1558
  // so this is needed as an escape hatch:
985
1559
  // TODO convert into provider option
986
- ((_a = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _a : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
1560
+ ((_b = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _b : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
987
1561
  ...(googleOptions == null ? void 0 : googleOptions.audioTimestamp) && {
988
1562
  audioTimestamp: googleOptions.audioTimestamp
989
1563
  },
990
1564
  // provider options:
991
1565
  responseModalities: googleOptions == null ? void 0 : googleOptions.responseModalities,
992
- thinkingConfig: googleOptions == null ? void 0 : googleOptions.thinkingConfig,
1566
+ thinkingConfig,
993
1567
  ...(googleOptions == null ? void 0 : googleOptions.mediaResolution) && {
994
1568
  mediaResolution: googleOptions.mediaResolution
995
1569
  },
@@ -1001,36 +1575,34 @@ var GoogleGenerativeAILanguageModel = class {
1001
1575
  systemInstruction: isGemmaModel ? void 0 : systemInstruction,
1002
1576
  safetySettings: googleOptions == null ? void 0 : googleOptions.safetySettings,
1003
1577
  tools: googleTools2,
1004
- toolConfig: (googleOptions == null ? void 0 : googleOptions.retrievalConfig) ? {
1005
- ...googleToolConfig,
1006
- retrievalConfig: googleOptions.retrievalConfig
1007
- } : googleToolConfig,
1578
+ toolConfig,
1008
1579
  cachedContent: googleOptions == null ? void 0 : googleOptions.cachedContent,
1009
- labels: googleOptions == null ? void 0 : googleOptions.labels
1580
+ labels: googleOptions == null ? void 0 : googleOptions.labels,
1581
+ serviceTier: sanitizedServiceTier
1010
1582
  },
1011
1583
  warnings: [...warnings, ...toolWarnings],
1012
1584
  providerOptionsName
1013
1585
  };
1014
1586
  }
1015
1587
  async doGenerate(options) {
1016
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
1588
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
1017
1589
  const { args, warnings, providerOptionsName } = await this.getArgs(options);
1018
- const mergedHeaders = (0, import_provider_utils6.combineHeaders)(
1019
- await (0, import_provider_utils6.resolve)(this.config.headers),
1590
+ const mergedHeaders = combineHeaders2(
1591
+ this.config.headers ? await resolve2(this.config.headers) : void 0,
1020
1592
  options.headers
1021
1593
  );
1022
1594
  const {
1023
1595
  responseHeaders,
1024
1596
  value: response,
1025
1597
  rawValue: rawResponse
1026
- } = await (0, import_provider_utils6.postJsonToApi)({
1598
+ } = await postJsonToApi2({
1027
1599
  url: `${this.config.baseURL}/${getModelPath(
1028
1600
  this.modelId
1029
1601
  )}:generateContent`,
1030
1602
  headers: mergedHeaders,
1031
1603
  body: args,
1032
1604
  failedResponseHandler: googleFailedResponseHandler,
1033
- successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(responseSchema),
1605
+ successfulResponseHandler: createJsonResponseHandler2(responseSchema),
1034
1606
  abortSignal: options.abortSignal,
1035
1607
  fetch: this.config.fetch
1036
1608
  });
@@ -1039,6 +1611,7 @@ var GoogleGenerativeAILanguageModel = class {
1039
1611
  const parts = (_b = (_a = candidate.content) == null ? void 0 : _a.parts) != null ? _b : [];
1040
1612
  const usageMetadata = response.usageMetadata;
1041
1613
  let lastCodeExecutionToolCallId;
1614
+ let lastServerToolCallId;
1042
1615
  for (const part of parts) {
1043
1616
  if ("executableCode" in part && ((_c = part.executableCode) == null ? void 0 : _c.code)) {
1044
1617
  const toolCallId = this.config.generateId();
@@ -1080,7 +1653,7 @@ var GoogleGenerativeAILanguageModel = class {
1080
1653
  providerMetadata: thoughtSignatureMetadata
1081
1654
  });
1082
1655
  }
1083
- } else if ("functionCall" in part) {
1656
+ } else if ("functionCall" in part && part.functionCall.name != null && part.functionCall.args != null) {
1084
1657
  content.push({
1085
1658
  type: "tool-call",
1086
1659
  toolCallId: this.config.generateId(),
@@ -1096,22 +1669,65 @@ var GoogleGenerativeAILanguageModel = class {
1096
1669
  const hasThought = part.thought === true;
1097
1670
  const hasThoughtSignature = !!part.thoughtSignature;
1098
1671
  content.push({
1099
- type: "file",
1672
+ type: hasThought ? "reasoning-file" : "file",
1100
1673
  data: part.inlineData.data,
1101
1674
  mediaType: part.inlineData.mimeType,
1102
- providerMetadata: hasThought || hasThoughtSignature ? {
1675
+ providerMetadata: hasThoughtSignature ? {
1103
1676
  [providerOptionsName]: {
1104
- ...hasThought ? { thought: true } : {},
1105
- ...hasThoughtSignature ? { thoughtSignature: part.thoughtSignature } : {}
1677
+ thoughtSignature: part.thoughtSignature
1106
1678
  }
1107
1679
  } : void 0
1108
1680
  });
1681
+ } else if ("toolCall" in part && part.toolCall) {
1682
+ const toolCallId = (_e = part.toolCall.id) != null ? _e : this.config.generateId();
1683
+ lastServerToolCallId = toolCallId;
1684
+ content.push({
1685
+ type: "tool-call",
1686
+ toolCallId,
1687
+ toolName: `server:${part.toolCall.toolType}`,
1688
+ input: JSON.stringify((_f = part.toolCall.args) != null ? _f : {}),
1689
+ providerExecuted: true,
1690
+ dynamic: true,
1691
+ providerMetadata: part.thoughtSignature ? {
1692
+ [providerOptionsName]: {
1693
+ thoughtSignature: part.thoughtSignature,
1694
+ serverToolCallId: toolCallId,
1695
+ serverToolType: part.toolCall.toolType
1696
+ }
1697
+ } : {
1698
+ [providerOptionsName]: {
1699
+ serverToolCallId: toolCallId,
1700
+ serverToolType: part.toolCall.toolType
1701
+ }
1702
+ }
1703
+ });
1704
+ } else if ("toolResponse" in part && part.toolResponse) {
1705
+ const responseToolCallId = (_g = lastServerToolCallId != null ? lastServerToolCallId : part.toolResponse.id) != null ? _g : this.config.generateId();
1706
+ content.push({
1707
+ type: "tool-result",
1708
+ toolCallId: responseToolCallId,
1709
+ toolName: `server:${part.toolResponse.toolType}`,
1710
+ result: (_h = part.toolResponse.response) != null ? _h : {},
1711
+ providerMetadata: part.thoughtSignature ? {
1712
+ [providerOptionsName]: {
1713
+ thoughtSignature: part.thoughtSignature,
1714
+ serverToolCallId: responseToolCallId,
1715
+ serverToolType: part.toolResponse.toolType
1716
+ }
1717
+ } : {
1718
+ [providerOptionsName]: {
1719
+ serverToolCallId: responseToolCallId,
1720
+ serverToolType: part.toolResponse.toolType
1721
+ }
1722
+ }
1723
+ });
1724
+ lastServerToolCallId = void 0;
1109
1725
  }
1110
1726
  }
1111
- const sources = (_e = extractSources({
1727
+ const sources = (_i = extractSources({
1112
1728
  groundingMetadata: candidate.groundingMetadata,
1113
1729
  generateId: this.config.generateId
1114
- })) != null ? _e : [];
1730
+ })) != null ? _i : [];
1115
1731
  for (const source of sources) {
1116
1732
  content.push(source);
1117
1733
  }
@@ -1125,17 +1741,19 @@ var GoogleGenerativeAILanguageModel = class {
1125
1741
  (part) => part.type === "tool-call" && !part.providerExecuted
1126
1742
  )
1127
1743
  }),
1128
- raw: (_f = candidate.finishReason) != null ? _f : void 0
1744
+ raw: (_j = candidate.finishReason) != null ? _j : void 0
1129
1745
  },
1130
1746
  usage: convertGoogleGenerativeAIUsage(usageMetadata),
1131
1747
  warnings,
1132
1748
  providerMetadata: {
1133
1749
  [providerOptionsName]: {
1134
- promptFeedback: (_g = response.promptFeedback) != null ? _g : null,
1135
- groundingMetadata: (_h = candidate.groundingMetadata) != null ? _h : null,
1136
- urlContextMetadata: (_i = candidate.urlContextMetadata) != null ? _i : null,
1137
- safetyRatings: (_j = candidate.safetyRatings) != null ? _j : null,
1138
- usageMetadata: usageMetadata != null ? usageMetadata : null
1750
+ promptFeedback: (_k = response.promptFeedback) != null ? _k : null,
1751
+ groundingMetadata: (_l = candidate.groundingMetadata) != null ? _l : null,
1752
+ urlContextMetadata: (_m = candidate.urlContextMetadata) != null ? _m : null,
1753
+ safetyRatings: (_n = candidate.safetyRatings) != null ? _n : null,
1754
+ usageMetadata: usageMetadata != null ? usageMetadata : null,
1755
+ finishMessage: (_o = candidate.finishMessage) != null ? _o : null,
1756
+ serviceTier: (_p = response.serviceTier) != null ? _p : null
1139
1757
  }
1140
1758
  },
1141
1759
  request: { body: args },
@@ -1147,19 +1765,22 @@ var GoogleGenerativeAILanguageModel = class {
1147
1765
  };
1148
1766
  }
1149
1767
  async doStream(options) {
1150
- const { args, warnings, providerOptionsName } = await this.getArgs(options);
1151
- const headers = (0, import_provider_utils6.combineHeaders)(
1152
- await (0, import_provider_utils6.resolve)(this.config.headers),
1768
+ const { args, warnings, providerOptionsName } = await this.getArgs(
1769
+ options,
1770
+ { isStreaming: true }
1771
+ );
1772
+ const headers = combineHeaders2(
1773
+ this.config.headers ? await resolve2(this.config.headers) : void 0,
1153
1774
  options.headers
1154
1775
  );
1155
- const { responseHeaders, value: response } = await (0, import_provider_utils6.postJsonToApi)({
1776
+ const { responseHeaders, value: response } = await postJsonToApi2({
1156
1777
  url: `${this.config.baseURL}/${getModelPath(
1157
1778
  this.modelId
1158
1779
  )}:streamGenerateContent?alt=sse`,
1159
1780
  headers,
1160
1781
  body: args,
1161
1782
  failedResponseHandler: googleFailedResponseHandler,
1162
- successfulResponseHandler: (0, import_provider_utils6.createEventSourceResponseHandler)(chunkSchema),
1783
+ successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),
1163
1784
  abortSignal: options.abortSignal,
1164
1785
  fetch: this.config.fetch
1165
1786
  });
@@ -1171,6 +1792,7 @@ var GoogleGenerativeAILanguageModel = class {
1171
1792
  let providerMetadata = void 0;
1172
1793
  let lastGroundingMetadata = null;
1173
1794
  let lastUrlContextMetadata = null;
1795
+ let serviceTier = null;
1174
1796
  const generateId3 = this.config.generateId;
1175
1797
  let hasToolCalls = false;
1176
1798
  let currentTextBlockId = null;
@@ -1178,6 +1800,8 @@ var GoogleGenerativeAILanguageModel = class {
1178
1800
  let blockCounter = 0;
1179
1801
  const emittedSourceUrls = /* @__PURE__ */ new Set();
1180
1802
  let lastCodeExecutionToolCallId;
1803
+ let lastServerToolCallId;
1804
+ const activeStreamingToolCalls = [];
1181
1805
  return {
1182
1806
  stream: response.pipeThrough(
1183
1807
  new TransformStream({
@@ -1185,7 +1809,7 @@ var GoogleGenerativeAILanguageModel = class {
1185
1809
  controller.enqueue({ type: "stream-start", warnings });
1186
1810
  },
1187
1811
  transform(chunk, controller) {
1188
- var _a, _b, _c, _d, _e, _f;
1812
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
1189
1813
  if (options.includeRawChunks) {
1190
1814
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1191
1815
  }
@@ -1198,6 +1822,9 @@ var GoogleGenerativeAILanguageModel = class {
1198
1822
  if (usageMetadata != null) {
1199
1823
  usage = usageMetadata;
1200
1824
  }
1825
+ if (value.serviceTier != null) {
1826
+ serviceTier = value.serviceTier;
1827
+ }
1201
1828
  const candidate = (_a = value.candidates) == null ? void 0 : _a[0];
1202
1829
  if (candidate == null) {
1203
1830
  return;
@@ -1325,50 +1952,159 @@ var GoogleGenerativeAILanguageModel = class {
1325
1952
  }
1326
1953
  const hasThought = part.thought === true;
1327
1954
  const hasThoughtSignature = !!part.thoughtSignature;
1328
- const fileMeta = hasThought || hasThoughtSignature ? {
1955
+ const fileMeta = hasThoughtSignature ? {
1329
1956
  [providerOptionsName]: {
1330
- ...hasThought ? { thought: true } : {},
1331
- ...hasThoughtSignature ? { thoughtSignature: part.thoughtSignature } : {}
1957
+ thoughtSignature: part.thoughtSignature
1332
1958
  }
1333
1959
  } : void 0;
1334
1960
  controller.enqueue({
1335
- type: "file",
1961
+ type: hasThought ? "reasoning-file" : "file",
1336
1962
  mediaType: part.inlineData.mimeType,
1337
1963
  data: part.inlineData.data,
1338
1964
  providerMetadata: fileMeta
1339
1965
  });
1966
+ } else if ("toolCall" in part && part.toolCall) {
1967
+ const toolCallId = (_e = part.toolCall.id) != null ? _e : generateId3();
1968
+ lastServerToolCallId = toolCallId;
1969
+ const serverMeta = {
1970
+ [providerOptionsName]: {
1971
+ ...part.thoughtSignature ? { thoughtSignature: part.thoughtSignature } : {},
1972
+ serverToolCallId: toolCallId,
1973
+ serverToolType: part.toolCall.toolType
1974
+ }
1975
+ };
1976
+ controller.enqueue({
1977
+ type: "tool-call",
1978
+ toolCallId,
1979
+ toolName: `server:${part.toolCall.toolType}`,
1980
+ input: JSON.stringify((_f = part.toolCall.args) != null ? _f : {}),
1981
+ providerExecuted: true,
1982
+ dynamic: true,
1983
+ providerMetadata: serverMeta
1984
+ });
1985
+ } else if ("toolResponse" in part && part.toolResponse) {
1986
+ const responseToolCallId = (_g = lastServerToolCallId != null ? lastServerToolCallId : part.toolResponse.id) != null ? _g : generateId3();
1987
+ const serverMeta = {
1988
+ [providerOptionsName]: {
1989
+ ...part.thoughtSignature ? { thoughtSignature: part.thoughtSignature } : {},
1990
+ serverToolCallId: responseToolCallId,
1991
+ serverToolType: part.toolResponse.toolType
1992
+ }
1993
+ };
1994
+ controller.enqueue({
1995
+ type: "tool-result",
1996
+ toolCallId: responseToolCallId,
1997
+ toolName: `server:${part.toolResponse.toolType}`,
1998
+ result: (_h = part.toolResponse.response) != null ? _h : {},
1999
+ providerMetadata: serverMeta
2000
+ });
2001
+ lastServerToolCallId = void 0;
1340
2002
  }
1341
2003
  }
1342
- const toolCallDeltas = getToolCallsFromParts({
1343
- parts: content.parts,
1344
- generateId: generateId3,
1345
- providerOptionsName
1346
- });
1347
- if (toolCallDeltas != null) {
1348
- for (const toolCall of toolCallDeltas) {
2004
+ for (const part of parts) {
2005
+ if (!("functionCall" in part)) continue;
2006
+ const providerMeta = part.thoughtSignature ? {
2007
+ [providerOptionsName]: {
2008
+ thoughtSignature: part.thoughtSignature
2009
+ }
2010
+ } : void 0;
2011
+ const isStreamingChunk = part.functionCall.partialArgs != null || part.functionCall.name != null && part.functionCall.willContinue === true;
2012
+ const isTerminalChunk = part.functionCall.name == null && part.functionCall.args == null && part.functionCall.partialArgs == null && part.functionCall.willContinue == null;
2013
+ const isCompleteCall = part.functionCall.name != null && part.functionCall.args != null && part.functionCall.partialArgs == null;
2014
+ if (isStreamingChunk) {
2015
+ if (part.functionCall.name != null && part.functionCall.willContinue === true) {
2016
+ const toolCallId = generateId3();
2017
+ const accumulator = new GoogleJSONAccumulator();
2018
+ activeStreamingToolCalls.push({
2019
+ toolCallId,
2020
+ toolName: part.functionCall.name,
2021
+ accumulator,
2022
+ providerMetadata: providerMeta
2023
+ });
2024
+ controller.enqueue({
2025
+ type: "tool-input-start",
2026
+ id: toolCallId,
2027
+ toolName: part.functionCall.name,
2028
+ providerMetadata: providerMeta
2029
+ });
2030
+ if (part.functionCall.partialArgs != null) {
2031
+ const { textDelta } = accumulator.processPartialArgs(
2032
+ part.functionCall.partialArgs
2033
+ );
2034
+ if (textDelta.length > 0) {
2035
+ controller.enqueue({
2036
+ type: "tool-input-delta",
2037
+ id: toolCallId,
2038
+ delta: textDelta,
2039
+ providerMetadata: providerMeta
2040
+ });
2041
+ }
2042
+ }
2043
+ } else if (part.functionCall.partialArgs != null && activeStreamingToolCalls.length > 0) {
2044
+ const active = activeStreamingToolCalls[activeStreamingToolCalls.length - 1];
2045
+ const { textDelta } = active.accumulator.processPartialArgs(
2046
+ part.functionCall.partialArgs
2047
+ );
2048
+ if (textDelta.length > 0) {
2049
+ controller.enqueue({
2050
+ type: "tool-input-delta",
2051
+ id: active.toolCallId,
2052
+ delta: textDelta,
2053
+ providerMetadata: providerMeta
2054
+ });
2055
+ }
2056
+ }
2057
+ } else if (isTerminalChunk && activeStreamingToolCalls.length > 0) {
2058
+ const active = activeStreamingToolCalls.pop();
2059
+ const { finalJSON, closingDelta } = active.accumulator.finalize();
2060
+ if (closingDelta.length > 0) {
2061
+ controller.enqueue({
2062
+ type: "tool-input-delta",
2063
+ id: active.toolCallId,
2064
+ delta: closingDelta,
2065
+ providerMetadata: active.providerMetadata
2066
+ });
2067
+ }
2068
+ controller.enqueue({
2069
+ type: "tool-input-end",
2070
+ id: active.toolCallId,
2071
+ providerMetadata: active.providerMetadata
2072
+ });
2073
+ controller.enqueue({
2074
+ type: "tool-call",
2075
+ toolCallId: active.toolCallId,
2076
+ toolName: active.toolName,
2077
+ input: finalJSON,
2078
+ providerMetadata: active.providerMetadata
2079
+ });
2080
+ hasToolCalls = true;
2081
+ } else if (isCompleteCall) {
2082
+ const toolCallId = generateId3();
2083
+ const toolName = part.functionCall.name;
2084
+ const args2 = typeof part.functionCall.args === "string" ? part.functionCall.args : JSON.stringify((_i = part.functionCall.args) != null ? _i : {});
1349
2085
  controller.enqueue({
1350
2086
  type: "tool-input-start",
1351
- id: toolCall.toolCallId,
1352
- toolName: toolCall.toolName,
1353
- providerMetadata: toolCall.providerMetadata
2087
+ id: toolCallId,
2088
+ toolName,
2089
+ providerMetadata: providerMeta
1354
2090
  });
1355
2091
  controller.enqueue({
1356
2092
  type: "tool-input-delta",
1357
- id: toolCall.toolCallId,
1358
- delta: toolCall.args,
1359
- providerMetadata: toolCall.providerMetadata
2093
+ id: toolCallId,
2094
+ delta: args2,
2095
+ providerMetadata: providerMeta
1360
2096
  });
1361
2097
  controller.enqueue({
1362
2098
  type: "tool-input-end",
1363
- id: toolCall.toolCallId,
1364
- providerMetadata: toolCall.providerMetadata
2099
+ id: toolCallId,
2100
+ providerMetadata: providerMeta
1365
2101
  });
1366
2102
  controller.enqueue({
1367
2103
  type: "tool-call",
1368
- toolCallId: toolCall.toolCallId,
1369
- toolName: toolCall.toolName,
1370
- input: toolCall.args,
1371
- providerMetadata: toolCall.providerMetadata
2104
+ toolCallId,
2105
+ toolName,
2106
+ input: args2,
2107
+ providerMetadata: providerMeta
1372
2108
  });
1373
2109
  hasToolCalls = true;
1374
2110
  }
@@ -1384,15 +2120,15 @@ var GoogleGenerativeAILanguageModel = class {
1384
2120
  };
1385
2121
  providerMetadata = {
1386
2122
  [providerOptionsName]: {
1387
- promptFeedback: (_e = value.promptFeedback) != null ? _e : null,
2123
+ promptFeedback: (_j = value.promptFeedback) != null ? _j : null,
1388
2124
  groundingMetadata: lastGroundingMetadata,
1389
2125
  urlContextMetadata: lastUrlContextMetadata,
1390
- safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null
2126
+ safetyRatings: (_k = candidate.safetyRatings) != null ? _k : null,
2127
+ usageMetadata: usageMetadata != null ? usageMetadata : null,
2128
+ finishMessage: (_l = candidate.finishMessage) != null ? _l : null,
2129
+ serviceTier
1391
2130
  }
1392
2131
  };
1393
- if (usageMetadata != null) {
1394
- providerMetadata[providerOptionsName].usageMetadata = usageMetadata;
1395
- }
1396
2132
  }
1397
2133
  },
1398
2134
  flush(controller) {
@@ -1422,25 +2158,74 @@ var GoogleGenerativeAILanguageModel = class {
1422
2158
  };
1423
2159
  }
1424
2160
  };
1425
- function getToolCallsFromParts({
1426
- parts,
1427
- generateId: generateId3,
1428
- providerOptionsName
2161
+ function isGemini3Model(modelId) {
2162
+ return /gemini-3[\.\-]/i.test(modelId) || /gemini-3$/i.test(modelId);
2163
+ }
2164
+ function getMaxOutputTokensForGemini25Model() {
2165
+ return 65536;
2166
+ }
2167
+ function getMaxThinkingTokensForGemini25Model(modelId) {
2168
+ const id = modelId.toLowerCase();
2169
+ if (id.includes("2.5-pro") || id.includes("gemini-3-pro-image")) {
2170
+ return 32768;
2171
+ }
2172
+ return 24576;
2173
+ }
2174
+ function resolveThinkingConfig({
2175
+ reasoning,
2176
+ modelId,
2177
+ warnings
1429
2178
  }) {
1430
- const functionCallParts = parts == null ? void 0 : parts.filter(
1431
- (part) => "functionCall" in part
1432
- );
1433
- return functionCallParts == null || functionCallParts.length === 0 ? void 0 : functionCallParts.map((part) => ({
1434
- type: "tool-call",
1435
- toolCallId: generateId3(),
1436
- toolName: part.functionCall.name,
1437
- args: JSON.stringify(part.functionCall.args),
1438
- providerMetadata: part.thoughtSignature ? {
1439
- [providerOptionsName]: {
1440
- thoughtSignature: part.thoughtSignature
1441
- }
1442
- } : void 0
1443
- }));
2179
+ if (!isCustomReasoning(reasoning)) {
2180
+ return void 0;
2181
+ }
2182
+ if (isGemini3Model(modelId) && !modelId.includes("gemini-3-pro-image")) {
2183
+ return resolveGemini3ThinkingConfig({ reasoning, warnings });
2184
+ }
2185
+ return resolveGemini25ThinkingConfig({ reasoning, modelId, warnings });
2186
+ }
2187
+ function resolveGemini3ThinkingConfig({
2188
+ reasoning,
2189
+ warnings
2190
+ }) {
2191
+ if (reasoning === "none") {
2192
+ return { thinkingLevel: "minimal" };
2193
+ }
2194
+ const thinkingLevel = mapReasoningToProviderEffort({
2195
+ reasoning,
2196
+ effortMap: {
2197
+ minimal: "minimal",
2198
+ low: "low",
2199
+ medium: "medium",
2200
+ high: "high",
2201
+ xhigh: "high"
2202
+ },
2203
+ warnings
2204
+ });
2205
+ if (thinkingLevel == null) {
2206
+ return void 0;
2207
+ }
2208
+ return { thinkingLevel };
2209
+ }
2210
+ function resolveGemini25ThinkingConfig({
2211
+ reasoning,
2212
+ modelId,
2213
+ warnings
2214
+ }) {
2215
+ if (reasoning === "none") {
2216
+ return { thinkingBudget: 0 };
2217
+ }
2218
+ const thinkingBudget = mapReasoningToProviderBudget({
2219
+ reasoning,
2220
+ maxOutputTokens: getMaxOutputTokensForGemini25Model(),
2221
+ maxReasoningBudget: getMaxThinkingTokensForGemini25Model(modelId),
2222
+ minReasoningBudget: 0,
2223
+ warnings
2224
+ });
2225
+ if (thinkingBudget == null) {
2226
+ return void 0;
2227
+ }
2228
+ return { thinkingBudget };
1444
2229
  }
1445
2230
  function extractSources({
1446
2231
  groundingMetadata,
@@ -1536,231 +2321,286 @@ function extractSources({
1536
2321
  }
1537
2322
  return sources.length > 0 ? sources : void 0;
1538
2323
  }
1539
- var getGroundingMetadataSchema = () => import_v45.z.object({
1540
- webSearchQueries: import_v45.z.array(import_v45.z.string()).nullish(),
1541
- imageSearchQueries: import_v45.z.array(import_v45.z.string()).nullish(),
1542
- retrievalQueries: import_v45.z.array(import_v45.z.string()).nullish(),
1543
- searchEntryPoint: import_v45.z.object({ renderedContent: import_v45.z.string() }).nullish(),
1544
- groundingChunks: import_v45.z.array(
1545
- import_v45.z.object({
1546
- web: import_v45.z.object({ uri: import_v45.z.string(), title: import_v45.z.string().nullish() }).nullish(),
1547
- image: import_v45.z.object({
1548
- sourceUri: import_v45.z.string(),
1549
- imageUri: import_v45.z.string(),
1550
- title: import_v45.z.string().nullish(),
1551
- domain: import_v45.z.string().nullish()
2324
+ var getGroundingMetadataSchema = () => z5.object({
2325
+ webSearchQueries: z5.array(z5.string()).nullish(),
2326
+ imageSearchQueries: z5.array(z5.string()).nullish(),
2327
+ retrievalQueries: z5.array(z5.string()).nullish(),
2328
+ searchEntryPoint: z5.object({ renderedContent: z5.string() }).nullish(),
2329
+ groundingChunks: z5.array(
2330
+ z5.object({
2331
+ web: z5.object({ uri: z5.string(), title: z5.string().nullish() }).nullish(),
2332
+ image: z5.object({
2333
+ sourceUri: z5.string(),
2334
+ imageUri: z5.string(),
2335
+ title: z5.string().nullish(),
2336
+ domain: z5.string().nullish()
1552
2337
  }).nullish(),
1553
- retrievedContext: import_v45.z.object({
1554
- uri: import_v45.z.string().nullish(),
1555
- title: import_v45.z.string().nullish(),
1556
- text: import_v45.z.string().nullish(),
1557
- fileSearchStore: import_v45.z.string().nullish()
2338
+ retrievedContext: z5.object({
2339
+ uri: z5.string().nullish(),
2340
+ title: z5.string().nullish(),
2341
+ text: z5.string().nullish(),
2342
+ fileSearchStore: z5.string().nullish()
1558
2343
  }).nullish(),
1559
- maps: import_v45.z.object({
1560
- uri: import_v45.z.string().nullish(),
1561
- title: import_v45.z.string().nullish(),
1562
- text: import_v45.z.string().nullish(),
1563
- placeId: import_v45.z.string().nullish()
2344
+ maps: z5.object({
2345
+ uri: z5.string().nullish(),
2346
+ title: z5.string().nullish(),
2347
+ text: z5.string().nullish(),
2348
+ placeId: z5.string().nullish()
1564
2349
  }).nullish()
1565
2350
  })
1566
2351
  ).nullish(),
1567
- groundingSupports: import_v45.z.array(
1568
- import_v45.z.object({
1569
- segment: import_v45.z.object({
1570
- startIndex: import_v45.z.number().nullish(),
1571
- endIndex: import_v45.z.number().nullish(),
1572
- text: import_v45.z.string().nullish()
2352
+ groundingSupports: z5.array(
2353
+ z5.object({
2354
+ segment: z5.object({
2355
+ startIndex: z5.number().nullish(),
2356
+ endIndex: z5.number().nullish(),
2357
+ text: z5.string().nullish()
1573
2358
  }).nullish(),
1574
- segment_text: import_v45.z.string().nullish(),
1575
- groundingChunkIndices: import_v45.z.array(import_v45.z.number()).nullish(),
1576
- supportChunkIndices: import_v45.z.array(import_v45.z.number()).nullish(),
1577
- confidenceScores: import_v45.z.array(import_v45.z.number()).nullish(),
1578
- confidenceScore: import_v45.z.array(import_v45.z.number()).nullish()
2359
+ segment_text: z5.string().nullish(),
2360
+ groundingChunkIndices: z5.array(z5.number()).nullish(),
2361
+ supportChunkIndices: z5.array(z5.number()).nullish(),
2362
+ confidenceScores: z5.array(z5.number()).nullish(),
2363
+ confidenceScore: z5.array(z5.number()).nullish()
1579
2364
  })
1580
2365
  ).nullish(),
1581
- retrievalMetadata: import_v45.z.union([
1582
- import_v45.z.object({
1583
- webDynamicRetrievalScore: import_v45.z.number()
2366
+ retrievalMetadata: z5.union([
2367
+ z5.object({
2368
+ webDynamicRetrievalScore: z5.number()
1584
2369
  }),
1585
- import_v45.z.object({})
2370
+ z5.object({})
1586
2371
  ]).nullish()
1587
2372
  });
1588
- var getContentSchema = () => import_v45.z.object({
1589
- parts: import_v45.z.array(
1590
- import_v45.z.union([
2373
+ var partialArgSchema = z5.object({
2374
+ jsonPath: z5.string(),
2375
+ stringValue: z5.string().nullish(),
2376
+ numberValue: z5.number().nullish(),
2377
+ boolValue: z5.boolean().nullish(),
2378
+ nullValue: z5.unknown().nullish(),
2379
+ willContinue: z5.boolean().nullish()
2380
+ });
2381
+ var getContentSchema = () => z5.object({
2382
+ parts: z5.array(
2383
+ z5.union([
1591
2384
  // note: order matters since text can be fully empty
1592
- import_v45.z.object({
1593
- functionCall: import_v45.z.object({
1594
- name: import_v45.z.string(),
1595
- args: import_v45.z.unknown()
2385
+ z5.object({
2386
+ functionCall: z5.object({
2387
+ name: z5.string().nullish(),
2388
+ args: z5.unknown().nullish(),
2389
+ partialArgs: z5.array(partialArgSchema).nullish(),
2390
+ willContinue: z5.boolean().nullish()
1596
2391
  }),
1597
- thoughtSignature: import_v45.z.string().nullish()
2392
+ thoughtSignature: z5.string().nullish()
1598
2393
  }),
1599
- import_v45.z.object({
1600
- inlineData: import_v45.z.object({
1601
- mimeType: import_v45.z.string(),
1602
- data: import_v45.z.string()
2394
+ z5.object({
2395
+ inlineData: z5.object({
2396
+ mimeType: z5.string(),
2397
+ data: z5.string()
1603
2398
  }),
1604
- thought: import_v45.z.boolean().nullish(),
1605
- thoughtSignature: import_v45.z.string().nullish()
2399
+ thought: z5.boolean().nullish(),
2400
+ thoughtSignature: z5.string().nullish()
1606
2401
  }),
1607
- import_v45.z.object({
1608
- executableCode: import_v45.z.object({
1609
- language: import_v45.z.string(),
1610
- code: import_v45.z.string()
2402
+ z5.object({
2403
+ toolCall: z5.object({
2404
+ toolType: z5.string(),
2405
+ args: z5.unknown().nullish(),
2406
+ id: z5.string()
2407
+ }),
2408
+ thoughtSignature: z5.string().nullish()
2409
+ }),
2410
+ z5.object({
2411
+ toolResponse: z5.object({
2412
+ toolType: z5.string(),
2413
+ response: z5.unknown().nullish(),
2414
+ id: z5.string()
2415
+ }),
2416
+ thoughtSignature: z5.string().nullish()
2417
+ }),
2418
+ z5.object({
2419
+ executableCode: z5.object({
2420
+ language: z5.string(),
2421
+ code: z5.string()
1611
2422
  }).nullish(),
1612
- codeExecutionResult: import_v45.z.object({
1613
- outcome: import_v45.z.string(),
1614
- output: import_v45.z.string().nullish()
2423
+ codeExecutionResult: z5.object({
2424
+ outcome: z5.string(),
2425
+ output: z5.string().nullish()
1615
2426
  }).nullish(),
1616
- text: import_v45.z.string().nullish(),
1617
- thought: import_v45.z.boolean().nullish(),
1618
- thoughtSignature: import_v45.z.string().nullish()
2427
+ text: z5.string().nullish(),
2428
+ thought: z5.boolean().nullish(),
2429
+ thoughtSignature: z5.string().nullish()
1619
2430
  })
1620
2431
  ])
1621
2432
  ).nullish()
1622
2433
  });
1623
- var getSafetyRatingSchema = () => import_v45.z.object({
1624
- category: import_v45.z.string().nullish(),
1625
- probability: import_v45.z.string().nullish(),
1626
- probabilityScore: import_v45.z.number().nullish(),
1627
- severity: import_v45.z.string().nullish(),
1628
- severityScore: import_v45.z.number().nullish(),
1629
- blocked: import_v45.z.boolean().nullish()
2434
+ var getSafetyRatingSchema = () => z5.object({
2435
+ category: z5.string().nullish(),
2436
+ probability: z5.string().nullish(),
2437
+ probabilityScore: z5.number().nullish(),
2438
+ severity: z5.string().nullish(),
2439
+ severityScore: z5.number().nullish(),
2440
+ blocked: z5.boolean().nullish()
1630
2441
  });
1631
- var usageSchema = import_v45.z.object({
1632
- cachedContentTokenCount: import_v45.z.number().nullish(),
1633
- thoughtsTokenCount: import_v45.z.number().nullish(),
1634
- promptTokenCount: import_v45.z.number().nullish(),
1635
- candidatesTokenCount: import_v45.z.number().nullish(),
1636
- totalTokenCount: import_v45.z.number().nullish(),
2442
+ var tokenDetailsSchema = z5.array(
2443
+ z5.object({
2444
+ modality: z5.string(),
2445
+ tokenCount: z5.number()
2446
+ })
2447
+ ).nullish();
2448
+ var usageSchema = z5.object({
2449
+ cachedContentTokenCount: z5.number().nullish(),
2450
+ thoughtsTokenCount: z5.number().nullish(),
2451
+ promptTokenCount: z5.number().nullish(),
2452
+ candidatesTokenCount: z5.number().nullish(),
2453
+ totalTokenCount: z5.number().nullish(),
1637
2454
  // https://cloud.google.com/vertex-ai/generative-ai/docs/reference/rest/v1/GenerateContentResponse#TrafficType
1638
- trafficType: import_v45.z.string().nullish()
2455
+ trafficType: z5.string().nullish(),
2456
+ // https://ai.google.dev/api/generate-content#Modality
2457
+ promptTokensDetails: tokenDetailsSchema,
2458
+ candidatesTokensDetails: tokenDetailsSchema
1639
2459
  });
1640
- var getUrlContextMetadataSchema = () => import_v45.z.object({
1641
- urlMetadata: import_v45.z.array(
1642
- import_v45.z.object({
1643
- retrievedUrl: import_v45.z.string(),
1644
- urlRetrievalStatus: import_v45.z.string()
2460
+ var getUrlContextMetadataSchema = () => z5.object({
2461
+ urlMetadata: z5.array(
2462
+ z5.object({
2463
+ retrievedUrl: z5.string(),
2464
+ urlRetrievalStatus: z5.string()
1645
2465
  })
1646
2466
  ).nullish()
1647
2467
  });
1648
- var responseSchema = (0, import_provider_utils6.lazySchema)(
1649
- () => (0, import_provider_utils6.zodSchema)(
1650
- import_v45.z.object({
1651
- candidates: import_v45.z.array(
1652
- import_v45.z.object({
1653
- content: getContentSchema().nullish().or(import_v45.z.object({}).strict()),
1654
- finishReason: import_v45.z.string().nullish(),
1655
- safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish(),
2468
+ var responseSchema = lazySchema5(
2469
+ () => zodSchema5(
2470
+ z5.object({
2471
+ candidates: z5.array(
2472
+ z5.object({
2473
+ content: getContentSchema().nullish().or(z5.object({}).strict()),
2474
+ finishReason: z5.string().nullish(),
2475
+ finishMessage: z5.string().nullish(),
2476
+ safetyRatings: z5.array(getSafetyRatingSchema()).nullish(),
1656
2477
  groundingMetadata: getGroundingMetadataSchema().nullish(),
1657
2478
  urlContextMetadata: getUrlContextMetadataSchema().nullish()
1658
2479
  })
1659
2480
  ),
1660
2481
  usageMetadata: usageSchema.nullish(),
1661
- promptFeedback: import_v45.z.object({
1662
- blockReason: import_v45.z.string().nullish(),
1663
- safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish()
1664
- }).nullish()
2482
+ promptFeedback: z5.object({
2483
+ blockReason: z5.string().nullish(),
2484
+ safetyRatings: z5.array(getSafetyRatingSchema()).nullish()
2485
+ }).nullish(),
2486
+ serviceTier: z5.string().nullish()
1665
2487
  })
1666
2488
  )
1667
2489
  );
1668
- var chunkSchema = (0, import_provider_utils6.lazySchema)(
1669
- () => (0, import_provider_utils6.zodSchema)(
1670
- import_v45.z.object({
1671
- candidates: import_v45.z.array(
1672
- import_v45.z.object({
2490
+ var chunkSchema = lazySchema5(
2491
+ () => zodSchema5(
2492
+ z5.object({
2493
+ candidates: z5.array(
2494
+ z5.object({
1673
2495
  content: getContentSchema().nullish(),
1674
- finishReason: import_v45.z.string().nullish(),
1675
- safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish(),
2496
+ finishReason: z5.string().nullish(),
2497
+ finishMessage: z5.string().nullish(),
2498
+ safetyRatings: z5.array(getSafetyRatingSchema()).nullish(),
1676
2499
  groundingMetadata: getGroundingMetadataSchema().nullish(),
1677
2500
  urlContextMetadata: getUrlContextMetadataSchema().nullish()
1678
2501
  })
1679
2502
  ).nullish(),
1680
2503
  usageMetadata: usageSchema.nullish(),
1681
- promptFeedback: import_v45.z.object({
1682
- blockReason: import_v45.z.string().nullish(),
1683
- safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish()
1684
- }).nullish()
2504
+ promptFeedback: z5.object({
2505
+ blockReason: z5.string().nullish(),
2506
+ safetyRatings: z5.array(getSafetyRatingSchema()).nullish()
2507
+ }).nullish(),
2508
+ serviceTier: z5.string().nullish()
1685
2509
  })
1686
2510
  )
1687
2511
  );
1688
2512
 
1689
2513
  // src/tool/code-execution.ts
1690
- var import_provider_utils7 = require("@ai-sdk/provider-utils");
1691
- var import_v46 = require("zod/v4");
1692
- var codeExecution = (0, import_provider_utils7.createProviderToolFactoryWithOutputSchema)({
2514
+ import { createProviderToolFactoryWithOutputSchema } from "@ai-sdk/provider-utils";
2515
+ import { z as z6 } from "zod/v4";
2516
+ var codeExecution = createProviderToolFactoryWithOutputSchema({
1693
2517
  id: "google.code_execution",
1694
- inputSchema: import_v46.z.object({
1695
- language: import_v46.z.string().describe("The programming language of the code."),
1696
- code: import_v46.z.string().describe("The code to be executed.")
2518
+ inputSchema: z6.object({
2519
+ language: z6.string().describe("The programming language of the code."),
2520
+ code: z6.string().describe("The code to be executed.")
1697
2521
  }),
1698
- outputSchema: import_v46.z.object({
1699
- outcome: import_v46.z.string().describe('The outcome of the execution (e.g., "OUTCOME_OK").'),
1700
- output: import_v46.z.string().describe("The output from the code execution.")
2522
+ outputSchema: z6.object({
2523
+ outcome: z6.string().describe('The outcome of the execution (e.g., "OUTCOME_OK").'),
2524
+ output: z6.string().describe("The output from the code execution.")
1701
2525
  })
1702
2526
  });
1703
2527
 
1704
2528
  // src/tool/enterprise-web-search.ts
1705
- var import_provider_utils8 = require("@ai-sdk/provider-utils");
1706
- var import_v47 = require("zod/v4");
1707
- var enterpriseWebSearch = (0, import_provider_utils8.createProviderToolFactory)({
2529
+ import {
2530
+ createProviderToolFactory,
2531
+ lazySchema as lazySchema6,
2532
+ zodSchema as zodSchema6
2533
+ } from "@ai-sdk/provider-utils";
2534
+ import { z as z7 } from "zod/v4";
2535
+ var enterpriseWebSearch = createProviderToolFactory({
1708
2536
  id: "google.enterprise_web_search",
1709
- inputSchema: (0, import_provider_utils8.lazySchema)(() => (0, import_provider_utils8.zodSchema)(import_v47.z.object({})))
2537
+ inputSchema: lazySchema6(() => zodSchema6(z7.object({})))
1710
2538
  });
1711
2539
 
1712
2540
  // src/tool/file-search.ts
1713
- var import_provider_utils9 = require("@ai-sdk/provider-utils");
1714
- var import_v48 = require("zod/v4");
1715
- var fileSearchArgsBaseSchema = import_v48.z.object({
2541
+ import {
2542
+ createProviderToolFactory as createProviderToolFactory2,
2543
+ lazySchema as lazySchema7,
2544
+ zodSchema as zodSchema7
2545
+ } from "@ai-sdk/provider-utils";
2546
+ import { z as z8 } from "zod/v4";
2547
+ var fileSearchArgsBaseSchema = z8.object({
1716
2548
  /** The names of the file_search_stores to retrieve from.
1717
2549
  * Example: `fileSearchStores/my-file-search-store-123`
1718
2550
  */
1719
- fileSearchStoreNames: import_v48.z.array(import_v48.z.string()).describe(
2551
+ fileSearchStoreNames: z8.array(z8.string()).describe(
1720
2552
  "The names of the file_search_stores to retrieve from. Example: `fileSearchStores/my-file-search-store-123`"
1721
2553
  ),
1722
2554
  /** The number of file search retrieval chunks to retrieve. */
1723
- topK: import_v48.z.number().int().positive().describe("The number of file search retrieval chunks to retrieve.").optional(),
2555
+ topK: z8.number().int().positive().describe("The number of file search retrieval chunks to retrieve.").optional(),
1724
2556
  /** Metadata filter to apply to the file search retrieval documents.
1725
2557
  * See https://google.aip.dev/160 for the syntax of the filter expression.
1726
2558
  */
1727
- metadataFilter: import_v48.z.string().describe(
2559
+ metadataFilter: z8.string().describe(
1728
2560
  "Metadata filter to apply to the file search retrieval documents. See https://google.aip.dev/160 for the syntax of the filter expression."
1729
2561
  ).optional()
1730
2562
  }).passthrough();
1731
- var fileSearchArgsSchema = (0, import_provider_utils9.lazySchema)(
1732
- () => (0, import_provider_utils9.zodSchema)(fileSearchArgsBaseSchema)
2563
+ var fileSearchArgsSchema = lazySchema7(
2564
+ () => zodSchema7(fileSearchArgsBaseSchema)
1733
2565
  );
1734
- var fileSearch = (0, import_provider_utils9.createProviderToolFactory)({
2566
+ var fileSearch = createProviderToolFactory2({
1735
2567
  id: "google.file_search",
1736
2568
  inputSchema: fileSearchArgsSchema
1737
2569
  });
1738
2570
 
1739
2571
  // src/tool/google-maps.ts
1740
- var import_provider_utils10 = require("@ai-sdk/provider-utils");
1741
- var import_v49 = require("zod/v4");
1742
- var googleMaps = (0, import_provider_utils10.createProviderToolFactory)({
2572
+ import {
2573
+ createProviderToolFactory as createProviderToolFactory3,
2574
+ lazySchema as lazySchema8,
2575
+ zodSchema as zodSchema8
2576
+ } from "@ai-sdk/provider-utils";
2577
+ import { z as z9 } from "zod/v4";
2578
+ var googleMaps = createProviderToolFactory3({
1743
2579
  id: "google.google_maps",
1744
- inputSchema: (0, import_provider_utils10.lazySchema)(() => (0, import_provider_utils10.zodSchema)(import_v49.z.object({})))
2580
+ inputSchema: lazySchema8(() => zodSchema8(z9.object({})))
1745
2581
  });
1746
2582
 
1747
2583
  // src/tool/google-search.ts
1748
- var import_provider_utils11 = require("@ai-sdk/provider-utils");
1749
- var import_v410 = require("zod/v4");
1750
- var googleSearchToolArgsBaseSchema = import_v410.z.object({
1751
- searchTypes: import_v410.z.object({
1752
- webSearch: import_v410.z.object({}).optional(),
1753
- imageSearch: import_v410.z.object({}).optional()
2584
+ import {
2585
+ createProviderToolFactory as createProviderToolFactory4,
2586
+ lazySchema as lazySchema9,
2587
+ zodSchema as zodSchema9
2588
+ } from "@ai-sdk/provider-utils";
2589
+ import { z as z10 } from "zod/v4";
2590
+ var googleSearchToolArgsBaseSchema = z10.object({
2591
+ searchTypes: z10.object({
2592
+ webSearch: z10.object({}).optional(),
2593
+ imageSearch: z10.object({}).optional()
1754
2594
  }).optional(),
1755
- timeRangeFilter: import_v410.z.object({
1756
- startTime: import_v410.z.string(),
1757
- endTime: import_v410.z.string()
2595
+ timeRangeFilter: z10.object({
2596
+ startTime: z10.string(),
2597
+ endTime: z10.string()
1758
2598
  }).optional()
1759
2599
  }).passthrough();
1760
- var googleSearchToolArgsSchema = (0, import_provider_utils11.lazySchema)(
1761
- () => (0, import_provider_utils11.zodSchema)(googleSearchToolArgsBaseSchema)
2600
+ var googleSearchToolArgsSchema = lazySchema9(
2601
+ () => zodSchema9(googleSearchToolArgsBaseSchema)
1762
2602
  );
1763
- var googleSearch = (0, import_provider_utils11.createProviderToolFactory)(
2603
+ var googleSearch = createProviderToolFactory4(
1764
2604
  {
1765
2605
  id: "google.google_search",
1766
2606
  inputSchema: googleSearchToolArgsSchema
@@ -1768,21 +2608,25 @@ var googleSearch = (0, import_provider_utils11.createProviderToolFactory)(
1768
2608
  );
1769
2609
 
1770
2610
  // src/tool/url-context.ts
1771
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
1772
- var import_v411 = require("zod/v4");
1773
- var urlContext = (0, import_provider_utils12.createProviderToolFactory)({
2611
+ import {
2612
+ createProviderToolFactory as createProviderToolFactory5,
2613
+ lazySchema as lazySchema10,
2614
+ zodSchema as zodSchema10
2615
+ } from "@ai-sdk/provider-utils";
2616
+ import { z as z11 } from "zod/v4";
2617
+ var urlContext = createProviderToolFactory5({
1774
2618
  id: "google.url_context",
1775
- inputSchema: (0, import_provider_utils12.lazySchema)(() => (0, import_provider_utils12.zodSchema)(import_v411.z.object({})))
2619
+ inputSchema: lazySchema10(() => zodSchema10(z11.object({})))
1776
2620
  });
1777
2621
 
1778
2622
  // src/tool/vertex-rag-store.ts
1779
- var import_provider_utils13 = require("@ai-sdk/provider-utils");
1780
- var import_v412 = require("zod/v4");
1781
- var vertexRagStore = (0, import_provider_utils13.createProviderToolFactory)({
2623
+ import { createProviderToolFactory as createProviderToolFactory6 } from "@ai-sdk/provider-utils";
2624
+ import { z as z12 } from "zod/v4";
2625
+ var vertexRagStore = createProviderToolFactory6({
1782
2626
  id: "google.vertex_rag_store",
1783
- inputSchema: import_v412.z.object({
1784
- ragCorpus: import_v412.z.string(),
1785
- topK: import_v412.z.number().optional()
2627
+ inputSchema: z12.object({
2628
+ ragCorpus: z12.string(),
2629
+ topK: z12.number().optional()
1786
2630
  })
1787
2631
  });
1788
2632
 
@@ -1847,14 +2691,40 @@ var googleTools = {
1847
2691
  };
1848
2692
 
1849
2693
  // src/google-generative-ai-image-model.ts
1850
- var import_provider_utils14 = require("@ai-sdk/provider-utils");
1851
- var import_v413 = require("zod/v4");
1852
- var GoogleGenerativeAIImageModel = class {
2694
+ import {
2695
+ combineHeaders as combineHeaders3,
2696
+ convertToBase64 as convertToBase642,
2697
+ createJsonResponseHandler as createJsonResponseHandler3,
2698
+ generateId as defaultGenerateId,
2699
+ lazySchema as lazySchema11,
2700
+ parseProviderOptions as parseProviderOptions3,
2701
+ postJsonToApi as postJsonToApi3,
2702
+ resolve as resolve3,
2703
+ serializeModelOptions as serializeModelOptions3,
2704
+ WORKFLOW_SERIALIZE as WORKFLOW_SERIALIZE3,
2705
+ WORKFLOW_DESERIALIZE as WORKFLOW_DESERIALIZE3,
2706
+ zodSchema as zodSchema11
2707
+ } from "@ai-sdk/provider-utils";
2708
+ import { z as z13 } from "zod/v4";
2709
+ var GoogleGenerativeAIImageModel = class _GoogleGenerativeAIImageModel {
1853
2710
  constructor(modelId, settings, config) {
1854
2711
  this.modelId = modelId;
1855
2712
  this.settings = settings;
1856
2713
  this.config = config;
1857
- this.specificationVersion = "v3";
2714
+ this.specificationVersion = "v4";
2715
+ }
2716
+ static [WORKFLOW_SERIALIZE3](model) {
2717
+ return serializeModelOptions3({
2718
+ modelId: model.modelId,
2719
+ config: model.config
2720
+ });
2721
+ }
2722
+ static [WORKFLOW_DESERIALIZE3](options) {
2723
+ return new _GoogleGenerativeAIImageModel(
2724
+ options.modelId,
2725
+ {},
2726
+ options.config
2727
+ );
1858
2728
  }
1859
2729
  get maxImagesPerCall() {
1860
2730
  if (this.settings.maxImagesPerCall != null) {
@@ -1913,7 +2783,7 @@ var GoogleGenerativeAIImageModel = class {
1913
2783
  details: "This model does not support the `seed` option through this provider."
1914
2784
  });
1915
2785
  }
1916
- const googleOptions = await (0, import_provider_utils14.parseProviderOptions)({
2786
+ const googleOptions = await parseProviderOptions3({
1917
2787
  provider: "google",
1918
2788
  providerOptions,
1919
2789
  schema: googleImageModelOptionsSchema
@@ -1932,12 +2802,15 @@ var GoogleGenerativeAIImageModel = class {
1932
2802
  instances: [{ prompt }],
1933
2803
  parameters
1934
2804
  };
1935
- const { responseHeaders, value: response } = await (0, import_provider_utils14.postJsonToApi)({
2805
+ const { responseHeaders, value: response } = await postJsonToApi3({
1936
2806
  url: `${this.config.baseURL}/models/${this.modelId}:predict`,
1937
- headers: (0, import_provider_utils14.combineHeaders)(await (0, import_provider_utils14.resolve)(this.config.headers), headers),
2807
+ headers: combineHeaders3(
2808
+ this.config.headers ? await resolve3(this.config.headers) : void 0,
2809
+ headers
2810
+ ),
1938
2811
  body,
1939
2812
  failedResponseHandler: googleFailedResponseHandler,
1940
- successfulResponseHandler: (0, import_provider_utils14.createJsonResponseHandler)(
2813
+ successfulResponseHandler: createJsonResponseHandler3(
1941
2814
  googleImageResponseSchema
1942
2815
  ),
1943
2816
  abortSignal,
@@ -2023,7 +2896,7 @@ var GoogleGenerativeAIImageModel = class {
2023
2896
  baseURL: this.config.baseURL,
2024
2897
  headers: (_a = this.config.headers) != null ? _a : {},
2025
2898
  fetch: this.config.fetch,
2026
- generateId: (_b = this.config.generateId) != null ? _b : import_provider_utils14.generateId
2899
+ generateId: (_b = this.config.generateId) != null ? _b : defaultGenerateId
2027
2900
  });
2028
2901
  const result = await languageModel.doGenerate({
2029
2902
  prompt: languageModelPrompt,
@@ -2044,7 +2917,7 @@ var GoogleGenerativeAIImageModel = class {
2044
2917
  const images = [];
2045
2918
  for (const part of result.content) {
2046
2919
  if (part.type === "file" && part.mediaType.startsWith("image/")) {
2047
- images.push((0, import_provider_utils14.convertToBase64)(part.data));
2920
+ images.push(convertToBase642(part.data));
2048
2921
  }
2049
2922
  }
2050
2923
  return {
@@ -2071,31 +2944,207 @@ var GoogleGenerativeAIImageModel = class {
2071
2944
  function isGeminiModel(modelId) {
2072
2945
  return modelId.startsWith("gemini-");
2073
2946
  }
2074
- var googleImageResponseSchema = (0, import_provider_utils14.lazySchema)(
2075
- () => (0, import_provider_utils14.zodSchema)(
2076
- import_v413.z.object({
2077
- predictions: import_v413.z.array(import_v413.z.object({ bytesBase64Encoded: import_v413.z.string() })).default([])
2947
+ var googleImageResponseSchema = lazySchema11(
2948
+ () => zodSchema11(
2949
+ z13.object({
2950
+ predictions: z13.array(z13.object({ bytesBase64Encoded: z13.string() })).default([])
2078
2951
  })
2079
2952
  )
2080
2953
  );
2081
- var googleImageModelOptionsSchema = (0, import_provider_utils14.lazySchema)(
2082
- () => (0, import_provider_utils14.zodSchema)(
2083
- import_v413.z.object({
2084
- personGeneration: import_v413.z.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
2085
- aspectRatio: import_v413.z.enum(["1:1", "3:4", "4:3", "9:16", "16:9"]).nullish()
2954
+ var googleImageModelOptionsSchema = lazySchema11(
2955
+ () => zodSchema11(
2956
+ z13.object({
2957
+ personGeneration: z13.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
2958
+ aspectRatio: z13.enum(["1:1", "3:4", "4:3", "9:16", "16:9"]).nullish()
2959
+ })
2960
+ )
2961
+ );
2962
+
2963
+ // src/google-generative-ai-files.ts
2964
+ import {
2965
+ AISDKError
2966
+ } from "@ai-sdk/provider";
2967
+ import {
2968
+ combineHeaders as combineHeaders4,
2969
+ createJsonResponseHandler as createJsonResponseHandler4,
2970
+ delay,
2971
+ lazySchema as lazySchema12,
2972
+ parseProviderOptions as parseProviderOptions4,
2973
+ zodSchema as zodSchema12,
2974
+ getFromApi
2975
+ } from "@ai-sdk/provider-utils";
2976
+ import { z as z14 } from "zod/v4";
2977
+ var GoogleGenerativeAIFiles = class {
2978
+ constructor(config) {
2979
+ this.config = config;
2980
+ this.specificationVersion = "v4";
2981
+ }
2982
+ get provider() {
2983
+ return this.config.provider;
2984
+ }
2985
+ async uploadFile(options) {
2986
+ var _a, _b, _c, _d;
2987
+ const googleOptions = await parseProviderOptions4({
2988
+ provider: "google",
2989
+ providerOptions: options.providerOptions,
2990
+ schema: googleFilesUploadOptionsSchema
2991
+ });
2992
+ const resolvedHeaders = this.config.headers();
2993
+ const fetchFn = (_a = this.config.fetch) != null ? _a : globalThis.fetch;
2994
+ const warnings = [];
2995
+ if (options.filename != null) {
2996
+ warnings.push({ type: "unsupported", feature: "filename" });
2997
+ }
2998
+ const data = options.data;
2999
+ const fileBytes = data instanceof Uint8Array ? data : Uint8Array.from(atob(data), (c) => c.charCodeAt(0));
3000
+ const mediaType = options.mediaType;
3001
+ const displayName = googleOptions == null ? void 0 : googleOptions.displayName;
3002
+ const baseOrigin = this.config.baseURL.replace(/\/v1beta$/, "");
3003
+ const initResponse = await fetchFn(`${baseOrigin}/upload/v1beta/files`, {
3004
+ method: "POST",
3005
+ headers: {
3006
+ ...resolvedHeaders,
3007
+ "X-Goog-Upload-Protocol": "resumable",
3008
+ "X-Goog-Upload-Command": "start",
3009
+ "X-Goog-Upload-Header-Content-Length": String(fileBytes.length),
3010
+ "X-Goog-Upload-Header-Content-Type": mediaType,
3011
+ "Content-Type": "application/json"
3012
+ },
3013
+ body: JSON.stringify({
3014
+ file: {
3015
+ ...displayName != null ? { display_name: displayName } : {}
3016
+ }
3017
+ })
3018
+ });
3019
+ if (!initResponse.ok) {
3020
+ const errorBody = await initResponse.text();
3021
+ throw new AISDKError({
3022
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
3023
+ message: `Failed to initiate resumable upload: ${initResponse.status} ${errorBody}`
3024
+ });
3025
+ }
3026
+ const uploadUrl = initResponse.headers.get("x-goog-upload-url");
3027
+ if (!uploadUrl) {
3028
+ throw new AISDKError({
3029
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
3030
+ message: "No upload URL returned from initiation request"
3031
+ });
3032
+ }
3033
+ const uploadResponse = await fetchFn(uploadUrl, {
3034
+ method: "POST",
3035
+ headers: {
3036
+ "Content-Length": String(fileBytes.length),
3037
+ "X-Goog-Upload-Offset": "0",
3038
+ "X-Goog-Upload-Command": "upload, finalize"
3039
+ },
3040
+ body: fileBytes
3041
+ });
3042
+ if (!uploadResponse.ok) {
3043
+ const errorBody = await uploadResponse.text();
3044
+ throw new AISDKError({
3045
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
3046
+ message: `Failed to upload file data: ${uploadResponse.status} ${errorBody}`
3047
+ });
3048
+ }
3049
+ const uploadResult = await uploadResponse.json();
3050
+ let file = uploadResult.file;
3051
+ const pollIntervalMs = (_b = googleOptions == null ? void 0 : googleOptions.pollIntervalMs) != null ? _b : 2e3;
3052
+ const pollTimeoutMs = (_c = googleOptions == null ? void 0 : googleOptions.pollTimeoutMs) != null ? _c : 3e5;
3053
+ const startTime = Date.now();
3054
+ while (file.state === "PROCESSING") {
3055
+ if (Date.now() - startTime > pollTimeoutMs) {
3056
+ throw new AISDKError({
3057
+ name: "GOOGLE_FILES_UPLOAD_TIMEOUT",
3058
+ message: `File processing timed out after ${pollTimeoutMs}ms`
3059
+ });
3060
+ }
3061
+ await delay(pollIntervalMs);
3062
+ const { value: fileStatus } = await getFromApi({
3063
+ url: `${this.config.baseURL}/${file.name}`,
3064
+ headers: combineHeaders4(resolvedHeaders),
3065
+ successfulResponseHandler: createJsonResponseHandler4(
3066
+ googleFileResponseSchema
3067
+ ),
3068
+ failedResponseHandler: googleFailedResponseHandler,
3069
+ fetch: this.config.fetch
3070
+ });
3071
+ file = fileStatus;
3072
+ }
3073
+ if (file.state === "FAILED") {
3074
+ throw new AISDKError({
3075
+ name: "GOOGLE_FILES_UPLOAD_FAILED",
3076
+ message: `File processing failed for ${file.name}`
3077
+ });
3078
+ }
3079
+ return {
3080
+ warnings,
3081
+ providerReference: { google: file.uri },
3082
+ mediaType: (_d = file.mimeType) != null ? _d : options.mediaType,
3083
+ providerMetadata: {
3084
+ google: {
3085
+ name: file.name,
3086
+ displayName: file.displayName,
3087
+ mimeType: file.mimeType,
3088
+ sizeBytes: file.sizeBytes,
3089
+ state: file.state,
3090
+ uri: file.uri,
3091
+ ...file.createTime != null ? { createTime: file.createTime } : {},
3092
+ ...file.updateTime != null ? { updateTime: file.updateTime } : {},
3093
+ ...file.expirationTime != null ? { expirationTime: file.expirationTime } : {},
3094
+ ...file.sha256Hash != null ? { sha256Hash: file.sha256Hash } : {}
3095
+ }
3096
+ }
3097
+ };
3098
+ }
3099
+ };
3100
+ var googleFileResponseSchema = lazySchema12(
3101
+ () => zodSchema12(
3102
+ z14.object({
3103
+ name: z14.string(),
3104
+ displayName: z14.string().nullish(),
3105
+ mimeType: z14.string(),
3106
+ sizeBytes: z14.string().nullish(),
3107
+ createTime: z14.string().nullish(),
3108
+ updateTime: z14.string().nullish(),
3109
+ expirationTime: z14.string().nullish(),
3110
+ sha256Hash: z14.string().nullish(),
3111
+ uri: z14.string(),
3112
+ state: z14.string()
2086
3113
  })
2087
3114
  )
2088
3115
  );
3116
+ var googleFilesUploadOptionsSchema = lazySchema12(
3117
+ () => zodSchema12(
3118
+ z14.object({
3119
+ displayName: z14.string().nullish(),
3120
+ pollIntervalMs: z14.number().positive().nullish(),
3121
+ pollTimeoutMs: z14.number().positive().nullish()
3122
+ }).passthrough()
3123
+ )
3124
+ );
2089
3125
 
2090
3126
  // src/google-generative-ai-video-model.ts
2091
- var import_provider4 = require("@ai-sdk/provider");
2092
- var import_provider_utils15 = require("@ai-sdk/provider-utils");
2093
- var import_v414 = require("zod/v4");
3127
+ import {
3128
+ AISDKError as AISDKError2
3129
+ } from "@ai-sdk/provider";
3130
+ import {
3131
+ combineHeaders as combineHeaders5,
3132
+ convertUint8ArrayToBase64,
3133
+ createJsonResponseHandler as createJsonResponseHandler5,
3134
+ delay as delay2,
3135
+ getFromApi as getFromApi2,
3136
+ lazySchema as lazySchema13,
3137
+ parseProviderOptions as parseProviderOptions5,
3138
+ postJsonToApi as postJsonToApi4,
3139
+ resolve as resolve4,
3140
+ zodSchema as zodSchema13
3141
+ } from "@ai-sdk/provider-utils";
3142
+ import { z as z15 } from "zod/v4";
2094
3143
  var GoogleGenerativeAIVideoModel = class {
2095
3144
  constructor(modelId, config) {
2096
3145
  this.modelId = modelId;
2097
3146
  this.config = config;
2098
- this.specificationVersion = "v3";
3147
+ this.specificationVersion = "v4";
2099
3148
  }
2100
3149
  get provider() {
2101
3150
  return this.config.provider;
@@ -2107,7 +3156,7 @@ var GoogleGenerativeAIVideoModel = class {
2107
3156
  var _a, _b, _c, _d, _e, _f, _g, _h;
2108
3157
  const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
2109
3158
  const warnings = [];
2110
- const googleOptions = await (0, import_provider_utils15.parseProviderOptions)({
3159
+ const googleOptions = await parseProviderOptions5({
2111
3160
  provider: "google",
2112
3161
  providerOptions: options.providerOptions,
2113
3162
  schema: googleVideoModelOptionsSchema
@@ -2125,7 +3174,7 @@ var GoogleGenerativeAIVideoModel = class {
2125
3174
  details: "Google Generative AI video models require base64-encoded images. URL will be ignored."
2126
3175
  });
2127
3176
  } else {
2128
- const base64Data = typeof options.image.data === "string" ? options.image.data : (0, import_provider_utils15.convertUint8ArrayToBase64)(options.image.data);
3177
+ const base64Data = typeof options.image.data === "string" ? options.image.data : convertUint8ArrayToBase64(options.image.data);
2129
3178
  instance.image = {
2130
3179
  inlineData: {
2131
3180
  mimeType: options.image.mediaType || "image/png",
@@ -2191,17 +3240,17 @@ var GoogleGenerativeAIVideoModel = class {
2191
3240
  }
2192
3241
  }
2193
3242
  }
2194
- const { value: operation } = await (0, import_provider_utils15.postJsonToApi)({
3243
+ const { value: operation } = await postJsonToApi4({
2195
3244
  url: `${this.config.baseURL}/models/${this.modelId}:predictLongRunning`,
2196
- headers: (0, import_provider_utils15.combineHeaders)(
2197
- await (0, import_provider_utils15.resolve)(this.config.headers),
3245
+ headers: combineHeaders5(
3246
+ await resolve4(this.config.headers),
2198
3247
  options.headers
2199
3248
  ),
2200
3249
  body: {
2201
3250
  instances,
2202
3251
  parameters
2203
3252
  },
2204
- successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
3253
+ successfulResponseHandler: createJsonResponseHandler5(
2205
3254
  googleOperationSchema
2206
3255
  ),
2207
3256
  failedResponseHandler: googleFailedResponseHandler,
@@ -2210,7 +3259,7 @@ var GoogleGenerativeAIVideoModel = class {
2210
3259
  });
2211
3260
  const operationName = operation.name;
2212
3261
  if (!operationName) {
2213
- throw new import_provider4.AISDKError({
3262
+ throw new AISDKError2({
2214
3263
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2215
3264
  message: "No operation name returned from API"
2216
3265
  });
@@ -2222,25 +3271,25 @@ var GoogleGenerativeAIVideoModel = class {
2222
3271
  let responseHeaders;
2223
3272
  while (!finalOperation.done) {
2224
3273
  if (Date.now() - startTime > pollTimeoutMs) {
2225
- throw new import_provider4.AISDKError({
3274
+ throw new AISDKError2({
2226
3275
  name: "GOOGLE_VIDEO_GENERATION_TIMEOUT",
2227
3276
  message: `Video generation timed out after ${pollTimeoutMs}ms`
2228
3277
  });
2229
3278
  }
2230
- await (0, import_provider_utils15.delay)(pollIntervalMs);
3279
+ await delay2(pollIntervalMs);
2231
3280
  if ((_f = options.abortSignal) == null ? void 0 : _f.aborted) {
2232
- throw new import_provider4.AISDKError({
3281
+ throw new AISDKError2({
2233
3282
  name: "GOOGLE_VIDEO_GENERATION_ABORTED",
2234
3283
  message: "Video generation request was aborted"
2235
3284
  });
2236
3285
  }
2237
- const { value: statusOperation, responseHeaders: pollHeaders } = await (0, import_provider_utils15.getFromApi)({
3286
+ const { value: statusOperation, responseHeaders: pollHeaders } = await getFromApi2({
2238
3287
  url: `${this.config.baseURL}/${operationName}`,
2239
- headers: (0, import_provider_utils15.combineHeaders)(
2240
- await (0, import_provider_utils15.resolve)(this.config.headers),
3288
+ headers: combineHeaders5(
3289
+ await resolve4(this.config.headers),
2241
3290
  options.headers
2242
3291
  ),
2243
- successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
3292
+ successfulResponseHandler: createJsonResponseHandler5(
2244
3293
  googleOperationSchema
2245
3294
  ),
2246
3295
  failedResponseHandler: googleFailedResponseHandler,
@@ -2251,21 +3300,21 @@ var GoogleGenerativeAIVideoModel = class {
2251
3300
  responseHeaders = pollHeaders;
2252
3301
  }
2253
3302
  if (finalOperation.error) {
2254
- throw new import_provider4.AISDKError({
3303
+ throw new AISDKError2({
2255
3304
  name: "GOOGLE_VIDEO_GENERATION_FAILED",
2256
3305
  message: `Video generation failed: ${finalOperation.error.message}`
2257
3306
  });
2258
3307
  }
2259
3308
  const response = finalOperation.response;
2260
3309
  if (!((_g = response == null ? void 0 : response.generateVideoResponse) == null ? void 0 : _g.generatedSamples) || response.generateVideoResponse.generatedSamples.length === 0) {
2261
- throw new import_provider4.AISDKError({
3310
+ throw new AISDKError2({
2262
3311
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2263
3312
  message: `No videos in response. Response: ${JSON.stringify(finalOperation)}`
2264
3313
  });
2265
3314
  }
2266
3315
  const videos = [];
2267
3316
  const videoMetadata = [];
2268
- const resolvedHeaders = await (0, import_provider_utils15.resolve)(this.config.headers);
3317
+ const resolvedHeaders = await resolve4(this.config.headers);
2269
3318
  const apiKey = resolvedHeaders == null ? void 0 : resolvedHeaders["x-goog-api-key"];
2270
3319
  for (const generatedSample of response.generateVideoResponse.generatedSamples) {
2271
3320
  if ((_h = generatedSample.video) == null ? void 0 : _h.uri) {
@@ -2281,7 +3330,7 @@ var GoogleGenerativeAIVideoModel = class {
2281
3330
  }
2282
3331
  }
2283
3332
  if (videos.length === 0) {
2284
- throw new import_provider4.AISDKError({
3333
+ throw new AISDKError2({
2285
3334
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2286
3335
  message: "No valid videos in response"
2287
3336
  });
@@ -2302,37 +3351,37 @@ var GoogleGenerativeAIVideoModel = class {
2302
3351
  };
2303
3352
  }
2304
3353
  };
2305
- var googleOperationSchema = import_v414.z.object({
2306
- name: import_v414.z.string().nullish(),
2307
- done: import_v414.z.boolean().nullish(),
2308
- error: import_v414.z.object({
2309
- code: import_v414.z.number().nullish(),
2310
- message: import_v414.z.string(),
2311
- status: import_v414.z.string().nullish()
3354
+ var googleOperationSchema = z15.object({
3355
+ name: z15.string().nullish(),
3356
+ done: z15.boolean().nullish(),
3357
+ error: z15.object({
3358
+ code: z15.number().nullish(),
3359
+ message: z15.string(),
3360
+ status: z15.string().nullish()
2312
3361
  }).nullish(),
2313
- response: import_v414.z.object({
2314
- generateVideoResponse: import_v414.z.object({
2315
- generatedSamples: import_v414.z.array(
2316
- import_v414.z.object({
2317
- video: import_v414.z.object({
2318
- uri: import_v414.z.string().nullish()
3362
+ response: z15.object({
3363
+ generateVideoResponse: z15.object({
3364
+ generatedSamples: z15.array(
3365
+ z15.object({
3366
+ video: z15.object({
3367
+ uri: z15.string().nullish()
2319
3368
  }).nullish()
2320
3369
  })
2321
3370
  ).nullish()
2322
3371
  }).nullish()
2323
3372
  }).nullish()
2324
3373
  });
2325
- var googleVideoModelOptionsSchema = (0, import_provider_utils15.lazySchema)(
2326
- () => (0, import_provider_utils15.zodSchema)(
2327
- import_v414.z.object({
2328
- pollIntervalMs: import_v414.z.number().positive().nullish(),
2329
- pollTimeoutMs: import_v414.z.number().positive().nullish(),
2330
- personGeneration: import_v414.z.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
2331
- negativePrompt: import_v414.z.string().nullish(),
2332
- referenceImages: import_v414.z.array(
2333
- import_v414.z.object({
2334
- bytesBase64Encoded: import_v414.z.string().nullish(),
2335
- gcsUri: import_v414.z.string().nullish()
3374
+ var googleVideoModelOptionsSchema = lazySchema13(
3375
+ () => zodSchema13(
3376
+ z15.object({
3377
+ pollIntervalMs: z15.number().positive().nullish(),
3378
+ pollTimeoutMs: z15.number().positive().nullish(),
3379
+ personGeneration: z15.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
3380
+ negativePrompt: z15.string().nullish(),
3381
+ referenceImages: z15.array(
3382
+ z15.object({
3383
+ bytesBase64Encoded: z15.string().nullish(),
3384
+ gcsUri: z15.string().nullish()
2336
3385
  })
2337
3386
  ).nullish()
2338
3387
  }).passthrough()
@@ -2342,11 +3391,11 @@ var googleVideoModelOptionsSchema = (0, import_provider_utils15.lazySchema)(
2342
3391
  // src/google-provider.ts
2343
3392
  function createGoogleGenerativeAI(options = {}) {
2344
3393
  var _a, _b;
2345
- const baseURL = (_a = (0, import_provider_utils16.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://generativelanguage.googleapis.com/v1beta";
3394
+ const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://generativelanguage.googleapis.com/v1beta";
2346
3395
  const providerName = (_b = options.name) != null ? _b : "google.generative-ai";
2347
- const getHeaders = () => (0, import_provider_utils16.withUserAgentSuffix)(
3396
+ const getHeaders = () => withUserAgentSuffix(
2348
3397
  {
2349
- "x-goog-api-key": (0, import_provider_utils16.loadApiKey)({
3398
+ "x-goog-api-key": loadApiKey({
2350
3399
  apiKey: options.apiKey,
2351
3400
  environmentVariableName: "GOOGLE_GENERATIVE_AI_API_KEY",
2352
3401
  description: "Google Generative AI"
@@ -2361,7 +3410,7 @@ function createGoogleGenerativeAI(options = {}) {
2361
3410
  provider: providerName,
2362
3411
  baseURL,
2363
3412
  headers: getHeaders,
2364
- generateId: (_a2 = options.generateId) != null ? _a2 : import_provider_utils16.generateId,
3413
+ generateId: (_a2 = options.generateId) != null ? _a2 : generateId2,
2365
3414
  supportedUrls: () => ({
2366
3415
  "*": [
2367
3416
  // Google Generative Language "files" endpoint
@@ -2389,6 +3438,12 @@ function createGoogleGenerativeAI(options = {}) {
2389
3438
  headers: getHeaders,
2390
3439
  fetch: options.fetch
2391
3440
  });
3441
+ const createFiles = () => new GoogleGenerativeAIFiles({
3442
+ provider: providerName,
3443
+ baseURL,
3444
+ headers: getHeaders,
3445
+ fetch: options.fetch
3446
+ });
2392
3447
  const createVideoModel = (modelId) => {
2393
3448
  var _a2;
2394
3449
  return new GoogleGenerativeAIVideoModel(modelId, {
@@ -2396,7 +3451,7 @@ function createGoogleGenerativeAI(options = {}) {
2396
3451
  baseURL,
2397
3452
  headers: getHeaders,
2398
3453
  fetch: options.fetch,
2399
- generateId: (_a2 = options.generateId) != null ? _a2 : import_provider_utils16.generateId
3454
+ generateId: (_a2 = options.generateId) != null ? _a2 : generateId2
2400
3455
  });
2401
3456
  };
2402
3457
  const provider = function(modelId) {
@@ -2407,7 +3462,7 @@ function createGoogleGenerativeAI(options = {}) {
2407
3462
  }
2408
3463
  return createChatModel(modelId);
2409
3464
  };
2410
- provider.specificationVersion = "v3";
3465
+ provider.specificationVersion = "v4";
2411
3466
  provider.languageModel = createChatModel;
2412
3467
  provider.chat = createChatModel;
2413
3468
  provider.generativeAI = createChatModel;
@@ -2419,14 +3474,14 @@ function createGoogleGenerativeAI(options = {}) {
2419
3474
  provider.imageModel = createImageModel;
2420
3475
  provider.video = createVideoModel;
2421
3476
  provider.videoModel = createVideoModel;
3477
+ provider.files = createFiles;
2422
3478
  provider.tools = googleTools;
2423
3479
  return provider;
2424
3480
  }
2425
3481
  var google = createGoogleGenerativeAI();
2426
- // Annotate the CommonJS export names for ESM import in node:
2427
- 0 && (module.exports = {
3482
+ export {
2428
3483
  VERSION,
2429
3484
  createGoogleGenerativeAI,
2430
3485
  google
2431
- });
3486
+ };
2432
3487
  //# sourceMappingURL=index.js.map