@ai-sdk/google 4.0.0-beta.9 → 4.0.0-canary.50

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/CHANGELOG.md +345 -4
  2. package/README.md +6 -4
  3. package/dist/index.d.ts +97 -54
  4. package/dist/index.js +1639 -575
  5. package/dist/index.js.map +1 -1
  6. package/dist/internal/index.d.ts +62 -22
  7. package/dist/internal/index.js +1257 -449
  8. package/dist/internal/index.js.map +1 -1
  9. package/docs/{15-google-generative-ai.mdx → 15-google.mdx} +46 -40
  10. package/package.json +13 -14
  11. package/src/{convert-google-generative-ai-usage.ts → convert-google-usage.ts} +11 -4
  12. package/src/convert-json-schema-to-openapi-schema.ts +1 -1
  13. package/src/convert-to-google-messages.ts +577 -0
  14. package/src/{google-generative-ai-embedding-options.ts → google-embedding-model-options.ts} +2 -2
  15. package/src/{google-generative-ai-embedding-model.ts → google-embedding-model.ts} +28 -15
  16. package/src/google-error.ts +1 -1
  17. package/src/google-files.ts +225 -0
  18. package/src/google-image-model-options.ts +23 -0
  19. package/src/{google-generative-ai-image-model.ts → google-image-model.ts} +61 -49
  20. package/src/{google-generative-ai-image-settings.ts → google-image-settings.ts} +2 -2
  21. package/src/google-json-accumulator.ts +336 -0
  22. package/src/{google-generative-ai-options.ts → google-language-model-options.ts} +32 -5
  23. package/src/{google-generative-ai-language-model.ts → google-language-model.ts} +586 -191
  24. package/src/google-prepare-tools.ts +68 -8
  25. package/src/google-prompt.ts +82 -0
  26. package/src/google-provider.ts +56 -47
  27. package/src/google-video-model-options.ts +43 -0
  28. package/src/{google-generative-ai-video-model.ts → google-video-model.ts} +11 -50
  29. package/src/{google-generative-ai-video-settings.ts → google-video-settings.ts} +2 -1
  30. package/src/index.ts +28 -9
  31. package/src/internal/index.ts +2 -2
  32. package/src/{map-google-generative-ai-finish-reason.ts → map-google-finish-reason.ts} +2 -2
  33. package/src/tool/code-execution.ts +2 -2
  34. package/src/tool/enterprise-web-search.ts +9 -3
  35. package/src/tool/file-search.ts +5 -7
  36. package/src/tool/google-maps.ts +3 -2
  37. package/src/tool/google-search.ts +10 -11
  38. package/src/tool/url-context.ts +4 -2
  39. package/src/tool/vertex-rag-store.ts +9 -6
  40. package/dist/index.d.mts +0 -384
  41. package/dist/index.mjs +0 -2519
  42. package/dist/index.mjs.map +0 -1
  43. package/dist/internal/index.d.mts +0 -287
  44. package/dist/internal/index.mjs +0 -1708
  45. package/dist/internal/index.mjs.map +0 -1
  46. package/src/convert-to-google-generative-ai-messages.ts +0 -239
  47. package/src/google-generative-ai-prompt.ts +0 -47
package/dist/index.js CHANGED
@@ -1,81 +1,78 @@
1
- "use strict";
2
- var __defProp = Object.defineProperty;
3
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
- var __getOwnPropNames = Object.getOwnPropertyNames;
5
- var __hasOwnProp = Object.prototype.hasOwnProperty;
6
- var __export = (target, all) => {
7
- for (var name in all)
8
- __defProp(target, name, { get: all[name], enumerable: true });
9
- };
10
- var __copyProps = (to, from, except, desc) => {
11
- if (from && typeof from === "object" || typeof from === "function") {
12
- for (let key of __getOwnPropNames(from))
13
- if (!__hasOwnProp.call(to, key) && key !== except)
14
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
- }
16
- return to;
17
- };
18
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
-
20
- // src/index.ts
21
- var src_exports = {};
22
- __export(src_exports, {
23
- VERSION: () => VERSION,
24
- createGoogleGenerativeAI: () => createGoogleGenerativeAI,
25
- google: () => google
26
- });
27
- module.exports = __toCommonJS(src_exports);
28
-
29
1
  // src/google-provider.ts
30
- var import_provider_utils16 = require("@ai-sdk/provider-utils");
2
+ import {
3
+ generateId as generateId2,
4
+ loadApiKey,
5
+ withoutTrailingSlash,
6
+ withUserAgentSuffix
7
+ } from "@ai-sdk/provider-utils";
31
8
 
32
9
  // src/version.ts
33
- var VERSION = true ? "4.0.0-beta.9" : "0.0.0-test";
10
+ var VERSION = true ? "4.0.0-canary.50" : "0.0.0-test";
34
11
 
35
- // src/google-generative-ai-embedding-model.ts
36
- var import_provider = require("@ai-sdk/provider");
37
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
38
- var import_v43 = require("zod/v4");
12
+ // src/google-embedding-model.ts
13
+ import {
14
+ TooManyEmbeddingValuesForCallError
15
+ } from "@ai-sdk/provider";
16
+ import {
17
+ combineHeaders,
18
+ createJsonResponseHandler,
19
+ lazySchema as lazySchema3,
20
+ parseProviderOptions,
21
+ postJsonToApi,
22
+ resolve,
23
+ serializeModelOptions,
24
+ WORKFLOW_SERIALIZE,
25
+ WORKFLOW_DESERIALIZE,
26
+ zodSchema as zodSchema3
27
+ } from "@ai-sdk/provider-utils";
28
+ import { z as z3 } from "zod/v4";
39
29
 
40
30
  // src/google-error.ts
41
- var import_provider_utils = require("@ai-sdk/provider-utils");
42
- var import_v4 = require("zod/v4");
43
- var googleErrorDataSchema = (0, import_provider_utils.lazySchema)(
44
- () => (0, import_provider_utils.zodSchema)(
45
- import_v4.z.object({
46
- error: import_v4.z.object({
47
- code: import_v4.z.number().nullable(),
48
- message: import_v4.z.string(),
49
- status: import_v4.z.string()
31
+ import {
32
+ createJsonErrorResponseHandler,
33
+ lazySchema,
34
+ zodSchema
35
+ } from "@ai-sdk/provider-utils";
36
+ import { z } from "zod/v4";
37
+ var googleErrorDataSchema = lazySchema(
38
+ () => zodSchema(
39
+ z.object({
40
+ error: z.object({
41
+ code: z.number().nullable(),
42
+ message: z.string(),
43
+ status: z.string()
50
44
  })
51
45
  })
52
46
  )
53
47
  );
54
- var googleFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
48
+ var googleFailedResponseHandler = createJsonErrorResponseHandler({
55
49
  errorSchema: googleErrorDataSchema,
56
50
  errorToMessage: (data) => data.error.message
57
51
  });
58
52
 
59
- // src/google-generative-ai-embedding-options.ts
60
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
61
- var import_v42 = require("zod/v4");
62
- var googleEmbeddingContentPartSchema = import_v42.z.union([
63
- import_v42.z.object({ text: import_v42.z.string() }),
64
- import_v42.z.object({
65
- inlineData: import_v42.z.object({
66
- mimeType: import_v42.z.string(),
67
- data: import_v42.z.string()
53
+ // src/google-embedding-model-options.ts
54
+ import {
55
+ lazySchema as lazySchema2,
56
+ zodSchema as zodSchema2
57
+ } from "@ai-sdk/provider-utils";
58
+ import { z as z2 } from "zod/v4";
59
+ var googleEmbeddingContentPartSchema = z2.union([
60
+ z2.object({ text: z2.string() }),
61
+ z2.object({
62
+ inlineData: z2.object({
63
+ mimeType: z2.string(),
64
+ data: z2.string()
68
65
  })
69
66
  })
70
67
  ]);
71
- var googleEmbeddingModelOptions = (0, import_provider_utils2.lazySchema)(
72
- () => (0, import_provider_utils2.zodSchema)(
73
- import_v42.z.object({
68
+ var googleEmbeddingModelOptions = lazySchema2(
69
+ () => zodSchema2(
70
+ z2.object({
74
71
  /**
75
72
  * Optional. Optional reduced dimension for the output embedding.
76
73
  * If set, excessive values in the output embedding are truncated from the end.
77
74
  */
78
- outputDimensionality: import_v42.z.number().optional(),
75
+ outputDimensionality: z2.number().optional(),
79
76
  /**
80
77
  * Optional. Specifies the task type for generating embeddings.
81
78
  * Supported task types:
@@ -88,7 +85,7 @@ var googleEmbeddingModelOptions = (0, import_provider_utils2.lazySchema)(
88
85
  * - FACT_VERIFICATION: Optimized for verifying factual information.
89
86
  * - CODE_RETRIEVAL_QUERY: Optimized for retrieving code blocks based on natural language queries.
90
87
  */
91
- taskType: import_v42.z.enum([
88
+ taskType: z2.enum([
92
89
  "SEMANTIC_SIMILARITY",
93
90
  "CLASSIFICATION",
94
91
  "CLUSTERING",
@@ -107,13 +104,13 @@ var googleEmbeddingModelOptions = (0, import_provider_utils2.lazySchema)(
107
104
  * The array length must match the number of values being embedded. In
108
105
  * the case of a single embedding, the array length must be 1.
109
106
  */
110
- content: import_v42.z.array(import_v42.z.array(googleEmbeddingContentPartSchema).min(1).nullable()).optional()
107
+ content: z2.array(z2.array(googleEmbeddingContentPartSchema).min(1).nullable()).optional()
111
108
  })
112
109
  )
113
110
  );
114
111
 
115
- // src/google-generative-ai-embedding-model.ts
116
- var GoogleGenerativeAIEmbeddingModel = class {
112
+ // src/google-embedding-model.ts
113
+ var GoogleEmbeddingModel = class _GoogleEmbeddingModel {
117
114
  constructor(modelId, config) {
118
115
  this.specificationVersion = "v4";
119
116
  this.maxEmbeddingsPerCall = 2048;
@@ -121,6 +118,15 @@ var GoogleGenerativeAIEmbeddingModel = class {
121
118
  this.modelId = modelId;
122
119
  this.config = config;
123
120
  }
121
+ static [WORKFLOW_SERIALIZE](model) {
122
+ return serializeModelOptions({
123
+ modelId: model.modelId,
124
+ config: model.config
125
+ });
126
+ }
127
+ static [WORKFLOW_DESERIALIZE](options) {
128
+ return new _GoogleEmbeddingModel(options.modelId, options.config);
129
+ }
124
130
  get provider() {
125
131
  return this.config.provider;
126
132
  }
@@ -130,21 +136,21 @@ var GoogleGenerativeAIEmbeddingModel = class {
130
136
  abortSignal,
131
137
  providerOptions
132
138
  }) {
133
- const googleOptions = await (0, import_provider_utils3.parseProviderOptions)({
139
+ const googleOptions = await parseProviderOptions({
134
140
  provider: "google",
135
141
  providerOptions,
136
142
  schema: googleEmbeddingModelOptions
137
143
  });
138
144
  if (values.length > this.maxEmbeddingsPerCall) {
139
- throw new import_provider.TooManyEmbeddingValuesForCallError({
145
+ throw new TooManyEmbeddingValuesForCallError({
140
146
  provider: this.provider,
141
147
  modelId: this.modelId,
142
148
  maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
143
149
  values
144
150
  });
145
151
  }
146
- const mergedHeaders = (0, import_provider_utils3.combineHeaders)(
147
- await (0, import_provider_utils3.resolve)(this.config.headers),
152
+ const mergedHeaders = combineHeaders(
153
+ this.config.headers ? await resolve(this.config.headers) : void 0,
148
154
  headers
149
155
  );
150
156
  const multimodalContent = googleOptions == null ? void 0 : googleOptions.content;
@@ -161,7 +167,7 @@ var GoogleGenerativeAIEmbeddingModel = class {
161
167
  responseHeaders: responseHeaders2,
162
168
  value: response2,
163
169
  rawValue: rawValue2
164
- } = await (0, import_provider_utils3.postJsonToApi)({
170
+ } = await postJsonToApi({
165
171
  url: `${this.config.baseURL}/models/${this.modelId}:embedContent`,
166
172
  headers: mergedHeaders,
167
173
  body: {
@@ -173,7 +179,7 @@ var GoogleGenerativeAIEmbeddingModel = class {
173
179
  taskType: googleOptions == null ? void 0 : googleOptions.taskType
174
180
  },
175
181
  failedResponseHandler: googleFailedResponseHandler,
176
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
182
+ successfulResponseHandler: createJsonResponseHandler(
177
183
  googleGenerativeAISingleEmbeddingResponseSchema
178
184
  ),
179
185
  abortSignal,
@@ -190,7 +196,7 @@ var GoogleGenerativeAIEmbeddingModel = class {
190
196
  responseHeaders,
191
197
  value: response,
192
198
  rawValue
193
- } = await (0, import_provider_utils3.postJsonToApi)({
199
+ } = await postJsonToApi({
194
200
  url: `${this.config.baseURL}/models/${this.modelId}:batchEmbedContents`,
195
201
  headers: mergedHeaders,
196
202
  body: {
@@ -209,7 +215,7 @@ var GoogleGenerativeAIEmbeddingModel = class {
209
215
  })
210
216
  },
211
217
  failedResponseHandler: googleFailedResponseHandler,
212
- successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
218
+ successfulResponseHandler: createJsonResponseHandler(
213
219
  googleGenerativeAITextEmbeddingResponseSchema
214
220
  ),
215
221
  abortSignal,
@@ -223,27 +229,43 @@ var GoogleGenerativeAIEmbeddingModel = class {
223
229
  };
224
230
  }
225
231
  };
226
- var googleGenerativeAITextEmbeddingResponseSchema = (0, import_provider_utils3.lazySchema)(
227
- () => (0, import_provider_utils3.zodSchema)(
228
- import_v43.z.object({
229
- embeddings: import_v43.z.array(import_v43.z.object({ values: import_v43.z.array(import_v43.z.number()) }))
232
+ var googleGenerativeAITextEmbeddingResponseSchema = lazySchema3(
233
+ () => zodSchema3(
234
+ z3.object({
235
+ embeddings: z3.array(z3.object({ values: z3.array(z3.number()) }))
230
236
  })
231
237
  )
232
238
  );
233
- var googleGenerativeAISingleEmbeddingResponseSchema = (0, import_provider_utils3.lazySchema)(
234
- () => (0, import_provider_utils3.zodSchema)(
235
- import_v43.z.object({
236
- embedding: import_v43.z.object({ values: import_v43.z.array(import_v43.z.number()) })
239
+ var googleGenerativeAISingleEmbeddingResponseSchema = lazySchema3(
240
+ () => zodSchema3(
241
+ z3.object({
242
+ embedding: z3.object({ values: z3.array(z3.number()) })
237
243
  })
238
244
  )
239
245
  );
240
246
 
241
- // src/google-generative-ai-language-model.ts
242
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
243
- var import_v45 = require("zod/v4");
247
+ // src/google-language-model.ts
248
+ import {
249
+ combineHeaders as combineHeaders2,
250
+ createEventSourceResponseHandler,
251
+ createJsonResponseHandler as createJsonResponseHandler2,
252
+ generateId,
253
+ isCustomReasoning,
254
+ lazySchema as lazySchema5,
255
+ mapReasoningToProviderBudget,
256
+ mapReasoningToProviderEffort,
257
+ parseProviderOptions as parseProviderOptions2,
258
+ postJsonToApi as postJsonToApi2,
259
+ resolve as resolve2,
260
+ serializeModelOptions as serializeModelOptions2,
261
+ WORKFLOW_SERIALIZE as WORKFLOW_SERIALIZE2,
262
+ WORKFLOW_DESERIALIZE as WORKFLOW_DESERIALIZE2,
263
+ zodSchema as zodSchema5
264
+ } from "@ai-sdk/provider-utils";
265
+ import { z as z5 } from "zod/v4";
244
266
 
245
- // src/convert-google-generative-ai-usage.ts
246
- function convertGoogleGenerativeAIUsage(usage) {
267
+ // src/convert-google-usage.ts
268
+ function convertGoogleUsage(usage) {
247
269
  var _a, _b, _c, _d;
248
270
  if (usage == null) {
249
271
  return {
@@ -397,21 +419,147 @@ function isEmptyObjectSchema(jsonSchema) {
397
419
  return jsonSchema != null && typeof jsonSchema === "object" && jsonSchema.type === "object" && (jsonSchema.properties == null || Object.keys(jsonSchema.properties).length === 0) && !jsonSchema.additionalProperties;
398
420
  }
399
421
 
400
- // src/convert-to-google-generative-ai-messages.ts
401
- var import_provider2 = require("@ai-sdk/provider");
402
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
403
- function convertToGoogleGenerativeAIMessages(prompt, options) {
404
- var _a, _b, _c;
422
+ // src/convert-to-google-messages.ts
423
+ import {
424
+ UnsupportedFunctionalityError
425
+ } from "@ai-sdk/provider";
426
+ import {
427
+ convertToBase64,
428
+ isFullMediaType,
429
+ resolveFullMediaType,
430
+ resolveProviderReference
431
+ } from "@ai-sdk/provider-utils";
432
+ var dataUrlRegex = /^data:([^;,]+);base64,(.+)$/s;
433
+ function parseBase64DataUrl(value) {
434
+ const match = dataUrlRegex.exec(value);
435
+ if (match == null) {
436
+ return void 0;
437
+ }
438
+ return {
439
+ mediaType: match[1],
440
+ data: match[2]
441
+ };
442
+ }
443
+ function convertUrlToolResultPart(url) {
444
+ const parsedDataUrl = parseBase64DataUrl(url);
445
+ if (parsedDataUrl == null) {
446
+ return void 0;
447
+ }
448
+ return {
449
+ inlineData: {
450
+ mimeType: parsedDataUrl.mediaType,
451
+ data: parsedDataUrl.data
452
+ }
453
+ };
454
+ }
455
+ function appendToolResultParts(parts, toolName, outputValue) {
456
+ const functionResponseParts = [];
457
+ const responseTextParts = [];
458
+ for (const contentPart of outputValue) {
459
+ switch (contentPart.type) {
460
+ case "text": {
461
+ responseTextParts.push(contentPart.text);
462
+ break;
463
+ }
464
+ case "file-data": {
465
+ functionResponseParts.push({
466
+ inlineData: {
467
+ mimeType: contentPart.mediaType,
468
+ data: contentPart.data
469
+ }
470
+ });
471
+ break;
472
+ }
473
+ case "file-url": {
474
+ const functionResponsePart = convertUrlToolResultPart(
475
+ contentPart.url
476
+ );
477
+ if (functionResponsePart != null) {
478
+ functionResponseParts.push(functionResponsePart);
479
+ } else {
480
+ responseTextParts.push(JSON.stringify(contentPart));
481
+ }
482
+ break;
483
+ }
484
+ default: {
485
+ responseTextParts.push(JSON.stringify(contentPart));
486
+ break;
487
+ }
488
+ }
489
+ }
490
+ parts.push({
491
+ functionResponse: {
492
+ name: toolName,
493
+ response: {
494
+ name: toolName,
495
+ content: responseTextParts.length > 0 ? responseTextParts.join("\n") : "Tool executed successfully."
496
+ },
497
+ ...functionResponseParts.length > 0 ? { parts: functionResponseParts } : {}
498
+ }
499
+ });
500
+ }
501
+ function appendLegacyToolResultParts(parts, toolName, outputValue) {
502
+ for (const contentPart of outputValue) {
503
+ switch (contentPart.type) {
504
+ case "text":
505
+ parts.push({
506
+ functionResponse: {
507
+ name: toolName,
508
+ response: {
509
+ name: toolName,
510
+ content: contentPart.text
511
+ }
512
+ }
513
+ });
514
+ break;
515
+ case "file-data":
516
+ if (contentPart.mediaType.startsWith("image/")) {
517
+ parts.push(
518
+ {
519
+ inlineData: {
520
+ mimeType: contentPart.mediaType,
521
+ data: contentPart.data
522
+ }
523
+ },
524
+ {
525
+ text: "Tool executed successfully and returned this image as a response"
526
+ }
527
+ );
528
+ } else {
529
+ parts.push({ text: JSON.stringify(contentPart) });
530
+ }
531
+ break;
532
+ default:
533
+ parts.push({ text: JSON.stringify(contentPart) });
534
+ break;
535
+ }
536
+ }
537
+ }
538
+ function convertToGoogleMessages(prompt, options) {
539
+ var _a, _b, _c, _d;
405
540
  const systemInstructionParts = [];
406
541
  const contents = [];
407
542
  let systemMessagesAllowed = true;
408
543
  const isGemmaModel = (_a = options == null ? void 0 : options.isGemmaModel) != null ? _a : false;
409
- const providerOptionsName = (_b = options == null ? void 0 : options.providerOptionsName) != null ? _b : "google";
544
+ const providerOptionsNames = (_b = options == null ? void 0 : options.providerOptionsNames) != null ? _b : ["google"];
545
+ const isVertexLike = !providerOptionsNames.includes("google");
546
+ const supportsFunctionResponseParts = (_c = options == null ? void 0 : options.supportsFunctionResponseParts) != null ? _c : true;
547
+ const readProviderOpts = (part) => {
548
+ var _a2, _b2, _c2, _d2, _e;
549
+ for (const name of providerOptionsNames) {
550
+ const v = (_a2 = part.providerOptions) == null ? void 0 : _a2[name];
551
+ if (v != null) return v;
552
+ }
553
+ if (isVertexLike) {
554
+ return (_b2 = part.providerOptions) == null ? void 0 : _b2.google;
555
+ }
556
+ return (_e = (_c2 = part.providerOptions) == null ? void 0 : _c2.googleVertex) != null ? _e : (_d2 = part.providerOptions) == null ? void 0 : _d2.vertex;
557
+ };
410
558
  for (const { role, content } of prompt) {
411
559
  switch (role) {
412
560
  case "system": {
413
561
  if (!systemMessagesAllowed) {
414
- throw new import_provider2.UnsupportedFunctionalityError({
562
+ throw new UnsupportedFunctionalityError({
415
563
  functionality: "system messages are only supported at the beginning of the conversation"
416
564
  });
417
565
  }
@@ -428,20 +576,54 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
428
576
  break;
429
577
  }
430
578
  case "file": {
431
- const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
432
- parts.push(
433
- part.data instanceof URL ? {
434
- fileData: {
435
- mimeType: mediaType,
436
- fileUri: part.data.toString()
437
- }
438
- } : {
439
- inlineData: {
440
- mimeType: mediaType,
441
- data: (0, import_provider_utils4.convertToBase64)(part.data)
579
+ switch (part.data.type) {
580
+ case "url": {
581
+ parts.push({
582
+ fileData: {
583
+ mimeType: resolveFullMediaType({ part }),
584
+ fileUri: part.data.url.toString()
585
+ }
586
+ });
587
+ break;
588
+ }
589
+ case "reference": {
590
+ if (isVertexLike) {
591
+ throw new UnsupportedFunctionalityError({
592
+ functionality: "file parts with provider references"
593
+ });
442
594
  }
595
+ parts.push({
596
+ fileData: {
597
+ mimeType: resolveFullMediaType({ part }),
598
+ fileUri: resolveProviderReference({
599
+ reference: part.data.reference,
600
+ provider: "google"
601
+ })
602
+ }
603
+ });
604
+ break;
605
+ }
606
+ case "text": {
607
+ parts.push({
608
+ inlineData: {
609
+ mimeType: isFullMediaType(part.mediaType) ? part.mediaType : "text/plain",
610
+ data: convertToBase64(
611
+ new TextEncoder().encode(part.data.text)
612
+ )
613
+ }
614
+ });
615
+ break;
443
616
  }
444
- );
617
+ case "data": {
618
+ parts.push({
619
+ inlineData: {
620
+ mimeType: resolveFullMediaType({ part }),
621
+ data: convertToBase64(part.data.data)
622
+ }
623
+ });
624
+ break;
625
+ }
626
+ }
445
627
  break;
446
628
  }
447
629
  }
@@ -454,8 +636,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
454
636
  contents.push({
455
637
  role: "model",
456
638
  parts: content.map((part) => {
457
- var _a2, _b2, _c2, _d;
458
- const providerOpts = (_d = (_a2 = part.providerOptions) == null ? void 0 : _a2[providerOptionsName]) != null ? _d : providerOptionsName !== "google" ? (_b2 = part.providerOptions) == null ? void 0 : _b2.google : (_c2 = part.providerOptions) == null ? void 0 : _c2.vertex;
639
+ const providerOpts = readProviderOpts(part);
459
640
  const thoughtSignature = (providerOpts == null ? void 0 : providerOpts.thoughtSignature) != null ? String(providerOpts.thoughtSignature) : void 0;
460
641
  switch (part.type) {
461
642
  case "text": {
@@ -471,22 +652,89 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
471
652
  thoughtSignature
472
653
  };
473
654
  }
655
+ case "reasoning-file": {
656
+ switch (part.data.type) {
657
+ case "url": {
658
+ throw new UnsupportedFunctionalityError({
659
+ functionality: "File data URLs in assistant messages are not supported"
660
+ });
661
+ }
662
+ case "data": {
663
+ return {
664
+ inlineData: {
665
+ mimeType: part.mediaType,
666
+ data: convertToBase64(part.data.data)
667
+ },
668
+ thought: true,
669
+ thoughtSignature
670
+ };
671
+ }
672
+ }
673
+ break;
674
+ }
474
675
  case "file": {
475
- if (part.data instanceof URL) {
476
- throw new import_provider2.UnsupportedFunctionalityError({
477
- functionality: "File data URLs in assistant messages are not supported"
478
- });
676
+ switch (part.data.type) {
677
+ case "url": {
678
+ throw new UnsupportedFunctionalityError({
679
+ functionality: "File data URLs in assistant messages are not supported"
680
+ });
681
+ }
682
+ case "reference": {
683
+ if (isVertexLike) {
684
+ throw new UnsupportedFunctionalityError({
685
+ functionality: "file parts with provider references"
686
+ });
687
+ }
688
+ return {
689
+ fileData: {
690
+ mimeType: part.mediaType,
691
+ fileUri: resolveProviderReference({
692
+ reference: part.data.reference,
693
+ provider: "google"
694
+ })
695
+ },
696
+ ...(providerOpts == null ? void 0 : providerOpts.thought) === true ? { thought: true } : {},
697
+ thoughtSignature
698
+ };
699
+ }
700
+ case "text": {
701
+ return {
702
+ inlineData: {
703
+ mimeType: isFullMediaType(part.mediaType) ? part.mediaType : "text/plain",
704
+ data: convertToBase64(
705
+ new TextEncoder().encode(part.data.text)
706
+ )
707
+ },
708
+ ...(providerOpts == null ? void 0 : providerOpts.thought) === true ? { thought: true } : {},
709
+ thoughtSignature
710
+ };
711
+ }
712
+ case "data": {
713
+ return {
714
+ inlineData: {
715
+ mimeType: part.mediaType,
716
+ data: convertToBase64(part.data.data)
717
+ },
718
+ ...(providerOpts == null ? void 0 : providerOpts.thought) === true ? { thought: true } : {},
719
+ thoughtSignature
720
+ };
721
+ }
479
722
  }
480
- return {
481
- inlineData: {
482
- mimeType: part.mediaType,
483
- data: (0, import_provider_utils4.convertToBase64)(part.data)
484
- },
485
- ...(providerOpts == null ? void 0 : providerOpts.thought) === true ? { thought: true } : {},
486
- thoughtSignature
487
- };
723
+ break;
488
724
  }
489
725
  case "tool-call": {
726
+ const serverToolCallId = (providerOpts == null ? void 0 : providerOpts.serverToolCallId) != null ? String(providerOpts.serverToolCallId) : void 0;
727
+ const serverToolType = (providerOpts == null ? void 0 : providerOpts.serverToolType) != null ? String(providerOpts.serverToolType) : void 0;
728
+ if (serverToolCallId && serverToolType) {
729
+ return {
730
+ toolCall: {
731
+ toolType: serverToolType,
732
+ args: typeof part.input === "string" ? JSON.parse(part.input) : part.input,
733
+ id: serverToolCallId
734
+ },
735
+ thoughtSignature
736
+ };
737
+ }
490
738
  return {
491
739
  functionCall: {
492
740
  name: part.toolName,
@@ -495,6 +743,21 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
495
743
  thoughtSignature
496
744
  };
497
745
  }
746
+ case "tool-result": {
747
+ const serverToolCallId = (providerOpts == null ? void 0 : providerOpts.serverToolCallId) != null ? String(providerOpts.serverToolCallId) : void 0;
748
+ const serverToolType = (providerOpts == null ? void 0 : providerOpts.serverToolType) != null ? String(providerOpts.serverToolType) : void 0;
749
+ if (serverToolCallId && serverToolType) {
750
+ return {
751
+ toolResponse: {
752
+ toolType: serverToolType,
753
+ response: part.output.type === "json" ? part.output.value : {},
754
+ id: serverToolCallId
755
+ },
756
+ thoughtSignature
757
+ };
758
+ }
759
+ return void 0;
760
+ }
498
761
  }
499
762
  }).filter((part) => part !== void 0)
500
763
  });
@@ -507,38 +770,32 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
507
770
  if (part.type === "tool-approval-response") {
508
771
  continue;
509
772
  }
773
+ const partProviderOpts = readProviderOpts(part);
774
+ const serverToolCallId = (partProviderOpts == null ? void 0 : partProviderOpts.serverToolCallId) != null ? String(partProviderOpts.serverToolCallId) : void 0;
775
+ const serverToolType = (partProviderOpts == null ? void 0 : partProviderOpts.serverToolType) != null ? String(partProviderOpts.serverToolType) : void 0;
776
+ if (serverToolCallId && serverToolType) {
777
+ const serverThoughtSignature = (partProviderOpts == null ? void 0 : partProviderOpts.thoughtSignature) != null ? String(partProviderOpts.thoughtSignature) : void 0;
778
+ if (contents.length > 0) {
779
+ const lastContent = contents[contents.length - 1];
780
+ if (lastContent.role === "model") {
781
+ lastContent.parts.push({
782
+ toolResponse: {
783
+ toolType: serverToolType,
784
+ response: part.output.type === "json" ? part.output.value : {},
785
+ id: serverToolCallId
786
+ },
787
+ thoughtSignature: serverThoughtSignature
788
+ });
789
+ continue;
790
+ }
791
+ }
792
+ }
510
793
  const output = part.output;
511
794
  if (output.type === "content") {
512
- for (const contentPart of output.value) {
513
- switch (contentPart.type) {
514
- case "text":
515
- parts.push({
516
- functionResponse: {
517
- name: part.toolName,
518
- response: {
519
- name: part.toolName,
520
- content: contentPart.text
521
- }
522
- }
523
- });
524
- break;
525
- case "image-data":
526
- parts.push(
527
- {
528
- inlineData: {
529
- mimeType: contentPart.mediaType,
530
- data: contentPart.data
531
- }
532
- },
533
- {
534
- text: "Tool executed successfully and returned this image as a response"
535
- }
536
- );
537
- break;
538
- default:
539
- parts.push({ text: JSON.stringify(contentPart) });
540
- break;
541
- }
795
+ if (supportsFunctionResponseParts) {
796
+ appendToolResultParts(parts, part.toolName, output.value);
797
+ } else {
798
+ appendLegacyToolResultParts(parts, part.toolName, output.value);
542
799
  }
543
800
  } else {
544
801
  parts.push({
@@ -546,7 +803,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
546
803
  name: part.toolName,
547
804
  response: {
548
805
  name: part.toolName,
549
- content: output.type === "execution-denied" ? (_c = output.reason) != null ? _c : "Tool execution denied." : output.value
806
+ content: output.type === "execution-denied" ? (_d = output.reason) != null ? _d : "Tool call execution denied." : output.value
550
807
  }
551
808
  }
552
809
  });
@@ -575,40 +832,43 @@ function getModelPath(modelId) {
575
832
  return modelId.includes("/") ? modelId : `models/${modelId}`;
576
833
  }
577
834
 
578
- // src/google-generative-ai-options.ts
579
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
580
- var import_v44 = require("zod/v4");
581
- var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
582
- () => (0, import_provider_utils5.zodSchema)(
583
- import_v44.z.object({
584
- responseModalities: import_v44.z.array(import_v44.z.enum(["TEXT", "IMAGE"])).optional(),
585
- thinkingConfig: import_v44.z.object({
586
- thinkingBudget: import_v44.z.number().optional(),
587
- includeThoughts: import_v44.z.boolean().optional(),
835
+ // src/google-language-model-options.ts
836
+ import {
837
+ lazySchema as lazySchema4,
838
+ zodSchema as zodSchema4
839
+ } from "@ai-sdk/provider-utils";
840
+ import { z as z4 } from "zod/v4";
841
+ var googleLanguageModelOptions = lazySchema4(
842
+ () => zodSchema4(
843
+ z4.object({
844
+ responseModalities: z4.array(z4.enum(["TEXT", "IMAGE"])).optional(),
845
+ thinkingConfig: z4.object({
846
+ thinkingBudget: z4.number().optional(),
847
+ includeThoughts: z4.boolean().optional(),
588
848
  // https://ai.google.dev/gemini-api/docs/gemini-3?thinking=high#thinking_level
589
- thinkingLevel: import_v44.z.enum(["minimal", "low", "medium", "high"]).optional()
849
+ thinkingLevel: z4.enum(["minimal", "low", "medium", "high"]).optional()
590
850
  }).optional(),
591
851
  /**
592
852
  * Optional.
593
853
  * The name of the cached content used as context to serve the prediction.
594
854
  * Format: cachedContents/{cachedContent}
595
855
  */
596
- cachedContent: import_v44.z.string().optional(),
856
+ cachedContent: z4.string().optional(),
597
857
  /**
598
858
  * Optional. Enable structured output. Default is true.
599
859
  *
600
860
  * This is useful when the JSON Schema contains elements that are
601
861
  * not supported by the OpenAPI schema version that
602
- * Google Generative AI uses. You can use this to disable
862
+ * Google uses. You can use this to disable
603
863
  * structured outputs if you need to.
604
864
  */
605
- structuredOutputs: import_v44.z.boolean().optional(),
865
+ structuredOutputs: z4.boolean().optional(),
606
866
  /**
607
867
  * Optional. A list of unique safety settings for blocking unsafe content.
608
868
  */
609
- safetySettings: import_v44.z.array(
610
- import_v44.z.object({
611
- category: import_v44.z.enum([
869
+ safetySettings: z4.array(
870
+ z4.object({
871
+ category: z4.enum([
612
872
  "HARM_CATEGORY_UNSPECIFIED",
613
873
  "HARM_CATEGORY_HATE_SPEECH",
614
874
  "HARM_CATEGORY_DANGEROUS_CONTENT",
@@ -616,7 +876,7 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
616
876
  "HARM_CATEGORY_SEXUALLY_EXPLICIT",
617
877
  "HARM_CATEGORY_CIVIC_INTEGRITY"
618
878
  ]),
619
- threshold: import_v44.z.enum([
879
+ threshold: z4.enum([
620
880
  "HARM_BLOCK_THRESHOLD_UNSPECIFIED",
621
881
  "BLOCK_LOW_AND_ABOVE",
622
882
  "BLOCK_MEDIUM_AND_ABOVE",
@@ -626,7 +886,7 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
626
886
  ])
627
887
  })
628
888
  ).optional(),
629
- threshold: import_v44.z.enum([
889
+ threshold: z4.enum([
630
890
  "HARM_BLOCK_THRESHOLD_UNSPECIFIED",
631
891
  "BLOCK_LOW_AND_ABOVE",
632
892
  "BLOCK_MEDIUM_AND_ABOVE",
@@ -639,19 +899,19 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
639
899
  *
640
900
  * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/audio-understanding
641
901
  */
642
- audioTimestamp: import_v44.z.boolean().optional(),
902
+ audioTimestamp: z4.boolean().optional(),
643
903
  /**
644
904
  * Optional. Defines labels used in billing reports. Available on Vertex AI only.
645
905
  *
646
906
  * https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/add-labels-to-api-calls
647
907
  */
648
- labels: import_v44.z.record(import_v44.z.string(), import_v44.z.string()).optional(),
908
+ labels: z4.record(z4.string(), z4.string()).optional(),
649
909
  /**
650
910
  * Optional. If specified, the media resolution specified will be used.
651
911
  *
652
912
  * https://ai.google.dev/api/generate-content#MediaResolution
653
913
  */
654
- mediaResolution: import_v44.z.enum([
914
+ mediaResolution: z4.enum([
655
915
  "MEDIA_RESOLUTION_UNSPECIFIED",
656
916
  "MEDIA_RESOLUTION_LOW",
657
917
  "MEDIA_RESOLUTION_MEDIUM",
@@ -662,8 +922,8 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
662
922
  *
663
923
  * https://ai.google.dev/gemini-api/docs/image-generation#aspect_ratios
664
924
  */
665
- imageConfig: import_v44.z.object({
666
- aspectRatio: import_v44.z.enum([
925
+ imageConfig: z4.object({
926
+ aspectRatio: z4.enum([
667
927
  "1:1",
668
928
  "2:3",
669
929
  "3:2",
@@ -679,7 +939,7 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
679
939
  "1:4",
680
940
  "4:1"
681
941
  ]).optional(),
682
- imageSize: import_v44.z.enum(["1K", "2K", "4K", "512"]).optional()
942
+ imageSize: z4.enum(["1K", "2K", "4K", "512"]).optional()
683
943
  }).optional(),
684
944
  /**
685
945
  * Optional. Configuration for grounding retrieval.
@@ -687,24 +947,47 @@ var googleLanguageModelOptions = (0, import_provider_utils5.lazySchema)(
687
947
  *
688
948
  * https://cloud.google.com/vertex-ai/generative-ai/docs/grounding/grounding-with-google-maps
689
949
  */
690
- retrievalConfig: import_v44.z.object({
691
- latLng: import_v44.z.object({
692
- latitude: import_v44.z.number(),
693
- longitude: import_v44.z.number()
950
+ retrievalConfig: z4.object({
951
+ latLng: z4.object({
952
+ latitude: z4.number(),
953
+ longitude: z4.number()
694
954
  }).optional()
695
- }).optional()
955
+ }).optional(),
956
+ /**
957
+ * Optional. When set to true, function call arguments will be streamed
958
+ * incrementally via partialArgs in streaming responses. Only supported
959
+ * on the Vertex AI API (not the Gemini API) and only for Gemini 3+
960
+ * models.
961
+ *
962
+ * @default false
963
+ *
964
+ * https://docs.cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#streaming-fc
965
+ */
966
+ streamFunctionCallArguments: z4.boolean().optional(),
967
+ /**
968
+ * Optional. The service tier to use for the request.
969
+ */
970
+ serviceTier: z4.enum(["standard", "flex", "priority"]).optional()
696
971
  })
697
972
  )
698
973
  );
974
+ var VertexServiceTierMap = {
975
+ standard: "SERVICE_TIER_STANDARD",
976
+ flex: "SERVICE_TIER_FLEX",
977
+ priority: "SERVICE_TIER_PRIORITY"
978
+ };
699
979
 
700
980
  // src/google-prepare-tools.ts
701
- var import_provider3 = require("@ai-sdk/provider");
981
+ import {
982
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError2
983
+ } from "@ai-sdk/provider";
702
984
  function prepareTools({
703
985
  tools,
704
986
  toolChoice,
705
- modelId
987
+ modelId,
988
+ isVertexProvider = false
706
989
  }) {
707
- var _a;
990
+ var _a, _b;
708
991
  tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
709
992
  const toolWarnings = [];
710
993
  const isLatest = [
@@ -713,13 +996,14 @@ function prepareTools({
713
996
  "gemini-pro-latest"
714
997
  ].some((id) => id === modelId);
715
998
  const isGemini2orNewer = modelId.includes("gemini-2") || modelId.includes("gemini-3") || modelId.includes("nano-banana") || isLatest;
999
+ const isGemini3orNewer = modelId.includes("gemini-3");
716
1000
  const supportsFileSearch = modelId.includes("gemini-2.5") || modelId.includes("gemini-3");
717
1001
  if (tools == null) {
718
1002
  return { tools: void 0, toolConfig: void 0, toolWarnings };
719
1003
  }
720
1004
  const hasFunctionTools = tools.some((tool) => tool.type === "function");
721
1005
  const hasProviderTools = tools.some((tool) => tool.type === "provider");
722
- if (hasFunctionTools && hasProviderTools) {
1006
+ if (hasFunctionTools && hasProviderTools && !isGemini3orNewer) {
723
1007
  toolWarnings.push({
724
1008
  type: "unsupported",
725
1009
  feature: `combination of function and provider-defined tools`
@@ -770,7 +1054,7 @@ function prepareTools({
770
1054
  toolWarnings.push({
771
1055
  type: "unsupported",
772
1056
  feature: `provider-defined tool ${tool.id}`,
773
- details: "The code execution tools is not supported with other Gemini models than Gemini 2."
1057
+ details: "The code execution tool is not supported with other Gemini models than Gemini 2."
774
1058
  });
775
1059
  }
776
1060
  break;
@@ -824,6 +1108,47 @@ function prepareTools({
824
1108
  break;
825
1109
  }
826
1110
  });
1111
+ if (hasFunctionTools && isGemini3orNewer && googleTools2.length > 0) {
1112
+ const functionDeclarations2 = [];
1113
+ for (const tool of tools) {
1114
+ if (tool.type === "function") {
1115
+ functionDeclarations2.push({
1116
+ name: tool.name,
1117
+ description: (_a = tool.description) != null ? _a : "",
1118
+ parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
1119
+ });
1120
+ }
1121
+ }
1122
+ const combinedToolConfig = {
1123
+ functionCallingConfig: { mode: "VALIDATED" },
1124
+ ...!isVertexProvider && {
1125
+ includeServerSideToolInvocations: true
1126
+ }
1127
+ };
1128
+ if (toolChoice != null) {
1129
+ switch (toolChoice.type) {
1130
+ case "auto":
1131
+ break;
1132
+ case "none":
1133
+ combinedToolConfig.functionCallingConfig = { mode: "NONE" };
1134
+ break;
1135
+ case "required":
1136
+ combinedToolConfig.functionCallingConfig = { mode: "ANY" };
1137
+ break;
1138
+ case "tool":
1139
+ combinedToolConfig.functionCallingConfig = {
1140
+ mode: "ANY",
1141
+ allowedFunctionNames: [toolChoice.toolName]
1142
+ };
1143
+ break;
1144
+ }
1145
+ }
1146
+ return {
1147
+ tools: [...googleTools2, { functionDeclarations: functionDeclarations2 }],
1148
+ toolConfig: combinedToolConfig,
1149
+ toolWarnings
1150
+ };
1151
+ }
827
1152
  return {
828
1153
  tools: googleTools2.length > 0 ? googleTools2 : void 0,
829
1154
  toolConfig: void 0,
@@ -837,7 +1162,7 @@ function prepareTools({
837
1162
  case "function":
838
1163
  functionDeclarations.push({
839
1164
  name: tool.name,
840
- description: (_a = tool.description) != null ? _a : "",
1165
+ description: (_b = tool.description) != null ? _b : "",
841
1166
  parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
842
1167
  });
843
1168
  if (tool.strict === true) {
@@ -900,15 +1225,238 @@ function prepareTools({
900
1225
  };
901
1226
  default: {
902
1227
  const _exhaustiveCheck = type;
903
- throw new import_provider3.UnsupportedFunctionalityError({
1228
+ throw new UnsupportedFunctionalityError2({
904
1229
  functionality: `tool choice type: ${_exhaustiveCheck}`
905
1230
  });
906
1231
  }
907
1232
  }
908
1233
  }
909
1234
 
910
- // src/map-google-generative-ai-finish-reason.ts
911
- function mapGoogleGenerativeAIFinishReason({
1235
+ // src/google-json-accumulator.ts
1236
+ var GoogleJSONAccumulator = class {
1237
+ constructor() {
1238
+ this.accumulatedArgs = {};
1239
+ this.jsonText = "";
1240
+ /**
1241
+ * Stack representing the currently "open" containers in the JSON output.
1242
+ * Entry 0 is always the root `{` object once the first value is written.
1243
+ */
1244
+ this.pathStack = [];
1245
+ /**
1246
+ * Whether a string value is currently "open" (willContinue was true),
1247
+ * meaning the closing quote has not yet been emitted.
1248
+ */
1249
+ this.stringOpen = false;
1250
+ }
1251
+ /**
1252
+ * Input: [{jsonPath:"$.brightness",numberValue:50}]
1253
+ * Output: { currentJSON:{brightness:50}, textDelta:'{"brightness":50' }
1254
+ */
1255
+ processPartialArgs(partialArgs) {
1256
+ let delta = "";
1257
+ for (const arg of partialArgs) {
1258
+ const rawPath = arg.jsonPath.replace(/^\$\./, "");
1259
+ if (!rawPath) continue;
1260
+ const segments = parsePath(rawPath);
1261
+ const existingValue = getNestedValue(this.accumulatedArgs, segments);
1262
+ const isStringContinuation = arg.stringValue != null && existingValue !== void 0;
1263
+ if (isStringContinuation) {
1264
+ const escaped = JSON.stringify(arg.stringValue).slice(1, -1);
1265
+ setNestedValue(
1266
+ this.accumulatedArgs,
1267
+ segments,
1268
+ existingValue + arg.stringValue
1269
+ );
1270
+ delta += escaped;
1271
+ continue;
1272
+ }
1273
+ const resolved = resolvePartialArgValue(arg);
1274
+ if (resolved == null) continue;
1275
+ setNestedValue(this.accumulatedArgs, segments, resolved.value);
1276
+ delta += this.emitNavigationTo(segments, arg, resolved.json);
1277
+ }
1278
+ this.jsonText += delta;
1279
+ return {
1280
+ currentJSON: this.accumulatedArgs,
1281
+ textDelta: delta
1282
+ };
1283
+ }
1284
+ /**
1285
+ * Input: jsonText='{"brightness":50', accumulatedArgs={brightness:50}
1286
+ * Output: { finalJSON:'{"brightness":50}', closingDelta:'}' }
1287
+ */
1288
+ finalize() {
1289
+ const finalArgs = JSON.stringify(this.accumulatedArgs);
1290
+ const closingDelta = finalArgs.slice(this.jsonText.length);
1291
+ return { finalJSON: finalArgs, closingDelta };
1292
+ }
1293
+ /**
1294
+ * Input: pathStack=[] (first call) or pathStack=[root,...] (subsequent calls)
1295
+ * Output: '{' (first call) or '' (subsequent calls)
1296
+ */
1297
+ ensureRoot() {
1298
+ if (this.pathStack.length === 0) {
1299
+ this.pathStack.push({ segment: "", isArray: false, childCount: 0 });
1300
+ return "{";
1301
+ }
1302
+ return "";
1303
+ }
1304
+ /**
1305
+ * Emits the JSON text fragment needed to navigate from the current open
1306
+ * path to the new leaf at `targetSegments`, then writes the value.
1307
+ *
1308
+ * Input: targetSegments=["recipe","name"], arg={jsonPath:"$.recipe.name",stringValue:"Lasagna"}, valueJson='"Lasagna"'
1309
+ * Output: '{"recipe":{"name":"Lasagna"'
1310
+ */
1311
+ emitNavigationTo(targetSegments, arg, valueJson) {
1312
+ let fragment = "";
1313
+ if (this.stringOpen) {
1314
+ fragment += '"';
1315
+ this.stringOpen = false;
1316
+ }
1317
+ fragment += this.ensureRoot();
1318
+ const targetContainerSegments = targetSegments.slice(0, -1);
1319
+ const leafSegment = targetSegments[targetSegments.length - 1];
1320
+ const commonDepth = this.findCommonStackDepth(targetContainerSegments);
1321
+ fragment += this.closeDownTo(commonDepth);
1322
+ fragment += this.openDownTo(targetContainerSegments, leafSegment);
1323
+ fragment += this.emitLeaf(leafSegment, arg, valueJson);
1324
+ return fragment;
1325
+ }
1326
+ /**
1327
+ * Returns the stack depth to preserve when navigating to a new target
1328
+ * container path. Always >= 1 (the root is never popped).
1329
+ *
1330
+ * Input: stack=[root,"recipe","ingredients",0], target=["recipe","ingredients",1]
1331
+ * Output: 3 (keep root+"recipe"+"ingredients")
1332
+ */
1333
+ findCommonStackDepth(targetContainer) {
1334
+ const maxDepth = Math.min(
1335
+ this.pathStack.length - 1,
1336
+ targetContainer.length
1337
+ );
1338
+ let common = 0;
1339
+ for (let i = 0; i < maxDepth; i++) {
1340
+ if (this.pathStack[i + 1].segment === targetContainer[i]) {
1341
+ common++;
1342
+ } else {
1343
+ break;
1344
+ }
1345
+ }
1346
+ return common + 1;
1347
+ }
1348
+ /**
1349
+ * Closes containers from the current stack depth back down to `targetDepth`.
1350
+ *
1351
+ * Input: this.pathStack=[root,"recipe","ingredients",0], targetDepth=3
1352
+ * Output: '}'
1353
+ */
1354
+ closeDownTo(targetDepth) {
1355
+ let fragment = "";
1356
+ while (this.pathStack.length > targetDepth) {
1357
+ const entry = this.pathStack.pop();
1358
+ fragment += entry.isArray ? "]" : "}";
1359
+ }
1360
+ return fragment;
1361
+ }
1362
+ /**
1363
+ * Opens containers from the current stack depth down to the full target
1364
+ * container path, emitting opening `{`, `[`, keys, and commas as needed.
1365
+ * `leafSegment` is used to determine if the innermost container is an array.
1366
+ *
1367
+ * Input: this.pathStack=[root], targetContainer=["recipe","ingredients"], leafSegment=0
1368
+ * Output: '"recipe":{"ingredients":['
1369
+ */
1370
+ openDownTo(targetContainer, leafSegment) {
1371
+ let fragment = "";
1372
+ const startIdx = this.pathStack.length - 1;
1373
+ for (let i = startIdx; i < targetContainer.length; i++) {
1374
+ const seg = targetContainer[i];
1375
+ const parentEntry = this.pathStack[this.pathStack.length - 1];
1376
+ if (parentEntry.childCount > 0) {
1377
+ fragment += ",";
1378
+ }
1379
+ parentEntry.childCount++;
1380
+ if (typeof seg === "string") {
1381
+ fragment += `${JSON.stringify(seg)}:`;
1382
+ }
1383
+ const childSeg = i + 1 < targetContainer.length ? targetContainer[i + 1] : leafSegment;
1384
+ const isArray = typeof childSeg === "number";
1385
+ fragment += isArray ? "[" : "{";
1386
+ this.pathStack.push({ segment: seg, isArray, childCount: 0 });
1387
+ }
1388
+ return fragment;
1389
+ }
1390
+ /**
1391
+ * Emits the comma, key, and value for a leaf entry in the current container.
1392
+ *
1393
+ * Input: leafSegment="name", arg={stringValue:"Lasagna"}, valueJson='"Lasagna"'
1394
+ * Output: '"name":"Lasagna"' (or ',"name":"Lasagna"' if container.childCount > 0)
1395
+ */
1396
+ emitLeaf(leafSegment, arg, valueJson) {
1397
+ let fragment = "";
1398
+ const container = this.pathStack[this.pathStack.length - 1];
1399
+ if (container.childCount > 0) {
1400
+ fragment += ",";
1401
+ }
1402
+ container.childCount++;
1403
+ if (typeof leafSegment === "string") {
1404
+ fragment += `${JSON.stringify(leafSegment)}:`;
1405
+ }
1406
+ if (arg.stringValue != null && arg.willContinue) {
1407
+ fragment += valueJson.slice(0, -1);
1408
+ this.stringOpen = true;
1409
+ } else {
1410
+ fragment += valueJson;
1411
+ }
1412
+ return fragment;
1413
+ }
1414
+ };
1415
+ function parsePath(rawPath) {
1416
+ const segments = [];
1417
+ for (const part of rawPath.split(".")) {
1418
+ const bracketIdx = part.indexOf("[");
1419
+ if (bracketIdx === -1) {
1420
+ segments.push(part);
1421
+ } else {
1422
+ if (bracketIdx > 0) segments.push(part.slice(0, bracketIdx));
1423
+ for (const m of part.matchAll(/\[(\d+)\]/g)) {
1424
+ segments.push(parseInt(m[1], 10));
1425
+ }
1426
+ }
1427
+ }
1428
+ return segments;
1429
+ }
1430
+ function getNestedValue(obj, segments) {
1431
+ let current = obj;
1432
+ for (const seg of segments) {
1433
+ if (current == null || typeof current !== "object") return void 0;
1434
+ current = current[seg];
1435
+ }
1436
+ return current;
1437
+ }
1438
+ function setNestedValue(obj, segments, value) {
1439
+ let current = obj;
1440
+ for (let i = 0; i < segments.length - 1; i++) {
1441
+ const seg = segments[i];
1442
+ const nextSeg = segments[i + 1];
1443
+ if (current[seg] == null) {
1444
+ current[seg] = typeof nextSeg === "number" ? [] : {};
1445
+ }
1446
+ current = current[seg];
1447
+ }
1448
+ current[segments[segments.length - 1]] = value;
1449
+ }
1450
+ function resolvePartialArgValue(arg) {
1451
+ var _a, _b;
1452
+ const value = (_b = (_a = arg.stringValue) != null ? _a : arg.numberValue) != null ? _b : arg.boolValue;
1453
+ if (value != null) return { value, json: JSON.stringify(value) };
1454
+ if ("nullValue" in arg) return { value: null, json: "null" };
1455
+ return void 0;
1456
+ }
1457
+
1458
+ // src/map-google-finish-reason.ts
1459
+ function mapGoogleFinishReason({
912
1460
  finishReason,
913
1461
  hasToolCalls
914
1462
  }) {
@@ -933,14 +1481,23 @@ function mapGoogleGenerativeAIFinishReason({
933
1481
  }
934
1482
  }
935
1483
 
936
- // src/google-generative-ai-language-model.ts
937
- var GoogleGenerativeAILanguageModel = class {
1484
+ // src/google-language-model.ts
1485
+ var GoogleLanguageModel = class _GoogleLanguageModel {
938
1486
  constructor(modelId, config) {
939
1487
  this.specificationVersion = "v4";
940
1488
  var _a;
941
1489
  this.modelId = modelId;
942
1490
  this.config = config;
943
- this.generateId = (_a = config.generateId) != null ? _a : import_provider_utils6.generateId;
1491
+ this.generateId = (_a = config.generateId) != null ? _a : generateId;
1492
+ }
1493
+ static [WORKFLOW_SERIALIZE2](model) {
1494
+ return serializeModelOptions2({
1495
+ modelId: model.modelId,
1496
+ config: model.config
1497
+ });
1498
+ }
1499
+ static [WORKFLOW_DESERIALIZE2](options) {
1500
+ return new _GoogleLanguageModel(options.modelId, options.config);
944
1501
  }
945
1502
  get provider() {
946
1503
  return this.config.provider;
@@ -962,36 +1519,54 @@ var GoogleGenerativeAILanguageModel = class {
962
1519
  seed,
963
1520
  tools,
964
1521
  toolChoice,
1522
+ reasoning,
965
1523
  providerOptions
966
- }) {
967
- var _a;
1524
+ }, { isStreaming = false } = {}) {
1525
+ var _a, _b;
968
1526
  const warnings = [];
969
- const providerOptionsName = this.config.provider.includes("vertex") ? "vertex" : "google";
970
- let googleOptions = await (0, import_provider_utils6.parseProviderOptions)({
971
- provider: providerOptionsName,
972
- providerOptions,
973
- schema: googleLanguageModelOptions
974
- });
975
- if (googleOptions == null && providerOptionsName !== "google") {
976
- googleOptions = await (0, import_provider_utils6.parseProviderOptions)({
1527
+ const providerOptionsNames = this.config.provider.includes("vertex") ? ["googleVertex", "vertex"] : ["google"];
1528
+ let googleOptions;
1529
+ for (const name of providerOptionsNames) {
1530
+ googleOptions = await parseProviderOptions2({
1531
+ provider: name,
1532
+ providerOptions,
1533
+ schema: googleLanguageModelOptions
1534
+ });
1535
+ if (googleOptions != null) break;
1536
+ }
1537
+ if (googleOptions == null && !providerOptionsNames.includes("google")) {
1538
+ googleOptions = await parseProviderOptions2({
977
1539
  provider: "google",
978
1540
  providerOptions,
979
1541
  schema: googleLanguageModelOptions
980
1542
  });
981
1543
  }
1544
+ const isVertexProvider = this.config.provider.startsWith("google.vertex.");
982
1545
  if ((tools == null ? void 0 : tools.some(
983
1546
  (tool) => tool.type === "provider" && tool.id === "google.vertex_rag_store"
984
- )) && !this.config.provider.startsWith("google.vertex.")) {
1547
+ )) && !isVertexProvider) {
985
1548
  warnings.push({
986
1549
  type: "other",
987
1550
  message: `The 'vertex_rag_store' tool is only supported with the Google Vertex provider and might not be supported or could behave unexpectedly with the current Google provider (${this.config.provider}).`
988
1551
  });
989
1552
  }
1553
+ if ((googleOptions == null ? void 0 : googleOptions.streamFunctionCallArguments) && !isVertexProvider) {
1554
+ warnings.push({
1555
+ type: "other",
1556
+ message: `'streamFunctionCallArguments' is only supported on the Vertex AI API and will be ignored with the current Google provider (${this.config.provider}). See https://docs.cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#streaming-fc`
1557
+ });
1558
+ }
1559
+ let sanitizedServiceTier = googleOptions == null ? void 0 : googleOptions.serviceTier;
1560
+ if ((googleOptions == null ? void 0 : googleOptions.serviceTier) && isVertexProvider) {
1561
+ sanitizedServiceTier = VertexServiceTierMap[googleOptions.serviceTier];
1562
+ }
990
1563
  const isGemmaModel = this.modelId.toLowerCase().startsWith("gemma-");
991
- const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(
992
- prompt,
993
- { isGemmaModel, providerOptionsName }
994
- );
1564
+ const supportsFunctionResponseParts = this.modelId.startsWith("gemini-3");
1565
+ const { contents, systemInstruction } = convertToGoogleMessages(prompt, {
1566
+ isGemmaModel,
1567
+ providerOptionsNames,
1568
+ supportsFunctionResponseParts
1569
+ });
995
1570
  const {
996
1571
  tools: googleTools2,
997
1572
  toolConfig: googleToolConfig,
@@ -999,8 +1574,28 @@ var GoogleGenerativeAILanguageModel = class {
999
1574
  } = prepareTools({
1000
1575
  tools,
1001
1576
  toolChoice,
1002
- modelId: this.modelId
1577
+ modelId: this.modelId,
1578
+ isVertexProvider
1579
+ });
1580
+ const resolvedThinking = resolveThinkingConfig({
1581
+ reasoning,
1582
+ modelId: this.modelId,
1583
+ warnings
1003
1584
  });
1585
+ const thinkingConfig = (googleOptions == null ? void 0 : googleOptions.thinkingConfig) || resolvedThinking ? { ...resolvedThinking, ...googleOptions == null ? void 0 : googleOptions.thinkingConfig } : void 0;
1586
+ const streamFunctionCallArguments = isStreaming && isVertexProvider ? (_a = googleOptions == null ? void 0 : googleOptions.streamFunctionCallArguments) != null ? _a : false : void 0;
1587
+ const toolConfig = googleToolConfig || streamFunctionCallArguments || (googleOptions == null ? void 0 : googleOptions.retrievalConfig) ? {
1588
+ ...googleToolConfig,
1589
+ ...streamFunctionCallArguments && {
1590
+ functionCallingConfig: {
1591
+ ...googleToolConfig == null ? void 0 : googleToolConfig.functionCallingConfig,
1592
+ streamFunctionCallArguments: true
1593
+ }
1594
+ },
1595
+ ...(googleOptions == null ? void 0 : googleOptions.retrievalConfig) && {
1596
+ retrievalConfig: googleOptions.retrievalConfig
1597
+ }
1598
+ } : void 0;
1004
1599
  return {
1005
1600
  args: {
1006
1601
  generationConfig: {
@@ -1018,13 +1613,13 @@ var GoogleGenerativeAILanguageModel = class {
1018
1613
  responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
1019
1614
  // so this is needed as an escape hatch:
1020
1615
  // TODO convert into provider option
1021
- ((_a = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _a : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
1616
+ ((_b = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _b : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
1022
1617
  ...(googleOptions == null ? void 0 : googleOptions.audioTimestamp) && {
1023
1618
  audioTimestamp: googleOptions.audioTimestamp
1024
1619
  },
1025
1620
  // provider options:
1026
1621
  responseModalities: googleOptions == null ? void 0 : googleOptions.responseModalities,
1027
- thinkingConfig: googleOptions == null ? void 0 : googleOptions.thinkingConfig,
1622
+ thinkingConfig,
1028
1623
  ...(googleOptions == null ? void 0 : googleOptions.mediaResolution) && {
1029
1624
  mediaResolution: googleOptions.mediaResolution
1030
1625
  },
@@ -1036,36 +1631,37 @@ var GoogleGenerativeAILanguageModel = class {
1036
1631
  systemInstruction: isGemmaModel ? void 0 : systemInstruction,
1037
1632
  safetySettings: googleOptions == null ? void 0 : googleOptions.safetySettings,
1038
1633
  tools: googleTools2,
1039
- toolConfig: (googleOptions == null ? void 0 : googleOptions.retrievalConfig) ? {
1040
- ...googleToolConfig,
1041
- retrievalConfig: googleOptions.retrievalConfig
1042
- } : googleToolConfig,
1634
+ toolConfig,
1043
1635
  cachedContent: googleOptions == null ? void 0 : googleOptions.cachedContent,
1044
- labels: googleOptions == null ? void 0 : googleOptions.labels
1636
+ labels: googleOptions == null ? void 0 : googleOptions.labels,
1637
+ serviceTier: sanitizedServiceTier
1045
1638
  },
1046
1639
  warnings: [...warnings, ...toolWarnings],
1047
- providerOptionsName
1640
+ providerOptionsNames
1048
1641
  };
1049
1642
  }
1050
1643
  async doGenerate(options) {
1051
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
1052
- const { args, warnings, providerOptionsName } = await this.getArgs(options);
1053
- const mergedHeaders = (0, import_provider_utils6.combineHeaders)(
1054
- await (0, import_provider_utils6.resolve)(this.config.headers),
1644
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
1645
+ const { args, warnings, providerOptionsNames } = await this.getArgs(options);
1646
+ const wrapProviderMetadata = (payload) => Object.fromEntries(
1647
+ providerOptionsNames.map((name) => [name, payload])
1648
+ );
1649
+ const mergedHeaders = combineHeaders2(
1650
+ this.config.headers ? await resolve2(this.config.headers) : void 0,
1055
1651
  options.headers
1056
1652
  );
1057
1653
  const {
1058
1654
  responseHeaders,
1059
1655
  value: response,
1060
1656
  rawValue: rawResponse
1061
- } = await (0, import_provider_utils6.postJsonToApi)({
1657
+ } = await postJsonToApi2({
1062
1658
  url: `${this.config.baseURL}/${getModelPath(
1063
1659
  this.modelId
1064
1660
  )}:generateContent`,
1065
1661
  headers: mergedHeaders,
1066
1662
  body: args,
1067
1663
  failedResponseHandler: googleFailedResponseHandler,
1068
- successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(responseSchema),
1664
+ successfulResponseHandler: createJsonResponseHandler2(responseSchema),
1069
1665
  abortSignal: options.abortSignal,
1070
1666
  fetch: this.config.fetch
1071
1667
  });
@@ -1074,6 +1670,7 @@ var GoogleGenerativeAILanguageModel = class {
1074
1670
  const parts = (_b = (_a = candidate.content) == null ? void 0 : _a.parts) != null ? _b : [];
1075
1671
  const usageMetadata = response.usageMetadata;
1076
1672
  let lastCodeExecutionToolCallId;
1673
+ let lastServerToolCallId;
1077
1674
  for (const part of parts) {
1078
1675
  if ("executableCode" in part && ((_c = part.executableCode) == null ? void 0 : _c.code)) {
1079
1676
  const toolCallId = this.config.generateId();
@@ -1098,11 +1695,9 @@ var GoogleGenerativeAILanguageModel = class {
1098
1695
  });
1099
1696
  lastCodeExecutionToolCallId = void 0;
1100
1697
  } else if ("text" in part && part.text != null) {
1101
- const thoughtSignatureMetadata = part.thoughtSignature ? {
1102
- [providerOptionsName]: {
1103
- thoughtSignature: part.thoughtSignature
1104
- }
1105
- } : void 0;
1698
+ const thoughtSignatureMetadata = part.thoughtSignature ? wrapProviderMetadata({
1699
+ thoughtSignature: part.thoughtSignature
1700
+ }) : void 0;
1106
1701
  if (part.text.length === 0) {
1107
1702
  if (thoughtSignatureMetadata != null && content.length > 0) {
1108
1703
  const lastContent = content[content.length - 1];
@@ -1115,65 +1710,95 @@ var GoogleGenerativeAILanguageModel = class {
1115
1710
  providerMetadata: thoughtSignatureMetadata
1116
1711
  });
1117
1712
  }
1118
- } else if ("functionCall" in part) {
1713
+ } else if ("functionCall" in part && part.functionCall.name != null && part.functionCall.args != null) {
1119
1714
  content.push({
1120
1715
  type: "tool-call",
1121
1716
  toolCallId: this.config.generateId(),
1122
1717
  toolName: part.functionCall.name,
1123
1718
  input: JSON.stringify(part.functionCall.args),
1124
- providerMetadata: part.thoughtSignature ? {
1125
- [providerOptionsName]: {
1126
- thoughtSignature: part.thoughtSignature
1127
- }
1128
- } : void 0
1719
+ providerMetadata: part.thoughtSignature ? wrapProviderMetadata({
1720
+ thoughtSignature: part.thoughtSignature
1721
+ }) : void 0
1129
1722
  });
1130
1723
  } else if ("inlineData" in part) {
1131
1724
  const hasThought = part.thought === true;
1132
1725
  const hasThoughtSignature = !!part.thoughtSignature;
1133
1726
  content.push({
1134
- type: "file",
1135
- data: part.inlineData.data,
1727
+ type: hasThought ? "reasoning-file" : "file",
1728
+ data: { type: "data", data: part.inlineData.data },
1136
1729
  mediaType: part.inlineData.mimeType,
1137
- providerMetadata: hasThought || hasThoughtSignature ? {
1138
- [providerOptionsName]: {
1139
- ...hasThought ? { thought: true } : {},
1140
- ...hasThoughtSignature ? { thoughtSignature: part.thoughtSignature } : {}
1141
- }
1142
- } : void 0
1730
+ providerMetadata: hasThoughtSignature ? wrapProviderMetadata({
1731
+ thoughtSignature: part.thoughtSignature
1732
+ }) : void 0
1143
1733
  });
1734
+ } else if ("toolCall" in part && part.toolCall) {
1735
+ const toolCallId = (_e = part.toolCall.id) != null ? _e : this.config.generateId();
1736
+ lastServerToolCallId = toolCallId;
1737
+ content.push({
1738
+ type: "tool-call",
1739
+ toolCallId,
1740
+ toolName: `server:${part.toolCall.toolType}`,
1741
+ input: JSON.stringify((_f = part.toolCall.args) != null ? _f : {}),
1742
+ providerExecuted: true,
1743
+ dynamic: true,
1744
+ providerMetadata: part.thoughtSignature ? wrapProviderMetadata({
1745
+ thoughtSignature: part.thoughtSignature,
1746
+ serverToolCallId: toolCallId,
1747
+ serverToolType: part.toolCall.toolType
1748
+ }) : wrapProviderMetadata({
1749
+ serverToolCallId: toolCallId,
1750
+ serverToolType: part.toolCall.toolType
1751
+ })
1752
+ });
1753
+ } else if ("toolResponse" in part && part.toolResponse) {
1754
+ const responseToolCallId = (_g = lastServerToolCallId != null ? lastServerToolCallId : part.toolResponse.id) != null ? _g : this.config.generateId();
1755
+ content.push({
1756
+ type: "tool-result",
1757
+ toolCallId: responseToolCallId,
1758
+ toolName: `server:${part.toolResponse.toolType}`,
1759
+ result: (_h = part.toolResponse.response) != null ? _h : {},
1760
+ providerMetadata: part.thoughtSignature ? wrapProviderMetadata({
1761
+ thoughtSignature: part.thoughtSignature,
1762
+ serverToolCallId: responseToolCallId,
1763
+ serverToolType: part.toolResponse.toolType
1764
+ }) : wrapProviderMetadata({
1765
+ serverToolCallId: responseToolCallId,
1766
+ serverToolType: part.toolResponse.toolType
1767
+ })
1768
+ });
1769
+ lastServerToolCallId = void 0;
1144
1770
  }
1145
1771
  }
1146
- const sources = (_e = extractSources({
1772
+ const sources = (_i = extractSources({
1147
1773
  groundingMetadata: candidate.groundingMetadata,
1148
1774
  generateId: this.config.generateId
1149
- })) != null ? _e : [];
1775
+ })) != null ? _i : [];
1150
1776
  for (const source of sources) {
1151
1777
  content.push(source);
1152
1778
  }
1153
1779
  return {
1154
1780
  content,
1155
1781
  finishReason: {
1156
- unified: mapGoogleGenerativeAIFinishReason({
1782
+ unified: mapGoogleFinishReason({
1157
1783
  finishReason: candidate.finishReason,
1158
1784
  // Only count client-executed tool calls for finish reason determination.
1159
1785
  hasToolCalls: content.some(
1160
1786
  (part) => part.type === "tool-call" && !part.providerExecuted
1161
1787
  )
1162
1788
  }),
1163
- raw: (_f = candidate.finishReason) != null ? _f : void 0
1789
+ raw: (_j = candidate.finishReason) != null ? _j : void 0
1164
1790
  },
1165
- usage: convertGoogleGenerativeAIUsage(usageMetadata),
1791
+ usage: convertGoogleUsage(usageMetadata),
1166
1792
  warnings,
1167
- providerMetadata: {
1168
- [providerOptionsName]: {
1169
- promptFeedback: (_g = response.promptFeedback) != null ? _g : null,
1170
- groundingMetadata: (_h = candidate.groundingMetadata) != null ? _h : null,
1171
- urlContextMetadata: (_i = candidate.urlContextMetadata) != null ? _i : null,
1172
- safetyRatings: (_j = candidate.safetyRatings) != null ? _j : null,
1173
- usageMetadata: usageMetadata != null ? usageMetadata : null,
1174
- finishMessage: (_k = candidate.finishMessage) != null ? _k : null
1175
- }
1176
- },
1793
+ providerMetadata: wrapProviderMetadata({
1794
+ promptFeedback: (_k = response.promptFeedback) != null ? _k : null,
1795
+ groundingMetadata: (_l = candidate.groundingMetadata) != null ? _l : null,
1796
+ urlContextMetadata: (_m = candidate.urlContextMetadata) != null ? _m : null,
1797
+ safetyRatings: (_n = candidate.safetyRatings) != null ? _n : null,
1798
+ usageMetadata: usageMetadata != null ? usageMetadata : null,
1799
+ finishMessage: (_o = candidate.finishMessage) != null ? _o : null,
1800
+ serviceTier: (_p = response.serviceTier) != null ? _p : null
1801
+ }),
1177
1802
  request: { body: args },
1178
1803
  response: {
1179
1804
  // TODO timestamp, model id, id
@@ -1183,19 +1808,25 @@ var GoogleGenerativeAILanguageModel = class {
1183
1808
  };
1184
1809
  }
1185
1810
  async doStream(options) {
1186
- const { args, warnings, providerOptionsName } = await this.getArgs(options);
1187
- const headers = (0, import_provider_utils6.combineHeaders)(
1188
- await (0, import_provider_utils6.resolve)(this.config.headers),
1811
+ const { args, warnings, providerOptionsNames } = await this.getArgs(
1812
+ options,
1813
+ { isStreaming: true }
1814
+ );
1815
+ const wrapProviderMetadata = (payload) => Object.fromEntries(
1816
+ providerOptionsNames.map((name) => [name, payload])
1817
+ );
1818
+ const headers = combineHeaders2(
1819
+ this.config.headers ? await resolve2(this.config.headers) : void 0,
1189
1820
  options.headers
1190
1821
  );
1191
- const { responseHeaders, value: response } = await (0, import_provider_utils6.postJsonToApi)({
1822
+ const { responseHeaders, value: response } = await postJsonToApi2({
1192
1823
  url: `${this.config.baseURL}/${getModelPath(
1193
1824
  this.modelId
1194
1825
  )}:streamGenerateContent?alt=sse`,
1195
1826
  headers,
1196
1827
  body: args,
1197
1828
  failedResponseHandler: googleFailedResponseHandler,
1198
- successfulResponseHandler: (0, import_provider_utils6.createEventSourceResponseHandler)(chunkSchema),
1829
+ successfulResponseHandler: createEventSourceResponseHandler(chunkSchema),
1199
1830
  abortSignal: options.abortSignal,
1200
1831
  fetch: this.config.fetch
1201
1832
  });
@@ -1207,6 +1838,7 @@ var GoogleGenerativeAILanguageModel = class {
1207
1838
  let providerMetadata = void 0;
1208
1839
  let lastGroundingMetadata = null;
1209
1840
  let lastUrlContextMetadata = null;
1841
+ let serviceTier = null;
1210
1842
  const generateId3 = this.config.generateId;
1211
1843
  let hasToolCalls = false;
1212
1844
  let currentTextBlockId = null;
@@ -1214,6 +1846,8 @@ var GoogleGenerativeAILanguageModel = class {
1214
1846
  let blockCounter = 0;
1215
1847
  const emittedSourceUrls = /* @__PURE__ */ new Set();
1216
1848
  let lastCodeExecutionToolCallId;
1849
+ let lastServerToolCallId;
1850
+ const activeStreamingToolCalls = [];
1217
1851
  return {
1218
1852
  stream: response.pipeThrough(
1219
1853
  new TransformStream({
@@ -1221,7 +1855,7 @@ var GoogleGenerativeAILanguageModel = class {
1221
1855
  controller.enqueue({ type: "stream-start", warnings });
1222
1856
  },
1223
1857
  transform(chunk, controller) {
1224
- var _a, _b, _c, _d, _e, _f, _g;
1858
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
1225
1859
  if (options.includeRawChunks) {
1226
1860
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1227
1861
  }
@@ -1234,6 +1868,9 @@ var GoogleGenerativeAILanguageModel = class {
1234
1868
  if (usageMetadata != null) {
1235
1869
  usage = usageMetadata;
1236
1870
  }
1871
+ if (value.serviceTier != null) {
1872
+ serviceTier = value.serviceTier;
1873
+ }
1237
1874
  const candidate = (_a = value.candidates) == null ? void 0 : _a[0];
1238
1875
  if (candidate == null) {
1239
1876
  return;
@@ -1285,11 +1922,9 @@ var GoogleGenerativeAILanguageModel = class {
1285
1922
  lastCodeExecutionToolCallId = void 0;
1286
1923
  }
1287
1924
  } else if ("text" in part && part.text != null) {
1288
- const thoughtSignatureMetadata = part.thoughtSignature ? {
1289
- [providerOptionsName]: {
1290
- thoughtSignature: part.thoughtSignature
1291
- }
1292
- } : void 0;
1925
+ const thoughtSignatureMetadata = part.thoughtSignature ? wrapProviderMetadata({
1926
+ thoughtSignature: part.thoughtSignature
1927
+ }) : void 0;
1293
1928
  if (part.text.length === 0) {
1294
1929
  if (thoughtSignatureMetadata != null && currentTextBlockId !== null) {
1295
1930
  controller.enqueue({
@@ -1361,50 +1996,151 @@ var GoogleGenerativeAILanguageModel = class {
1361
1996
  }
1362
1997
  const hasThought = part.thought === true;
1363
1998
  const hasThoughtSignature = !!part.thoughtSignature;
1364
- const fileMeta = hasThought || hasThoughtSignature ? {
1365
- [providerOptionsName]: {
1366
- ...hasThought ? { thought: true } : {},
1367
- ...hasThoughtSignature ? { thoughtSignature: part.thoughtSignature } : {}
1368
- }
1369
- } : void 0;
1999
+ const fileMeta = hasThoughtSignature ? wrapProviderMetadata({
2000
+ thoughtSignature: part.thoughtSignature
2001
+ }) : void 0;
1370
2002
  controller.enqueue({
1371
- type: "file",
2003
+ type: hasThought ? "reasoning-file" : "file",
1372
2004
  mediaType: part.inlineData.mimeType,
1373
- data: part.inlineData.data,
2005
+ data: { type: "data", data: part.inlineData.data },
1374
2006
  providerMetadata: fileMeta
1375
2007
  });
2008
+ } else if ("toolCall" in part && part.toolCall) {
2009
+ const toolCallId = (_e = part.toolCall.id) != null ? _e : generateId3();
2010
+ lastServerToolCallId = toolCallId;
2011
+ const serverMeta = wrapProviderMetadata({
2012
+ ...part.thoughtSignature ? { thoughtSignature: part.thoughtSignature } : {},
2013
+ serverToolCallId: toolCallId,
2014
+ serverToolType: part.toolCall.toolType
2015
+ });
2016
+ controller.enqueue({
2017
+ type: "tool-call",
2018
+ toolCallId,
2019
+ toolName: `server:${part.toolCall.toolType}`,
2020
+ input: JSON.stringify((_f = part.toolCall.args) != null ? _f : {}),
2021
+ providerExecuted: true,
2022
+ dynamic: true,
2023
+ providerMetadata: serverMeta
2024
+ });
2025
+ } else if ("toolResponse" in part && part.toolResponse) {
2026
+ const responseToolCallId = (_g = lastServerToolCallId != null ? lastServerToolCallId : part.toolResponse.id) != null ? _g : generateId3();
2027
+ const serverMeta = wrapProviderMetadata({
2028
+ ...part.thoughtSignature ? { thoughtSignature: part.thoughtSignature } : {},
2029
+ serverToolCallId: responseToolCallId,
2030
+ serverToolType: part.toolResponse.toolType
2031
+ });
2032
+ controller.enqueue({
2033
+ type: "tool-result",
2034
+ toolCallId: responseToolCallId,
2035
+ toolName: `server:${part.toolResponse.toolType}`,
2036
+ result: (_h = part.toolResponse.response) != null ? _h : {},
2037
+ providerMetadata: serverMeta
2038
+ });
2039
+ lastServerToolCallId = void 0;
1376
2040
  }
1377
2041
  }
1378
- const toolCallDeltas = getToolCallsFromParts({
1379
- parts: content.parts,
1380
- generateId: generateId3,
1381
- providerOptionsName
1382
- });
1383
- if (toolCallDeltas != null) {
1384
- for (const toolCall of toolCallDeltas) {
2042
+ for (const part of parts) {
2043
+ if (!("functionCall" in part)) continue;
2044
+ const providerMeta = part.thoughtSignature ? wrapProviderMetadata({
2045
+ thoughtSignature: part.thoughtSignature
2046
+ }) : void 0;
2047
+ const isStreamingChunk = part.functionCall.partialArgs != null || part.functionCall.name != null && part.functionCall.willContinue === true;
2048
+ const isTerminalChunk = part.functionCall.name == null && part.functionCall.args == null && part.functionCall.partialArgs == null && part.functionCall.willContinue == null;
2049
+ const isCompleteCall = part.functionCall.name != null && part.functionCall.args != null && part.functionCall.partialArgs == null;
2050
+ if (isStreamingChunk) {
2051
+ if (part.functionCall.name != null && part.functionCall.willContinue === true) {
2052
+ const toolCallId = generateId3();
2053
+ const accumulator = new GoogleJSONAccumulator();
2054
+ activeStreamingToolCalls.push({
2055
+ toolCallId,
2056
+ toolName: part.functionCall.name,
2057
+ accumulator,
2058
+ providerMetadata: providerMeta
2059
+ });
2060
+ controller.enqueue({
2061
+ type: "tool-input-start",
2062
+ id: toolCallId,
2063
+ toolName: part.functionCall.name,
2064
+ providerMetadata: providerMeta
2065
+ });
2066
+ if (part.functionCall.partialArgs != null) {
2067
+ const { textDelta } = accumulator.processPartialArgs(
2068
+ part.functionCall.partialArgs
2069
+ );
2070
+ if (textDelta.length > 0) {
2071
+ controller.enqueue({
2072
+ type: "tool-input-delta",
2073
+ id: toolCallId,
2074
+ delta: textDelta,
2075
+ providerMetadata: providerMeta
2076
+ });
2077
+ }
2078
+ }
2079
+ } else if (part.functionCall.partialArgs != null && activeStreamingToolCalls.length > 0) {
2080
+ const active = activeStreamingToolCalls[activeStreamingToolCalls.length - 1];
2081
+ const { textDelta } = active.accumulator.processPartialArgs(
2082
+ part.functionCall.partialArgs
2083
+ );
2084
+ if (textDelta.length > 0) {
2085
+ controller.enqueue({
2086
+ type: "tool-input-delta",
2087
+ id: active.toolCallId,
2088
+ delta: textDelta,
2089
+ providerMetadata: providerMeta
2090
+ });
2091
+ }
2092
+ }
2093
+ } else if (isTerminalChunk && activeStreamingToolCalls.length > 0) {
2094
+ const active = activeStreamingToolCalls.pop();
2095
+ const { finalJSON, closingDelta } = active.accumulator.finalize();
2096
+ if (closingDelta.length > 0) {
2097
+ controller.enqueue({
2098
+ type: "tool-input-delta",
2099
+ id: active.toolCallId,
2100
+ delta: closingDelta,
2101
+ providerMetadata: active.providerMetadata
2102
+ });
2103
+ }
2104
+ controller.enqueue({
2105
+ type: "tool-input-end",
2106
+ id: active.toolCallId,
2107
+ providerMetadata: active.providerMetadata
2108
+ });
2109
+ controller.enqueue({
2110
+ type: "tool-call",
2111
+ toolCallId: active.toolCallId,
2112
+ toolName: active.toolName,
2113
+ input: finalJSON,
2114
+ providerMetadata: active.providerMetadata
2115
+ });
2116
+ hasToolCalls = true;
2117
+ } else if (isCompleteCall) {
2118
+ const toolCallId = generateId3();
2119
+ const toolName = part.functionCall.name;
2120
+ const args2 = typeof part.functionCall.args === "string" ? part.functionCall.args : JSON.stringify((_i = part.functionCall.args) != null ? _i : {});
1385
2121
  controller.enqueue({
1386
2122
  type: "tool-input-start",
1387
- id: toolCall.toolCallId,
1388
- toolName: toolCall.toolName,
1389
- providerMetadata: toolCall.providerMetadata
2123
+ id: toolCallId,
2124
+ toolName,
2125
+ providerMetadata: providerMeta
1390
2126
  });
1391
2127
  controller.enqueue({
1392
2128
  type: "tool-input-delta",
1393
- id: toolCall.toolCallId,
1394
- delta: toolCall.args,
1395
- providerMetadata: toolCall.providerMetadata
2129
+ id: toolCallId,
2130
+ delta: args2,
2131
+ providerMetadata: providerMeta
1396
2132
  });
1397
2133
  controller.enqueue({
1398
2134
  type: "tool-input-end",
1399
- id: toolCall.toolCallId,
1400
- providerMetadata: toolCall.providerMetadata
2135
+ id: toolCallId,
2136
+ providerMetadata: providerMeta
1401
2137
  });
1402
2138
  controller.enqueue({
1403
2139
  type: "tool-call",
1404
- toolCallId: toolCall.toolCallId,
1405
- toolName: toolCall.toolName,
1406
- input: toolCall.args,
1407
- providerMetadata: toolCall.providerMetadata
2140
+ toolCallId,
2141
+ toolName,
2142
+ input: args2,
2143
+ providerMetadata: providerMeta
1408
2144
  });
1409
2145
  hasToolCalls = true;
1410
2146
  }
@@ -1412,22 +2148,21 @@ var GoogleGenerativeAILanguageModel = class {
1412
2148
  }
1413
2149
  if (candidate.finishReason != null) {
1414
2150
  finishReason = {
1415
- unified: mapGoogleGenerativeAIFinishReason({
2151
+ unified: mapGoogleFinishReason({
1416
2152
  finishReason: candidate.finishReason,
1417
2153
  hasToolCalls
1418
2154
  }),
1419
2155
  raw: candidate.finishReason
1420
2156
  };
1421
- providerMetadata = {
1422
- [providerOptionsName]: {
1423
- promptFeedback: (_e = value.promptFeedback) != null ? _e : null,
1424
- groundingMetadata: lastGroundingMetadata,
1425
- urlContextMetadata: lastUrlContextMetadata,
1426
- safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null,
1427
- usageMetadata: usageMetadata != null ? usageMetadata : null,
1428
- finishMessage: (_g = candidate.finishMessage) != null ? _g : null
1429
- }
1430
- };
2157
+ providerMetadata = wrapProviderMetadata({
2158
+ promptFeedback: (_j = value.promptFeedback) != null ? _j : null,
2159
+ groundingMetadata: lastGroundingMetadata,
2160
+ urlContextMetadata: lastUrlContextMetadata,
2161
+ safetyRatings: (_k = candidate.safetyRatings) != null ? _k : null,
2162
+ usageMetadata: usageMetadata != null ? usageMetadata : null,
2163
+ finishMessage: (_l = candidate.finishMessage) != null ? _l : null,
2164
+ serviceTier
2165
+ });
1431
2166
  }
1432
2167
  },
1433
2168
  flush(controller) {
@@ -1446,7 +2181,7 @@ var GoogleGenerativeAILanguageModel = class {
1446
2181
  controller.enqueue({
1447
2182
  type: "finish",
1448
2183
  finishReason,
1449
- usage: convertGoogleGenerativeAIUsage(usage),
2184
+ usage: convertGoogleUsage(usage),
1450
2185
  providerMetadata
1451
2186
  });
1452
2187
  }
@@ -1457,25 +2192,74 @@ var GoogleGenerativeAILanguageModel = class {
1457
2192
  };
1458
2193
  }
1459
2194
  };
1460
- function getToolCallsFromParts({
1461
- parts,
1462
- generateId: generateId3,
1463
- providerOptionsName
2195
+ function isGemini3Model(modelId) {
2196
+ return /gemini-3[\.\-]/i.test(modelId) || /gemini-3$/i.test(modelId);
2197
+ }
2198
+ function getMaxOutputTokensForGemini25Model() {
2199
+ return 65536;
2200
+ }
2201
+ function getMaxThinkingTokensForGemini25Model(modelId) {
2202
+ const id = modelId.toLowerCase();
2203
+ if (id.includes("2.5-pro") || id.includes("gemini-3-pro-image")) {
2204
+ return 32768;
2205
+ }
2206
+ return 24576;
2207
+ }
2208
+ function resolveThinkingConfig({
2209
+ reasoning,
2210
+ modelId,
2211
+ warnings
1464
2212
  }) {
1465
- const functionCallParts = parts == null ? void 0 : parts.filter(
1466
- (part) => "functionCall" in part
1467
- );
1468
- return functionCallParts == null || functionCallParts.length === 0 ? void 0 : functionCallParts.map((part) => ({
1469
- type: "tool-call",
1470
- toolCallId: generateId3(),
1471
- toolName: part.functionCall.name,
1472
- args: JSON.stringify(part.functionCall.args),
1473
- providerMetadata: part.thoughtSignature ? {
1474
- [providerOptionsName]: {
1475
- thoughtSignature: part.thoughtSignature
1476
- }
1477
- } : void 0
1478
- }));
2213
+ if (!isCustomReasoning(reasoning)) {
2214
+ return void 0;
2215
+ }
2216
+ if (isGemini3Model(modelId) && !modelId.includes("gemini-3-pro-image")) {
2217
+ return resolveGemini3ThinkingConfig({ reasoning, warnings });
2218
+ }
2219
+ return resolveGemini25ThinkingConfig({ reasoning, modelId, warnings });
2220
+ }
2221
+ function resolveGemini3ThinkingConfig({
2222
+ reasoning,
2223
+ warnings
2224
+ }) {
2225
+ if (reasoning === "none") {
2226
+ return { thinkingLevel: "minimal" };
2227
+ }
2228
+ const thinkingLevel = mapReasoningToProviderEffort({
2229
+ reasoning,
2230
+ effortMap: {
2231
+ minimal: "minimal",
2232
+ low: "low",
2233
+ medium: "medium",
2234
+ high: "high",
2235
+ xhigh: "high"
2236
+ },
2237
+ warnings
2238
+ });
2239
+ if (thinkingLevel == null) {
2240
+ return void 0;
2241
+ }
2242
+ return { thinkingLevel };
2243
+ }
2244
+ function resolveGemini25ThinkingConfig({
2245
+ reasoning,
2246
+ modelId,
2247
+ warnings
2248
+ }) {
2249
+ if (reasoning === "none") {
2250
+ return { thinkingBudget: 0 };
2251
+ }
2252
+ const thinkingBudget = mapReasoningToProviderBudget({
2253
+ reasoning,
2254
+ maxOutputTokens: getMaxOutputTokensForGemini25Model(),
2255
+ maxReasoningBudget: getMaxThinkingTokensForGemini25Model(modelId),
2256
+ minReasoningBudget: 0,
2257
+ warnings
2258
+ });
2259
+ if (thinkingBudget == null) {
2260
+ return void 0;
2261
+ }
2262
+ return { thinkingBudget };
1479
2263
  }
1480
2264
  function extractSources({
1481
2265
  groundingMetadata,
@@ -1571,256 +2355,312 @@ function extractSources({
1571
2355
  }
1572
2356
  return sources.length > 0 ? sources : void 0;
1573
2357
  }
1574
- var getGroundingMetadataSchema = () => import_v45.z.object({
1575
- webSearchQueries: import_v45.z.array(import_v45.z.string()).nullish(),
1576
- imageSearchQueries: import_v45.z.array(import_v45.z.string()).nullish(),
1577
- retrievalQueries: import_v45.z.array(import_v45.z.string()).nullish(),
1578
- searchEntryPoint: import_v45.z.object({ renderedContent: import_v45.z.string() }).nullish(),
1579
- groundingChunks: import_v45.z.array(
1580
- import_v45.z.object({
1581
- web: import_v45.z.object({ uri: import_v45.z.string(), title: import_v45.z.string().nullish() }).nullish(),
1582
- image: import_v45.z.object({
1583
- sourceUri: import_v45.z.string(),
1584
- imageUri: import_v45.z.string(),
1585
- title: import_v45.z.string().nullish(),
1586
- domain: import_v45.z.string().nullish()
2358
+ var getGroundingMetadataSchema = () => z5.object({
2359
+ webSearchQueries: z5.array(z5.string()).nullish(),
2360
+ imageSearchQueries: z5.array(z5.string()).nullish(),
2361
+ retrievalQueries: z5.array(z5.string()).nullish(),
2362
+ searchEntryPoint: z5.object({ renderedContent: z5.string() }).nullish(),
2363
+ groundingChunks: z5.array(
2364
+ z5.object({
2365
+ web: z5.object({ uri: z5.string(), title: z5.string().nullish() }).nullish(),
2366
+ image: z5.object({
2367
+ sourceUri: z5.string(),
2368
+ imageUri: z5.string(),
2369
+ title: z5.string().nullish(),
2370
+ domain: z5.string().nullish()
1587
2371
  }).nullish(),
1588
- retrievedContext: import_v45.z.object({
1589
- uri: import_v45.z.string().nullish(),
1590
- title: import_v45.z.string().nullish(),
1591
- text: import_v45.z.string().nullish(),
1592
- fileSearchStore: import_v45.z.string().nullish()
2372
+ retrievedContext: z5.object({
2373
+ uri: z5.string().nullish(),
2374
+ title: z5.string().nullish(),
2375
+ text: z5.string().nullish(),
2376
+ fileSearchStore: z5.string().nullish()
1593
2377
  }).nullish(),
1594
- maps: import_v45.z.object({
1595
- uri: import_v45.z.string().nullish(),
1596
- title: import_v45.z.string().nullish(),
1597
- text: import_v45.z.string().nullish(),
1598
- placeId: import_v45.z.string().nullish()
2378
+ maps: z5.object({
2379
+ uri: z5.string().nullish(),
2380
+ title: z5.string().nullish(),
2381
+ text: z5.string().nullish(),
2382
+ placeId: z5.string().nullish()
1599
2383
  }).nullish()
1600
2384
  })
1601
2385
  ).nullish(),
1602
- groundingSupports: import_v45.z.array(
1603
- import_v45.z.object({
1604
- segment: import_v45.z.object({
1605
- startIndex: import_v45.z.number().nullish(),
1606
- endIndex: import_v45.z.number().nullish(),
1607
- text: import_v45.z.string().nullish()
2386
+ groundingSupports: z5.array(
2387
+ z5.object({
2388
+ segment: z5.object({
2389
+ startIndex: z5.number().nullish(),
2390
+ endIndex: z5.number().nullish(),
2391
+ text: z5.string().nullish()
1608
2392
  }).nullish(),
1609
- segment_text: import_v45.z.string().nullish(),
1610
- groundingChunkIndices: import_v45.z.array(import_v45.z.number()).nullish(),
1611
- supportChunkIndices: import_v45.z.array(import_v45.z.number()).nullish(),
1612
- confidenceScores: import_v45.z.array(import_v45.z.number()).nullish(),
1613
- confidenceScore: import_v45.z.array(import_v45.z.number()).nullish()
2393
+ segment_text: z5.string().nullish(),
2394
+ groundingChunkIndices: z5.array(z5.number()).nullish(),
2395
+ supportChunkIndices: z5.array(z5.number()).nullish(),
2396
+ confidenceScores: z5.array(z5.number()).nullish(),
2397
+ confidenceScore: z5.array(z5.number()).nullish()
1614
2398
  })
1615
2399
  ).nullish(),
1616
- retrievalMetadata: import_v45.z.union([
1617
- import_v45.z.object({
1618
- webDynamicRetrievalScore: import_v45.z.number()
2400
+ retrievalMetadata: z5.union([
2401
+ z5.object({
2402
+ webDynamicRetrievalScore: z5.number()
1619
2403
  }),
1620
- import_v45.z.object({})
2404
+ z5.object({})
1621
2405
  ]).nullish()
1622
2406
  });
1623
- var getContentSchema = () => import_v45.z.object({
1624
- parts: import_v45.z.array(
1625
- import_v45.z.union([
2407
+ var partialArgSchema = z5.object({
2408
+ jsonPath: z5.string(),
2409
+ stringValue: z5.string().nullish(),
2410
+ numberValue: z5.number().nullish(),
2411
+ boolValue: z5.boolean().nullish(),
2412
+ nullValue: z5.unknown().nullish(),
2413
+ willContinue: z5.boolean().nullish()
2414
+ });
2415
+ var getContentSchema = () => z5.object({
2416
+ parts: z5.array(
2417
+ z5.union([
1626
2418
  // note: order matters since text can be fully empty
1627
- import_v45.z.object({
1628
- functionCall: import_v45.z.object({
1629
- name: import_v45.z.string(),
1630
- args: import_v45.z.unknown()
2419
+ z5.object({
2420
+ functionCall: z5.object({
2421
+ name: z5.string().nullish(),
2422
+ args: z5.unknown().nullish(),
2423
+ partialArgs: z5.array(partialArgSchema).nullish(),
2424
+ willContinue: z5.boolean().nullish()
2425
+ }),
2426
+ thoughtSignature: z5.string().nullish()
2427
+ }),
2428
+ z5.object({
2429
+ inlineData: z5.object({
2430
+ mimeType: z5.string(),
2431
+ data: z5.string()
2432
+ }),
2433
+ thought: z5.boolean().nullish(),
2434
+ thoughtSignature: z5.string().nullish()
2435
+ }),
2436
+ z5.object({
2437
+ toolCall: z5.object({
2438
+ toolType: z5.string(),
2439
+ args: z5.unknown().nullish(),
2440
+ id: z5.string()
1631
2441
  }),
1632
- thoughtSignature: import_v45.z.string().nullish()
2442
+ thoughtSignature: z5.string().nullish()
1633
2443
  }),
1634
- import_v45.z.object({
1635
- inlineData: import_v45.z.object({
1636
- mimeType: import_v45.z.string(),
1637
- data: import_v45.z.string()
2444
+ z5.object({
2445
+ toolResponse: z5.object({
2446
+ toolType: z5.string(),
2447
+ response: z5.unknown().nullish(),
2448
+ id: z5.string()
1638
2449
  }),
1639
- thought: import_v45.z.boolean().nullish(),
1640
- thoughtSignature: import_v45.z.string().nullish()
2450
+ thoughtSignature: z5.string().nullish()
1641
2451
  }),
1642
- import_v45.z.object({
1643
- executableCode: import_v45.z.object({
1644
- language: import_v45.z.string(),
1645
- code: import_v45.z.string()
2452
+ z5.object({
2453
+ executableCode: z5.object({
2454
+ language: z5.string(),
2455
+ code: z5.string()
1646
2456
  }).nullish(),
1647
- codeExecutionResult: import_v45.z.object({
1648
- outcome: import_v45.z.string(),
1649
- output: import_v45.z.string().nullish()
2457
+ codeExecutionResult: z5.object({
2458
+ outcome: z5.string(),
2459
+ output: z5.string().nullish()
1650
2460
  }).nullish(),
1651
- text: import_v45.z.string().nullish(),
1652
- thought: import_v45.z.boolean().nullish(),
1653
- thoughtSignature: import_v45.z.string().nullish()
2461
+ text: z5.string().nullish(),
2462
+ thought: z5.boolean().nullish(),
2463
+ thoughtSignature: z5.string().nullish()
1654
2464
  })
1655
2465
  ])
1656
2466
  ).nullish()
1657
2467
  });
1658
- var getSafetyRatingSchema = () => import_v45.z.object({
1659
- category: import_v45.z.string().nullish(),
1660
- probability: import_v45.z.string().nullish(),
1661
- probabilityScore: import_v45.z.number().nullish(),
1662
- severity: import_v45.z.string().nullish(),
1663
- severityScore: import_v45.z.number().nullish(),
1664
- blocked: import_v45.z.boolean().nullish()
2468
+ var getSafetyRatingSchema = () => z5.object({
2469
+ category: z5.string().nullish(),
2470
+ probability: z5.string().nullish(),
2471
+ probabilityScore: z5.number().nullish(),
2472
+ severity: z5.string().nullish(),
2473
+ severityScore: z5.number().nullish(),
2474
+ blocked: z5.boolean().nullish()
1665
2475
  });
1666
- var usageSchema = import_v45.z.object({
1667
- cachedContentTokenCount: import_v45.z.number().nullish(),
1668
- thoughtsTokenCount: import_v45.z.number().nullish(),
1669
- promptTokenCount: import_v45.z.number().nullish(),
1670
- candidatesTokenCount: import_v45.z.number().nullish(),
1671
- totalTokenCount: import_v45.z.number().nullish(),
2476
+ var tokenDetailsSchema = z5.array(
2477
+ z5.object({
2478
+ modality: z5.string(),
2479
+ tokenCount: z5.number()
2480
+ })
2481
+ ).nullish();
2482
+ var usageSchema = z5.object({
2483
+ cachedContentTokenCount: z5.number().nullish(),
2484
+ thoughtsTokenCount: z5.number().nullish(),
2485
+ promptTokenCount: z5.number().nullish(),
2486
+ candidatesTokenCount: z5.number().nullish(),
2487
+ totalTokenCount: z5.number().nullish(),
1672
2488
  // https://cloud.google.com/vertex-ai/generative-ai/docs/reference/rest/v1/GenerateContentResponse#TrafficType
1673
- trafficType: import_v45.z.string().nullish()
2489
+ trafficType: z5.string().nullish(),
2490
+ // https://ai.google.dev/api/generate-content#Modality
2491
+ promptTokensDetails: tokenDetailsSchema,
2492
+ candidatesTokensDetails: tokenDetailsSchema
1674
2493
  });
1675
- var getUrlContextMetadataSchema = () => import_v45.z.object({
1676
- urlMetadata: import_v45.z.array(
1677
- import_v45.z.object({
1678
- retrievedUrl: import_v45.z.string(),
1679
- urlRetrievalStatus: import_v45.z.string()
2494
+ var getUrlContextMetadataSchema = () => z5.object({
2495
+ urlMetadata: z5.array(
2496
+ z5.object({
2497
+ retrievedUrl: z5.string(),
2498
+ urlRetrievalStatus: z5.string()
1680
2499
  })
1681
2500
  ).nullish()
1682
2501
  });
1683
- var responseSchema = (0, import_provider_utils6.lazySchema)(
1684
- () => (0, import_provider_utils6.zodSchema)(
1685
- import_v45.z.object({
1686
- candidates: import_v45.z.array(
1687
- import_v45.z.object({
1688
- content: getContentSchema().nullish().or(import_v45.z.object({}).strict()),
1689
- finishReason: import_v45.z.string().nullish(),
1690
- finishMessage: import_v45.z.string().nullish(),
1691
- safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish(),
2502
+ var responseSchema = lazySchema5(
2503
+ () => zodSchema5(
2504
+ z5.object({
2505
+ candidates: z5.array(
2506
+ z5.object({
2507
+ content: getContentSchema().nullish().or(z5.object({}).strict()),
2508
+ finishReason: z5.string().nullish(),
2509
+ finishMessage: z5.string().nullish(),
2510
+ safetyRatings: z5.array(getSafetyRatingSchema()).nullish(),
1692
2511
  groundingMetadata: getGroundingMetadataSchema().nullish(),
1693
2512
  urlContextMetadata: getUrlContextMetadataSchema().nullish()
1694
2513
  })
1695
2514
  ),
1696
2515
  usageMetadata: usageSchema.nullish(),
1697
- promptFeedback: import_v45.z.object({
1698
- blockReason: import_v45.z.string().nullish(),
1699
- safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish()
1700
- }).nullish()
2516
+ promptFeedback: z5.object({
2517
+ blockReason: z5.string().nullish(),
2518
+ safetyRatings: z5.array(getSafetyRatingSchema()).nullish()
2519
+ }).nullish(),
2520
+ serviceTier: z5.string().nullish()
1701
2521
  })
1702
2522
  )
1703
2523
  );
1704
- var chunkSchema = (0, import_provider_utils6.lazySchema)(
1705
- () => (0, import_provider_utils6.zodSchema)(
1706
- import_v45.z.object({
1707
- candidates: import_v45.z.array(
1708
- import_v45.z.object({
2524
+ var chunkSchema = lazySchema5(
2525
+ () => zodSchema5(
2526
+ z5.object({
2527
+ candidates: z5.array(
2528
+ z5.object({
1709
2529
  content: getContentSchema().nullish(),
1710
- finishReason: import_v45.z.string().nullish(),
1711
- finishMessage: import_v45.z.string().nullish(),
1712
- safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish(),
2530
+ finishReason: z5.string().nullish(),
2531
+ finishMessage: z5.string().nullish(),
2532
+ safetyRatings: z5.array(getSafetyRatingSchema()).nullish(),
1713
2533
  groundingMetadata: getGroundingMetadataSchema().nullish(),
1714
2534
  urlContextMetadata: getUrlContextMetadataSchema().nullish()
1715
2535
  })
1716
2536
  ).nullish(),
1717
2537
  usageMetadata: usageSchema.nullish(),
1718
- promptFeedback: import_v45.z.object({
1719
- blockReason: import_v45.z.string().nullish(),
1720
- safetyRatings: import_v45.z.array(getSafetyRatingSchema()).nullish()
1721
- }).nullish()
2538
+ promptFeedback: z5.object({
2539
+ blockReason: z5.string().nullish(),
2540
+ safetyRatings: z5.array(getSafetyRatingSchema()).nullish()
2541
+ }).nullish(),
2542
+ serviceTier: z5.string().nullish()
1722
2543
  })
1723
2544
  )
1724
2545
  );
1725
2546
 
1726
2547
  // src/tool/code-execution.ts
1727
- var import_provider_utils7 = require("@ai-sdk/provider-utils");
1728
- var import_v46 = require("zod/v4");
1729
- var codeExecution = (0, import_provider_utils7.createProviderToolFactoryWithOutputSchema)({
2548
+ import { createProviderExecutedToolFactory } from "@ai-sdk/provider-utils";
2549
+ import { z as z6 } from "zod/v4";
2550
+ var codeExecution = createProviderExecutedToolFactory({
1730
2551
  id: "google.code_execution",
1731
- inputSchema: import_v46.z.object({
1732
- language: import_v46.z.string().describe("The programming language of the code."),
1733
- code: import_v46.z.string().describe("The code to be executed.")
2552
+ inputSchema: z6.object({
2553
+ language: z6.string().describe("The programming language of the code."),
2554
+ code: z6.string().describe("The code to be executed.")
1734
2555
  }),
1735
- outputSchema: import_v46.z.object({
1736
- outcome: import_v46.z.string().describe('The outcome of the execution (e.g., "OUTCOME_OK").'),
1737
- output: import_v46.z.string().describe("The output from the code execution.")
2556
+ outputSchema: z6.object({
2557
+ outcome: z6.string().describe('The outcome of the execution (e.g., "OUTCOME_OK").'),
2558
+ output: z6.string().describe("The output from the code execution.")
1738
2559
  })
1739
2560
  });
1740
2561
 
1741
2562
  // src/tool/enterprise-web-search.ts
1742
- var import_provider_utils8 = require("@ai-sdk/provider-utils");
1743
- var import_v47 = require("zod/v4");
1744
- var enterpriseWebSearch = (0, import_provider_utils8.createProviderToolFactory)({
2563
+ import {
2564
+ createProviderExecutedToolFactory as createProviderExecutedToolFactory2,
2565
+ lazySchema as lazySchema6,
2566
+ zodSchema as zodSchema6
2567
+ } from "@ai-sdk/provider-utils";
2568
+ import { z as z7 } from "zod/v4";
2569
+ var enterpriseWebSearch = createProviderExecutedToolFactory2({
1745
2570
  id: "google.enterprise_web_search",
1746
- inputSchema: (0, import_provider_utils8.lazySchema)(() => (0, import_provider_utils8.zodSchema)(import_v47.z.object({})))
2571
+ inputSchema: lazySchema6(() => zodSchema6(z7.object({}))),
2572
+ outputSchema: lazySchema6(() => zodSchema6(z7.object({})))
1747
2573
  });
1748
2574
 
1749
2575
  // src/tool/file-search.ts
1750
- var import_provider_utils9 = require("@ai-sdk/provider-utils");
1751
- var import_v48 = require("zod/v4");
1752
- var fileSearchArgsBaseSchema = import_v48.z.object({
2576
+ import {
2577
+ createProviderExecutedToolFactory as createProviderExecutedToolFactory3,
2578
+ lazySchema as lazySchema7,
2579
+ zodSchema as zodSchema7
2580
+ } from "@ai-sdk/provider-utils";
2581
+ import { z as z8 } from "zod/v4";
2582
+ var fileSearchArgsBaseSchema = z8.object({
1753
2583
  /** The names of the file_search_stores to retrieve from.
1754
2584
  * Example: `fileSearchStores/my-file-search-store-123`
1755
2585
  */
1756
- fileSearchStoreNames: import_v48.z.array(import_v48.z.string()).describe(
2586
+ fileSearchStoreNames: z8.array(z8.string()).describe(
1757
2587
  "The names of the file_search_stores to retrieve from. Example: `fileSearchStores/my-file-search-store-123`"
1758
2588
  ),
1759
2589
  /** The number of file search retrieval chunks to retrieve. */
1760
- topK: import_v48.z.number().int().positive().describe("The number of file search retrieval chunks to retrieve.").optional(),
2590
+ topK: z8.number().int().positive().describe("The number of file search retrieval chunks to retrieve.").optional(),
1761
2591
  /** Metadata filter to apply to the file search retrieval documents.
1762
2592
  * See https://google.aip.dev/160 for the syntax of the filter expression.
1763
2593
  */
1764
- metadataFilter: import_v48.z.string().describe(
2594
+ metadataFilter: z8.string().describe(
1765
2595
  "Metadata filter to apply to the file search retrieval documents. See https://google.aip.dev/160 for the syntax of the filter expression."
1766
2596
  ).optional()
1767
2597
  }).passthrough();
1768
- var fileSearchArgsSchema = (0, import_provider_utils9.lazySchema)(
1769
- () => (0, import_provider_utils9.zodSchema)(fileSearchArgsBaseSchema)
1770
- );
1771
- var fileSearch = (0, import_provider_utils9.createProviderToolFactory)({
2598
+ var fileSearch = createProviderExecutedToolFactory3({
1772
2599
  id: "google.file_search",
1773
- inputSchema: fileSearchArgsSchema
2600
+ inputSchema: lazySchema7(() => zodSchema7(z8.object({}))),
2601
+ outputSchema: lazySchema7(() => zodSchema7(z8.object({})))
1774
2602
  });
1775
2603
 
1776
2604
  // src/tool/google-maps.ts
1777
- var import_provider_utils10 = require("@ai-sdk/provider-utils");
1778
- var import_v49 = require("zod/v4");
1779
- var googleMaps = (0, import_provider_utils10.createProviderToolFactory)({
2605
+ import {
2606
+ createProviderExecutedToolFactory as createProviderExecutedToolFactory4,
2607
+ lazySchema as lazySchema8,
2608
+ zodSchema as zodSchema8
2609
+ } from "@ai-sdk/provider-utils";
2610
+ import { z as z9 } from "zod/v4";
2611
+ var googleMaps = createProviderExecutedToolFactory4({
1780
2612
  id: "google.google_maps",
1781
- inputSchema: (0, import_provider_utils10.lazySchema)(() => (0, import_provider_utils10.zodSchema)(import_v49.z.object({})))
2613
+ inputSchema: lazySchema8(() => zodSchema8(z9.object({}))),
2614
+ outputSchema: lazySchema8(() => zodSchema8(z9.object({})))
1782
2615
  });
1783
2616
 
1784
2617
  // src/tool/google-search.ts
1785
- var import_provider_utils11 = require("@ai-sdk/provider-utils");
1786
- var import_v410 = require("zod/v4");
1787
- var googleSearchToolArgsBaseSchema = import_v410.z.object({
1788
- searchTypes: import_v410.z.object({
1789
- webSearch: import_v410.z.object({}).optional(),
1790
- imageSearch: import_v410.z.object({}).optional()
2618
+ import {
2619
+ createProviderExecutedToolFactory as createProviderExecutedToolFactory5,
2620
+ lazySchema as lazySchema9,
2621
+ zodSchema as zodSchema9
2622
+ } from "@ai-sdk/provider-utils";
2623
+ import { z as z10 } from "zod/v4";
2624
+ var googleSearchToolArgsBaseSchema = z10.object({
2625
+ searchTypes: z10.object({
2626
+ webSearch: z10.object({}).optional(),
2627
+ imageSearch: z10.object({}).optional()
1791
2628
  }).optional(),
1792
- timeRangeFilter: import_v410.z.object({
1793
- startTime: import_v410.z.string(),
1794
- endTime: import_v410.z.string()
2629
+ timeRangeFilter: z10.object({
2630
+ startTime: z10.string(),
2631
+ endTime: z10.string()
1795
2632
  }).optional()
1796
2633
  }).passthrough();
1797
- var googleSearchToolArgsSchema = (0, import_provider_utils11.lazySchema)(
1798
- () => (0, import_provider_utils11.zodSchema)(googleSearchToolArgsBaseSchema)
1799
- );
1800
- var googleSearch = (0, import_provider_utils11.createProviderToolFactory)(
1801
- {
1802
- id: "google.google_search",
1803
- inputSchema: googleSearchToolArgsSchema
1804
- }
1805
- );
2634
+ var googleSearch = createProviderExecutedToolFactory5({
2635
+ id: "google.google_search",
2636
+ inputSchema: lazySchema9(() => zodSchema9(z10.object({}))),
2637
+ outputSchema: lazySchema9(() => zodSchema9(z10.object({})))
2638
+ });
1806
2639
 
1807
2640
  // src/tool/url-context.ts
1808
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
1809
- var import_v411 = require("zod/v4");
1810
- var urlContext = (0, import_provider_utils12.createProviderToolFactory)({
2641
+ import {
2642
+ createProviderExecutedToolFactory as createProviderExecutedToolFactory6,
2643
+ lazySchema as lazySchema10,
2644
+ zodSchema as zodSchema10
2645
+ } from "@ai-sdk/provider-utils";
2646
+ import { z as z11 } from "zod/v4";
2647
+ var urlContext = createProviderExecutedToolFactory6({
1811
2648
  id: "google.url_context",
1812
- inputSchema: (0, import_provider_utils12.lazySchema)(() => (0, import_provider_utils12.zodSchema)(import_v411.z.object({})))
2649
+ inputSchema: lazySchema10(() => zodSchema10(z11.object({}))),
2650
+ outputSchema: lazySchema10(() => zodSchema10(z11.object({})))
1813
2651
  });
1814
2652
 
1815
2653
  // src/tool/vertex-rag-store.ts
1816
- var import_provider_utils13 = require("@ai-sdk/provider-utils");
1817
- var import_v412 = require("zod/v4");
1818
- var vertexRagStore = (0, import_provider_utils13.createProviderToolFactory)({
2654
+ import {
2655
+ createProviderExecutedToolFactory as createProviderExecutedToolFactory7,
2656
+ lazySchema as lazySchema11,
2657
+ zodSchema as zodSchema11
2658
+ } from "@ai-sdk/provider-utils";
2659
+ import { z as z12 } from "zod/v4";
2660
+ var vertexRagStore = createProviderExecutedToolFactory7({
1819
2661
  id: "google.vertex_rag_store",
1820
- inputSchema: import_v412.z.object({
1821
- ragCorpus: import_v412.z.string(),
1822
- topK: import_v412.z.number().optional()
1823
- })
2662
+ inputSchema: lazySchema11(() => zodSchema11(z12.object({}))),
2663
+ outputSchema: lazySchema11(() => zodSchema11(z12.object({})))
1824
2664
  });
1825
2665
 
1826
2666
  // src/google-tools.ts
@@ -1883,16 +2723,55 @@ var googleTools = {
1883
2723
  vertexRagStore
1884
2724
  };
1885
2725
 
1886
- // src/google-generative-ai-image-model.ts
1887
- var import_provider_utils14 = require("@ai-sdk/provider-utils");
1888
- var import_v413 = require("zod/v4");
1889
- var GoogleGenerativeAIImageModel = class {
2726
+ // src/google-image-model.ts
2727
+ import {
2728
+ combineHeaders as combineHeaders3,
2729
+ convertToBase64 as convertToBase642,
2730
+ createJsonResponseHandler as createJsonResponseHandler3,
2731
+ generateId as defaultGenerateId,
2732
+ lazySchema as lazySchema13,
2733
+ parseProviderOptions as parseProviderOptions3,
2734
+ postJsonToApi as postJsonToApi3,
2735
+ resolve as resolve3,
2736
+ serializeModelOptions as serializeModelOptions3,
2737
+ WORKFLOW_SERIALIZE as WORKFLOW_SERIALIZE3,
2738
+ WORKFLOW_DESERIALIZE as WORKFLOW_DESERIALIZE3,
2739
+ zodSchema as zodSchema13
2740
+ } from "@ai-sdk/provider-utils";
2741
+ import { z as z14 } from "zod/v4";
2742
+
2743
+ // src/google-image-model-options.ts
2744
+ import {
2745
+ lazySchema as lazySchema12,
2746
+ zodSchema as zodSchema12
2747
+ } from "@ai-sdk/provider-utils";
2748
+ import { z as z13 } from "zod/v4";
2749
+ var googleImageModelOptionsSchema = lazySchema12(
2750
+ () => zodSchema12(
2751
+ z13.object({
2752
+ personGeneration: z13.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
2753
+ aspectRatio: z13.enum(["1:1", "3:4", "4:3", "9:16", "16:9"]).nullish()
2754
+ })
2755
+ )
2756
+ );
2757
+
2758
+ // src/google-image-model.ts
2759
+ var GoogleImageModel = class _GoogleImageModel {
1890
2760
  constructor(modelId, settings, config) {
1891
2761
  this.modelId = modelId;
1892
2762
  this.settings = settings;
1893
2763
  this.config = config;
1894
2764
  this.specificationVersion = "v4";
1895
2765
  }
2766
+ static [WORKFLOW_SERIALIZE3](model) {
2767
+ return serializeModelOptions3({
2768
+ modelId: model.modelId,
2769
+ config: model.config
2770
+ });
2771
+ }
2772
+ static [WORKFLOW_DESERIALIZE3](options) {
2773
+ return new _GoogleImageModel(options.modelId, {}, options.config);
2774
+ }
1896
2775
  get maxImagesPerCall() {
1897
2776
  if (this.settings.maxImagesPerCall != null) {
1898
2777
  return this.settings.maxImagesPerCall;
@@ -1928,12 +2807,12 @@ var GoogleGenerativeAIImageModel = class {
1928
2807
  const warnings = [];
1929
2808
  if (files != null && files.length > 0) {
1930
2809
  throw new Error(
1931
- "Google Generative AI does not support image editing with Imagen models. Use Google Vertex AI (@ai-sdk/google-vertex) for image editing capabilities."
2810
+ "Google Gemini API does not support image editing with Imagen models. Use Google Vertex AI (@ai-sdk/google-vertex) for image editing capabilities."
1932
2811
  );
1933
2812
  }
1934
2813
  if (mask != null) {
1935
2814
  throw new Error(
1936
- "Google Generative AI does not support image editing with masks. Use Google Vertex AI (@ai-sdk/google-vertex) for image editing capabilities."
2815
+ "Google Gemini API does not support image editing with masks. Use Google Vertex AI (@ai-sdk/google-vertex) for image editing capabilities."
1937
2816
  );
1938
2817
  }
1939
2818
  if (size != null) {
@@ -1950,7 +2829,7 @@ var GoogleGenerativeAIImageModel = class {
1950
2829
  details: "This model does not support the `seed` option through this provider."
1951
2830
  });
1952
2831
  }
1953
- const googleOptions = await (0, import_provider_utils14.parseProviderOptions)({
2832
+ const googleOptions = await parseProviderOptions3({
1954
2833
  provider: "google",
1955
2834
  providerOptions,
1956
2835
  schema: googleImageModelOptionsSchema
@@ -1969,12 +2848,15 @@ var GoogleGenerativeAIImageModel = class {
1969
2848
  instances: [{ prompt }],
1970
2849
  parameters
1971
2850
  };
1972
- const { responseHeaders, value: response } = await (0, import_provider_utils14.postJsonToApi)({
2851
+ const { responseHeaders, value: response } = await postJsonToApi3({
1973
2852
  url: `${this.config.baseURL}/models/${this.modelId}:predict`,
1974
- headers: (0, import_provider_utils14.combineHeaders)(await (0, import_provider_utils14.resolve)(this.config.headers), headers),
2853
+ headers: combineHeaders3(
2854
+ this.config.headers ? await resolve3(this.config.headers) : void 0,
2855
+ headers
2856
+ ),
1975
2857
  body,
1976
2858
  failedResponseHandler: googleFailedResponseHandler,
1977
- successfulResponseHandler: (0, import_provider_utils14.createJsonResponseHandler)(
2859
+ successfulResponseHandler: createJsonResponseHandler3(
1978
2860
  googleImageResponseSchema
1979
2861
  ),
1980
2862
  abortSignal,
@@ -2040,13 +2922,16 @@ var GoogleGenerativeAIImageModel = class {
2040
2922
  if (file.type === "url") {
2041
2923
  userContent.push({
2042
2924
  type: "file",
2043
- data: new URL(file.url),
2925
+ data: { type: "url", url: new URL(file.url) },
2044
2926
  mediaType: "image/*"
2045
2927
  });
2046
2928
  } else {
2047
2929
  userContent.push({
2048
2930
  type: "file",
2049
- data: typeof file.data === "string" ? file.data : new Uint8Array(file.data),
2931
+ data: {
2932
+ type: "data",
2933
+ data: typeof file.data === "string" ? file.data : new Uint8Array(file.data)
2934
+ },
2050
2935
  mediaType: file.mediaType
2051
2936
  });
2052
2937
  }
@@ -2055,12 +2940,12 @@ var GoogleGenerativeAIImageModel = class {
2055
2940
  const languageModelPrompt = [
2056
2941
  { role: "user", content: userContent }
2057
2942
  ];
2058
- const languageModel = new GoogleGenerativeAILanguageModel(this.modelId, {
2943
+ const languageModel = new GoogleLanguageModel(this.modelId, {
2059
2944
  provider: this.config.provider,
2060
2945
  baseURL: this.config.baseURL,
2061
2946
  headers: (_a = this.config.headers) != null ? _a : {},
2062
2947
  fetch: this.config.fetch,
2063
- generateId: (_b = this.config.generateId) != null ? _b : import_provider_utils14.generateId
2948
+ generateId: (_b = this.config.generateId) != null ? _b : defaultGenerateId
2064
2949
  });
2065
2950
  const result = await languageModel.doGenerate({
2066
2951
  prompt: languageModelPrompt,
@@ -2080,8 +2965,8 @@ var GoogleGenerativeAIImageModel = class {
2080
2965
  const currentDate = (_f = (_e = (_d = this.config._internal) == null ? void 0 : _d.currentDate) == null ? void 0 : _e.call(_d)) != null ? _f : /* @__PURE__ */ new Date();
2081
2966
  const images = [];
2082
2967
  for (const part of result.content) {
2083
- if (part.type === "file" && part.mediaType.startsWith("image/")) {
2084
- images.push((0, import_provider_utils14.convertToBase64)(part.data));
2968
+ if (part.type === "file" && part.mediaType.startsWith("image/") && part.data.type === "data") {
2969
+ images.push(convertToBase642(part.data.data));
2085
2970
  }
2086
2971
  }
2087
2972
  return {
@@ -2108,27 +2993,215 @@ var GoogleGenerativeAIImageModel = class {
2108
2993
  function isGeminiModel(modelId) {
2109
2994
  return modelId.startsWith("gemini-");
2110
2995
  }
2111
- var googleImageResponseSchema = (0, import_provider_utils14.lazySchema)(
2112
- () => (0, import_provider_utils14.zodSchema)(
2113
- import_v413.z.object({
2114
- predictions: import_v413.z.array(import_v413.z.object({ bytesBase64Encoded: import_v413.z.string() })).default([])
2996
+ var googleImageResponseSchema = lazySchema13(
2997
+ () => zodSchema13(
2998
+ z14.object({
2999
+ predictions: z14.array(z14.object({ bytesBase64Encoded: z14.string() })).default([])
2115
3000
  })
2116
3001
  )
2117
3002
  );
2118
- var googleImageModelOptionsSchema = (0, import_provider_utils14.lazySchema)(
2119
- () => (0, import_provider_utils14.zodSchema)(
2120
- import_v413.z.object({
2121
- personGeneration: import_v413.z.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
2122
- aspectRatio: import_v413.z.enum(["1:1", "3:4", "4:3", "9:16", "16:9"]).nullish()
3003
+
3004
+ // src/google-files.ts
3005
+ import {
3006
+ AISDKError
3007
+ } from "@ai-sdk/provider";
3008
+ import {
3009
+ combineHeaders as combineHeaders4,
3010
+ convertInlineFileDataToUint8Array,
3011
+ createJsonResponseHandler as createJsonResponseHandler4,
3012
+ delay,
3013
+ lazySchema as lazySchema14,
3014
+ parseProviderOptions as parseProviderOptions4,
3015
+ zodSchema as zodSchema14,
3016
+ getFromApi
3017
+ } from "@ai-sdk/provider-utils";
3018
+ import { z as z15 } from "zod/v4";
3019
+ var GoogleFiles = class {
3020
+ constructor(config) {
3021
+ this.config = config;
3022
+ this.specificationVersion = "v4";
3023
+ }
3024
+ get provider() {
3025
+ return this.config.provider;
3026
+ }
3027
+ async uploadFile(options) {
3028
+ var _a, _b, _c, _d;
3029
+ const googleOptions = await parseProviderOptions4({
3030
+ provider: "google",
3031
+ providerOptions: options.providerOptions,
3032
+ schema: googleFilesUploadOptionsSchema
3033
+ });
3034
+ const resolvedHeaders = this.config.headers();
3035
+ const fetchFn = (_a = this.config.fetch) != null ? _a : globalThis.fetch;
3036
+ const warnings = [];
3037
+ if (options.filename != null) {
3038
+ warnings.push({ type: "unsupported", feature: "filename" });
3039
+ }
3040
+ const fileBytes = convertInlineFileDataToUint8Array(options.data);
3041
+ const mediaType = options.mediaType;
3042
+ const displayName = googleOptions == null ? void 0 : googleOptions.displayName;
3043
+ const baseOrigin = this.config.baseURL.replace(/\/v1beta$/, "");
3044
+ const initResponse = await fetchFn(`${baseOrigin}/upload/v1beta/files`, {
3045
+ method: "POST",
3046
+ headers: {
3047
+ ...resolvedHeaders,
3048
+ "X-Goog-Upload-Protocol": "resumable",
3049
+ "X-Goog-Upload-Command": "start",
3050
+ "X-Goog-Upload-Header-Content-Length": String(fileBytes.length),
3051
+ "X-Goog-Upload-Header-Content-Type": mediaType,
3052
+ "Content-Type": "application/json"
3053
+ },
3054
+ body: JSON.stringify({
3055
+ file: {
3056
+ ...displayName != null ? { display_name: displayName } : {}
3057
+ }
3058
+ })
3059
+ });
3060
+ if (!initResponse.ok) {
3061
+ const errorBody = await initResponse.text();
3062
+ throw new AISDKError({
3063
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
3064
+ message: `Failed to initiate resumable upload: ${initResponse.status} ${errorBody}`
3065
+ });
3066
+ }
3067
+ const uploadUrl = initResponse.headers.get("x-goog-upload-url");
3068
+ if (!uploadUrl) {
3069
+ throw new AISDKError({
3070
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
3071
+ message: "No upload URL returned from initiation request"
3072
+ });
3073
+ }
3074
+ const uploadResponse = await fetchFn(uploadUrl, {
3075
+ method: "POST",
3076
+ headers: {
3077
+ "Content-Length": String(fileBytes.length),
3078
+ "X-Goog-Upload-Offset": "0",
3079
+ "X-Goog-Upload-Command": "upload, finalize"
3080
+ },
3081
+ body: fileBytes
3082
+ });
3083
+ if (!uploadResponse.ok) {
3084
+ const errorBody = await uploadResponse.text();
3085
+ throw new AISDKError({
3086
+ name: "GOOGLE_FILES_UPLOAD_ERROR",
3087
+ message: `Failed to upload file data: ${uploadResponse.status} ${errorBody}`
3088
+ });
3089
+ }
3090
+ const uploadResult = await uploadResponse.json();
3091
+ let file = uploadResult.file;
3092
+ const pollIntervalMs = (_b = googleOptions == null ? void 0 : googleOptions.pollIntervalMs) != null ? _b : 2e3;
3093
+ const pollTimeoutMs = (_c = googleOptions == null ? void 0 : googleOptions.pollTimeoutMs) != null ? _c : 3e5;
3094
+ const startTime = Date.now();
3095
+ while (file.state === "PROCESSING") {
3096
+ if (Date.now() - startTime > pollTimeoutMs) {
3097
+ throw new AISDKError({
3098
+ name: "GOOGLE_FILES_UPLOAD_TIMEOUT",
3099
+ message: `File processing timed out after ${pollTimeoutMs}ms`
3100
+ });
3101
+ }
3102
+ await delay(pollIntervalMs);
3103
+ const { value: fileStatus } = await getFromApi({
3104
+ url: `${this.config.baseURL}/${file.name}`,
3105
+ headers: combineHeaders4(resolvedHeaders),
3106
+ successfulResponseHandler: createJsonResponseHandler4(
3107
+ googleFileResponseSchema
3108
+ ),
3109
+ failedResponseHandler: googleFailedResponseHandler,
3110
+ fetch: this.config.fetch
3111
+ });
3112
+ file = fileStatus;
3113
+ }
3114
+ if (file.state === "FAILED") {
3115
+ throw new AISDKError({
3116
+ name: "GOOGLE_FILES_UPLOAD_FAILED",
3117
+ message: `File processing failed for ${file.name}`
3118
+ });
3119
+ }
3120
+ return {
3121
+ warnings,
3122
+ providerReference: { google: file.uri },
3123
+ mediaType: (_d = file.mimeType) != null ? _d : options.mediaType,
3124
+ providerMetadata: {
3125
+ google: {
3126
+ name: file.name,
3127
+ displayName: file.displayName,
3128
+ mimeType: file.mimeType,
3129
+ sizeBytes: file.sizeBytes,
3130
+ state: file.state,
3131
+ uri: file.uri,
3132
+ ...file.createTime != null ? { createTime: file.createTime } : {},
3133
+ ...file.updateTime != null ? { updateTime: file.updateTime } : {},
3134
+ ...file.expirationTime != null ? { expirationTime: file.expirationTime } : {},
3135
+ ...file.sha256Hash != null ? { sha256Hash: file.sha256Hash } : {}
3136
+ }
3137
+ }
3138
+ };
3139
+ }
3140
+ };
3141
+ var googleFileResponseSchema = lazySchema14(
3142
+ () => zodSchema14(
3143
+ z15.object({
3144
+ name: z15.string(),
3145
+ displayName: z15.string().nullish(),
3146
+ mimeType: z15.string(),
3147
+ sizeBytes: z15.string().nullish(),
3148
+ createTime: z15.string().nullish(),
3149
+ updateTime: z15.string().nullish(),
3150
+ expirationTime: z15.string().nullish(),
3151
+ sha256Hash: z15.string().nullish(),
3152
+ uri: z15.string(),
3153
+ state: z15.string()
2123
3154
  })
2124
3155
  )
2125
3156
  );
3157
+ var googleFilesUploadOptionsSchema = lazySchema14(
3158
+ () => zodSchema14(
3159
+ z15.object({
3160
+ displayName: z15.string().nullish(),
3161
+ pollIntervalMs: z15.number().positive().nullish(),
3162
+ pollTimeoutMs: z15.number().positive().nullish()
3163
+ }).passthrough()
3164
+ )
3165
+ );
3166
+
3167
+ // src/google-video-model.ts
3168
+ import {
3169
+ AISDKError as AISDKError2
3170
+ } from "@ai-sdk/provider";
3171
+ import {
3172
+ combineHeaders as combineHeaders5,
3173
+ convertUint8ArrayToBase64,
3174
+ createJsonResponseHandler as createJsonResponseHandler5,
3175
+ delay as delay2,
3176
+ getFromApi as getFromApi2,
3177
+ parseProviderOptions as parseProviderOptions5,
3178
+ postJsonToApi as postJsonToApi4,
3179
+ resolve as resolve4
3180
+ } from "@ai-sdk/provider-utils";
3181
+ import { z as z17 } from "zod/v4";
3182
+
3183
+ // src/google-video-model-options.ts
3184
+ import { lazySchema as lazySchema15, zodSchema as zodSchema15 } from "@ai-sdk/provider-utils";
3185
+ import { z as z16 } from "zod/v4";
3186
+ var googleVideoModelOptionsSchema = lazySchema15(
3187
+ () => zodSchema15(
3188
+ z16.object({
3189
+ pollIntervalMs: z16.number().positive().nullish(),
3190
+ pollTimeoutMs: z16.number().positive().nullish(),
3191
+ personGeneration: z16.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
3192
+ negativePrompt: z16.string().nullish(),
3193
+ referenceImages: z16.array(
3194
+ z16.object({
3195
+ bytesBase64Encoded: z16.string().nullish(),
3196
+ gcsUri: z16.string().nullish()
3197
+ })
3198
+ ).nullish()
3199
+ }).passthrough()
3200
+ )
3201
+ );
2126
3202
 
2127
- // src/google-generative-ai-video-model.ts
2128
- var import_provider4 = require("@ai-sdk/provider");
2129
- var import_provider_utils15 = require("@ai-sdk/provider-utils");
2130
- var import_v414 = require("zod/v4");
2131
- var GoogleGenerativeAIVideoModel = class {
3203
+ // src/google-video-model.ts
3204
+ var GoogleVideoModel = class {
2132
3205
  constructor(modelId, config) {
2133
3206
  this.modelId = modelId;
2134
3207
  this.config = config;
@@ -2144,7 +3217,7 @@ var GoogleGenerativeAIVideoModel = class {
2144
3217
  var _a, _b, _c, _d, _e, _f, _g, _h;
2145
3218
  const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
2146
3219
  const warnings = [];
2147
- const googleOptions = await (0, import_provider_utils15.parseProviderOptions)({
3220
+ const googleOptions = await parseProviderOptions5({
2148
3221
  provider: "google",
2149
3222
  providerOptions: options.providerOptions,
2150
3223
  schema: googleVideoModelOptionsSchema
@@ -2162,7 +3235,7 @@ var GoogleGenerativeAIVideoModel = class {
2162
3235
  details: "Google Generative AI video models require base64-encoded images. URL will be ignored."
2163
3236
  });
2164
3237
  } else {
2165
- const base64Data = typeof options.image.data === "string" ? options.image.data : (0, import_provider_utils15.convertUint8ArrayToBase64)(options.image.data);
3238
+ const base64Data = typeof options.image.data === "string" ? options.image.data : convertUint8ArrayToBase64(options.image.data);
2166
3239
  instance.image = {
2167
3240
  inlineData: {
2168
3241
  mimeType: options.image.mediaType || "image/png",
@@ -2228,17 +3301,17 @@ var GoogleGenerativeAIVideoModel = class {
2228
3301
  }
2229
3302
  }
2230
3303
  }
2231
- const { value: operation } = await (0, import_provider_utils15.postJsonToApi)({
3304
+ const { value: operation } = await postJsonToApi4({
2232
3305
  url: `${this.config.baseURL}/models/${this.modelId}:predictLongRunning`,
2233
- headers: (0, import_provider_utils15.combineHeaders)(
2234
- await (0, import_provider_utils15.resolve)(this.config.headers),
3306
+ headers: combineHeaders5(
3307
+ await resolve4(this.config.headers),
2235
3308
  options.headers
2236
3309
  ),
2237
3310
  body: {
2238
3311
  instances,
2239
3312
  parameters
2240
3313
  },
2241
- successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
3314
+ successfulResponseHandler: createJsonResponseHandler5(
2242
3315
  googleOperationSchema
2243
3316
  ),
2244
3317
  failedResponseHandler: googleFailedResponseHandler,
@@ -2247,7 +3320,7 @@ var GoogleGenerativeAIVideoModel = class {
2247
3320
  });
2248
3321
  const operationName = operation.name;
2249
3322
  if (!operationName) {
2250
- throw new import_provider4.AISDKError({
3323
+ throw new AISDKError2({
2251
3324
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2252
3325
  message: "No operation name returned from API"
2253
3326
  });
@@ -2259,25 +3332,25 @@ var GoogleGenerativeAIVideoModel = class {
2259
3332
  let responseHeaders;
2260
3333
  while (!finalOperation.done) {
2261
3334
  if (Date.now() - startTime > pollTimeoutMs) {
2262
- throw new import_provider4.AISDKError({
3335
+ throw new AISDKError2({
2263
3336
  name: "GOOGLE_VIDEO_GENERATION_TIMEOUT",
2264
3337
  message: `Video generation timed out after ${pollTimeoutMs}ms`
2265
3338
  });
2266
3339
  }
2267
- await (0, import_provider_utils15.delay)(pollIntervalMs);
3340
+ await delay2(pollIntervalMs);
2268
3341
  if ((_f = options.abortSignal) == null ? void 0 : _f.aborted) {
2269
- throw new import_provider4.AISDKError({
3342
+ throw new AISDKError2({
2270
3343
  name: "GOOGLE_VIDEO_GENERATION_ABORTED",
2271
3344
  message: "Video generation request was aborted"
2272
3345
  });
2273
3346
  }
2274
- const { value: statusOperation, responseHeaders: pollHeaders } = await (0, import_provider_utils15.getFromApi)({
3347
+ const { value: statusOperation, responseHeaders: pollHeaders } = await getFromApi2({
2275
3348
  url: `${this.config.baseURL}/${operationName}`,
2276
- headers: (0, import_provider_utils15.combineHeaders)(
2277
- await (0, import_provider_utils15.resolve)(this.config.headers),
3349
+ headers: combineHeaders5(
3350
+ await resolve4(this.config.headers),
2278
3351
  options.headers
2279
3352
  ),
2280
- successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
3353
+ successfulResponseHandler: createJsonResponseHandler5(
2281
3354
  googleOperationSchema
2282
3355
  ),
2283
3356
  failedResponseHandler: googleFailedResponseHandler,
@@ -2288,21 +3361,21 @@ var GoogleGenerativeAIVideoModel = class {
2288
3361
  responseHeaders = pollHeaders;
2289
3362
  }
2290
3363
  if (finalOperation.error) {
2291
- throw new import_provider4.AISDKError({
3364
+ throw new AISDKError2({
2292
3365
  name: "GOOGLE_VIDEO_GENERATION_FAILED",
2293
3366
  message: `Video generation failed: ${finalOperation.error.message}`
2294
3367
  });
2295
3368
  }
2296
3369
  const response = finalOperation.response;
2297
3370
  if (!((_g = response == null ? void 0 : response.generateVideoResponse) == null ? void 0 : _g.generatedSamples) || response.generateVideoResponse.generatedSamples.length === 0) {
2298
- throw new import_provider4.AISDKError({
3371
+ throw new AISDKError2({
2299
3372
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2300
3373
  message: `No videos in response. Response: ${JSON.stringify(finalOperation)}`
2301
3374
  });
2302
3375
  }
2303
3376
  const videos = [];
2304
3377
  const videoMetadata = [];
2305
- const resolvedHeaders = await (0, import_provider_utils15.resolve)(this.config.headers);
3378
+ const resolvedHeaders = await resolve4(this.config.headers);
2306
3379
  const apiKey = resolvedHeaders == null ? void 0 : resolvedHeaders["x-goog-api-key"];
2307
3380
  for (const generatedSample of response.generateVideoResponse.generatedSamples) {
2308
3381
  if ((_h = generatedSample.video) == null ? void 0 : _h.uri) {
@@ -2318,7 +3391,7 @@ var GoogleGenerativeAIVideoModel = class {
2318
3391
  }
2319
3392
  }
2320
3393
  if (videos.length === 0) {
2321
- throw new import_provider4.AISDKError({
3394
+ throw new AISDKError2({
2322
3395
  name: "GOOGLE_VIDEO_GENERATION_ERROR",
2323
3396
  message: "No valid videos in response"
2324
3397
  });
@@ -2339,51 +3412,35 @@ var GoogleGenerativeAIVideoModel = class {
2339
3412
  };
2340
3413
  }
2341
3414
  };
2342
- var googleOperationSchema = import_v414.z.object({
2343
- name: import_v414.z.string().nullish(),
2344
- done: import_v414.z.boolean().nullish(),
2345
- error: import_v414.z.object({
2346
- code: import_v414.z.number().nullish(),
2347
- message: import_v414.z.string(),
2348
- status: import_v414.z.string().nullish()
3415
+ var googleOperationSchema = z17.object({
3416
+ name: z17.string().nullish(),
3417
+ done: z17.boolean().nullish(),
3418
+ error: z17.object({
3419
+ code: z17.number().nullish(),
3420
+ message: z17.string(),
3421
+ status: z17.string().nullish()
2349
3422
  }).nullish(),
2350
- response: import_v414.z.object({
2351
- generateVideoResponse: import_v414.z.object({
2352
- generatedSamples: import_v414.z.array(
2353
- import_v414.z.object({
2354
- video: import_v414.z.object({
2355
- uri: import_v414.z.string().nullish()
3423
+ response: z17.object({
3424
+ generateVideoResponse: z17.object({
3425
+ generatedSamples: z17.array(
3426
+ z17.object({
3427
+ video: z17.object({
3428
+ uri: z17.string().nullish()
2356
3429
  }).nullish()
2357
3430
  })
2358
3431
  ).nullish()
2359
3432
  }).nullish()
2360
3433
  }).nullish()
2361
3434
  });
2362
- var googleVideoModelOptionsSchema = (0, import_provider_utils15.lazySchema)(
2363
- () => (0, import_provider_utils15.zodSchema)(
2364
- import_v414.z.object({
2365
- pollIntervalMs: import_v414.z.number().positive().nullish(),
2366
- pollTimeoutMs: import_v414.z.number().positive().nullish(),
2367
- personGeneration: import_v414.z.enum(["dont_allow", "allow_adult", "allow_all"]).nullish(),
2368
- negativePrompt: import_v414.z.string().nullish(),
2369
- referenceImages: import_v414.z.array(
2370
- import_v414.z.object({
2371
- bytesBase64Encoded: import_v414.z.string().nullish(),
2372
- gcsUri: import_v414.z.string().nullish()
2373
- })
2374
- ).nullish()
2375
- }).passthrough()
2376
- )
2377
- );
2378
3435
 
2379
3436
  // src/google-provider.ts
2380
- function createGoogleGenerativeAI(options = {}) {
3437
+ function createGoogle(options = {}) {
2381
3438
  var _a, _b;
2382
- const baseURL = (_a = (0, import_provider_utils16.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://generativelanguage.googleapis.com/v1beta";
3439
+ const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://generativelanguage.googleapis.com/v1beta";
2383
3440
  const providerName = (_b = options.name) != null ? _b : "google.generative-ai";
2384
- const getHeaders = () => (0, import_provider_utils16.withUserAgentSuffix)(
3441
+ const getHeaders = () => withUserAgentSuffix(
2385
3442
  {
2386
- "x-goog-api-key": (0, import_provider_utils16.loadApiKey)({
3443
+ "x-goog-api-key": loadApiKey({
2387
3444
  apiKey: options.apiKey,
2388
3445
  environmentVariableName: "GOOGLE_GENERATIVE_AI_API_KEY",
2389
3446
  description: "Google Generative AI"
@@ -2394,11 +3451,11 @@ function createGoogleGenerativeAI(options = {}) {
2394
3451
  );
2395
3452
  const createChatModel = (modelId) => {
2396
3453
  var _a2;
2397
- return new GoogleGenerativeAILanguageModel(modelId, {
3454
+ return new GoogleLanguageModel(modelId, {
2398
3455
  provider: providerName,
2399
3456
  baseURL,
2400
3457
  headers: getHeaders,
2401
- generateId: (_a2 = options.generateId) != null ? _a2 : import_provider_utils16.generateId,
3458
+ generateId: (_a2 = options.generateId) != null ? _a2 : generateId2,
2402
3459
  supportedUrls: () => ({
2403
3460
  "*": [
2404
3461
  // Google Generative Language "files" endpoint
@@ -2414,13 +3471,19 @@ function createGoogleGenerativeAI(options = {}) {
2414
3471
  fetch: options.fetch
2415
3472
  });
2416
3473
  };
2417
- const createEmbeddingModel = (modelId) => new GoogleGenerativeAIEmbeddingModel(modelId, {
3474
+ const createEmbeddingModel = (modelId) => new GoogleEmbeddingModel(modelId, {
3475
+ provider: providerName,
3476
+ baseURL,
3477
+ headers: getHeaders,
3478
+ fetch: options.fetch
3479
+ });
3480
+ const createImageModel = (modelId, settings = {}) => new GoogleImageModel(modelId, settings, {
2418
3481
  provider: providerName,
2419
3482
  baseURL,
2420
3483
  headers: getHeaders,
2421
3484
  fetch: options.fetch
2422
3485
  });
2423
- const createImageModel = (modelId, settings = {}) => new GoogleGenerativeAIImageModel(modelId, settings, {
3486
+ const createFiles = () => new GoogleFiles({
2424
3487
  provider: providerName,
2425
3488
  baseURL,
2426
3489
  headers: getHeaders,
@@ -2428,12 +3491,12 @@ function createGoogleGenerativeAI(options = {}) {
2428
3491
  });
2429
3492
  const createVideoModel = (modelId) => {
2430
3493
  var _a2;
2431
- return new GoogleGenerativeAIVideoModel(modelId, {
3494
+ return new GoogleVideoModel(modelId, {
2432
3495
  provider: providerName,
2433
3496
  baseURL,
2434
3497
  headers: getHeaders,
2435
3498
  fetch: options.fetch,
2436
- generateId: (_a2 = options.generateId) != null ? _a2 : import_provider_utils16.generateId
3499
+ generateId: (_a2 = options.generateId) != null ? _a2 : generateId2
2437
3500
  });
2438
3501
  };
2439
3502
  const provider = function(modelId) {
@@ -2456,14 +3519,15 @@ function createGoogleGenerativeAI(options = {}) {
2456
3519
  provider.imageModel = createImageModel;
2457
3520
  provider.video = createVideoModel;
2458
3521
  provider.videoModel = createVideoModel;
3522
+ provider.files = createFiles;
2459
3523
  provider.tools = googleTools;
2460
3524
  return provider;
2461
3525
  }
2462
- var google = createGoogleGenerativeAI();
2463
- // Annotate the CommonJS export names for ESM import in node:
2464
- 0 && (module.exports = {
3526
+ var google = createGoogle();
3527
+ export {
2465
3528
  VERSION,
2466
- createGoogleGenerativeAI,
3529
+ createGoogle,
3530
+ createGoogle as createGoogleGenerativeAI,
2467
3531
  google
2468
- });
3532
+ };
2469
3533
  //# sourceMappingURL=index.js.map