@ai-sdk/google 3.0.61 → 3.0.63

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -7,7 +7,7 @@ import {
7
7
  } from "@ai-sdk/provider-utils";
8
8
 
9
9
  // src/version.ts
10
- var VERSION = true ? "3.0.61" : "0.0.0-test";
10
+ var VERSION = true ? "3.0.63" : "0.0.0-test";
11
11
 
12
12
  // src/google-generative-ai-embedding-model.ts
13
13
  import {
@@ -828,9 +828,10 @@ var googleLanguageModelOptions = lazySchema4(
828
828
  /**
829
829
  * Optional. When set to true, function call arguments will be streamed
830
830
  * incrementally via partialArgs in streaming responses. Only supported
831
- * on the Vertex AI API (not the Gemini API).
831
+ * on the Vertex AI API (not the Gemini API) and only for Gemini 3+
832
+ * models.
832
833
  *
833
- * @default true
834
+ * @default false
834
835
  *
835
836
  * https://docs.cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#streaming-fc
836
837
  */
@@ -842,6 +843,11 @@ var googleLanguageModelOptions = lazySchema4(
842
843
  })
843
844
  )
844
845
  );
846
+ var VertexServiceTierMap = {
847
+ standard: "SERVICE_TIER_STANDARD",
848
+ flex: "SERVICE_TIER_FLEX",
849
+ priority: "SERVICE_TIER_PRIORITY"
850
+ };
845
851
 
846
852
  // src/google-prepare-tools.ts
847
853
  import {
@@ -1374,7 +1380,7 @@ var GoogleGenerativeAILanguageModel = class {
1374
1380
  tools,
1375
1381
  toolChoice,
1376
1382
  providerOptions
1377
- }) {
1383
+ }, { isStreaming = false } = {}) {
1378
1384
  var _a, _b;
1379
1385
  const warnings = [];
1380
1386
  const providerOptionsName = this.config.provider.includes("vertex") ? "vertex" : "google";
@@ -1405,6 +1411,10 @@ var GoogleGenerativeAILanguageModel = class {
1405
1411
  message: `'streamFunctionCallArguments' is only supported on the Vertex AI API and will be ignored with the current Google provider (${this.config.provider}). See https://docs.cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#streaming-fc`
1406
1412
  });
1407
1413
  }
1414
+ let sanitizedServiceTier = googleOptions == null ? void 0 : googleOptions.serviceTier;
1415
+ if ((googleOptions == null ? void 0 : googleOptions.serviceTier) && isVertexProvider) {
1416
+ sanitizedServiceTier = VertexServiceTierMap[googleOptions.serviceTier];
1417
+ }
1408
1418
  const isGemmaModel = this.modelId.toLowerCase().startsWith("gemma-");
1409
1419
  const supportsFunctionResponseParts = this.modelId.startsWith("gemini-3");
1410
1420
  const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(
@@ -1424,7 +1434,7 @@ var GoogleGenerativeAILanguageModel = class {
1424
1434
  toolChoice,
1425
1435
  modelId: this.modelId
1426
1436
  });
1427
- const streamFunctionCallArguments = isVertexProvider ? (_a = googleOptions == null ? void 0 : googleOptions.streamFunctionCallArguments) != null ? _a : true : void 0;
1437
+ const streamFunctionCallArguments = isStreaming && isVertexProvider ? (_a = googleOptions == null ? void 0 : googleOptions.streamFunctionCallArguments) != null ? _a : false : void 0;
1428
1438
  const toolConfig = googleToolConfig || streamFunctionCallArguments || (googleOptions == null ? void 0 : googleOptions.retrievalConfig) ? {
1429
1439
  ...googleToolConfig,
1430
1440
  ...streamFunctionCallArguments && {
@@ -1475,7 +1485,7 @@ var GoogleGenerativeAILanguageModel = class {
1475
1485
  toolConfig,
1476
1486
  cachedContent: googleOptions == null ? void 0 : googleOptions.cachedContent,
1477
1487
  labels: googleOptions == null ? void 0 : googleOptions.labels,
1478
- serviceTier: googleOptions == null ? void 0 : googleOptions.serviceTier
1488
+ serviceTier: sanitizedServiceTier
1479
1489
  },
1480
1490
  warnings: [...warnings, ...toolWarnings],
1481
1491
  providerOptionsName
@@ -1663,7 +1673,10 @@ var GoogleGenerativeAILanguageModel = class {
1663
1673
  };
1664
1674
  }
1665
1675
  async doStream(options) {
1666
- const { args, warnings, providerOptionsName } = await this.getArgs(options);
1676
+ const { args, warnings, providerOptionsName } = await this.getArgs(
1677
+ options,
1678
+ { isStreaming: true }
1679
+ );
1667
1680
  const headers = combineHeaders2(
1668
1681
  await resolve2(this.config.headers),
1669
1682
  options.headers