@ai-sdk/google 3.0.61 → 3.0.62

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -617,9 +617,10 @@ var googleLanguageModelOptions = lazySchema2(
617
617
  /**
618
618
  * Optional. When set to true, function call arguments will be streamed
619
619
  * incrementally via partialArgs in streaming responses. Only supported
620
- * on the Vertex AI API (not the Gemini API).
620
+ * on the Vertex AI API (not the Gemini API) and only for Gemini 3+
621
+ * models.
621
622
  *
622
- * @default true
623
+ * @default false
623
624
  *
624
625
  * https://docs.cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#streaming-fc
625
626
  */
@@ -1163,7 +1164,7 @@ var GoogleGenerativeAILanguageModel = class {
1163
1164
  tools,
1164
1165
  toolChoice,
1165
1166
  providerOptions
1166
- }) {
1167
+ }, { isStreaming = false } = {}) {
1167
1168
  var _a, _b;
1168
1169
  const warnings = [];
1169
1170
  const providerOptionsName = this.config.provider.includes("vertex") ? "vertex" : "google";
@@ -1213,7 +1214,7 @@ var GoogleGenerativeAILanguageModel = class {
1213
1214
  toolChoice,
1214
1215
  modelId: this.modelId
1215
1216
  });
1216
- const streamFunctionCallArguments = isVertexProvider ? (_a = googleOptions == null ? void 0 : googleOptions.streamFunctionCallArguments) != null ? _a : true : void 0;
1217
+ const streamFunctionCallArguments = isStreaming && isVertexProvider ? (_a = googleOptions == null ? void 0 : googleOptions.streamFunctionCallArguments) != null ? _a : false : void 0;
1217
1218
  const toolConfig = googleToolConfig || streamFunctionCallArguments || (googleOptions == null ? void 0 : googleOptions.retrievalConfig) ? {
1218
1219
  ...googleToolConfig,
1219
1220
  ...streamFunctionCallArguments && {
@@ -1452,7 +1453,10 @@ var GoogleGenerativeAILanguageModel = class {
1452
1453
  };
1453
1454
  }
1454
1455
  async doStream(options) {
1455
- const { args, warnings, providerOptionsName } = await this.getArgs(options);
1456
+ const { args, warnings, providerOptionsName } = await this.getArgs(
1457
+ options,
1458
+ { isStreaming: true }
1459
+ );
1456
1460
  const headers = combineHeaders(
1457
1461
  await resolve(this.config.headers),
1458
1462
  options.headers