@ai-sdk/google 2.0.0-alpha.9 → 2.0.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -248,9 +248,7 @@ function isEmptyObjectSchema(jsonSchema) {
248
248
  import {
249
249
  UnsupportedFunctionalityError
250
250
  } from "@ai-sdk/provider";
251
- import {
252
- convertToBase64
253
- } from "@ai-sdk/provider-utils";
251
+ import { convertToBase64 } from "@ai-sdk/provider-utils";
254
252
  function convertToGoogleGenerativeAIMessages(prompt) {
255
253
  const systemInstructionParts = [];
256
254
  const contents = [];
@@ -328,7 +326,7 @@ function convertToGoogleGenerativeAIMessages(prompt) {
328
326
  return {
329
327
  functionCall: {
330
328
  name: part.toolName,
331
- args: part.args
329
+ args: part.input
332
330
  }
333
331
  };
334
332
  }
@@ -346,7 +344,7 @@ function convertToGoogleGenerativeAIMessages(prompt) {
346
344
  name: part.toolName,
347
345
  response: {
348
346
  name: part.toolName,
349
- content: part.result
347
+ content: part.output.value
350
348
  }
351
349
  }
352
350
  }))
@@ -382,7 +380,8 @@ var dynamicRetrievalConfig = z4.object({
382
380
  var googleGenerativeAIProviderOptions = z4.object({
383
381
  responseModalities: z4.array(z4.enum(["TEXT", "IMAGE"])).optional(),
384
382
  thinkingConfig: z4.object({
385
- thinkingBudget: z4.number().optional()
383
+ thinkingBudget: z4.number().optional(),
384
+ includeThoughts: z4.boolean().optional()
386
385
  }).optional(),
387
386
  /**
388
387
  Optional.
@@ -488,7 +487,7 @@ function prepareTools({
488
487
  functionDeclarations.push({
489
488
  name: tool.name,
490
489
  description: (_a = tool.description) != null ? _a : "",
491
- parameters: convertJSONSchemaToOpenAPISchema(tool.parameters)
490
+ parameters: convertJSONSchemaToOpenAPISchema(tool.inputSchema)
492
491
  });
493
492
  }
494
493
  }
@@ -595,13 +594,19 @@ var GoogleGenerativeAILanguageModel = class {
595
594
  toolChoice,
596
595
  providerOptions
597
596
  }) {
598
- var _a, _b;
597
+ var _a, _b, _c;
599
598
  const warnings = [];
600
599
  const googleOptions = await parseProviderOptions2({
601
600
  provider: "google",
602
601
  providerOptions,
603
602
  schema: googleGenerativeAIProviderOptions
604
603
  });
604
+ if (((_a = googleOptions == null ? void 0 : googleOptions.thinkingConfig) == null ? void 0 : _a.includeThoughts) === true && !this.config.provider.startsWith("google.vertex.")) {
605
+ warnings.push({
606
+ type: "other",
607
+ message: `The 'includeThoughts' option is only supported with the Google Vertex provider and might not be supported or could behave unexpectedly with the current Google provider (${this.config.provider}).`
608
+ });
609
+ }
605
610
  const { contents, systemInstruction } = convertToGoogleGenerativeAIMessages(prompt);
606
611
  const {
607
612
  tools: googleTools,
@@ -610,7 +615,7 @@ var GoogleGenerativeAILanguageModel = class {
610
615
  } = prepareTools({
611
616
  tools,
612
617
  toolChoice,
613
- useSearchGrounding: (_a = googleOptions == null ? void 0 : googleOptions.useSearchGrounding) != null ? _a : false,
618
+ useSearchGrounding: (_b = googleOptions == null ? void 0 : googleOptions.useSearchGrounding) != null ? _b : false,
614
619
  dynamicRetrievalConfig: googleOptions == null ? void 0 : googleOptions.dynamicRetrievalConfig,
615
620
  modelId: this.modelId
616
621
  });
@@ -631,7 +636,7 @@ var GoogleGenerativeAILanguageModel = class {
631
636
  responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
632
637
  // so this is needed as an escape hatch:
633
638
  // TODO convert into provider option
634
- ((_b = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _b : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
639
+ ((_c = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _c : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
635
640
  ...(googleOptions == null ? void 0 : googleOptions.audioTimestamp) && {
636
641
  audioTimestamp: googleOptions.audioTimestamp
637
642
  },
@@ -675,16 +680,20 @@ var GoogleGenerativeAILanguageModel = class {
675
680
  const candidate = response.candidates[0];
676
681
  const content = [];
677
682
  const parts = candidate.content == null || typeof candidate.content !== "object" || !("parts" in candidate.content) ? [] : (_a = candidate.content.parts) != null ? _a : [];
683
+ const usageMetadata = response.usageMetadata;
678
684
  for (const part of parts) {
679
- if ("text" in part && part.text.length > 0) {
680
- content.push({ type: "text", text: part.text });
685
+ if ("text" in part && part.text != null && part.text.length > 0) {
686
+ if (part.thought === true) {
687
+ content.push({ type: "reasoning", text: part.text });
688
+ } else {
689
+ content.push({ type: "text", text: part.text });
690
+ }
681
691
  } else if ("functionCall" in part) {
682
692
  content.push({
683
693
  type: "tool-call",
684
- toolCallType: "function",
685
694
  toolCallId: this.config.generateId(),
686
695
  toolName: part.functionCall.name,
687
- args: JSON.stringify(part.functionCall.args)
696
+ input: JSON.stringify(part.functionCall.args)
688
697
  });
689
698
  } else if ("inlineData" in part) {
690
699
  content.push({
@@ -701,7 +710,6 @@ var GoogleGenerativeAILanguageModel = class {
701
710
  for (const source of sources) {
702
711
  content.push(source);
703
712
  }
704
- const usageMetadata = response.usageMetadata;
705
713
  return {
706
714
  content,
707
715
  finishReason: mapGoogleGenerativeAIFinishReason({
@@ -757,6 +765,9 @@ var GoogleGenerativeAILanguageModel = class {
757
765
  let providerMetadata = void 0;
758
766
  const generateId2 = this.config.generateId;
759
767
  let hasToolCalls = false;
768
+ let currentTextBlockId = null;
769
+ let currentReasoningBlockId = null;
770
+ let blockCounter = 0;
760
771
  return {
761
772
  stream: response.pipeThrough(
762
773
  new TransformStream({
@@ -764,7 +775,10 @@ var GoogleGenerativeAILanguageModel = class {
764
775
  controller.enqueue({ type: "stream-start", warnings });
765
776
  },
766
777
  transform(chunk, controller) {
767
- var _a, _b, _c, _d, _e, _f, _g, _h, _i;
778
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
779
+ if (options.includeRawChunks) {
780
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
781
+ }
768
782
  if (!chunk.success) {
769
783
  controller.enqueue({ type: "error", error: chunk.error });
770
784
  return;
@@ -784,9 +798,51 @@ var GoogleGenerativeAILanguageModel = class {
784
798
  }
785
799
  const content = candidate.content;
786
800
  if (content != null) {
787
- const deltaText = getTextFromParts(content.parts);
788
- if (deltaText != null) {
789
- controller.enqueue(deltaText);
801
+ const parts = (_g = content.parts) != null ? _g : [];
802
+ for (const part of parts) {
803
+ if ("text" in part && part.text != null && part.text.length > 0) {
804
+ if (part.thought === true) {
805
+ if (currentTextBlockId !== null) {
806
+ controller.enqueue({
807
+ type: "text-end",
808
+ id: currentTextBlockId
809
+ });
810
+ currentTextBlockId = null;
811
+ }
812
+ if (currentReasoningBlockId === null) {
813
+ currentReasoningBlockId = String(blockCounter++);
814
+ controller.enqueue({
815
+ type: "reasoning-start",
816
+ id: currentReasoningBlockId
817
+ });
818
+ }
819
+ controller.enqueue({
820
+ type: "reasoning-delta",
821
+ id: currentReasoningBlockId,
822
+ delta: part.text
823
+ });
824
+ } else {
825
+ if (currentReasoningBlockId !== null) {
826
+ controller.enqueue({
827
+ type: "reasoning-end",
828
+ id: currentReasoningBlockId
829
+ });
830
+ currentReasoningBlockId = null;
831
+ }
832
+ if (currentTextBlockId === null) {
833
+ currentTextBlockId = String(blockCounter++);
834
+ controller.enqueue({
835
+ type: "text-start",
836
+ id: currentTextBlockId
837
+ });
838
+ }
839
+ controller.enqueue({
840
+ type: "text-delta",
841
+ id: currentTextBlockId,
842
+ delta: part.text
843
+ });
844
+ }
845
+ }
790
846
  }
791
847
  const inlineDataParts = getInlineDataParts(content.parts);
792
848
  if (inlineDataParts != null) {
@@ -805,18 +861,24 @@ var GoogleGenerativeAILanguageModel = class {
805
861
  if (toolCallDeltas != null) {
806
862
  for (const toolCall of toolCallDeltas) {
807
863
  controller.enqueue({
808
- type: "tool-call-delta",
809
- toolCallType: "function",
810
- toolCallId: toolCall.toolCallId,
811
- toolName: toolCall.toolName,
812
- argsTextDelta: toolCall.args
864
+ type: "tool-input-start",
865
+ id: toolCall.toolCallId,
866
+ toolName: toolCall.toolName
867
+ });
868
+ controller.enqueue({
869
+ type: "tool-input-delta",
870
+ id: toolCall.toolCallId,
871
+ delta: toolCall.args
872
+ });
873
+ controller.enqueue({
874
+ type: "tool-input-end",
875
+ id: toolCall.toolCallId
813
876
  });
814
877
  controller.enqueue({
815
878
  type: "tool-call",
816
- toolCallType: "function",
817
879
  toolCallId: toolCall.toolCallId,
818
880
  toolName: toolCall.toolName,
819
- args: toolCall.args
881
+ input: toolCall.args
820
882
  });
821
883
  hasToolCalls = true;
822
884
  }
@@ -827,22 +889,34 @@ var GoogleGenerativeAILanguageModel = class {
827
889
  finishReason: candidate.finishReason,
828
890
  hasToolCalls
829
891
  });
830
- const sources = (_g = extractSources({
892
+ const sources = (_h = extractSources({
831
893
  groundingMetadata: candidate.groundingMetadata,
832
894
  generateId: generateId2
833
- })) != null ? _g : [];
895
+ })) != null ? _h : [];
834
896
  for (const source of sources) {
835
897
  controller.enqueue(source);
836
898
  }
837
899
  providerMetadata = {
838
900
  google: {
839
- groundingMetadata: (_h = candidate.groundingMetadata) != null ? _h : null,
840
- safetyRatings: (_i = candidate.safetyRatings) != null ? _i : null
901
+ groundingMetadata: (_i = candidate.groundingMetadata) != null ? _i : null,
902
+ safetyRatings: (_j = candidate.safetyRatings) != null ? _j : null
841
903
  }
842
904
  };
843
905
  }
844
906
  },
845
907
  flush(controller) {
908
+ if (currentTextBlockId !== null) {
909
+ controller.enqueue({
910
+ type: "text-end",
911
+ id: currentTextBlockId
912
+ });
913
+ }
914
+ if (currentReasoningBlockId !== null) {
915
+ controller.enqueue({
916
+ type: "reasoning-end",
917
+ id: currentReasoningBlockId
918
+ });
919
+ }
846
920
  controller.enqueue({
847
921
  type: "finish",
848
922
  finishReason,
@@ -866,19 +940,11 @@ function getToolCallsFromParts({
866
940
  );
867
941
  return functionCallParts == null || functionCallParts.length === 0 ? void 0 : functionCallParts.map((part) => ({
868
942
  type: "tool-call",
869
- toolCallType: "function",
870
943
  toolCallId: generateId2(),
871
944
  toolName: part.functionCall.name,
872
945
  args: JSON.stringify(part.functionCall.args)
873
946
  }));
874
947
  }
875
- function getTextFromParts(parts) {
876
- const textParts = parts == null ? void 0 : parts.filter((part) => "text" in part);
877
- return textParts == null || textParts.length === 0 ? void 0 : {
878
- type: "text",
879
- text: textParts.map((part) => part.text).join("")
880
- };
881
- }
882
948
  function getInlineDataParts(parts) {
883
949
  return parts == null ? void 0 : parts.filter(
884
950
  (part) => "inlineData" in part
@@ -900,12 +966,9 @@ function extractSources({
900
966
  }));
901
967
  }
902
968
  var contentSchema = z5.object({
903
- role: z5.string(),
904
969
  parts: z5.array(
905
970
  z5.union([
906
- z5.object({
907
- text: z5.string()
908
- }),
971
+ // note: order matters since text can be fully empty
909
972
  z5.object({
910
973
  functionCall: z5.object({
911
974
  name: z5.string(),
@@ -917,6 +980,10 @@ var contentSchema = z5.object({
917
980
  mimeType: z5.string(),
918
981
  data: z5.string()
919
982
  })
983
+ }),
984
+ z5.object({
985
+ text: z5.string().nullish(),
986
+ thought: z5.boolean().nullish()
920
987
  })
921
988
  ])
922
989
  ).nullish()