@langchain/google-genai 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -507,8 +507,8 @@ class ChatGoogleGenerativeAI extends chat_models_1.BaseChatModel {
507
507
  throw new Error("`maxOutputTokens` must be a positive integer");
508
508
  }
509
509
  this.temperature = fields?.temperature ?? this.temperature;
510
- if (this.temperature && (this.temperature < 0 || this.temperature > 1)) {
511
- throw new Error("`temperature` must be in the range of [0.0,1.0]");
510
+ if (this.temperature && (this.temperature < 0 || this.temperature > 2)) {
511
+ throw new Error("`temperature` must be in the range of [0.0,2.0]");
512
512
  }
513
513
  this.topP = fields?.topP ?? this.topP;
514
514
  if (this.topP && this.topP < 0) {
@@ -48,7 +48,7 @@ export interface GoogleGenerativeAIChatInput extends BaseChatModelParams, Pick<G
48
48
  /**
49
49
  * Controls the randomness of the output.
50
50
  *
51
- * Values can range from [0.0,1.0], inclusive. A value closer to 1.0
51
+ * Values can range from [0.0,2.0], inclusive. A value closer to 2.0
52
52
  * will produce responses that are more varied and creative, while
53
53
  * a value closer to 0.0 will typically result in less surprising
54
54
  * responses from the model.
@@ -504,8 +504,8 @@ export class ChatGoogleGenerativeAI extends BaseChatModel {
504
504
  throw new Error("`maxOutputTokens` must be a positive integer");
505
505
  }
506
506
  this.temperature = fields?.temperature ?? this.temperature;
507
- if (this.temperature && (this.temperature < 0 || this.temperature > 1)) {
508
- throw new Error("`temperature` must be in the range of [0.0,1.0]");
507
+ if (this.temperature && (this.temperature < 0 || this.temperature > 2)) {
508
+ throw new Error("`temperature` must be in the range of [0.0,2.0]");
509
509
  }
510
510
  this.topP = fields?.topP ?? this.topP;
511
511
  if (this.topP && this.topP < 0) {
@@ -53,6 +53,14 @@ function messageContentMedia(content) {
53
53
  },
54
54
  };
55
55
  }
56
+ if ("mimeType" in content && "fileUri" in content) {
57
+ return {
58
+ fileData: {
59
+ mimeType: content.mimeType,
60
+ fileUri: content.fileUri,
61
+ },
62
+ };
63
+ }
56
64
  throw new Error("Invalid media content");
57
65
  }
58
66
  function convertMessageContentToParts(message, isMultimodalModel) {
@@ -48,6 +48,14 @@ function messageContentMedia(content) {
48
48
  },
49
49
  };
50
50
  }
51
+ if ("mimeType" in content && "fileUri" in content) {
52
+ return {
53
+ fileData: {
54
+ mimeType: content.mimeType,
55
+ fileUri: content.fileUri,
56
+ },
57
+ };
58
+ }
51
59
  throw new Error("Invalid media content");
52
60
  }
53
61
  export function convertMessageContentToParts(message, isMultimodalModel) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/google-genai",
3
- "version": "0.1.7",
3
+ "version": "0.1.9",
4
4
  "description": "Google Generative AI integration for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {