@librechat/agents 2.4.37 → 2.4.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,73 @@
1
+ 'use strict';
2
+
3
+ var googleGenai = require('@langchain/google-genai');
4
+ var env = require('@langchain/core/utils/env');
5
+ var generativeAi = require('@google/generative-ai');
6
+
7
+ class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
8
+ constructor(fields) {
9
+ super(fields);
10
+ this.model = fields.model.replace(/^models\//, '');
11
+ this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;
12
+ if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {
13
+ throw new Error('`maxOutputTokens` must be a positive integer');
14
+ }
15
+ this.temperature = fields.temperature ?? this.temperature;
16
+ if (this.temperature != null &&
17
+ (this.temperature < 0 || this.temperature > 2)) {
18
+ throw new Error('`temperature` must be in the range of [0.0,2.0]');
19
+ }
20
+ this.topP = fields.topP ?? this.topP;
21
+ if (this.topP != null && this.topP < 0) {
22
+ throw new Error('`topP` must be a positive integer');
23
+ }
24
+ if (this.topP != null && this.topP > 1) {
25
+ throw new Error('`topP` must be below 1.');
26
+ }
27
+ this.topK = fields.topK ?? this.topK;
28
+ if (this.topK != null && this.topK < 0) {
29
+ throw new Error('`topK` must be a positive integer');
30
+ }
31
+ this.stopSequences = fields.stopSequences ?? this.stopSequences;
32
+ this.apiKey = fields.apiKey ?? env.getEnvironmentVariable('GOOGLE_API_KEY');
33
+ if (this.apiKey == null || this.apiKey === '') {
34
+ throw new Error('Please set an API key for Google GenerativeAI ' +
35
+ 'in the environment variable GOOGLE_API_KEY ' +
36
+ 'or in the `apiKey` field of the ' +
37
+ 'ChatGoogleGenerativeAI constructor');
38
+ }
39
+ this.safetySettings = fields.safetySettings ?? this.safetySettings;
40
+ if (this.safetySettings && this.safetySettings.length > 0) {
41
+ const safetySettingsSet = new Set(this.safetySettings.map((s) => s.category));
42
+ if (safetySettingsSet.size !== this.safetySettings.length) {
43
+ throw new Error('The categories in `safetySettings` array must be unique');
44
+ }
45
+ }
46
+ this.streaming = fields.streaming ?? this.streaming;
47
+ this.json = fields.json;
48
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
49
+ // @ts-ignore - Accessing private property from parent class
50
+ this.client = new generativeAi.GoogleGenerativeAI(this.apiKey).getGenerativeModel({
51
+ model: this.model,
52
+ safetySettings: this.safetySettings,
53
+ generationConfig: {
54
+ stopSequences: this.stopSequences,
55
+ maxOutputTokens: this.maxOutputTokens,
56
+ temperature: this.temperature,
57
+ topP: this.topP,
58
+ topK: this.topK,
59
+ ...(this.json != null
60
+ ? { responseMimeType: 'application/json' }
61
+ : {}),
62
+ },
63
+ }, {
64
+ apiVersion: fields.apiVersion,
65
+ baseUrl: fields.baseUrl,
66
+ customHeaders: fields.customHeaders,
67
+ });
68
+ this.streamUsage = fields.streamUsage ?? this.streamUsage;
69
+ }
70
+ }
71
+
72
+ exports.CustomChatGoogleGenerativeAI = CustomChatGoogleGenerativeAI;
73
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.cjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["import { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type { GoogleGenerativeAIChatInput } from '@langchain/google-genai';\nimport type { RequestOptions, SafetySetting } from '@google/generative-ai';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n constructor(\n fields: GoogleGenerativeAIChatInput & {\n customHeaders?: RequestOptions['customHeaders'];\n }\n ) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n}\n"],"names":["ChatGoogleGenerativeAI","getEnvironmentVariable","GenerativeAI"],"mappings":";;;;;;AAMM,MAAO,4BAA6B,SAAQA,kCAAsB,CAAA;AACtE,IAAA,WAAA,CACE,MAEC,EAAA;QAED,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAIC,0BAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;;AAIvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIC,+BAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAE5D;;;;"}
@@ -4,29 +4,29 @@ var ollama = require('@langchain/ollama');
4
4
  var mistralai = require('@langchain/mistralai');
5
5
  var aws = require('@langchain/aws');
6
6
  var googleVertexai = require('@langchain/google-vertexai');
7
- var googleGenai = require('@langchain/google-genai');
8
7
  var web = require('@langchain/community/chat_models/bedrock/web');
9
- var index$1 = require('./anthropic/index.cjs');
10
- var index = require('./openrouter/index.cjs');
11
- var index$2 = require('./openai/index.cjs');
8
+ var index = require('./google/index.cjs');
9
+ var index$2 = require('./anthropic/index.cjs');
10
+ var index$1 = require('./openrouter/index.cjs');
11
+ var index$3 = require('./openai/index.cjs');
12
12
  var _enum = require('../common/enum.cjs');
13
13
 
14
14
  // src/llm/providers.ts
15
15
  const llmProviders = {
16
- [_enum.Providers.XAI]: index$2.ChatXAI,
17
- [_enum.Providers.OPENAI]: index$2.ChatOpenAI,
16
+ [_enum.Providers.XAI]: index$3.ChatXAI,
17
+ [_enum.Providers.OPENAI]: index$3.ChatOpenAI,
18
18
  [_enum.Providers.OLLAMA]: ollama.ChatOllama,
19
- [_enum.Providers.AZURE]: index$2.AzureChatOpenAI,
19
+ [_enum.Providers.AZURE]: index$3.AzureChatOpenAI,
20
20
  [_enum.Providers.VERTEXAI]: googleVertexai.ChatVertexAI,
21
- [_enum.Providers.DEEPSEEK]: index$2.ChatDeepSeek,
21
+ [_enum.Providers.DEEPSEEK]: index$3.ChatDeepSeek,
22
22
  [_enum.Providers.MISTRALAI]: mistralai.ChatMistralAI,
23
23
  [_enum.Providers.MISTRAL]: mistralai.ChatMistralAI,
24
- [_enum.Providers.ANTHROPIC]: index$1.CustomAnthropic,
25
- [_enum.Providers.OPENROUTER]: index.ChatOpenRouter,
24
+ [_enum.Providers.ANTHROPIC]: index$2.CustomAnthropic,
25
+ [_enum.Providers.OPENROUTER]: index$1.ChatOpenRouter,
26
26
  [_enum.Providers.BEDROCK_LEGACY]: web.BedrockChat,
27
27
  [_enum.Providers.BEDROCK]: aws.ChatBedrockConverse,
28
28
  // [Providers.ANTHROPIC]: ChatAnthropic,
29
- [_enum.Providers.GOOGLE]: googleGenai.ChatGoogleGenerativeAI,
29
+ [_enum.Providers.GOOGLE]: index.CustomChatGoogleGenerativeAI,
30
30
  };
31
31
  const manualToolStreamProviders = new Set([
32
32
  _enum.Providers.ANTHROPIC,
@@ -1 +1 @@
1
- {"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type {\n ChatModelConstructorMap,\n ProviderOptionsMap,\n ChatModelMap,\n} from '@/types';\nimport { CustomAnthropic } from '@/llm/anthropic';\nimport { ChatOpenRouter } from '@/llm/openrouter';\nimport {\n ChatXAI,\n ChatOpenAI,\n ChatDeepSeek,\n AzureChatOpenAI,\n} from '@/llm/openai';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.XAI]: ChatXAI,\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.MISTRAL]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: ChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([\n Providers.ANTHROPIC,\n Providers.BEDROCK,\n Providers.OLLAMA,\n]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};\n"],"names":["Providers","ChatXAI","ChatOpenAI","ChatOllama","AzureChatOpenAI","ChatVertexAI","ChatDeepSeek","ChatMistralAI","CustomAnthropic","ChatOpenRouter","BedrockChat","ChatBedrockConverse","ChatGoogleGenerativeAI"],"mappings":";;;;;;;;;;;;;AAAA;AAuBa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAACA,eAAS,CAAC,GAAG,GAAGC,eAAO;AACxB,IAAA,CAACD,eAAS,CAAC,MAAM,GAAGE,kBAAU;AAC9B,IAAA,CAACF,eAAS,CAAC,MAAM,GAAGG,iBAAU;AAC9B,IAAA,CAACH,eAAS,CAAC,KAAK,GAAGI,uBAAe;AAClC,IAAA,CAACJ,eAAS,CAAC,QAAQ,GAAGK,2BAAY;AAClC,IAAA,CAACL,eAAS,CAAC,QAAQ,GAAGM,oBAAY;AAClC,IAAA,CAACN,eAAS,CAAC,SAAS,GAAGO,uBAAa;AACpC,IAAA,CAACP,eAAS,CAAC,OAAO,GAAGO,uBAAa;AAClC,IAAA,CAACP,eAAS,CAAC,SAAS,GAAGQ,uBAAe;AACtC,IAAA,CAACR,eAAS,CAAC,UAAU,GAAGS,oBAAc;AACtC,IAAA,CAACT,eAAS,CAAC,cAAc,GAAGU,eAAW;AACvC,IAAA,CAACV,eAAS,CAAC,OAAO,GAAGW,uBAAmB;;AAExC,IAAA,CAACX,eAAS,CAAC,MAAM,GAAGY,kCAAsB;;AAG/B,MAAA,yBAAyB,GAAG,IAAI,GAAG,CAAqB;AACnE,IAAAZ,eAAS,CAAC,SAAS;AACnB,IAAAA,eAAS,CAAC,OAAO;AACjB,IAAAA,eAAS,CAAC,MAAM;AACjB,CAAA;AAEY,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;;;"}
1
+ {"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type {\n ChatModelConstructorMap,\n ProviderOptionsMap,\n ChatModelMap,\n} from '@/types';\nimport { CustomChatGoogleGenerativeAI } from '@/llm/google';\nimport { CustomAnthropic } from '@/llm/anthropic';\nimport { ChatOpenRouter } from '@/llm/openrouter';\nimport {\n ChatXAI,\n ChatOpenAI,\n ChatDeepSeek,\n AzureChatOpenAI,\n} from '@/llm/openai';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.XAI]: ChatXAI,\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.MISTRAL]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: CustomChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([\n Providers.ANTHROPIC,\n Providers.BEDROCK,\n Providers.OLLAMA,\n]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};\n"],"names":["Providers","ChatXAI","ChatOpenAI","ChatOllama","AzureChatOpenAI","ChatVertexAI","ChatDeepSeek","ChatMistralAI","CustomAnthropic","ChatOpenRouter","BedrockChat","ChatBedrockConverse","CustomChatGoogleGenerativeAI"],"mappings":";;;;;;;;;;;;;AAAA;AAuBa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAACA,eAAS,CAAC,GAAG,GAAGC,eAAO;AACxB,IAAA,CAACD,eAAS,CAAC,MAAM,GAAGE,kBAAU;AAC9B,IAAA,CAACF,eAAS,CAAC,MAAM,GAAGG,iBAAU;AAC9B,IAAA,CAACH,eAAS,CAAC,KAAK,GAAGI,uBAAe;AAClC,IAAA,CAACJ,eAAS,CAAC,QAAQ,GAAGK,2BAAY;AAClC,IAAA,CAACL,eAAS,CAAC,QAAQ,GAAGM,oBAAY;AAClC,IAAA,CAACN,eAAS,CAAC,SAAS,GAAGO,uBAAa;AACpC,IAAA,CAACP,eAAS,CAAC,OAAO,GAAGO,uBAAa;AAClC,IAAA,CAACP,eAAS,CAAC,SAAS,GAAGQ,uBAAe;AACtC,IAAA,CAACR,eAAS,CAAC,UAAU,GAAGS,sBAAc;AACtC,IAAA,CAACT,eAAS,CAAC,cAAc,GAAGU,eAAW;AACvC,IAAA,CAACV,eAAS,CAAC,OAAO,GAAGW,uBAAmB;;AAExC,IAAA,CAACX,eAAS,CAAC,MAAM,GAAGY,kCAA4B;;AAGrC,MAAA,yBAAyB,GAAG,IAAI,GAAG,CAAqB;AACnE,IAAAZ,eAAS,CAAC,SAAS;AACnB,IAAAA,eAAS,CAAC,OAAO;AACjB,IAAAA,eAAS,CAAC,MAAM;AACjB,CAAA;AAEY,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;;;"}
@@ -0,0 +1,71 @@
1
+ import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
2
+ import { getEnvironmentVariable } from '@langchain/core/utils/env';
3
+ import { GoogleGenerativeAI } from '@google/generative-ai';
4
+
5
+ class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
6
+ constructor(fields) {
7
+ super(fields);
8
+ this.model = fields.model.replace(/^models\//, '');
9
+ this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;
10
+ if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {
11
+ throw new Error('`maxOutputTokens` must be a positive integer');
12
+ }
13
+ this.temperature = fields.temperature ?? this.temperature;
14
+ if (this.temperature != null &&
15
+ (this.temperature < 0 || this.temperature > 2)) {
16
+ throw new Error('`temperature` must be in the range of [0.0,2.0]');
17
+ }
18
+ this.topP = fields.topP ?? this.topP;
19
+ if (this.topP != null && this.topP < 0) {
20
+ throw new Error('`topP` must be a positive integer');
21
+ }
22
+ if (this.topP != null && this.topP > 1) {
23
+ throw new Error('`topP` must be below 1.');
24
+ }
25
+ this.topK = fields.topK ?? this.topK;
26
+ if (this.topK != null && this.topK < 0) {
27
+ throw new Error('`topK` must be a positive integer');
28
+ }
29
+ this.stopSequences = fields.stopSequences ?? this.stopSequences;
30
+ this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');
31
+ if (this.apiKey == null || this.apiKey === '') {
32
+ throw new Error('Please set an API key for Google GenerativeAI ' +
33
+ 'in the environment variable GOOGLE_API_KEY ' +
34
+ 'or in the `apiKey` field of the ' +
35
+ 'ChatGoogleGenerativeAI constructor');
36
+ }
37
+ this.safetySettings = fields.safetySettings ?? this.safetySettings;
38
+ if (this.safetySettings && this.safetySettings.length > 0) {
39
+ const safetySettingsSet = new Set(this.safetySettings.map((s) => s.category));
40
+ if (safetySettingsSet.size !== this.safetySettings.length) {
41
+ throw new Error('The categories in `safetySettings` array must be unique');
42
+ }
43
+ }
44
+ this.streaming = fields.streaming ?? this.streaming;
45
+ this.json = fields.json;
46
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
47
+ // @ts-ignore - Accessing private property from parent class
48
+ this.client = new GoogleGenerativeAI(this.apiKey).getGenerativeModel({
49
+ model: this.model,
50
+ safetySettings: this.safetySettings,
51
+ generationConfig: {
52
+ stopSequences: this.stopSequences,
53
+ maxOutputTokens: this.maxOutputTokens,
54
+ temperature: this.temperature,
55
+ topP: this.topP,
56
+ topK: this.topK,
57
+ ...(this.json != null
58
+ ? { responseMimeType: 'application/json' }
59
+ : {}),
60
+ },
61
+ }, {
62
+ apiVersion: fields.apiVersion,
63
+ baseUrl: fields.baseUrl,
64
+ customHeaders: fields.customHeaders,
65
+ });
66
+ this.streamUsage = fields.streamUsage ?? this.streamUsage;
67
+ }
68
+ }
69
+
70
+ export { CustomChatGoogleGenerativeAI };
71
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.mjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["import { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type { GoogleGenerativeAIChatInput } from '@langchain/google-genai';\nimport type { RequestOptions, SafetySetting } from '@google/generative-ai';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n constructor(\n fields: GoogleGenerativeAIChatInput & {\n customHeaders?: RequestOptions['customHeaders'];\n }\n ) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n}\n"],"names":["GenerativeAI"],"mappings":";;;;AAMM,MAAO,4BAA6B,SAAQ,sBAAsB,CAAA;AACtE,IAAA,WAAA,CACE,MAEC,EAAA;QAED,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,sBAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;;AAIvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIA,kBAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAE5D;;;;"}
@@ -2,8 +2,8 @@ import { ChatOllama } from '@langchain/ollama';
2
2
  import { ChatMistralAI } from '@langchain/mistralai';
3
3
  import { ChatBedrockConverse } from '@langchain/aws';
4
4
  import { ChatVertexAI } from '@langchain/google-vertexai';
5
- import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
6
5
  import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
6
+ import { CustomChatGoogleGenerativeAI } from './google/index.mjs';
7
7
  import { CustomAnthropic } from './anthropic/index.mjs';
8
8
  import { ChatOpenRouter } from './openrouter/index.mjs';
9
9
  import { ChatDeepSeek, AzureChatOpenAI, ChatOpenAI, ChatXAI } from './openai/index.mjs';
@@ -24,7 +24,7 @@ const llmProviders = {
24
24
  [Providers.BEDROCK_LEGACY]: BedrockChat,
25
25
  [Providers.BEDROCK]: ChatBedrockConverse,
26
26
  // [Providers.ANTHROPIC]: ChatAnthropic,
27
- [Providers.GOOGLE]: ChatGoogleGenerativeAI,
27
+ [Providers.GOOGLE]: CustomChatGoogleGenerativeAI,
28
28
  };
29
29
  const manualToolStreamProviders = new Set([
30
30
  Providers.ANTHROPIC,
@@ -1 +1 @@
1
- {"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type {\n ChatModelConstructorMap,\n ProviderOptionsMap,\n ChatModelMap,\n} from '@/types';\nimport { CustomAnthropic } from '@/llm/anthropic';\nimport { ChatOpenRouter } from '@/llm/openrouter';\nimport {\n ChatXAI,\n ChatOpenAI,\n ChatDeepSeek,\n AzureChatOpenAI,\n} from '@/llm/openai';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.XAI]: ChatXAI,\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.MISTRAL]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: ChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([\n Providers.ANTHROPIC,\n Providers.BEDROCK,\n Providers.OLLAMA,\n]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};\n"],"names":[],"mappings":";;;;;;;;;;;AAAA;AAuBa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,GAAG,GAAG,OAAO;AACxB,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,KAAK,GAAG,eAAe;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,aAAa;AAClC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,eAAe;AACtC,IAAA,CAAC,SAAS,CAAC,UAAU,GAAG,cAAc;AACtC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;;AAExC,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,sBAAsB;;AAG/B,MAAA,yBAAyB,GAAG,IAAI,GAAG,CAAqB;AACnE,IAAA,SAAS,CAAC,SAAS;AACnB,IAAA,SAAS,CAAC,OAAO;AACjB,IAAA,SAAS,CAAC,MAAM;AACjB,CAAA;AAEY,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;"}
1
+ {"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type {\n ChatModelConstructorMap,\n ProviderOptionsMap,\n ChatModelMap,\n} from '@/types';\nimport { CustomChatGoogleGenerativeAI } from '@/llm/google';\nimport { CustomAnthropic } from '@/llm/anthropic';\nimport { ChatOpenRouter } from '@/llm/openrouter';\nimport {\n ChatXAI,\n ChatOpenAI,\n ChatDeepSeek,\n AzureChatOpenAI,\n} from '@/llm/openai';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.XAI]: ChatXAI,\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.MISTRAL]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: CustomChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([\n Providers.ANTHROPIC,\n Providers.BEDROCK,\n Providers.OLLAMA,\n]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};\n"],"names":[],"mappings":";;;;;;;;;;;AAAA;AAuBa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,GAAG,GAAG,OAAO;AACxB,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,KAAK,GAAG,eAAe;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,aAAa;AAClC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,eAAe;AACtC,IAAA,CAAC,SAAS,CAAC,UAAU,GAAG,cAAc;AACtC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;;AAExC,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,4BAA4B;;AAGrC,MAAA,yBAAyB,GAAG,IAAI,GAAG,CAAqB;AACnE,IAAA,SAAS,CAAC,SAAS;AACnB,IAAA,SAAS,CAAC,OAAO;AACjB,IAAA,SAAS,CAAC,MAAM;AACjB,CAAA;AAEY,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;"}
@@ -0,0 +1,8 @@
1
+ import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
2
+ import type { GoogleGenerativeAIChatInput } from '@langchain/google-genai';
3
+ import type { RequestOptions } from '@google/generative-ai';
4
+ export declare class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
5
+ constructor(fields: GoogleGenerativeAIChatInput & {
6
+ customHeaders?: RequestOptions['customHeaders'];
7
+ });
8
+ }
@@ -3,7 +3,6 @@ import { ChatAnthropic } from '@langchain/anthropic';
3
3
  import { ChatMistralAI } from '@langchain/mistralai';
4
4
  import { ChatBedrockConverse } from '@langchain/aws';
5
5
  import { ChatVertexAI } from '@langchain/google-vertexai';
6
- import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
7
6
  import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
8
7
  import type { BindToolsInput, BaseChatModelParams } from '@langchain/core/language_models/chat_models';
9
8
  import type { OpenAIChatInput, ChatOpenAIFields, AzureOpenAIInput, ClientOptions as OAIClientOptions } from '@langchain/openai';
@@ -14,6 +13,7 @@ import type { ChatDeepSeekCallOptions } from '@langchain/deepseek';
14
13
  import type { ChatOpenRouterCallOptions } from '@/llm/openrouter';
15
14
  import type { ChatBedrockConverseInput } from '@langchain/aws';
16
15
  import type { ChatMistralAIInput } from '@langchain/mistralai';
16
+ import type { RequestOptions } from '@google/generative-ai';
17
17
  import type { StructuredTool } from '@langchain/core/tools';
18
18
  import type { AnthropicInput } from '@langchain/anthropic';
19
19
  import type { Runnable } from '@langchain/core/runnables';
@@ -21,6 +21,7 @@ import type { ChatOllamaInput } from '@langchain/ollama';
21
21
  import type { OpenAI as OpenAIClient } from 'openai';
22
22
  import type { ChatXAIInput } from '@langchain/xai';
23
23
  import { ChatXAI, ChatOpenAI, ChatDeepSeek, AzureChatOpenAI } from '@/llm/openai';
24
+ import { CustomChatGoogleGenerativeAI } from '@/llm/google';
24
25
  import { ChatOpenRouter } from '@/llm/openrouter';
25
26
  import { Providers } from '@/common';
26
27
  export type AzureClientOptions = Partial<OpenAIChatInput> & Partial<AzureOpenAIInput> & {
@@ -48,7 +49,9 @@ export type BedrockAnthropicInput = ChatBedrockConverseInput & {
48
49
  additionalModelRequestFields?: ChatBedrockConverseInput['additionalModelRequestFields'] & AnthropicReasoning;
49
50
  };
50
51
  export type BedrockConverseClientOptions = ChatBedrockConverseInput;
51
- export type GoogleClientOptions = GoogleGenerativeAIChatInput;
52
+ export type GoogleClientOptions = GoogleGenerativeAIChatInput & {
53
+ customHeaders?: RequestOptions['customHeaders'];
54
+ };
52
55
  export type DeepSeekClientOptions = ChatDeepSeekCallOptions;
53
56
  export type XAIClientOptions = ChatXAIInput;
54
57
  export type ClientOptions = OpenAIClientOptions | AzureClientOptions | OllamaClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockClientOptions | BedrockConverseClientOptions | GoogleClientOptions | DeepSeekClientOptions | XAIClientOptions;
@@ -83,7 +86,7 @@ export type ChatModelMap = {
83
86
  [Providers.OPENROUTER]: ChatOpenRouter;
84
87
  [Providers.BEDROCK_LEGACY]: BedrockChat;
85
88
  [Providers.BEDROCK]: ChatBedrockConverse;
86
- [Providers.GOOGLE]: ChatGoogleGenerativeAI;
89
+ [Providers.GOOGLE]: CustomChatGoogleGenerativeAI;
87
90
  };
88
91
  export type ChatModelConstructorMap = {
89
92
  [P in Providers]: new (config: ProviderOptionsMap[P]) => ChatModelMap[P];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@librechat/agents",
3
- "version": "2.4.37",
3
+ "version": "2.4.38",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
@@ -47,7 +47,7 @@
47
47
  "image": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/image.ts --provider 'google' --name 'Jo' --location 'New York, NY'",
48
48
  "code_exec_files": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_files.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
49
49
  "code_exec_simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_simple.ts --provider 'google' --name 'Jo' --location 'New York, NY'",
50
- "simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'xai' --name 'Jo' --location 'New York, NY'",
50
+ "simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'google' --name 'Jo' --location 'New York, NY'",
51
51
  "caching": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/caching.ts --name 'Jo' --location 'New York, NY'",
52
52
  "thinking": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/thinking.ts --name 'Jo' --location 'New York, NY'",
53
53
  "memory": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/memory.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
@@ -0,0 +1,97 @@
1
+ import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
2
+ import { getEnvironmentVariable } from '@langchain/core/utils/env';
3
+ import { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';
4
+ import type { GoogleGenerativeAIChatInput } from '@langchain/google-genai';
5
+ import type { RequestOptions, SafetySetting } from '@google/generative-ai';
6
+
7
+ export class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
8
+ constructor(
9
+ fields: GoogleGenerativeAIChatInput & {
10
+ customHeaders?: RequestOptions['customHeaders'];
11
+ }
12
+ ) {
13
+ super(fields);
14
+
15
+ this.model = fields.model.replace(/^models\//, '');
16
+
17
+ this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;
18
+
19
+ if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {
20
+ throw new Error('`maxOutputTokens` must be a positive integer');
21
+ }
22
+
23
+ this.temperature = fields.temperature ?? this.temperature;
24
+ if (
25
+ this.temperature != null &&
26
+ (this.temperature < 0 || this.temperature > 2)
27
+ ) {
28
+ throw new Error('`temperature` must be in the range of [0.0,2.0]');
29
+ }
30
+
31
+ this.topP = fields.topP ?? this.topP;
32
+ if (this.topP != null && this.topP < 0) {
33
+ throw new Error('`topP` must be a positive integer');
34
+ }
35
+
36
+ if (this.topP != null && this.topP > 1) {
37
+ throw new Error('`topP` must be below 1.');
38
+ }
39
+
40
+ this.topK = fields.topK ?? this.topK;
41
+ if (this.topK != null && this.topK < 0) {
42
+ throw new Error('`topK` must be a positive integer');
43
+ }
44
+
45
+ this.stopSequences = fields.stopSequences ?? this.stopSequences;
46
+
47
+ this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');
48
+ if (this.apiKey == null || this.apiKey === '') {
49
+ throw new Error(
50
+ 'Please set an API key for Google GenerativeAI ' +
51
+ 'in the environment variable GOOGLE_API_KEY ' +
52
+ 'or in the `apiKey` field of the ' +
53
+ 'ChatGoogleGenerativeAI constructor'
54
+ );
55
+ }
56
+
57
+ this.safetySettings = fields.safetySettings ?? this.safetySettings;
58
+ if (this.safetySettings && this.safetySettings.length > 0) {
59
+ const safetySettingsSet = new Set(
60
+ this.safetySettings.map((s) => s.category)
61
+ );
62
+ if (safetySettingsSet.size !== this.safetySettings.length) {
63
+ throw new Error(
64
+ 'The categories in `safetySettings` array must be unique'
65
+ );
66
+ }
67
+ }
68
+
69
+ this.streaming = fields.streaming ?? this.streaming;
70
+ this.json = fields.json;
71
+
72
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
73
+ // @ts-ignore - Accessing private property from parent class
74
+ this.client = new GenerativeAI(this.apiKey).getGenerativeModel(
75
+ {
76
+ model: this.model,
77
+ safetySettings: this.safetySettings as SafetySetting[],
78
+ generationConfig: {
79
+ stopSequences: this.stopSequences,
80
+ maxOutputTokens: this.maxOutputTokens,
81
+ temperature: this.temperature,
82
+ topP: this.topP,
83
+ topK: this.topK,
84
+ ...(this.json != null
85
+ ? { responseMimeType: 'application/json' }
86
+ : {}),
87
+ },
88
+ },
89
+ {
90
+ apiVersion: fields.apiVersion,
91
+ baseUrl: fields.baseUrl,
92
+ customHeaders: fields.customHeaders,
93
+ }
94
+ );
95
+ this.streamUsage = fields.streamUsage ?? this.streamUsage;
96
+ }
97
+ }
@@ -4,13 +4,13 @@ import { ChatMistralAI } from '@langchain/mistralai';
4
4
  import { ChatBedrockConverse } from '@langchain/aws';
5
5
  // import { ChatAnthropic } from '@langchain/anthropic';
6
6
  import { ChatVertexAI } from '@langchain/google-vertexai';
7
- import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
8
7
  import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
9
8
  import type {
10
9
  ChatModelConstructorMap,
11
10
  ProviderOptionsMap,
12
11
  ChatModelMap,
13
12
  } from '@/types';
13
+ import { CustomChatGoogleGenerativeAI } from '@/llm/google';
14
14
  import { CustomAnthropic } from '@/llm/anthropic';
15
15
  import { ChatOpenRouter } from '@/llm/openrouter';
16
16
  import {
@@ -35,7 +35,7 @@ export const llmProviders: Partial<ChatModelConstructorMap> = {
35
35
  [Providers.BEDROCK_LEGACY]: BedrockChat,
36
36
  [Providers.BEDROCK]: ChatBedrockConverse,
37
37
  // [Providers.ANTHROPIC]: ChatAnthropic,
38
- [Providers.GOOGLE]: ChatGoogleGenerativeAI,
38
+ [Providers.GOOGLE]: CustomChatGoogleGenerativeAI,
39
39
  };
40
40
 
41
41
  export const manualToolStreamProviders = new Set<Providers | string>([
package/src/types/llm.ts CHANGED
@@ -4,7 +4,6 @@ import { ChatAnthropic } from '@langchain/anthropic';
4
4
  import { ChatMistralAI } from '@langchain/mistralai';
5
5
  import { ChatBedrockConverse } from '@langchain/aws';
6
6
  import { ChatVertexAI } from '@langchain/google-vertexai';
7
- import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
8
7
  import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
9
8
  import type {
10
9
  BindToolsInput,
@@ -23,6 +22,7 @@ import type { ChatDeepSeekCallOptions } from '@langchain/deepseek';
23
22
  import type { ChatOpenRouterCallOptions } from '@/llm/openrouter';
24
23
  import type { ChatBedrockConverseInput } from '@langchain/aws';
25
24
  import type { ChatMistralAIInput } from '@langchain/mistralai';
25
+ import type { RequestOptions } from '@google/generative-ai';
26
26
  import type { StructuredTool } from '@langchain/core/tools';
27
27
  import type { AnthropicInput } from '@langchain/anthropic';
28
28
  import type { Runnable } from '@langchain/core/runnables';
@@ -35,6 +35,7 @@ import {
35
35
  ChatDeepSeek,
36
36
  AzureChatOpenAI,
37
37
  } from '@/llm/openai';
38
+ import { CustomChatGoogleGenerativeAI } from '@/llm/google';
38
39
  import { ChatOpenRouter } from '@/llm/openrouter';
39
40
  import { Providers } from '@/common';
40
41
 
@@ -67,7 +68,9 @@ export type BedrockAnthropicInput = ChatBedrockConverseInput & {
67
68
  AnthropicReasoning;
68
69
  };
69
70
  export type BedrockConverseClientOptions = ChatBedrockConverseInput;
70
- export type GoogleClientOptions = GoogleGenerativeAIChatInput;
71
+ export type GoogleClientOptions = GoogleGenerativeAIChatInput & {
72
+ customHeaders?: RequestOptions['customHeaders'];
73
+ };
71
74
  export type DeepSeekClientOptions = ChatDeepSeekCallOptions;
72
75
  export type XAIClientOptions = ChatXAIInput;
73
76
 
@@ -117,7 +120,7 @@ export type ChatModelMap = {
117
120
  [Providers.OPENROUTER]: ChatOpenRouter;
118
121
  [Providers.BEDROCK_LEGACY]: BedrockChat;
119
122
  [Providers.BEDROCK]: ChatBedrockConverse;
120
- [Providers.GOOGLE]: ChatGoogleGenerativeAI;
123
+ [Providers.GOOGLE]: CustomChatGoogleGenerativeAI;
121
124
  };
122
125
 
123
126
  export type ChatModelConstructorMap = {
@@ -100,7 +100,7 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
100
100
  } as t.VertexAIClientOptions & t.LLMConfig,
101
101
  [Providers.GOOGLE]: {
102
102
  provider: Providers.GOOGLE,
103
- model: 'gemini-2.5-pro-exp-03-25',
103
+ model: 'gemini-2.5-flash-preview-04-17',
104
104
  streaming: true,
105
105
  streamUsage: true,
106
106
  },
@@ -108,6 +108,7 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
108
108
  provider: Providers.BEDROCK,
109
109
  // model: 'anthropic.claude-3-sonnet-20240229-v1:0',
110
110
  // model: 'us.anthropic.claude-3-5-sonnet-20241022-v2:0',
111
+ // model: 'us.amazon.nova-pro-v1:0',
111
112
  model: 'us.anthropic.claude-sonnet-4-20250514-v1:0',
112
113
  // additionalModelRequestFields: { thinking: { type: 'enabled', budget_tokens: 2000 } },
113
114
  region: process.env.BEDROCK_AWS_REGION,