@librechat/agents 3.0.25 → 3.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,16 @@ var common = require('./utils/common.cjs');
10
10
  /* eslint-disable @typescript-eslint/ban-ts-comment */
11
11
  class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
12
12
  thinkingConfig;
13
+ /**
14
+ * Override to add gemini-3 model support for multimodal and function calling thought signatures
15
+ */
16
+ get _isMultimodalModel() {
17
+ return (this.model.startsWith('gemini-1.5') ||
18
+ this.model.startsWith('gemini-2') ||
19
+ (this.model.startsWith('gemma-3-') &&
20
+ !this.model.startsWith('gemma-3-1b')) ||
21
+ this.model.startsWith('gemini-3'));
22
+ }
13
23
  constructor(fields) {
14
24
  super(fields);
15
25
  this.model = fields.model.replace(/^models\//, '');
@@ -75,6 +85,44 @@ class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
75
85
  static lc_name() {
76
86
  return 'LibreChatGoogleGenerativeAI';
77
87
  }
88
+ /**
89
+ * Helper function to convert Gemini API usage metadata to LangChain format
90
+ * Includes support for cached tokens and tier-based tracking for gemini-3-pro-preview
91
+ */
92
+ _convertToUsageMetadata(usageMetadata, model) {
93
+ if (!usageMetadata) {
94
+ return undefined;
95
+ }
96
+ const output = {
97
+ input_tokens: usageMetadata.promptTokenCount ?? 0,
98
+ output_tokens: (usageMetadata.candidatesTokenCount ?? 0) +
99
+ (usageMetadata.thoughtsTokenCount ?? 0),
100
+ total_tokens: usageMetadata.totalTokenCount ?? 0,
101
+ };
102
+ if (usageMetadata.cachedContentTokenCount) {
103
+ output.input_token_details ??= {};
104
+ output.input_token_details.cache_read =
105
+ usageMetadata.cachedContentTokenCount;
106
+ }
107
+ // gemini-3-pro-preview has bracket based tracking of tokens per request
108
+ if (model === 'gemini-3-pro-preview') {
109
+ const over200k = Math.max(0, (usageMetadata.promptTokenCount ?? 0) - 200000);
110
+ const cachedOver200k = Math.max(0, (usageMetadata.cachedContentTokenCount ?? 0) - 200000);
111
+ if (over200k) {
112
+ output.input_token_details = {
113
+ ...output.input_token_details,
114
+ over_200k: over200k,
115
+ };
116
+ }
117
+ if (cachedOver200k) {
118
+ output.input_token_details = {
119
+ ...output.input_token_details,
120
+ cache_read_over_200k: cachedOver200k,
121
+ };
122
+ }
123
+ }
124
+ return output;
125
+ }
78
126
  invocationParams(options) {
79
127
  const params = super.invocationParams(options);
80
128
  if (this.thinkingConfig) {
@@ -88,8 +136,36 @@ class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
88
136
  }
89
137
  return params;
90
138
  }
139
+ async _generate(messages, options, runManager) {
140
+ const prompt = common.convertBaseMessagesToContent(messages, this._isMultimodalModel, this.useSystemInstruction, this.model);
141
+ let actualPrompt = prompt;
142
+ if (prompt?.[0].role === 'system') {
143
+ const [systemInstruction] = prompt;
144
+ /** @ts-ignore */
145
+ this.client.systemInstruction = systemInstruction;
146
+ actualPrompt = prompt.slice(1);
147
+ }
148
+ const parameters = this.invocationParams(options);
149
+ const request = {
150
+ ...parameters,
151
+ contents: actualPrompt,
152
+ };
153
+ const res = await this.caller.callWithOptions({ signal: options.signal }, async () =>
154
+ /** @ts-ignore */
155
+ this.client.generateContent(request));
156
+ const response = res.response;
157
+ const usageMetadata = this._convertToUsageMetadata(
158
+ /** @ts-ignore */
159
+ response.usageMetadata, this.model);
160
+ /** @ts-ignore */
161
+ const generationResult = common.mapGenerateContentResultToChatResult(response, {
162
+ usageMetadata,
163
+ });
164
+ await runManager?.handleLLMNewToken(generationResult.generations[0].text || '', undefined, undefined, undefined, undefined, undefined);
165
+ return generationResult;
166
+ }
91
167
  async *_streamResponseChunks(messages$1, options, runManager) {
92
- const prompt = common.convertBaseMessagesToContent(messages$1, this._isMultimodalModel, this.useSystemInstruction);
168
+ const prompt = common.convertBaseMessagesToContent(messages$1, this._isMultimodalModel, this.useSystemInstruction, this.model);
93
169
  let actualPrompt = prompt;
94
170
  if (prompt?.[0].role === 'system') {
95
171
  const [systemInstruction] = prompt;
@@ -112,14 +188,7 @@ class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
112
188
  if ('usageMetadata' in response &&
113
189
  this.streamUsage !== false &&
114
190
  options.streamUsage !== false) {
115
- const genAIUsageMetadata = response.usageMetadata;
116
- const output_tokens = (genAIUsageMetadata?.candidatesTokenCount ?? 0) +
117
- (genAIUsageMetadata?.thoughtsTokenCount ?? 0);
118
- lastUsageMetadata = {
119
- input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,
120
- output_tokens,
121
- total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,
122
- };
191
+ lastUsageMetadata = this._convertToUsageMetadata(response.usageMetadata, this.model);
123
192
  }
124
193
  const chunk = common.convertResponseContentToChatGenerationChunk(response, {
125
194
  usageMetadata: undefined});
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/ban-ts-comment */\nimport { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type {\n GenerateContentRequest,\n SafetySetting,\n} from '@google/generative-ai';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { GeminiGenerationConfig } from '@langchain/google-common';\nimport type { GeminiApiUsageMetadata } from './types';\nimport type { GoogleClientOptions } from '@/types';\nimport {\n convertResponseContentToChatGenerationChunk,\n convertBaseMessagesToContent,\n} from './utils/common';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];\n constructor(fields: GoogleClientOptions) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.thinkingConfig = fields.thinkingConfig ?? this.thinkingConfig;\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n\n static lc_name(): 'LibreChatGoogleGenerativeAI' {\n return 'LibreChatGoogleGenerativeAI';\n }\n\n invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<GenerateContentRequest, 'contents'> {\n const params = super.invocationParams(options);\n if (this.thinkingConfig) {\n /** @ts-ignore */\n this.client.generationConfig = {\n /** @ts-ignore */\n ...this.client.generationConfig,\n /** @ts-ignore */\n thinkingConfig: this.thinkingConfig,\n };\n }\n return params;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction\n );\n let actualPrompt = prompt;\n if (prompt?.[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n const stream = await this.caller.callWithOptions(\n { signal: options.signal },\n async () => {\n /** @ts-ignore */\n const { stream } = await this.client.generateContentStream(request);\n return stream;\n }\n );\n\n let index = 0;\n let lastUsageMetadata: UsageMetadata | undefined;\n for await (const response of stream) {\n if (\n 'usageMetadata' in response &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n const genAIUsageMetadata = response.usageMetadata as\n | GeminiApiUsageMetadata\n | undefined;\n\n const output_tokens =\n (genAIUsageMetadata?.candidatesTokenCount ?? 0) +\n (genAIUsageMetadata?.thoughtsTokenCount ?? 0);\n lastUsageMetadata = {\n input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,\n output_tokens,\n total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,\n };\n }\n\n const chunk = convertResponseContentToChatGenerationChunk(response, {\n usageMetadata: undefined,\n index,\n });\n index += 1;\n if (!chunk) {\n continue;\n }\n\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n\n if (lastUsageMetadata) {\n const finalChunk = new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n usage_metadata: lastUsageMetadata,\n }),\n });\n yield finalChunk;\n await runManager?.handleLLMNewToken(\n finalChunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: finalChunk }\n );\n }\n }\n}\n"],"names":["ChatGoogleGenerativeAI","getEnvironmentVariable","GenerativeAI","messages","convertBaseMessagesToContent","convertResponseContentToChatGenerationChunk","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;;;AAAA;AAoBM,MAAO,4BAA6B,SAAQA,kCAAsB,CAAA;AACtE,IAAA,cAAc;AACd,IAAA,WAAA,CAAY,MAA2B,EAAA;QACrC,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAIC,0BAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;QAElE,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;AAGvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIC,+BAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAG3D,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,6BAA6B;;AAGtC,IAAA,gBAAgB,CACd,OAAmC,EAAA;QAEnC,MAAM,MAAM,GAAG,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC;AAC9C,QAAA,IAAI,IAAI,CAAC,cAAc,EAAE;;AAEvB,YAAA,IAAI,CAAC,MAAM,CAAC,gBAAgB,GAAG;;AAE7B,gBAAA,GAAG,IAAI,CAAC,MAAM,CAAC,gBAAgB;;gBAE/B,cAAc,EAAE,IAAI,CAAC,cAAc;aACpC;;AAEH,QAAA,OAAO,MAAM;;IAGf,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAGC,mCAA4B,CACzCD,UAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,CAC1B;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AACjC,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AACD,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC9C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B,YAAW;;AAET,YAAA,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,qBAAqB,CAAC,OAAO,CAAC;AACnE,YAAA,OAAO,MAAM;AACf,SAAC,CACF;AAGD,QAAA,IAAI,iBAA4C;AAChD,QAAA,WAAW,MAAM,QAAQ,IAAI,MAAM,EAAE;YACnC,IACE,eAAe,IAAI,QAAQ;gBAC3B,IAAI,CAAC,WAAW,KAAK,KAAK;AAC1B,gBAAA,OAAO,CAAC,WAAW,KAAK,KAAK,EAC7B;AACA,gBAAA,MAAM,kBAAkB,GAAG,QAAQ,CAAC,aAEvB;gBAEb,MAAM,aAAa,GACjB,CAAC,kBAAkB,EAAE,oBAAoB,IAAI,CAAC;AAC9C,qBAAC,kBAAkB,EAAE,kBAAkB,IAAI,CAAC,CAAC;AAC/C,gBAAA,iBAAiB,GAAG;AAClB,oBAAA,YAAY,EAAE,kBAAkB,EAAE,gBAAgB,IAAI,CAAC;oBACvD,aAAa;AACb,oBAAA,YAAY,EAAE,kBAAkB,EAAE,eAAe,IAAI,CAAC;iBACvD;;AAGH,YAAA,MAAM,KAAK,GAAGE,kDAA2C,CAAC,QAAQ,EAAE;AAClE,gBAAA,aAAa,EAAE,SAEhB,CAAA,CAAC;YAEF,IAAI,CAAC,KAAK,EAAE;gBACV;;AAGF,YAAA,MAAM,KAAK;YACX,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,CAAC,IAAI,IAAI,EAAE,EAChB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,CACV;;QAGH,IAAI,iBAAiB,EAAE;AACrB,YAAA,MAAM,UAAU,GAAG,IAAIC,2BAAmB,CAAC;AACzC,gBAAA,IAAI,EAAE,EAAE;gBACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,cAAc,EAAE,iBAAiB;iBAClC,CAAC;AACH,aAAA,CAAC;AACF,YAAA,MAAM,UAAU;YAChB,MAAM,UAAU,EAAE,iBAAiB,CACjC,UAAU,CAAC,IAAI,IAAI,EAAE,EACrB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,UAAU,EAAE,CACtB;;;AAGN;;;;"}
1
+ {"version":3,"file":"index.cjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/ban-ts-comment */\nimport { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type {\n GenerateContentRequest,\n SafetySetting,\n} from '@google/generative-ai';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { GeminiGenerationConfig } from '@langchain/google-common';\nimport type { GeminiApiUsageMetadata, InputTokenDetails } from './types';\nimport type { GoogleClientOptions } from '@/types';\nimport {\n convertResponseContentToChatGenerationChunk,\n convertBaseMessagesToContent,\n mapGenerateContentResultToChatResult,\n} from './utils/common';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];\n\n /**\n * Override to add gemini-3 model support for multimodal and function calling thought signatures\n */\n get _isMultimodalModel(): boolean {\n return (\n this.model.startsWith('gemini-1.5') ||\n this.model.startsWith('gemini-2') ||\n (this.model.startsWith('gemma-3-') &&\n !this.model.startsWith('gemma-3-1b')) ||\n this.model.startsWith('gemini-3')\n );\n }\n\n constructor(fields: GoogleClientOptions) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.thinkingConfig = fields.thinkingConfig ?? this.thinkingConfig;\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n\n static lc_name(): 'LibreChatGoogleGenerativeAI' {\n return 'LibreChatGoogleGenerativeAI';\n }\n\n /**\n * Helper function to convert Gemini API usage metadata to LangChain format\n * Includes support for cached tokens and tier-based tracking for gemini-3-pro-preview\n */\n private _convertToUsageMetadata(\n usageMetadata: GeminiApiUsageMetadata | undefined,\n model: string\n ): UsageMetadata | undefined {\n if (!usageMetadata) {\n return undefined;\n }\n\n const output: UsageMetadata = {\n input_tokens: usageMetadata.promptTokenCount ?? 0,\n output_tokens:\n (usageMetadata.candidatesTokenCount ?? 0) +\n (usageMetadata.thoughtsTokenCount ?? 0),\n total_tokens: usageMetadata.totalTokenCount ?? 0,\n };\n\n if (usageMetadata.cachedContentTokenCount) {\n output.input_token_details ??= {};\n output.input_token_details.cache_read =\n usageMetadata.cachedContentTokenCount;\n }\n\n // gemini-3-pro-preview has bracket based tracking of tokens per request\n if (model === 'gemini-3-pro-preview') {\n const over200k = Math.max(\n 0,\n (usageMetadata.promptTokenCount ?? 0) - 200000\n );\n const cachedOver200k = Math.max(\n 0,\n (usageMetadata.cachedContentTokenCount ?? 0) - 200000\n );\n if (over200k) {\n output.input_token_details = {\n ...output.input_token_details,\n over_200k: over200k,\n } as InputTokenDetails;\n }\n if (cachedOver200k) {\n output.input_token_details = {\n ...output.input_token_details,\n cache_read_over_200k: cachedOver200k,\n } as InputTokenDetails;\n }\n }\n\n return output;\n }\n\n invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<GenerateContentRequest, 'contents'> {\n const params = super.invocationParams(options);\n if (this.thinkingConfig) {\n /** @ts-ignore */\n this.client.generationConfig = {\n /** @ts-ignore */\n ...this.client.generationConfig,\n /** @ts-ignore */\n thinkingConfig: this.thinkingConfig,\n };\n }\n return params;\n }\n\n async _generate(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): Promise<import('@langchain/core/outputs').ChatResult> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction,\n this.model\n );\n let actualPrompt = prompt;\n if (prompt?.[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n\n const res = await this.caller.callWithOptions(\n { signal: options.signal },\n async () =>\n /** @ts-ignore */\n this.client.generateContent(request)\n );\n\n const response = res.response;\n const usageMetadata = this._convertToUsageMetadata(\n /** @ts-ignore */\n response.usageMetadata,\n this.model\n );\n\n /** @ts-ignore */\n const generationResult = mapGenerateContentResultToChatResult(response, {\n usageMetadata,\n });\n\n await runManager?.handleLLMNewToken(\n generationResult.generations[0].text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n undefined\n );\n return generationResult;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction,\n this.model\n );\n let actualPrompt = prompt;\n if (prompt?.[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n const stream = await this.caller.callWithOptions(\n { signal: options.signal },\n async () => {\n /** @ts-ignore */\n const { stream } = await this.client.generateContentStream(request);\n return stream;\n }\n );\n\n let index = 0;\n let lastUsageMetadata: UsageMetadata | undefined;\n for await (const response of stream) {\n if (\n 'usageMetadata' in response &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n lastUsageMetadata = this._convertToUsageMetadata(\n response.usageMetadata as GeminiApiUsageMetadata | undefined,\n this.model\n );\n }\n\n const chunk = convertResponseContentToChatGenerationChunk(response, {\n usageMetadata: undefined,\n index,\n });\n index += 1;\n if (!chunk) {\n continue;\n }\n\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n\n if (lastUsageMetadata) {\n const finalChunk = new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n usage_metadata: lastUsageMetadata,\n }),\n });\n yield finalChunk;\n await runManager?.handleLLMNewToken(\n finalChunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: finalChunk }\n );\n }\n }\n}\n"],"names":["ChatGoogleGenerativeAI","getEnvironmentVariable","GenerativeAI","convertBaseMessagesToContent","mapGenerateContentResultToChatResult","messages","convertResponseContentToChatGenerationChunk","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;;;AAAA;AAqBM,MAAO,4BAA6B,SAAQA,kCAAsB,CAAA;AACtE,IAAA,cAAc;AAEd;;AAEG;AACH,IAAA,IAAI,kBAAkB,GAAA;QACpB,QACE,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,YAAY,CAAC;AACnC,YAAA,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC;AACjC,aAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC;gBAChC,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;YACvC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC;;AAIrC,IAAA,WAAA,CAAY,MAA2B,EAAA;QACrC,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAIC,0BAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;QAElE,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;AAGvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIC,+BAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAG3D,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,6BAA6B;;AAGtC;;;AAGG;IACK,uBAAuB,CAC7B,aAAiD,EACjD,KAAa,EAAA;QAEb,IAAI,CAAC,aAAa,EAAE;AAClB,YAAA,OAAO,SAAS;;AAGlB,QAAA,MAAM,MAAM,GAAkB;AAC5B,YAAA,YAAY,EAAE,aAAa,CAAC,gBAAgB,IAAI,CAAC;AACjD,YAAA,aAAa,EACX,CAAC,aAAa,CAAC,oBAAoB,IAAI,CAAC;AACxC,iBAAC,aAAa,CAAC,kBAAkB,IAAI,CAAC,CAAC;AACzC,YAAA,YAAY,EAAE,aAAa,CAAC,eAAe,IAAI,CAAC;SACjD;AAED,QAAA,IAAI,aAAa,CAAC,uBAAuB,EAAE;AACzC,YAAA,MAAM,CAAC,mBAAmB,KAAK,EAAE;YACjC,MAAM,CAAC,mBAAmB,CAAC,UAAU;gBACnC,aAAa,CAAC,uBAAuB;;;AAIzC,QAAA,IAAI,KAAK,KAAK,sBAAsB,EAAE;AACpC,YAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CACvB,CAAC,EACD,CAAC,aAAa,CAAC,gBAAgB,IAAI,CAAC,IAAI,MAAM,CAC/C;AACD,YAAA,MAAM,cAAc,GAAG,IAAI,CAAC,GAAG,CAC7B,CAAC,EACD,CAAC,aAAa,CAAC,uBAAuB,IAAI,CAAC,IAAI,MAAM,CACtD;YACD,IAAI,QAAQ,EAAE;gBACZ,MAAM,CAAC,mBAAmB,GAAG;oBAC3B,GAAG,MAAM,CAAC,mBAAmB;AAC7B,oBAAA,SAAS,EAAE,QAAQ;iBACC;;YAExB,IAAI,cAAc,EAAE;gBAClB,MAAM,CAAC,mBAAmB,GAAG;oBAC3B,GAAG,MAAM,CAAC,mBAAmB;AAC7B,oBAAA,oBAAoB,EAAE,cAAc;iBAChB;;;AAI1B,QAAA,OAAO,MAAM;;AAGf,IAAA,gBAAgB,CACd,OAAmC,EAAA;QAEnC,MAAM,MAAM,GAAG,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC;AAC9C,QAAA,IAAI,IAAI,CAAC,cAAc,EAAE;;AAEvB,YAAA,IAAI,CAAC,MAAM,CAAC,gBAAgB,GAAG;;AAE7B,gBAAA,GAAG,IAAI,CAAC,MAAM,CAAC,gBAAgB;;gBAE/B,cAAc,EAAE,IAAI,CAAC,cAAc;aACpC;;AAEH,QAAA,OAAO,MAAM;;AAGf,IAAA,MAAM,SAAS,CACb,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAGC,mCAA4B,CACzC,QAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,EACzB,IAAI,CAAC,KAAK,CACX;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AACjC,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AAED,QAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC3C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B;;QAEE,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,OAAO,CAAC,CACvC;AAED,QAAA,MAAM,QAAQ,GAAG,GAAG,CAAC,QAAQ;AAC7B,QAAA,MAAM,aAAa,GAAG,IAAI,CAAC,uBAAuB;;AAEhD,QAAA,QAAQ,CAAC,aAAa,EACtB,IAAI,CAAC,KAAK,CACX;;AAGD,QAAA,MAAM,gBAAgB,GAAGC,2CAAoC,CAAC,QAAQ,EAAE;YACtE,aAAa;AACd,SAAA,CAAC;QAEF,MAAM,UAAU,EAAE,iBAAiB,CACjC,gBAAgB,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE,EAC1C,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,CACV;AACD,QAAA,OAAO,gBAAgB;;IAGzB,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAGF,mCAA4B,CACzCE,UAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,EACzB,IAAI,CAAC,KAAK,CACX;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AACjC,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AACD,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC9C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B,YAAW;;AAET,YAAA,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,qBAAqB,CAAC,OAAO,CAAC;AACnE,YAAA,OAAO,MAAM;AACf,SAAC,CACF;AAGD,QAAA,IAAI,iBAA4C;AAChD,QAAA,WAAW,MAAM,QAAQ,IAAI,MAAM,EAAE;YACnC,IACE,eAAe,IAAI,QAAQ;gBAC3B,IAAI,CAAC,WAAW,KAAK,KAAK;AAC1B,gBAAA,OAAO,CAAC,WAAW,KAAK,KAAK,EAC7B;AACA,gBAAA,iBAAiB,GAAG,IAAI,CAAC,uBAAuB,CAC9C,QAAQ,CAAC,aAAmD,EAC5D,IAAI,CAAC,KAAK,CACX;;AAGH,YAAA,MAAM,KAAK,GAAGC,kDAA2C,CAAC,QAAQ,EAAE;AAClE,gBAAA,aAAa,EAAE,SAEhB,CAAA,CAAC;YAEF,IAAI,CAAC,KAAK,EAAE;gBACV;;AAGF,YAAA,MAAM,KAAK;YACX,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,CAAC,IAAI,IAAI,EAAE,EAChB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,CACV;;QAGH,IAAI,iBAAiB,EAAE;AACrB,YAAA,MAAM,UAAU,GAAG,IAAIC,2BAAmB,CAAC;AACzC,gBAAA,IAAI,EAAE,EAAE;gBACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,cAAc,EAAE,iBAAiB;iBAClC,CAAC;AACH,aAAA,CAAC;AACF,YAAA,MAAM,UAAU;YAChB,MAAM,UAAU,EAAE,iBAAiB,CACjC,UAAU,CAAC,IAAI,IAAI,EAAE,EACrB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,UAAU,EAAE,CACtB;;;AAGN;;;;"}
@@ -8,6 +8,15 @@ var uuid = require('uuid');
8
8
  require('@langchain/core/utils/types');
9
9
  require('@langchain/core/utils/json_schema');
10
10
 
11
+ const _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY = '__gemini_function_call_thought_signatures__';
12
+ const DUMMY_SIGNATURE = 'ErYCCrMCAdHtim9kOoOkrPiCNVsmlpMIKd7ZMxgiFbVQOkgp7nlLcDMzVsZwIzvuT7nQROivoXA72ccC2lSDvR0Gh7dkWaGuj7ctv6t7ZceHnecx0QYa+ix8tYpRfjhyWozQ49lWiws6+YGjCt10KRTyWsZ2h6O7iHTYJwKIRwGUHRKy/qK/6kFxJm5ML00gLq4D8s5Z6DBpp2ZlR+uF4G8jJgeWQgyHWVdx2wGYElaceVAc66tZdPQRdOHpWtgYSI1YdaXgVI8KHY3/EfNc2YqqMIulvkDBAnuMhkAjV9xmBa54Tq+ih3Im4+r3DzqhGqYdsSkhS0kZMwte4Hjs65dZzCw9lANxIqYi1DJ639WNPYihp/DCJCos7o+/EeSPJaio5sgWDyUnMGkY1atsJZ+m7pj7DD5tvQ==';
13
+ /**
14
+ * Executes a function immediately and returns its result.
15
+ * Functional utility similar to an Immediately Invoked Function Expression (IIFE).
16
+ * @param fn The function to execute.
17
+ * @returns The result of invoking fn.
18
+ */
19
+ const iife = (fn) => fn();
11
20
  function getMessageAuthor(message) {
12
21
  const type = message._getType();
13
22
  if (messages.ChatMessage.isInstance(message)) {
@@ -284,7 +293,7 @@ function _convertLangChainContentToPart(content, isMultimodalModel) {
284
293
  }
285
294
  }
286
295
  }
287
- function convertMessageContentToParts(message, isMultimodalModel, previousMessages) {
296
+ function convertMessageContentToParts(message, isMultimodalModel, previousMessages, model) {
288
297
  if (messages.isToolMessage(message)) {
289
298
  const messageName = message.name ??
290
299
  inferToolNameFromPreviousMessages(message, previousMessages);
@@ -328,19 +337,33 @@ function convertMessageContentToParts(message, isMultimodalModel, previousMessag
328
337
  .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))
329
338
  .filter((p) => p !== undefined));
330
339
  }
331
- if (messages.isAIMessage(message) && message.tool_calls?.length != null) {
332
- functionCalls = message.tool_calls.map((tc) => {
340
+ const functionThoughtSignatures = message.additional_kwargs?.[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY];
341
+ if (messages.isAIMessage(message) && (message.tool_calls?.length ?? 0) > 0) {
342
+ functionCalls = (message.tool_calls ?? []).map((tc) => {
343
+ const thoughtSignature = iife(() => {
344
+ if (tc.id != null && tc.id !== '') {
345
+ const signature = functionThoughtSignatures?.[tc.id];
346
+ if (signature != null && signature !== '') {
347
+ return signature;
348
+ }
349
+ }
350
+ if (model?.includes('gemini-3') === true) {
351
+ return DUMMY_SIGNATURE;
352
+ }
353
+ return '';
354
+ });
333
355
  return {
334
356
  functionCall: {
335
357
  name: tc.name,
336
358
  args: tc.args,
337
359
  },
360
+ ...(thoughtSignature ? { thoughtSignature } : {}),
338
361
  };
339
362
  });
340
363
  }
341
364
  return [...messageParts, ...functionCalls];
342
365
  }
343
- function convertBaseMessagesToContent(messages$1, isMultimodalModel, convertSystemMessageToHumanContent = false) {
366
+ function convertBaseMessagesToContent(messages$1, isMultimodalModel, convertSystemMessageToHumanContent = false, model) {
344
367
  return messages$1.reduce((acc, message, index) => {
345
368
  if (!messages.isBaseMessage(message)) {
346
369
  throw new Error('Unsupported message input');
@@ -356,7 +379,7 @@ function convertBaseMessagesToContent(messages$1, isMultimodalModel, convertSyst
356
379
  prevContent.role === role) {
357
380
  throw new Error('Google Generative AI requires alternate messages between authors');
358
381
  }
359
- const parts = convertMessageContentToParts(message, isMultimodalModel, messages$1.slice(0, index));
382
+ const parts = convertMessageContentToParts(message, isMultimodalModel, messages$1.slice(0, index), model);
360
383
  if (acc.mergeWithPreviousContent) {
361
384
  const prevContent = acc.content?.[acc.content.length - 1];
362
385
  if (!prevContent) {
@@ -388,9 +411,20 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
388
411
  if (!response.candidates || response.candidates.length === 0) {
389
412
  return null;
390
413
  }
391
- const functionCalls = response.functionCalls();
392
414
  const [candidate] = response.candidates;
393
415
  const { content: candidateContent, ...generationInfo } = candidate ?? {};
416
+ // Extract function calls directly from parts to preserve thoughtSignature
417
+ const functionCalls = candidateContent?.parts?.reduce((acc, p) => {
418
+ if ('functionCall' in p && p.functionCall) {
419
+ acc.push({
420
+ ...p,
421
+ id: 'id' in p.functionCall && typeof p.functionCall.id === 'string'
422
+ ? p.functionCall.id
423
+ : uuid.v4(),
424
+ });
425
+ }
426
+ return acc;
427
+ }, []) ?? [];
394
428
  let content;
395
429
  // Checks if some parts do not have text. If false, it means that the content is a string.
396
430
  const reasoningParts = [];
@@ -409,9 +443,11 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
409
443
  content = textParts.join('');
410
444
  }
411
445
  else if (candidateContent && Array.isArray(candidateContent.parts)) {
412
- content = candidateContent.parts.map((p) => {
446
+ content = candidateContent.parts
447
+ .map((p) => {
413
448
  if ('text' in p && 'thought' in p && p.thought === true) {
414
449
  reasoningParts.push(p.text ?? '');
450
+ return undefined;
415
451
  }
416
452
  else if ('text' in p) {
417
453
  return {
@@ -432,7 +468,8 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
432
468
  };
433
469
  }
434
470
  return p;
435
- });
471
+ })
472
+ .filter((p) => p !== undefined);
436
473
  }
437
474
  else {
438
475
  // no content returned - likely due to abnormal stop reason, e.g. malformed function call
@@ -447,17 +484,26 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
447
484
  text = block?.text ?? '';
448
485
  }
449
486
  const toolCallChunks = [];
450
- if (functionCalls) {
487
+ if (functionCalls.length > 0) {
451
488
  toolCallChunks.push(...functionCalls.map((fc) => ({
452
- ...fc,
453
- args: JSON.stringify(fc.args),
454
- // Un-commenting this causes LangChain to incorrectly merge tool calls together
455
- // index: extra.index,
456
489
  type: 'tool_call_chunk',
457
- id: 'id' in fc && typeof fc.id === 'string' ? fc.id : uuid.v4(),
490
+ id: fc?.id,
491
+ name: fc?.functionCall.name,
492
+ args: JSON.stringify(fc?.functionCall.args),
458
493
  })));
459
494
  }
460
- const additional_kwargs = {};
495
+ // Extract thought signatures from function calls for Gemini 3+
496
+ const functionThoughtSignatures = functionCalls.reduce((acc, fc) => {
497
+ if (fc &&
498
+ 'thoughtSignature' in fc &&
499
+ typeof fc.thoughtSignature === 'string') {
500
+ acc[fc.id] = fc.thoughtSignature;
501
+ }
502
+ return acc;
503
+ }, {});
504
+ const additional_kwargs = {
505
+ [_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures,
506
+ };
461
507
  if (reasoningParts.length > 0) {
462
508
  additional_kwargs.reasoning = reasoningParts.join('');
463
509
  }
@@ -481,10 +527,134 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
481
527
  generationInfo,
482
528
  });
483
529
  }
530
+ /**
531
+ * Maps a Google GenerateContentResult to a LangChain ChatResult
532
+ */
533
+ function mapGenerateContentResultToChatResult(response, extra) {
534
+ if (!response.candidates ||
535
+ response.candidates.length === 0 ||
536
+ !response.candidates[0]) {
537
+ return {
538
+ generations: [],
539
+ llmOutput: {
540
+ filters: response.promptFeedback,
541
+ },
542
+ };
543
+ }
544
+ const [candidate] = response.candidates;
545
+ const { content: candidateContent, ...generationInfo } = candidate ?? {};
546
+ // Extract function calls directly from parts to preserve thoughtSignature
547
+ const functionCalls = candidateContent?.parts.reduce((acc, p) => {
548
+ if ('functionCall' in p && p.functionCall) {
549
+ acc.push({
550
+ ...p,
551
+ id: 'id' in p.functionCall && typeof p.functionCall.id === 'string'
552
+ ? p.functionCall.id
553
+ : uuid.v4(),
554
+ });
555
+ }
556
+ return acc;
557
+ }, []) ?? [];
558
+ let content;
559
+ const reasoningParts = [];
560
+ if (Array.isArray(candidateContent?.parts) &&
561
+ candidateContent.parts.length === 1 &&
562
+ candidateContent.parts[0].text &&
563
+ !('thought' in candidateContent.parts[0] &&
564
+ candidateContent.parts[0].thought === true)) {
565
+ content = candidateContent.parts[0].text;
566
+ }
567
+ else if (Array.isArray(candidateContent?.parts) &&
568
+ candidateContent.parts.length > 0) {
569
+ content = candidateContent.parts
570
+ .map((p) => {
571
+ if ('text' in p && 'thought' in p && p.thought === true) {
572
+ reasoningParts.push(p.text ?? '');
573
+ return undefined;
574
+ }
575
+ else if ('text' in p) {
576
+ return {
577
+ type: 'text',
578
+ text: p.text,
579
+ };
580
+ }
581
+ else if ('executableCode' in p) {
582
+ return {
583
+ type: 'executableCode',
584
+ executableCode: p.executableCode,
585
+ };
586
+ }
587
+ else if ('codeExecutionResult' in p) {
588
+ return {
589
+ type: 'codeExecutionResult',
590
+ codeExecutionResult: p.codeExecutionResult,
591
+ };
592
+ }
593
+ return p;
594
+ })
595
+ .filter((p) => p !== undefined);
596
+ }
597
+ else {
598
+ content = [];
599
+ }
600
+ let text = '';
601
+ if (typeof content === 'string') {
602
+ text = content;
603
+ }
604
+ else if (Array.isArray(content) && content.length > 0) {
605
+ const block = content.find((b) => 'text' in b);
606
+ text = block?.text ?? text;
607
+ }
608
+ const additional_kwargs = {
609
+ ...generationInfo,
610
+ };
611
+ if (reasoningParts.length > 0) {
612
+ additional_kwargs.reasoning = reasoningParts.join('');
613
+ }
614
+ // Extract thought signatures from function calls for Gemini 3+
615
+ const functionThoughtSignatures = functionCalls.reduce((acc, fc) => {
616
+ if ('thoughtSignature' in fc && typeof fc.thoughtSignature === 'string') {
617
+ acc[fc.id] = fc.thoughtSignature;
618
+ }
619
+ return acc;
620
+ }, {});
621
+ const tool_calls = functionCalls.map((fc) => ({
622
+ type: 'tool_call',
623
+ id: fc.id,
624
+ name: fc.functionCall.name,
625
+ args: fc.functionCall.args,
626
+ }));
627
+ // Store thought signatures map for later retrieval
628
+ additional_kwargs[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY] =
629
+ functionThoughtSignatures;
630
+ const generation = {
631
+ text,
632
+ message: new messages.AIMessage({
633
+ content: content ?? '',
634
+ tool_calls,
635
+ additional_kwargs,
636
+ usage_metadata: extra?.usageMetadata,
637
+ }),
638
+ generationInfo,
639
+ };
640
+ return {
641
+ generations: [generation],
642
+ llmOutput: {
643
+ tokenUsage: {
644
+ promptTokens: extra?.usageMetadata?.input_tokens,
645
+ completionTokens: extra?.usageMetadata?.output_tokens,
646
+ totalTokens: extra?.usageMetadata?.total_tokens,
647
+ },
648
+ },
649
+ };
650
+ }
484
651
 
652
+ exports._FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY = _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY;
485
653
  exports.convertAuthorToRole = convertAuthorToRole;
486
654
  exports.convertBaseMessagesToContent = convertBaseMessagesToContent;
487
655
  exports.convertMessageContentToParts = convertMessageContentToParts;
488
656
  exports.convertResponseContentToChatGenerationChunk = convertResponseContentToChatGenerationChunk;
489
657
  exports.getMessageAuthor = getMessageAuthor;
658
+ exports.iife = iife;
659
+ exports.mapGenerateContentResultToChatResult = mapGenerateContentResultToChatResult;
490
660
  //# sourceMappingURL=common.cjs.map