@librechat/agents 3.0.25 → 3.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,16 @@ var common = require('./utils/common.cjs');
10
10
  /* eslint-disable @typescript-eslint/ban-ts-comment */
11
11
  class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
12
12
  thinkingConfig;
13
+ /**
14
+ * Override to add gemini-3 model support for multimodal and function calling thought signatures
15
+ */
16
+ get _isMultimodalModel() {
17
+ return (this.model.startsWith('gemini-1.5') ||
18
+ this.model.startsWith('gemini-2') ||
19
+ (this.model.startsWith('gemma-3-') &&
20
+ !this.model.startsWith('gemma-3-1b')) ||
21
+ this.model.startsWith('gemini-3'));
22
+ }
13
23
  constructor(fields) {
14
24
  super(fields);
15
25
  this.model = fields.model.replace(/^models\//, '');
@@ -75,6 +85,44 @@ class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
75
85
  static lc_name() {
76
86
  return 'LibreChatGoogleGenerativeAI';
77
87
  }
88
+ /**
89
+ * Helper function to convert Gemini API usage metadata to LangChain format
90
+ * Includes support for cached tokens and tier-based tracking for gemini-3-pro-preview
91
+ */
92
+ _convertToUsageMetadata(usageMetadata, model) {
93
+ if (!usageMetadata) {
94
+ return undefined;
95
+ }
96
+ const output = {
97
+ input_tokens: usageMetadata.promptTokenCount ?? 0,
98
+ output_tokens: (usageMetadata.candidatesTokenCount ?? 0) +
99
+ (usageMetadata.thoughtsTokenCount ?? 0),
100
+ total_tokens: usageMetadata.totalTokenCount ?? 0,
101
+ };
102
+ if (usageMetadata.cachedContentTokenCount) {
103
+ output.input_token_details ??= {};
104
+ output.input_token_details.cache_read =
105
+ usageMetadata.cachedContentTokenCount;
106
+ }
107
+ // gemini-3-pro-preview has bracket based tracking of tokens per request
108
+ if (model === 'gemini-3-pro-preview') {
109
+ const over200k = Math.max(0, (usageMetadata.promptTokenCount ?? 0) - 200000);
110
+ const cachedOver200k = Math.max(0, (usageMetadata.cachedContentTokenCount ?? 0) - 200000);
111
+ if (over200k) {
112
+ output.input_token_details = {
113
+ ...output.input_token_details,
114
+ over_200k: over200k,
115
+ };
116
+ }
117
+ if (cachedOver200k) {
118
+ output.input_token_details = {
119
+ ...output.input_token_details,
120
+ cache_read_over_200k: cachedOver200k,
121
+ };
122
+ }
123
+ }
124
+ return output;
125
+ }
78
126
  invocationParams(options) {
79
127
  const params = super.invocationParams(options);
80
128
  if (this.thinkingConfig) {
@@ -88,8 +136,36 @@ class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
88
136
  }
89
137
  return params;
90
138
  }
139
+ async _generate(messages, options, runManager) {
140
+ const prompt = common.convertBaseMessagesToContent(messages, this._isMultimodalModel, this.useSystemInstruction, this.model);
141
+ let actualPrompt = prompt;
142
+ if (prompt?.[0].role === 'system') {
143
+ const [systemInstruction] = prompt;
144
+ /** @ts-ignore */
145
+ this.client.systemInstruction = systemInstruction;
146
+ actualPrompt = prompt.slice(1);
147
+ }
148
+ const parameters = this.invocationParams(options);
149
+ const request = {
150
+ ...parameters,
151
+ contents: actualPrompt,
152
+ };
153
+ const res = await this.caller.callWithOptions({ signal: options.signal }, async () =>
154
+ /** @ts-ignore */
155
+ this.client.generateContent(request));
156
+ const response = res.response;
157
+ const usageMetadata = this._convertToUsageMetadata(
158
+ /** @ts-ignore */
159
+ response.usageMetadata, this.model);
160
+ /** @ts-ignore */
161
+ const generationResult = common.mapGenerateContentResultToChatResult(response, {
162
+ usageMetadata,
163
+ });
164
+ await runManager?.handleLLMNewToken(generationResult.generations[0].text || '', undefined, undefined, undefined, undefined, undefined);
165
+ return generationResult;
166
+ }
91
167
  async *_streamResponseChunks(messages$1, options, runManager) {
92
- const prompt = common.convertBaseMessagesToContent(messages$1, this._isMultimodalModel, this.useSystemInstruction);
168
+ const prompt = common.convertBaseMessagesToContent(messages$1, this._isMultimodalModel, this.useSystemInstruction, this.model);
93
169
  let actualPrompt = prompt;
94
170
  if (prompt?.[0].role === 'system') {
95
171
  const [systemInstruction] = prompt;
@@ -112,14 +188,7 @@ class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
112
188
  if ('usageMetadata' in response &&
113
189
  this.streamUsage !== false &&
114
190
  options.streamUsage !== false) {
115
- const genAIUsageMetadata = response.usageMetadata;
116
- const output_tokens = (genAIUsageMetadata?.candidatesTokenCount ?? 0) +
117
- (genAIUsageMetadata?.thoughtsTokenCount ?? 0);
118
- lastUsageMetadata = {
119
- input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,
120
- output_tokens,
121
- total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,
122
- };
191
+ lastUsageMetadata = this._convertToUsageMetadata(response.usageMetadata, this.model);
123
192
  }
124
193
  const chunk = common.convertResponseContentToChatGenerationChunk(response, {
125
194
  usageMetadata: undefined});
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/ban-ts-comment */\nimport { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type {\n GenerateContentRequest,\n SafetySetting,\n} from '@google/generative-ai';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { GeminiGenerationConfig } from '@langchain/google-common';\nimport type { GeminiApiUsageMetadata } from './types';\nimport type { GoogleClientOptions } from '@/types';\nimport {\n convertResponseContentToChatGenerationChunk,\n convertBaseMessagesToContent,\n} from './utils/common';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];\n constructor(fields: GoogleClientOptions) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.thinkingConfig = fields.thinkingConfig ?? this.thinkingConfig;\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n\n static lc_name(): 'LibreChatGoogleGenerativeAI' {\n return 'LibreChatGoogleGenerativeAI';\n }\n\n invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<GenerateContentRequest, 'contents'> {\n const params = super.invocationParams(options);\n if (this.thinkingConfig) {\n /** @ts-ignore */\n this.client.generationConfig = {\n /** @ts-ignore */\n ...this.client.generationConfig,\n /** @ts-ignore */\n thinkingConfig: this.thinkingConfig,\n };\n }\n return params;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction\n );\n let actualPrompt = prompt;\n if (prompt?.[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n const stream = await this.caller.callWithOptions(\n { signal: options.signal },\n async () => {\n /** @ts-ignore */\n const { stream } = await this.client.generateContentStream(request);\n return stream;\n }\n );\n\n let index = 0;\n let lastUsageMetadata: UsageMetadata | undefined;\n for await (const response of stream) {\n if (\n 'usageMetadata' in response &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n const genAIUsageMetadata = response.usageMetadata as\n | GeminiApiUsageMetadata\n | undefined;\n\n const output_tokens =\n (genAIUsageMetadata?.candidatesTokenCount ?? 0) +\n (genAIUsageMetadata?.thoughtsTokenCount ?? 0);\n lastUsageMetadata = {\n input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,\n output_tokens,\n total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,\n };\n }\n\n const chunk = convertResponseContentToChatGenerationChunk(response, {\n usageMetadata: undefined,\n index,\n });\n index += 1;\n if (!chunk) {\n continue;\n }\n\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n\n if (lastUsageMetadata) {\n const finalChunk = new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n usage_metadata: lastUsageMetadata,\n }),\n });\n yield finalChunk;\n await runManager?.handleLLMNewToken(\n finalChunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: finalChunk }\n );\n }\n }\n}\n"],"names":["ChatGoogleGenerativeAI","getEnvironmentVariable","GenerativeAI","messages","convertBaseMessagesToContent","convertResponseContentToChatGenerationChunk","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;;;AAAA;AAoBM,MAAO,4BAA6B,SAAQA,kCAAsB,CAAA;AACtE,IAAA,cAAc;AACd,IAAA,WAAA,CAAY,MAA2B,EAAA;QACrC,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAIC,0BAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;QAElE,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;AAGvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIC,+BAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAG3D,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,6BAA6B;;AAGtC,IAAA,gBAAgB,CACd,OAAmC,EAAA;QAEnC,MAAM,MAAM,GAAG,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC;AAC9C,QAAA,IAAI,IAAI,CAAC,cAAc,EAAE;;AAEvB,YAAA,IAAI,CAAC,MAAM,CAAC,gBAAgB,GAAG;;AAE7B,gBAAA,GAAG,IAAI,CAAC,MAAM,CAAC,gBAAgB;;gBAE/B,cAAc,EAAE,IAAI,CAAC,cAAc;aACpC;;AAEH,QAAA,OAAO,MAAM;;IAGf,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAGC,mCAA4B,CACzCD,UAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,CAC1B;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AACjC,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AACD,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC9C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B,YAAW;;AAET,YAAA,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,qBAAqB,CAAC,OAAO,CAAC;AACnE,YAAA,OAAO,MAAM;AACf,SAAC,CACF;AAGD,QAAA,IAAI,iBAA4C;AAChD,QAAA,WAAW,MAAM,QAAQ,IAAI,MAAM,EAAE;YACnC,IACE,eAAe,IAAI,QAAQ;gBAC3B,IAAI,CAAC,WAAW,KAAK,KAAK;AAC1B,gBAAA,OAAO,CAAC,WAAW,KAAK,KAAK,EAC7B;AACA,gBAAA,MAAM,kBAAkB,GAAG,QAAQ,CAAC,aAEvB;gBAEb,MAAM,aAAa,GACjB,CAAC,kBAAkB,EAAE,oBAAoB,IAAI,CAAC;AAC9C,qBAAC,kBAAkB,EAAE,kBAAkB,IAAI,CAAC,CAAC;AAC/C,gBAAA,iBAAiB,GAAG;AAClB,oBAAA,YAAY,EAAE,kBAAkB,EAAE,gBAAgB,IAAI,CAAC;oBACvD,aAAa;AACb,oBAAA,YAAY,EAAE,kBAAkB,EAAE,eAAe,IAAI,CAAC;iBACvD;;AAGH,YAAA,MAAM,KAAK,GAAGE,kDAA2C,CAAC,QAAQ,EAAE;AAClE,gBAAA,aAAa,EAAE,SAEhB,CAAA,CAAC;YAEF,IAAI,CAAC,KAAK,EAAE;gBACV;;AAGF,YAAA,MAAM,KAAK;YACX,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,CAAC,IAAI,IAAI,EAAE,EAChB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,CACV;;QAGH,IAAI,iBAAiB,EAAE;AACrB,YAAA,MAAM,UAAU,GAAG,IAAIC,2BAAmB,CAAC;AACzC,gBAAA,IAAI,EAAE,EAAE;gBACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,cAAc,EAAE,iBAAiB;iBAClC,CAAC;AACH,aAAA,CAAC;AACF,YAAA,MAAM,UAAU;YAChB,MAAM,UAAU,EAAE,iBAAiB,CACjC,UAAU,CAAC,IAAI,IAAI,EAAE,EACrB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,UAAU,EAAE,CACtB;;;AAGN;;;;"}
1
+ {"version":3,"file":"index.cjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/ban-ts-comment */\nimport { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type {\n GenerateContentRequest,\n SafetySetting,\n} from '@google/generative-ai';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { GeminiGenerationConfig } from '@langchain/google-common';\nimport type { GeminiApiUsageMetadata, InputTokenDetails } from './types';\nimport type { GoogleClientOptions } from '@/types';\nimport {\n convertResponseContentToChatGenerationChunk,\n convertBaseMessagesToContent,\n mapGenerateContentResultToChatResult,\n} from './utils/common';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];\n\n /**\n * Override to add gemini-3 model support for multimodal and function calling thought signatures\n */\n get _isMultimodalModel(): boolean {\n return (\n this.model.startsWith('gemini-1.5') ||\n this.model.startsWith('gemini-2') ||\n (this.model.startsWith('gemma-3-') &&\n !this.model.startsWith('gemma-3-1b')) ||\n this.model.startsWith('gemini-3')\n );\n }\n\n constructor(fields: GoogleClientOptions) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.thinkingConfig = fields.thinkingConfig ?? this.thinkingConfig;\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n\n static lc_name(): 'LibreChatGoogleGenerativeAI' {\n return 'LibreChatGoogleGenerativeAI';\n }\n\n /**\n * Helper function to convert Gemini API usage metadata to LangChain format\n * Includes support for cached tokens and tier-based tracking for gemini-3-pro-preview\n */\n private _convertToUsageMetadata(\n usageMetadata: GeminiApiUsageMetadata | undefined,\n model: string\n ): UsageMetadata | undefined {\n if (!usageMetadata) {\n return undefined;\n }\n\n const output: UsageMetadata = {\n input_tokens: usageMetadata.promptTokenCount ?? 0,\n output_tokens:\n (usageMetadata.candidatesTokenCount ?? 0) +\n (usageMetadata.thoughtsTokenCount ?? 0),\n total_tokens: usageMetadata.totalTokenCount ?? 0,\n };\n\n if (usageMetadata.cachedContentTokenCount) {\n output.input_token_details ??= {};\n output.input_token_details.cache_read =\n usageMetadata.cachedContentTokenCount;\n }\n\n // gemini-3-pro-preview has bracket based tracking of tokens per request\n if (model === 'gemini-3-pro-preview') {\n const over200k = Math.max(\n 0,\n (usageMetadata.promptTokenCount ?? 0) - 200000\n );\n const cachedOver200k = Math.max(\n 0,\n (usageMetadata.cachedContentTokenCount ?? 0) - 200000\n );\n if (over200k) {\n output.input_token_details = {\n ...output.input_token_details,\n over_200k: over200k,\n } as InputTokenDetails;\n }\n if (cachedOver200k) {\n output.input_token_details = {\n ...output.input_token_details,\n cache_read_over_200k: cachedOver200k,\n } as InputTokenDetails;\n }\n }\n\n return output;\n }\n\n invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<GenerateContentRequest, 'contents'> {\n const params = super.invocationParams(options);\n if (this.thinkingConfig) {\n /** @ts-ignore */\n this.client.generationConfig = {\n /** @ts-ignore */\n ...this.client.generationConfig,\n /** @ts-ignore */\n thinkingConfig: this.thinkingConfig,\n };\n }\n return params;\n }\n\n async _generate(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): Promise<import('@langchain/core/outputs').ChatResult> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction,\n this.model\n );\n let actualPrompt = prompt;\n if (prompt?.[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n\n const res = await this.caller.callWithOptions(\n { signal: options.signal },\n async () =>\n /** @ts-ignore */\n this.client.generateContent(request)\n );\n\n const response = res.response;\n const usageMetadata = this._convertToUsageMetadata(\n /** @ts-ignore */\n response.usageMetadata,\n this.model\n );\n\n /** @ts-ignore */\n const generationResult = mapGenerateContentResultToChatResult(response, {\n usageMetadata,\n });\n\n await runManager?.handleLLMNewToken(\n generationResult.generations[0].text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n undefined\n );\n return generationResult;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction,\n this.model\n );\n let actualPrompt = prompt;\n if (prompt?.[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n const stream = await this.caller.callWithOptions(\n { signal: options.signal },\n async () => {\n /** @ts-ignore */\n const { stream } = await this.client.generateContentStream(request);\n return stream;\n }\n );\n\n let index = 0;\n let lastUsageMetadata: UsageMetadata | undefined;\n for await (const response of stream) {\n if (\n 'usageMetadata' in response &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n lastUsageMetadata = this._convertToUsageMetadata(\n response.usageMetadata as GeminiApiUsageMetadata | undefined,\n this.model\n );\n }\n\n const chunk = convertResponseContentToChatGenerationChunk(response, {\n usageMetadata: undefined,\n index,\n });\n index += 1;\n if (!chunk) {\n continue;\n }\n\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n\n if (lastUsageMetadata) {\n const finalChunk = new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n usage_metadata: lastUsageMetadata,\n }),\n });\n yield finalChunk;\n await runManager?.handleLLMNewToken(\n finalChunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: finalChunk }\n );\n }\n }\n}\n"],"names":["ChatGoogleGenerativeAI","getEnvironmentVariable","GenerativeAI","convertBaseMessagesToContent","mapGenerateContentResultToChatResult","messages","convertResponseContentToChatGenerationChunk","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;;;AAAA;AAqBM,MAAO,4BAA6B,SAAQA,kCAAsB,CAAA;AACtE,IAAA,cAAc;AAEd;;AAEG;AACH,IAAA,IAAI,kBAAkB,GAAA;QACpB,QACE,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,YAAY,CAAC;AACnC,YAAA,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC;AACjC,aAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC;gBAChC,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;YACvC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,UAAU,CAAC;;AAIrC,IAAA,WAAA,CAAY,MAA2B,EAAA;QACrC,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAIC,0BAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;QAElE,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;AAGvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIC,+BAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAG3D,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,6BAA6B;;AAGtC;;;AAGG;IACK,uBAAuB,CAC7B,aAAiD,EACjD,KAAa,EAAA;QAEb,IAAI,CAAC,aAAa,EAAE;AAClB,YAAA,OAAO,SAAS;;AAGlB,QAAA,MAAM,MAAM,GAAkB;AAC5B,YAAA,YAAY,EAAE,aAAa,CAAC,gBAAgB,IAAI,CAAC;AACjD,YAAA,aAAa,EACX,CAAC,aAAa,CAAC,oBAAoB,IAAI,CAAC;AACxC,iBAAC,aAAa,CAAC,kBAAkB,IAAI,CAAC,CAAC;AACzC,YAAA,YAAY,EAAE,aAAa,CAAC,eAAe,IAAI,CAAC;SACjD;AAED,QAAA,IAAI,aAAa,CAAC,uBAAuB,EAAE;AACzC,YAAA,MAAM,CAAC,mBAAmB,KAAK,EAAE;YACjC,MAAM,CAAC,mBAAmB,CAAC,UAAU;gBACnC,aAAa,CAAC,uBAAuB;;;AAIzC,QAAA,IAAI,KAAK,KAAK,sBAAsB,EAAE;AACpC,YAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CACvB,CAAC,EACD,CAAC,aAAa,CAAC,gBAAgB,IAAI,CAAC,IAAI,MAAM,CAC/C;AACD,YAAA,MAAM,cAAc,GAAG,IAAI,CAAC,GAAG,CAC7B,CAAC,EACD,CAAC,aAAa,CAAC,uBAAuB,IAAI,CAAC,IAAI,MAAM,CACtD;YACD,IAAI,QAAQ,EAAE;gBACZ,MAAM,CAAC,mBAAmB,GAAG;oBAC3B,GAAG,MAAM,CAAC,mBAAmB;AAC7B,oBAAA,SAAS,EAAE,QAAQ;iBACC;;YAExB,IAAI,cAAc,EAAE;gBAClB,MAAM,CAAC,mBAAmB,GAAG;oBAC3B,GAAG,MAAM,CAAC,mBAAmB;AAC7B,oBAAA,oBAAoB,EAAE,cAAc;iBAChB;;;AAI1B,QAAA,OAAO,MAAM;;AAGf,IAAA,gBAAgB,CACd,OAAmC,EAAA;QAEnC,MAAM,MAAM,GAAG,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC;AAC9C,QAAA,IAAI,IAAI,CAAC,cAAc,EAAE;;AAEvB,YAAA,IAAI,CAAC,MAAM,CAAC,gBAAgB,GAAG;;AAE7B,gBAAA,GAAG,IAAI,CAAC,MAAM,CAAC,gBAAgB;;gBAE/B,cAAc,EAAE,IAAI,CAAC,cAAc;aACpC;;AAEH,QAAA,OAAO,MAAM;;AAGf,IAAA,MAAM,SAAS,CACb,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAGC,mCAA4B,CACzC,QAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,EACzB,IAAI,CAAC,KAAK,CACX;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AACjC,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AAED,QAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC3C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B;;QAEE,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,OAAO,CAAC,CACvC;AAED,QAAA,MAAM,QAAQ,GAAG,GAAG,CAAC,QAAQ;AAC7B,QAAA,MAAM,aAAa,GAAG,IAAI,CAAC,uBAAuB;;AAEhD,QAAA,QAAQ,CAAC,aAAa,EACtB,IAAI,CAAC,KAAK,CACX;;AAGD,QAAA,MAAM,gBAAgB,GAAGC,2CAAoC,CAAC,QAAQ,EAAE;YACtE,aAAa;AACd,SAAA,CAAC;QAEF,MAAM,UAAU,EAAE,iBAAiB,CACjC,gBAAgB,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE,EAC1C,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,CACV;AACD,QAAA,OAAO,gBAAgB;;IAGzB,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAGF,mCAA4B,CACzCE,UAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,EACzB,IAAI,CAAC,KAAK,CACX;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AACjC,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AACD,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC9C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B,YAAW;;AAET,YAAA,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,qBAAqB,CAAC,OAAO,CAAC;AACnE,YAAA,OAAO,MAAM;AACf,SAAC,CACF;AAGD,QAAA,IAAI,iBAA4C;AAChD,QAAA,WAAW,MAAM,QAAQ,IAAI,MAAM,EAAE;YACnC,IACE,eAAe,IAAI,QAAQ;gBAC3B,IAAI,CAAC,WAAW,KAAK,KAAK;AAC1B,gBAAA,OAAO,CAAC,WAAW,KAAK,KAAK,EAC7B;AACA,gBAAA,iBAAiB,GAAG,IAAI,CAAC,uBAAuB,CAC9C,QAAQ,CAAC,aAAmD,EAC5D,IAAI,CAAC,KAAK,CACX;;AAGH,YAAA,MAAM,KAAK,GAAGC,kDAA2C,CAAC,QAAQ,EAAE;AAClE,gBAAA,aAAa,EAAE,SAEhB,CAAA,CAAC;YAEF,IAAI,CAAC,KAAK,EAAE;gBACV;;AAGF,YAAA,MAAM,KAAK;YACX,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,CAAC,IAAI,IAAI,EAAE,EAChB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,CACV;;QAGH,IAAI,iBAAiB,EAAE;AACrB,YAAA,MAAM,UAAU,GAAG,IAAIC,2BAAmB,CAAC;AACzC,gBAAA,IAAI,EAAE,EAAE;gBACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,cAAc,EAAE,iBAAiB;iBAClC,CAAC;AACH,aAAA,CAAC;AACF,YAAA,MAAM,UAAU;YAChB,MAAM,UAAU,EAAE,iBAAiB,CACjC,UAAU,CAAC,IAAI,IAAI,EAAE,EACrB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,UAAU,EAAE,CACtB;;;AAGN;;;;"}
@@ -8,6 +8,15 @@ var uuid = require('uuid');
8
8
  require('@langchain/core/utils/types');
9
9
  require('@langchain/core/utils/json_schema');
10
10
 
11
+ const _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY = '__gemini_function_call_thought_signatures__';
12
+ const DUMMY_SIGNATURE = 'ErYCCrMCAdHtim9kOoOkrPiCNVsmlpMIKd7ZMxgiFbVQOkgp7nlLcDMzVsZwIzvuT7nQROivoXA72ccC2lSDvR0Gh7dkWaGuj7ctv6t7ZceHnecx0QYa+ix8tYpRfjhyWozQ49lWiws6+YGjCt10KRTyWsZ2h6O7iHTYJwKIRwGUHRKy/qK/6kFxJm5ML00gLq4D8s5Z6DBpp2ZlR+uF4G8jJgeWQgyHWVdx2wGYElaceVAc66tZdPQRdOHpWtgYSI1YdaXgVI8KHY3/EfNc2YqqMIulvkDBAnuMhkAjV9xmBa54Tq+ih3Im4+r3DzqhGqYdsSkhS0kZMwte4Hjs65dZzCw9lANxIqYi1DJ639WNPYihp/DCJCos7o+/EeSPJaio5sgWDyUnMGkY1atsJZ+m7pj7DD5tvQ==';
13
+ /**
14
+ * Executes a function immediately and returns its result.
15
+ * Functional utility similar to an Immediately Invoked Function Expression (IIFE).
16
+ * @param fn The function to execute.
17
+ * @returns The result of invoking fn.
18
+ */
19
+ const iife = (fn) => fn();
11
20
  function getMessageAuthor(message) {
12
21
  const type = message._getType();
13
22
  if (messages.ChatMessage.isInstance(message)) {
@@ -235,19 +244,6 @@ function _convertLangChainContentToPart(content, isMultimodalModel) {
235
244
  },
236
245
  };
237
246
  }
238
- else if (content.type === 'document' ||
239
- content.type === 'audio' ||
240
- content.type === 'video') {
241
- if (!isMultimodalModel) {
242
- throw new Error(`This model does not support ${content.type}s`);
243
- }
244
- return {
245
- inlineData: {
246
- data: content.data,
247
- mimeType: content.mimeType,
248
- },
249
- };
250
- }
251
247
  else if (content.type === 'media') {
252
248
  return messageContentMedia(content);
253
249
  }
@@ -284,7 +280,7 @@ function _convertLangChainContentToPart(content, isMultimodalModel) {
284
280
  }
285
281
  }
286
282
  }
287
- function convertMessageContentToParts(message, isMultimodalModel, previousMessages) {
283
+ function convertMessageContentToParts(message, isMultimodalModel, previousMessages, model) {
288
284
  if (messages.isToolMessage(message)) {
289
285
  const messageName = message.name ??
290
286
  inferToolNameFromPreviousMessages(message, previousMessages);
@@ -328,19 +324,33 @@ function convertMessageContentToParts(message, isMultimodalModel, previousMessag
328
324
  .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))
329
325
  .filter((p) => p !== undefined));
330
326
  }
331
- if (messages.isAIMessage(message) && message.tool_calls?.length != null) {
332
- functionCalls = message.tool_calls.map((tc) => {
327
+ const functionThoughtSignatures = message.additional_kwargs?.[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY];
328
+ if (messages.isAIMessage(message) && (message.tool_calls?.length ?? 0) > 0) {
329
+ functionCalls = (message.tool_calls ?? []).map((tc) => {
330
+ const thoughtSignature = iife(() => {
331
+ if (tc.id != null && tc.id !== '') {
332
+ const signature = functionThoughtSignatures?.[tc.id];
333
+ if (signature != null && signature !== '') {
334
+ return signature;
335
+ }
336
+ }
337
+ if (model?.includes('gemini-3') === true) {
338
+ return DUMMY_SIGNATURE;
339
+ }
340
+ return '';
341
+ });
333
342
  return {
334
343
  functionCall: {
335
344
  name: tc.name,
336
345
  args: tc.args,
337
346
  },
347
+ ...(thoughtSignature ? { thoughtSignature } : {}),
338
348
  };
339
349
  });
340
350
  }
341
351
  return [...messageParts, ...functionCalls];
342
352
  }
343
- function convertBaseMessagesToContent(messages$1, isMultimodalModel, convertSystemMessageToHumanContent = false) {
353
+ function convertBaseMessagesToContent(messages$1, isMultimodalModel, convertSystemMessageToHumanContent = false, model) {
344
354
  return messages$1.reduce((acc, message, index) => {
345
355
  if (!messages.isBaseMessage(message)) {
346
356
  throw new Error('Unsupported message input');
@@ -356,7 +366,7 @@ function convertBaseMessagesToContent(messages$1, isMultimodalModel, convertSyst
356
366
  prevContent.role === role) {
357
367
  throw new Error('Google Generative AI requires alternate messages between authors');
358
368
  }
359
- const parts = convertMessageContentToParts(message, isMultimodalModel, messages$1.slice(0, index));
369
+ const parts = convertMessageContentToParts(message, isMultimodalModel, messages$1.slice(0, index), model);
360
370
  if (acc.mergeWithPreviousContent) {
361
371
  const prevContent = acc.content?.[acc.content.length - 1];
362
372
  if (!prevContent) {
@@ -388,9 +398,20 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
388
398
  if (!response.candidates || response.candidates.length === 0) {
389
399
  return null;
390
400
  }
391
- const functionCalls = response.functionCalls();
392
401
  const [candidate] = response.candidates;
393
402
  const { content: candidateContent, ...generationInfo } = candidate ?? {};
403
+ // Extract function calls directly from parts to preserve thoughtSignature
404
+ const functionCalls = candidateContent?.parts?.reduce((acc, p) => {
405
+ if ('functionCall' in p && p.functionCall) {
406
+ acc.push({
407
+ ...p,
408
+ id: 'id' in p.functionCall && typeof p.functionCall.id === 'string'
409
+ ? p.functionCall.id
410
+ : uuid.v4(),
411
+ });
412
+ }
413
+ return acc;
414
+ }, []) ?? [];
394
415
  let content;
395
416
  // Checks if some parts do not have text. If false, it means that the content is a string.
396
417
  const reasoningParts = [];
@@ -409,9 +430,11 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
409
430
  content = textParts.join('');
410
431
  }
411
432
  else if (candidateContent && Array.isArray(candidateContent.parts)) {
412
- content = candidateContent.parts.map((p) => {
433
+ content = candidateContent.parts
434
+ .map((p) => {
413
435
  if ('text' in p && 'thought' in p && p.thought === true) {
414
436
  reasoningParts.push(p.text ?? '');
437
+ return undefined;
415
438
  }
416
439
  else if ('text' in p) {
417
440
  return {
@@ -432,7 +455,8 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
432
455
  };
433
456
  }
434
457
  return p;
435
- });
458
+ })
459
+ .filter((p) => p !== undefined);
436
460
  }
437
461
  else {
438
462
  // no content returned - likely due to abnormal stop reason, e.g. malformed function call
@@ -447,17 +471,26 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
447
471
  text = block?.text ?? '';
448
472
  }
449
473
  const toolCallChunks = [];
450
- if (functionCalls) {
474
+ if (functionCalls.length > 0) {
451
475
  toolCallChunks.push(...functionCalls.map((fc) => ({
452
- ...fc,
453
- args: JSON.stringify(fc.args),
454
- // Un-commenting this causes LangChain to incorrectly merge tool calls together
455
- // index: extra.index,
456
476
  type: 'tool_call_chunk',
457
- id: 'id' in fc && typeof fc.id === 'string' ? fc.id : uuid.v4(),
477
+ id: fc?.id,
478
+ name: fc?.functionCall.name,
479
+ args: JSON.stringify(fc?.functionCall.args),
458
480
  })));
459
481
  }
460
- const additional_kwargs = {};
482
+ // Extract thought signatures from function calls for Gemini 3+
483
+ const functionThoughtSignatures = functionCalls.reduce((acc, fc) => {
484
+ if (fc &&
485
+ 'thoughtSignature' in fc &&
486
+ typeof fc.thoughtSignature === 'string') {
487
+ acc[fc.id] = fc.thoughtSignature;
488
+ }
489
+ return acc;
490
+ }, {});
491
+ const additional_kwargs = {
492
+ [_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY]: functionThoughtSignatures,
493
+ };
461
494
  if (reasoningParts.length > 0) {
462
495
  additional_kwargs.reasoning = reasoningParts.join('');
463
496
  }
@@ -481,10 +514,134 @@ function convertResponseContentToChatGenerationChunk(response, extra) {
481
514
  generationInfo,
482
515
  });
483
516
  }
517
+ /**
518
+ * Maps a Google GenerateContentResult to a LangChain ChatResult
519
+ */
520
+ function mapGenerateContentResultToChatResult(response, extra) {
521
+ if (!response.candidates ||
522
+ response.candidates.length === 0 ||
523
+ !response.candidates[0]) {
524
+ return {
525
+ generations: [],
526
+ llmOutput: {
527
+ filters: response.promptFeedback,
528
+ },
529
+ };
530
+ }
531
+ const [candidate] = response.candidates;
532
+ const { content: candidateContent, ...generationInfo } = candidate ?? {};
533
+ // Extract function calls directly from parts to preserve thoughtSignature
534
+ const functionCalls = candidateContent?.parts.reduce((acc, p) => {
535
+ if ('functionCall' in p && p.functionCall) {
536
+ acc.push({
537
+ ...p,
538
+ id: 'id' in p.functionCall && typeof p.functionCall.id === 'string'
539
+ ? p.functionCall.id
540
+ : uuid.v4(),
541
+ });
542
+ }
543
+ return acc;
544
+ }, []) ?? [];
545
+ let content;
546
+ const reasoningParts = [];
547
+ if (Array.isArray(candidateContent?.parts) &&
548
+ candidateContent.parts.length === 1 &&
549
+ candidateContent.parts[0].text &&
550
+ !('thought' in candidateContent.parts[0] &&
551
+ candidateContent.parts[0].thought === true)) {
552
+ content = candidateContent.parts[0].text;
553
+ }
554
+ else if (Array.isArray(candidateContent?.parts) &&
555
+ candidateContent.parts.length > 0) {
556
+ content = candidateContent.parts
557
+ .map((p) => {
558
+ if ('text' in p && 'thought' in p && p.thought === true) {
559
+ reasoningParts.push(p.text ?? '');
560
+ return undefined;
561
+ }
562
+ else if ('text' in p) {
563
+ return {
564
+ type: 'text',
565
+ text: p.text,
566
+ };
567
+ }
568
+ else if ('executableCode' in p) {
569
+ return {
570
+ type: 'executableCode',
571
+ executableCode: p.executableCode,
572
+ };
573
+ }
574
+ else if ('codeExecutionResult' in p) {
575
+ return {
576
+ type: 'codeExecutionResult',
577
+ codeExecutionResult: p.codeExecutionResult,
578
+ };
579
+ }
580
+ return p;
581
+ })
582
+ .filter((p) => p !== undefined);
583
+ }
584
+ else {
585
+ content = [];
586
+ }
587
+ let text = '';
588
+ if (typeof content === 'string') {
589
+ text = content;
590
+ }
591
+ else if (Array.isArray(content) && content.length > 0) {
592
+ const block = content.find((b) => 'text' in b);
593
+ text = block?.text ?? text;
594
+ }
595
+ const additional_kwargs = {
596
+ ...generationInfo,
597
+ };
598
+ if (reasoningParts.length > 0) {
599
+ additional_kwargs.reasoning = reasoningParts.join('');
600
+ }
601
+ // Extract thought signatures from function calls for Gemini 3+
602
+ const functionThoughtSignatures = functionCalls.reduce((acc, fc) => {
603
+ if ('thoughtSignature' in fc && typeof fc.thoughtSignature === 'string') {
604
+ acc[fc.id] = fc.thoughtSignature;
605
+ }
606
+ return acc;
607
+ }, {});
608
+ const tool_calls = functionCalls.map((fc) => ({
609
+ type: 'tool_call',
610
+ id: fc.id,
611
+ name: fc.functionCall.name,
612
+ args: fc.functionCall.args,
613
+ }));
614
+ // Store thought signatures map for later retrieval
615
+ additional_kwargs[_FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY] =
616
+ functionThoughtSignatures;
617
+ const generation = {
618
+ text,
619
+ message: new messages.AIMessage({
620
+ content: content ?? '',
621
+ tool_calls,
622
+ additional_kwargs,
623
+ usage_metadata: extra?.usageMetadata,
624
+ }),
625
+ generationInfo,
626
+ };
627
+ return {
628
+ generations: [generation],
629
+ llmOutput: {
630
+ tokenUsage: {
631
+ promptTokens: extra?.usageMetadata?.input_tokens,
632
+ completionTokens: extra?.usageMetadata?.output_tokens,
633
+ totalTokens: extra?.usageMetadata?.total_tokens,
634
+ },
635
+ },
636
+ };
637
+ }
484
638
 
639
+ exports._FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY = _FUNCTION_CALL_THOUGHT_SIGNATURES_MAP_KEY;
485
640
  exports.convertAuthorToRole = convertAuthorToRole;
486
641
  exports.convertBaseMessagesToContent = convertBaseMessagesToContent;
487
642
  exports.convertMessageContentToParts = convertMessageContentToParts;
488
643
  exports.convertResponseContentToChatGenerationChunk = convertResponseContentToChatGenerationChunk;
489
644
  exports.getMessageAuthor = getMessageAuthor;
645
+ exports.iife = iife;
646
+ exports.mapGenerateContentResultToChatResult = mapGenerateContentResultToChatResult;
490
647
  //# sourceMappingURL=common.cjs.map