@librechat/agents 2.4.70 → 2.4.72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -24,6 +24,7 @@ import type { BaseMessage, UsageMetadata } from '@langchain/core/messages';
24
24
  import type { ChatXAIInput } from '@langchain/xai';
25
25
  import type * as t from '@langchain/openai';
26
26
  import {
27
+ isReasoningModel,
27
28
  _convertMessagesToOpenAIParams,
28
29
  _convertMessagesToOpenAIResponsesParams,
29
30
  _convertOpenAIResponsesDeltaToBaseMessageChunk,
@@ -192,6 +193,7 @@ export class CustomAzureOpenAIClient extends AzureOpenAIClient {
192
193
  }
193
194
  }
194
195
 
196
+ /** @ts-expect-error We are intentionally overriding `getReasoningParams` */
195
197
  export class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptions> {
196
198
  public get exposedClient(): CustomOpenAIClient {
197
199
  return this.client;
@@ -224,6 +226,41 @@ export class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptions> {
224
226
  return requestOptions;
225
227
  }
226
228
 
229
+ /**
230
+ * Returns backwards compatible reasoning parameters from constructor params and call options
231
+ * @internal
232
+ */
233
+ getReasoningParams(
234
+ options?: this['ParsedCallOptions']
235
+ ): OpenAIClient.Reasoning | undefined {
236
+ if (!isReasoningModel(this.model)) {
237
+ return;
238
+ }
239
+
240
+ // apply options in reverse order of importance -- newer options supersede older options
241
+ let reasoning: OpenAIClient.Reasoning | undefined;
242
+ if (this.reasoning !== undefined) {
243
+ reasoning = {
244
+ ...reasoning,
245
+ ...this.reasoning,
246
+ };
247
+ }
248
+ if (options?.reasoning !== undefined) {
249
+ reasoning = {
250
+ ...reasoning,
251
+ ...options.reasoning,
252
+ };
253
+ }
254
+
255
+ return reasoning;
256
+ }
257
+
258
+ protected _getReasoningParams(
259
+ options?: this['ParsedCallOptions']
260
+ ): OpenAIClient.Reasoning | undefined {
261
+ return this.getReasoningParams(options);
262
+ }
263
+
227
264
  async *_streamResponseChunks(
228
265
  messages: BaseMessage[],
229
266
  options: this['ParsedCallOptions'],
@@ -391,10 +428,46 @@ export class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptions> {
391
428
  }
392
429
  }
393
430
 
431
+ /** @ts-expect-error We are intentionally overriding `getReasoningParams` */
394
432
  export class AzureChatOpenAI extends OriginalAzureChatOpenAI {
395
433
  public get exposedClient(): CustomOpenAIClient {
396
434
  return this.client;
397
435
  }
436
+ /**
437
+ * Returns backwards compatible reasoning parameters from constructor params and call options
438
+ * @internal
439
+ */
440
+ getReasoningParams(
441
+ options?: this['ParsedCallOptions']
442
+ ): OpenAIClient.Reasoning | undefined {
443
+ if (!isReasoningModel(this.model)) {
444
+ return;
445
+ }
446
+
447
+ // apply options in reverse order of importance -- newer options supersede older options
448
+ let reasoning: OpenAIClient.Reasoning | undefined;
449
+ if (this.reasoning !== undefined) {
450
+ reasoning = {
451
+ ...reasoning,
452
+ ...this.reasoning,
453
+ };
454
+ }
455
+ if (options?.reasoning !== undefined) {
456
+ reasoning = {
457
+ ...reasoning,
458
+ ...options.reasoning,
459
+ };
460
+ }
461
+
462
+ return reasoning;
463
+ }
464
+
465
+ protected _getReasoningParams(
466
+ options?: this['ParsedCallOptions']
467
+ ): OpenAIClient.Reasoning | undefined {
468
+ return this.getReasoningParams(options);
469
+ }
470
+
398
471
  protected _getClientOptions(
399
472
  options: OpenAICoreRequestOptions | undefined
400
473
  ): OpenAICoreRequestOptions {
@@ -648,7 +648,11 @@ export function _convertMessagesToOpenAIResponsesParams(
648
648
  }
649
649
 
650
650
  export function isReasoningModel(model?: string) {
651
- return model != null && model && /^o\d/.test(model);
651
+ return (
652
+ model != null &&
653
+ model !== '' &&
654
+ (/^o\d/.test(model) || /^gpt-[5-9]/.test(model))
655
+ );
652
656
  }
653
657
 
654
658
  function _convertOpenAIResponsesMessageToBaseMessage(