@librechat/agents 2.4.79 → 2.4.81

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/cjs/llm/anthropic/index.cjs +3 -0
  2. package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
  3. package/dist/cjs/llm/google/index.cjs +3 -0
  4. package/dist/cjs/llm/google/index.cjs.map +1 -1
  5. package/dist/cjs/llm/ollama/index.cjs +3 -0
  6. package/dist/cjs/llm/ollama/index.cjs.map +1 -1
  7. package/dist/cjs/llm/openai/index.cjs +14 -1
  8. package/dist/cjs/llm/openai/index.cjs.map +1 -1
  9. package/dist/cjs/llm/openrouter/index.cjs +5 -1
  10. package/dist/cjs/llm/openrouter/index.cjs.map +1 -1
  11. package/dist/cjs/llm/vertexai/index.cjs +1 -1
  12. package/dist/cjs/llm/vertexai/index.cjs.map +1 -1
  13. package/dist/esm/llm/anthropic/index.mjs +3 -0
  14. package/dist/esm/llm/anthropic/index.mjs.map +1 -1
  15. package/dist/esm/llm/google/index.mjs +3 -0
  16. package/dist/esm/llm/google/index.mjs.map +1 -1
  17. package/dist/esm/llm/ollama/index.mjs +3 -0
  18. package/dist/esm/llm/ollama/index.mjs.map +1 -1
  19. package/dist/esm/llm/openai/index.mjs +14 -1
  20. package/dist/esm/llm/openai/index.mjs.map +1 -1
  21. package/dist/esm/llm/openrouter/index.mjs +5 -1
  22. package/dist/esm/llm/openrouter/index.mjs.map +1 -1
  23. package/dist/esm/llm/vertexai/index.mjs +1 -1
  24. package/dist/esm/llm/vertexai/index.mjs.map +1 -1
  25. package/dist/types/llm/anthropic/index.d.ts +1 -0
  26. package/dist/types/llm/google/index.d.ts +1 -0
  27. package/dist/types/llm/ollama/index.d.ts +1 -0
  28. package/dist/types/llm/openai/index.d.ts +4 -0
  29. package/dist/types/llm/openrouter/index.d.ts +4 -2
  30. package/dist/types/llm/vertexai/index.d.ts +1 -1
  31. package/package.json +2 -2
  32. package/src/llm/anthropic/index.ts +4 -0
  33. package/src/llm/google/index.ts +4 -0
  34. package/src/llm/ollama/index.ts +3 -0
  35. package/src/llm/openai/index.ts +15 -1
  36. package/src/llm/openrouter/index.ts +15 -6
  37. package/src/llm/vertexai/index.ts +2 -2
@@ -20,6 +20,7 @@ export declare class CustomAnthropic extends ChatAnthropicMessages {
20
20
  private tools_in_params?;
21
21
  private emitted_usage?;
22
22
  constructor(fields?: CustomAnthropicInput);
23
+ static lc_name(): 'LibreChatAnthropic';
23
24
  /**
24
25
  * Get the parameters used to invoke the model
25
26
  */
@@ -8,6 +8,7 @@ import type { GoogleClientOptions } from '@/types';
8
8
  export declare class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
9
9
  thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];
10
10
  constructor(fields: GoogleClientOptions);
11
+ static lc_name(): 'LibreChatGoogleGenerativeAI';
11
12
  invocationParams(options?: this['ParsedCallOptions']): Omit<GenerateContentRequest, 'contents'>;
12
13
  _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
13
14
  }
@@ -3,5 +3,6 @@ import { ChatOllama as BaseChatOllama } from '@langchain/ollama';
3
3
  import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
4
4
  import type { BaseMessage } from '@langchain/core/messages';
5
5
  export declare class ChatOllama extends BaseChatOllama {
6
+ static lc_name(): 'LibreChatOllama';
6
7
  _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
7
8
  }
@@ -41,6 +41,7 @@ export declare class CustomAzureOpenAIClient extends AzureOpenAIClient {
41
41
  /** @ts-expect-error We are intentionally overriding `getReasoningParams` */
42
42
  export declare class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptions> {
43
43
  get exposedClient(): CustomOpenAIClient;
44
+ static lc_name(): string;
44
45
  protected _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
45
46
  /**
46
47
  * Returns backwards compatible reasoning parameters from constructor params and call options
@@ -54,6 +55,7 @@ export declare class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptio
54
55
  /** @ts-expect-error We are intentionally overriding `getReasoningParams` */
55
56
  export declare class AzureChatOpenAI extends OriginalAzureChatOpenAI {
56
57
  get exposedClient(): CustomOpenAIClient;
58
+ static lc_name(): 'LibreChatAzureOpenAI';
57
59
  /**
58
60
  * Returns backwards compatible reasoning parameters from constructor params and call options
59
61
  * @internal
@@ -65,6 +67,7 @@ export declare class AzureChatOpenAI extends OriginalAzureChatOpenAI {
65
67
  }
66
68
  export declare class ChatDeepSeek extends OriginalChatDeepSeek {
67
69
  get exposedClient(): CustomOpenAIClient;
70
+ static lc_name(): 'LibreChatDeepSeek';
68
71
  protected _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
69
72
  }
70
73
  /** xAI-specific usage metadata type */
@@ -92,6 +95,7 @@ export declare class ChatXAI extends OriginalChatXAI {
92
95
  baseURL?: string;
93
96
  };
94
97
  });
98
+ static lc_name(): 'LibreChatXAI';
95
99
  get exposedClient(): CustomOpenAIClient;
96
100
  protected _getClientOptions(options?: OpenAICoreRequestOptions): OpenAICoreRequestOptions;
97
101
  _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
@@ -1,10 +1,12 @@
1
- import type { ChatOpenAICallOptions, OpenAIClient } from '@langchain/openai';
2
- import type { AIMessageChunk, HumanMessageChunk, SystemMessageChunk, FunctionMessageChunk, ToolMessageChunk, ChatMessageChunk } from '@langchain/core/messages';
3
1
  import { ChatOpenAI } from '@/llm/openai';
2
+ import type { FunctionMessageChunk, SystemMessageChunk, HumanMessageChunk, ToolMessageChunk, ChatMessageChunk, AIMessageChunk } from '@langchain/core/messages';
3
+ import type { ChatOpenAICallOptions, OpenAIChatInput, OpenAIClient } from '@langchain/openai';
4
4
  export interface ChatOpenRouterCallOptions extends ChatOpenAICallOptions {
5
5
  include_reasoning?: boolean;
6
+ modelKwargs?: OpenAIChatInput['modelKwargs'];
6
7
  }
7
8
  export declare class ChatOpenRouter extends ChatOpenAI {
8
9
  constructor(_fields: Partial<ChatOpenRouterCallOptions>);
10
+ static lc_name(): 'LibreChatOpenRouter';
9
11
  protected _convertOpenAIDeltaToBaseMessageChunk(delta: Record<string, any>, rawResponse: OpenAIClient.ChatCompletionChunk, defaultRole?: 'function' | 'user' | 'system' | 'developer' | 'assistant' | 'tool'): AIMessageChunk | HumanMessageChunk | SystemMessageChunk | FunctionMessageChunk | ToolMessageChunk | ChatMessageChunk;
10
12
  }
@@ -286,7 +286,7 @@ import type { VertexAIClientOptions } from '@/types';
286
286
  export declare class ChatVertexAI extends ChatGoogle {
287
287
  lc_namespace: string[];
288
288
  dynamicThinkingBudget: boolean;
289
- static lc_name(): 'ChatVertexAI';
289
+ static lc_name(): 'LibreChatVertexAI';
290
290
  constructor(fields?: VertexAIClientOptions);
291
291
  invocationParams(options?: this['ParsedCallOptions'] | undefined): GoogleAIModelRequestParams;
292
292
  buildConnection(fields: VertexAIClientOptions, client: GoogleAbstractedClient): void;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@librechat/agents",
3
- "version": "2.4.79",
3
+ "version": "2.4.81",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
@@ -51,7 +51,7 @@
51
51
  "caching": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/caching.ts --name 'Jo' --location 'New York, NY'",
52
52
  "thinking": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/thinking.ts --name 'Jo' --location 'New York, NY'",
53
53
  "memory": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/memory.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
54
- "tool-test": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/tools.ts --provider 'anthropicLITELLM' --name 'Jo' --location 'New York, NY'",
54
+ "tool-test": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/tools.ts --provider 'openrouter' --name 'Jo' --location 'New York, NY'",
55
55
  "search": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/search.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
56
56
  "ant_web_search": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/ant_web_search.ts --name 'Jo' --location 'New York, NY'",
57
57
  "abort": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/abort.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
@@ -141,6 +141,10 @@ export class CustomAnthropic extends ChatAnthropicMessages {
141
141
  this._lc_stream_delay = fields?._lc_stream_delay ?? 25;
142
142
  }
143
143
 
144
+ static lc_name(): 'LibreChatAnthropic' {
145
+ return 'LibreChatAnthropic';
146
+ }
147
+
144
148
  /**
145
149
  * Get the parameters used to invoke the model
146
150
  */
@@ -107,6 +107,10 @@ export class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
107
107
  this.streamUsage = fields.streamUsage ?? this.streamUsage;
108
108
  }
109
109
 
110
+ static lc_name(): 'LibreChatGoogleGenerativeAI' {
111
+ return 'LibreChatGoogleGenerativeAI';
112
+ }
113
+
110
114
  invocationParams(
111
115
  options?: this['ParsedCallOptions']
112
116
  ): Omit<GenerateContentRequest, 'contents'> {
@@ -13,6 +13,9 @@ import {
13
13
  } from './utils';
14
14
 
15
15
  export class ChatOllama extends BaseChatOllama {
16
+ static lc_name(): 'LibreChatOllama' {
17
+ return 'LibreChatOllama';
18
+ }
16
19
  async *_streamResponseChunks(
17
20
  messages: BaseMessage[],
18
21
  options: this['ParsedCallOptions'],
@@ -198,6 +198,9 @@ export class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptions> {
198
198
  public get exposedClient(): CustomOpenAIClient {
199
199
  return this.client;
200
200
  }
201
+ static lc_name(): string {
202
+ return 'LibreChatOpenAI';
203
+ }
201
204
  protected _getClientOptions(
202
205
  options?: OpenAICoreRequestOptions
203
206
  ): OpenAICoreRequestOptions {
@@ -233,7 +236,8 @@ export class ChatOpenAI extends OriginalChatOpenAI<t.ChatOpenAICallOptions> {
233
236
  getReasoningParams(
234
237
  options?: this['ParsedCallOptions']
235
238
  ): OpenAIClient.Reasoning | undefined {
236
- if (!isReasoningModel(this.model)) {
239
+ const lc_name = (this.constructor as typeof ChatOpenAI).lc_name();
240
+ if (lc_name === 'LibreChatOpenAI' && !isReasoningModel(this.model)) {
237
241
  return;
238
242
  }
239
243
 
@@ -439,6 +443,9 @@ export class AzureChatOpenAI extends OriginalAzureChatOpenAI {
439
443
  public get exposedClient(): CustomOpenAIClient {
440
444
  return this.client;
441
445
  }
446
+ static lc_name(): 'LibreChatAzureOpenAI' {
447
+ return 'LibreChatAzureOpenAI';
448
+ }
442
449
  /**
443
450
  * Returns backwards compatible reasoning parameters from constructor params and call options
444
451
  * @internal
@@ -580,6 +587,9 @@ export class ChatDeepSeek extends OriginalChatDeepSeek {
580
587
  public get exposedClient(): CustomOpenAIClient {
581
588
  return this.client;
582
589
  }
590
+ static lc_name(): 'LibreChatDeepSeek' {
591
+ return 'LibreChatDeepSeek';
592
+ }
583
593
  protected _getClientOptions(
584
594
  options?: OpenAICoreRequestOptions
585
595
  ): OpenAICoreRequestOptions {
@@ -648,6 +658,10 @@ export class ChatXAI extends OriginalChatXAI {
648
658
  }
649
659
  }
650
660
 
661
+ static lc_name(): 'LibreChatXAI' {
662
+ return 'LibreChatXAI';
663
+ }
664
+
651
665
  public get exposedClient(): CustomOpenAIClient {
652
666
  return this.client;
653
667
  }
@@ -1,27 +1,36 @@
1
- import type { ChatOpenAICallOptions, OpenAIClient } from '@langchain/openai';
1
+ import { ChatOpenAI } from '@/llm/openai';
2
2
  import type {
3
- AIMessageChunk,
4
- HumanMessageChunk,
5
- SystemMessageChunk,
6
3
  FunctionMessageChunk,
4
+ SystemMessageChunk,
5
+ HumanMessageChunk,
7
6
  ToolMessageChunk,
8
7
  ChatMessageChunk,
8
+ AIMessageChunk,
9
9
  } from '@langchain/core/messages';
10
- import { ChatOpenAI } from '@/llm/openai';
10
+ import type {
11
+ ChatOpenAICallOptions,
12
+ OpenAIChatInput,
13
+ OpenAIClient,
14
+ } from '@langchain/openai';
11
15
 
12
16
  export interface ChatOpenRouterCallOptions extends ChatOpenAICallOptions {
13
17
  include_reasoning?: boolean;
18
+ modelKwargs?: OpenAIChatInput['modelKwargs'];
14
19
  }
15
20
  export class ChatOpenRouter extends ChatOpenAI {
16
21
  constructor(_fields: Partial<ChatOpenRouterCallOptions>) {
17
- const { include_reasoning, ...fields } = _fields;
22
+ const { include_reasoning, modelKwargs = {}, ...fields } = _fields;
18
23
  super({
19
24
  ...fields,
20
25
  modelKwargs: {
26
+ ...modelKwargs,
21
27
  include_reasoning,
22
28
  },
23
29
  });
24
30
  }
31
+ static lc_name(): 'LibreChatOpenRouter' {
32
+ return 'LibreChatOpenRouter';
33
+ }
25
34
  protected override _convertOpenAIDeltaToBaseMessageChunk(
26
35
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
27
36
  delta: Record<string, any>,
@@ -313,8 +313,8 @@ export class ChatVertexAI extends ChatGoogle {
313
313
  lc_namespace = ['langchain', 'chat_models', 'vertexai'];
314
314
  dynamicThinkingBudget = false;
315
315
 
316
- static lc_name(): 'ChatVertexAI' {
317
- return 'ChatVertexAI';
316
+ static lc_name(): 'LibreChatVertexAI' {
317
+ return 'LibreChatVertexAI';
318
318
  }
319
319
 
320
320
  constructor(fields?: VertexAIClientOptions) {