@librechat/agents 1.9.97 → 1.9.99
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/common/enum.cjs +1 -0
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +4 -3
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/llm/providers.cjs +2 -1
- package/dist/cjs/llm/providers.cjs.map +1 -1
- package/dist/cjs/main.cjs +3 -0
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/run.cjs +3 -2
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/stream.cjs +1 -0
- package/dist/cjs/stream.cjs.map +1 -1
- package/dist/cjs/tools/ToolNode.cjs +1 -1
- package/dist/cjs/utils/llm.cjs +14 -0
- package/dist/cjs/utils/llm.cjs.map +1 -0
- package/dist/esm/common/enum.mjs +1 -0
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +4 -3
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/llm/providers.mjs +2 -1
- package/dist/esm/llm/providers.mjs.map +1 -1
- package/dist/esm/main.mjs +2 -1
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/run.mjs +4 -3
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/stream.mjs +1 -1
- package/dist/esm/stream.mjs.map +1 -1
- package/dist/esm/tools/ToolNode.mjs +1 -1
- package/dist/esm/utils/llm.mjs +12 -0
- package/dist/esm/utils/llm.mjs.map +1 -0
- package/dist/types/common/enum.d.ts +2 -1
- package/dist/types/stream.d.ts +1 -0
- package/dist/types/types/llm.d.ts +18 -8
- package/dist/types/utils/index.d.ts +1 -0
- package/dist/types/utils/llm.d.ts +2 -0
- package/package.json +1 -1
- package/src/common/enum.ts +1 -0
- package/src/graphs/Graph.ts +4 -4
- package/src/llm/providers.ts +2 -1
- package/src/run.ts +3 -2
- package/src/stream.ts +1 -1
- package/src/types/llm.ts +19 -8
- package/src/utils/index.ts +1 -0
- package/src/utils/llm.ts +9 -0
- package/src/utils/llmConfig.ts +11 -0
|
@@ -1,24 +1,32 @@
|
|
|
1
|
-
import { ChatOpenAI } from '@langchain/openai';
|
|
2
1
|
import { ChatOllama } from '@langchain/ollama';
|
|
3
2
|
import { ChatAnthropic } from '@langchain/anthropic';
|
|
4
3
|
import { ChatMistralAI } from '@langchain/mistralai';
|
|
5
4
|
import { ChatBedrockConverse } from '@langchain/aws';
|
|
6
5
|
import { ChatVertexAI } from '@langchain/google-vertexai';
|
|
7
|
-
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
8
6
|
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
|
9
|
-
import
|
|
10
|
-
import
|
|
11
|
-
import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
|
|
7
|
+
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
8
|
+
import { ChatOpenAI, AzureChatOpenAI, ClientOptions as OAIClientOptions } from '@langchain/openai';
|
|
9
|
+
import type { BindToolsInput, BaseChatModelParams } from '@langchain/core/language_models/chat_models';
|
|
10
|
+
import type { ChatOpenAIFields, OpenAIChatInput, AzureOpenAIInput } from '@langchain/openai';
|
|
12
11
|
import type { BedrockChatFields } from '@langchain/community/chat_models/bedrock/web';
|
|
13
|
-
import type { ChatOpenAIFields } from '@langchain/openai';
|
|
14
|
-
import type { OpenAI as OpenAIClient } from 'openai';
|
|
15
12
|
import type { GoogleGenerativeAIChatInput } from '@langchain/google-genai';
|
|
16
13
|
import type { ChatVertexAIInput } from '@langchain/google-vertexai';
|
|
17
14
|
import type { ChatBedrockConverseInput } from '@langchain/aws';
|
|
18
15
|
import type { ChatMistralAIInput } from '@langchain/mistralai';
|
|
16
|
+
import type { StructuredTool } from '@langchain/core/tools';
|
|
19
17
|
import type { AnthropicInput } from '@langchain/anthropic';
|
|
18
|
+
import type { Runnable } from '@langchain/core/runnables';
|
|
20
19
|
import type { ChatOllamaInput } from '@langchain/ollama';
|
|
20
|
+
import type { OpenAI as OpenAIClient } from 'openai';
|
|
21
21
|
import { Providers } from '@/common';
|
|
22
|
+
export type AzureClientOptions = (Partial<OpenAIChatInput> & Partial<AzureOpenAIInput> & {
|
|
23
|
+
openAIApiKey?: string;
|
|
24
|
+
openAIApiVersion?: string;
|
|
25
|
+
openAIBasePath?: string;
|
|
26
|
+
deploymentName?: string;
|
|
27
|
+
} & BaseChatModelParams & {
|
|
28
|
+
configuration?: OAIClientOptions;
|
|
29
|
+
});
|
|
22
30
|
export type ChatOpenAIToolType = BindToolsInput | OpenAIClient.ChatCompletionTool;
|
|
23
31
|
export type CommonToolType = StructuredTool | ChatOpenAIToolType;
|
|
24
32
|
export type OpenAIClientOptions = ChatOpenAIFields;
|
|
@@ -29,12 +37,13 @@ export type VertexAIClientOptions = ChatVertexAIInput;
|
|
|
29
37
|
export type BedrockClientOptions = BedrockChatFields;
|
|
30
38
|
export type BedrockConverseClientOptions = ChatBedrockConverseInput;
|
|
31
39
|
export type GoogleClientOptions = GoogleGenerativeAIChatInput;
|
|
32
|
-
export type ClientOptions = OpenAIClientOptions | OllamaClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockClientOptions | BedrockConverseClientOptions | GoogleClientOptions;
|
|
40
|
+
export type ClientOptions = OpenAIClientOptions | AzureClientOptions | OllamaClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockClientOptions | BedrockConverseClientOptions | GoogleClientOptions;
|
|
33
41
|
export type LLMConfig = {
|
|
34
42
|
provider: Providers;
|
|
35
43
|
} & ClientOptions;
|
|
36
44
|
export type ProviderOptionsMap = {
|
|
37
45
|
[Providers.OPENAI]: OpenAIClientOptions;
|
|
46
|
+
[Providers.AZURE]: AzureClientOptions;
|
|
38
47
|
[Providers.OLLAMA]: OllamaClientOptions;
|
|
39
48
|
[Providers.ANTHROPIC]: AnthropicClientOptions;
|
|
40
49
|
[Providers.MISTRALAI]: MistralAIClientOptions;
|
|
@@ -46,6 +55,7 @@ export type ProviderOptionsMap = {
|
|
|
46
55
|
export type ChatModelMap = {
|
|
47
56
|
[Providers.OPENAI]: ChatOpenAI;
|
|
48
57
|
[Providers.OLLAMA]: ChatOllama;
|
|
58
|
+
[Providers.AZURE]: AzureChatOpenAI;
|
|
49
59
|
[Providers.ANTHROPIC]: ChatAnthropic;
|
|
50
60
|
[Providers.MISTRALAI]: ChatMistralAI;
|
|
51
61
|
[Providers.VERTEXAI]: ChatVertexAI;
|
package/package.json
CHANGED
package/src/common/enum.ts
CHANGED
package/src/graphs/Graph.ts
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
// src/graphs/Graph.ts
|
|
2
2
|
import { nanoid } from 'nanoid';
|
|
3
|
-
import { ChatOpenAI } from '@langchain/openai';
|
|
4
3
|
import { concat } from '@langchain/core/utils/stream';
|
|
5
4
|
import { ToolNode } from '@langchain/langgraph/prebuilt';
|
|
6
5
|
import { START, END, StateGraph } from '@langchain/langgraph';
|
|
6
|
+
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
|
|
7
7
|
import { Runnable, RunnableConfig } from '@langchain/core/runnables';
|
|
8
8
|
import { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';
|
|
9
9
|
import { AIMessageChunk, ToolMessage, SystemMessage } from '@langchain/core/messages';
|
|
@@ -18,7 +18,7 @@ import {
|
|
|
18
18
|
formatOpenAIArtifactContent,
|
|
19
19
|
formatAnthropicArtifactContent,
|
|
20
20
|
} from '@/messages';
|
|
21
|
-
import { resetIfNotEmpty, joinKeys, sleep } from '@/utils';
|
|
21
|
+
import { resetIfNotEmpty, isOpenAILike, joinKeys, sleep } from '@/utils';
|
|
22
22
|
import { HandlerRegistry } from '@/events';
|
|
23
23
|
|
|
24
24
|
const { AGENT, TOOLS } = GraphNodeKeys;
|
|
@@ -262,7 +262,7 @@ export class StandardGraph extends Graph<
|
|
|
262
262
|
const ChatModelClass = getChatModelClass(this.provider);
|
|
263
263
|
const model = new ChatModelClass(this.clientOptions);
|
|
264
264
|
|
|
265
|
-
if (this.provider
|
|
265
|
+
if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {
|
|
266
266
|
model.temperature = (this.clientOptions as t.OpenAIClientOptions).temperature as number;
|
|
267
267
|
model.topP = (this.clientOptions as t.OpenAIClientOptions).topP as number;
|
|
268
268
|
model.frequencyPenalty = (this.clientOptions as t.OpenAIClientOptions).frequencyPenalty as number;
|
|
@@ -323,7 +323,7 @@ export class StandardGraph extends Graph<
|
|
|
323
323
|
formatAnthropicArtifactContent(finalMessages);
|
|
324
324
|
} else if (
|
|
325
325
|
isLatestToolMessage &&
|
|
326
|
-
provider
|
|
326
|
+
isOpenAILike(provider)
|
|
327
327
|
) {
|
|
328
328
|
formatOpenAIArtifactContent(finalMessages);
|
|
329
329
|
}
|
package/src/llm/providers.ts
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
// src/llm/providers.ts
|
|
2
|
-
import { ChatOpenAI } from '@langchain/openai';
|
|
3
2
|
import { ChatOllama } from '@langchain/ollama';
|
|
4
3
|
import { ChatBedrockConverse } from '@langchain/aws';
|
|
5
4
|
// import { ChatAnthropic } from '@langchain/anthropic';
|
|
6
5
|
import { ChatMistralAI } from '@langchain/mistralai';
|
|
7
6
|
import { ChatVertexAI } from '@langchain/google-vertexai';
|
|
7
|
+
import { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';
|
|
8
8
|
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
|
9
9
|
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
10
10
|
import type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';
|
|
@@ -13,6 +13,7 @@ import { CustomAnthropic } from '@/llm/anthropic/llm';
|
|
|
13
13
|
|
|
14
14
|
export const llmProviders: Partial<ChatModelConstructorMap> = {
|
|
15
15
|
[Providers.OPENAI]: ChatOpenAI,
|
|
16
|
+
[Providers.AZURE]: AzureChatOpenAI,
|
|
16
17
|
[Providers.OLLAMA]: ChatOllama,
|
|
17
18
|
[Providers.VERTEXAI]: ChatVertexAI,
|
|
18
19
|
[Providers.BEDROCK_LEGACY]: BedrockChat,
|
package/src/run.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
// src/run.ts
|
|
2
|
-
import { ChatOpenAI } from '@langchain/openai';
|
|
3
2
|
import { PromptTemplate } from '@langchain/core/prompts';
|
|
3
|
+
import { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';
|
|
4
4
|
import type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';
|
|
5
5
|
import type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';
|
|
6
6
|
import type { RunnableConfig } from '@langchain/core/runnables';
|
|
@@ -10,6 +10,7 @@ import { manualToolStreamProviders } from '@/llm/providers';
|
|
|
10
10
|
import { createTitleRunnable } from '@/utils/title';
|
|
11
11
|
import { StandardGraph } from '@/graphs/Graph';
|
|
12
12
|
import { HandlerRegistry } from '@/events';
|
|
13
|
+
import { isOpenAILike } from '@/utils/llm';
|
|
13
14
|
|
|
14
15
|
export class Run<T extends t.BaseGraphState> {
|
|
15
16
|
graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;
|
|
@@ -186,7 +187,7 @@ export class Run<T extends t.BaseGraphState> {
|
|
|
186
187
|
if (!model) {
|
|
187
188
|
return { language: '', title: '' };
|
|
188
189
|
}
|
|
189
|
-
if (this.provider
|
|
190
|
+
if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {
|
|
190
191
|
model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)?.temperature as number;
|
|
191
192
|
model.topP = (clientOptions as t.OpenAIClientOptions | undefined)?.topP as number;
|
|
192
193
|
model.frequencyPenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.frequencyPenalty as number;
|
package/src/stream.ts
CHANGED
|
@@ -15,7 +15,7 @@ function getNonEmptyValue(possibleValues: string[]): string | undefined {
|
|
|
15
15
|
return undefined;
|
|
16
16
|
}
|
|
17
17
|
|
|
18
|
-
const getMessageId = (stepKey: string, graph: Graph<t.BaseGraphState>, returnExistingId = false): string | undefined => {
|
|
18
|
+
export const getMessageId = (stepKey: string, graph: Graph<t.BaseGraphState>, returnExistingId = false): string | undefined => {
|
|
19
19
|
const messageId = graph.messageIdsByStepKey.get(stepKey);
|
|
20
20
|
if (messageId != null && messageId) {
|
|
21
21
|
return returnExistingId ? messageId : undefined;
|
package/src/types/llm.ts
CHANGED
|
@@ -1,26 +1,35 @@
|
|
|
1
1
|
// src/types/llm.ts
|
|
2
|
-
import { ChatOpenAI } from '@langchain/openai';
|
|
3
2
|
import { ChatOllama } from '@langchain/ollama';
|
|
4
3
|
import { ChatAnthropic } from '@langchain/anthropic';
|
|
5
4
|
import { ChatMistralAI } from '@langchain/mistralai';
|
|
6
5
|
import { ChatBedrockConverse } from '@langchain/aws';
|
|
7
6
|
import { ChatVertexAI } from '@langchain/google-vertexai';
|
|
8
|
-
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
9
7
|
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
|
10
|
-
import
|
|
11
|
-
import
|
|
12
|
-
import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
|
|
8
|
+
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
9
|
+
import { ChatOpenAI, AzureChatOpenAI, ClientOptions as OAIClientOptions } from '@langchain/openai';
|
|
10
|
+
import type { BindToolsInput, BaseChatModelParams } from '@langchain/core/language_models/chat_models';
|
|
11
|
+
import type { ChatOpenAIFields, OpenAIChatInput, AzureOpenAIInput } from '@langchain/openai';
|
|
13
12
|
import type { BedrockChatFields } from '@langchain/community/chat_models/bedrock/web';
|
|
14
|
-
import type { ChatOpenAIFields } from '@langchain/openai';
|
|
15
|
-
import type { OpenAI as OpenAIClient } from 'openai';
|
|
16
13
|
import type { GoogleGenerativeAIChatInput } from '@langchain/google-genai';
|
|
17
14
|
import type { ChatVertexAIInput } from '@langchain/google-vertexai';
|
|
18
15
|
import type { ChatBedrockConverseInput } from '@langchain/aws';
|
|
19
16
|
import type { ChatMistralAIInput } from '@langchain/mistralai';
|
|
17
|
+
import type { StructuredTool } from '@langchain/core/tools';
|
|
20
18
|
import type { AnthropicInput } from '@langchain/anthropic';
|
|
19
|
+
import type { Runnable } from '@langchain/core/runnables';
|
|
21
20
|
import type { ChatOllamaInput } from '@langchain/ollama';
|
|
21
|
+
import type { OpenAI as OpenAIClient } from 'openai';
|
|
22
22
|
import { Providers } from '@/common';
|
|
23
23
|
|
|
24
|
+
export type AzureClientOptions = (Partial<OpenAIChatInput> & Partial<AzureOpenAIInput> & {
|
|
25
|
+
openAIApiKey?: string;
|
|
26
|
+
openAIApiVersion?: string;
|
|
27
|
+
openAIBasePath?: string;
|
|
28
|
+
deploymentName?: string;
|
|
29
|
+
} & BaseChatModelParams & {
|
|
30
|
+
configuration?: OAIClientOptions;
|
|
31
|
+
});
|
|
32
|
+
|
|
24
33
|
export type ChatOpenAIToolType = BindToolsInput | OpenAIClient.ChatCompletionTool;
|
|
25
34
|
export type CommonToolType = StructuredTool | ChatOpenAIToolType;
|
|
26
35
|
|
|
@@ -33,7 +42,7 @@ export type BedrockClientOptions = BedrockChatFields;
|
|
|
33
42
|
export type BedrockConverseClientOptions = ChatBedrockConverseInput;
|
|
34
43
|
export type GoogleClientOptions = GoogleGenerativeAIChatInput;
|
|
35
44
|
|
|
36
|
-
export type ClientOptions = OpenAIClientOptions | OllamaClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockClientOptions | BedrockConverseClientOptions | GoogleClientOptions;
|
|
45
|
+
export type ClientOptions = OpenAIClientOptions | AzureClientOptions | OllamaClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockClientOptions | BedrockConverseClientOptions | GoogleClientOptions;
|
|
37
46
|
|
|
38
47
|
export type LLMConfig = {
|
|
39
48
|
provider: Providers;
|
|
@@ -41,6 +50,7 @@ export type LLMConfig = {
|
|
|
41
50
|
|
|
42
51
|
export type ProviderOptionsMap = {
|
|
43
52
|
[Providers.OPENAI]: OpenAIClientOptions;
|
|
53
|
+
[Providers.AZURE]: AzureClientOptions;
|
|
44
54
|
[Providers.OLLAMA]: OllamaClientOptions;
|
|
45
55
|
[Providers.ANTHROPIC]: AnthropicClientOptions;
|
|
46
56
|
[Providers.MISTRALAI]: MistralAIClientOptions;
|
|
@@ -53,6 +63,7 @@ export type ProviderOptionsMap = {
|
|
|
53
63
|
export type ChatModelMap = {
|
|
54
64
|
[Providers.OPENAI]: ChatOpenAI;
|
|
55
65
|
[Providers.OLLAMA]: ChatOllama;
|
|
66
|
+
[Providers.AZURE]: AzureChatOpenAI;
|
|
56
67
|
[Providers.ANTHROPIC]: ChatAnthropic;
|
|
57
68
|
[Providers.MISTRALAI]: ChatMistralAI;
|
|
58
69
|
[Providers.VERTEXAI]: ChatVertexAI;
|
package/src/utils/index.ts
CHANGED
package/src/utils/llm.ts
ADDED
package/src/utils/llmConfig.ts
CHANGED
|
@@ -11,6 +11,17 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
|
|
|
11
11
|
streamUsage: true,
|
|
12
12
|
// disableStreaming: true,
|
|
13
13
|
},
|
|
14
|
+
[Providers.AZURE]: {
|
|
15
|
+
provider: Providers.AZURE,
|
|
16
|
+
temperature: 0.7,
|
|
17
|
+
streaming: true,
|
|
18
|
+
streamUsage: true,
|
|
19
|
+
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
|
|
20
|
+
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE,
|
|
21
|
+
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT,
|
|
22
|
+
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION,
|
|
23
|
+
model: process.env.AZURE_MODEL_NAME ?? 'gpt-4o',
|
|
24
|
+
},
|
|
14
25
|
[Providers.OLLAMA]: {
|
|
15
26
|
provider: Providers.OLLAMA,
|
|
16
27
|
model: 'llama3.2',
|