@librechat/agents 2.4.322 → 3.0.0-rc10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/agents/AgentContext.cjs +218 -0
- package/dist/cjs/agents/AgentContext.cjs.map +1 -0
- package/dist/cjs/common/enum.cjs +15 -5
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/events.cjs +10 -6
- package/dist/cjs/events.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +309 -213
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/graphs/MultiAgentGraph.cjs +507 -0
- package/dist/cjs/graphs/MultiAgentGraph.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/index.cjs +54 -9
- package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/types.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +52 -6
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +22 -2
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/utils/tools.cjs +29 -0
- package/dist/cjs/llm/anthropic/utils/tools.cjs.map +1 -0
- package/dist/cjs/llm/google/index.cjs +144 -0
- package/dist/cjs/llm/google/index.cjs.map +1 -0
- package/dist/cjs/llm/google/utils/common.cjs +477 -0
- package/dist/cjs/llm/google/utils/common.cjs.map +1 -0
- package/dist/cjs/llm/ollama/index.cjs +67 -0
- package/dist/cjs/llm/ollama/index.cjs.map +1 -0
- package/dist/cjs/llm/ollama/utils.cjs +158 -0
- package/dist/cjs/llm/ollama/utils.cjs.map +1 -0
- package/dist/cjs/llm/openai/index.cjs +422 -3
- package/dist/cjs/llm/openai/index.cjs.map +1 -1
- package/dist/cjs/llm/openai/utils/index.cjs +672 -0
- package/dist/cjs/llm/openai/utils/index.cjs.map +1 -0
- package/dist/cjs/llm/providers.cjs +15 -15
- package/dist/cjs/llm/providers.cjs.map +1 -1
- package/dist/cjs/llm/text.cjs +14 -3
- package/dist/cjs/llm/text.cjs.map +1 -1
- package/dist/cjs/llm/vertexai/index.cjs +330 -0
- package/dist/cjs/llm/vertexai/index.cjs.map +1 -0
- package/dist/cjs/main.cjs +11 -0
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/run.cjs +137 -85
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/stream.cjs +86 -52
- package/dist/cjs/stream.cjs.map +1 -1
- package/dist/cjs/tools/ToolNode.cjs +10 -4
- package/dist/cjs/tools/ToolNode.cjs.map +1 -1
- package/dist/cjs/tools/handlers.cjs +119 -13
- package/dist/cjs/tools/handlers.cjs.map +1 -1
- package/dist/cjs/tools/search/anthropic.cjs +40 -0
- package/dist/cjs/tools/search/anthropic.cjs.map +1 -0
- package/dist/cjs/tools/search/firecrawl.cjs +55 -9
- package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
- package/dist/cjs/tools/search/format.cjs +6 -6
- package/dist/cjs/tools/search/format.cjs.map +1 -1
- package/dist/cjs/tools/search/rerankers.cjs +7 -29
- package/dist/cjs/tools/search/rerankers.cjs.map +1 -1
- package/dist/cjs/tools/search/search.cjs +86 -16
- package/dist/cjs/tools/search/search.cjs.map +1 -1
- package/dist/cjs/tools/search/tool.cjs +4 -2
- package/dist/cjs/tools/search/tool.cjs.map +1 -1
- package/dist/cjs/tools/search/utils.cjs +1 -1
- package/dist/cjs/tools/search/utils.cjs.map +1 -1
- package/dist/cjs/utils/events.cjs +31 -0
- package/dist/cjs/utils/events.cjs.map +1 -0
- package/dist/cjs/utils/title.cjs +57 -21
- package/dist/cjs/utils/title.cjs.map +1 -1
- package/dist/cjs/utils/tokens.cjs +54 -7
- package/dist/cjs/utils/tokens.cjs.map +1 -1
- package/dist/esm/agents/AgentContext.mjs +216 -0
- package/dist/esm/agents/AgentContext.mjs.map +1 -0
- package/dist/esm/common/enum.mjs +16 -6
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/events.mjs +10 -6
- package/dist/esm/events.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +311 -215
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/graphs/MultiAgentGraph.mjs +505 -0
- package/dist/esm/graphs/MultiAgentGraph.mjs.map +1 -0
- package/dist/esm/llm/anthropic/index.mjs +54 -9
- package/dist/esm/llm/anthropic/index.mjs.map +1 -1
- package/dist/esm/llm/anthropic/types.mjs.map +1 -1
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs +52 -6
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs +22 -2
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -1
- package/dist/esm/llm/anthropic/utils/tools.mjs +27 -0
- package/dist/esm/llm/anthropic/utils/tools.mjs.map +1 -0
- package/dist/esm/llm/google/index.mjs +142 -0
- package/dist/esm/llm/google/index.mjs.map +1 -0
- package/dist/esm/llm/google/utils/common.mjs +471 -0
- package/dist/esm/llm/google/utils/common.mjs.map +1 -0
- package/dist/esm/llm/ollama/index.mjs +65 -0
- package/dist/esm/llm/ollama/index.mjs.map +1 -0
- package/dist/esm/llm/ollama/utils.mjs +155 -0
- package/dist/esm/llm/ollama/utils.mjs.map +1 -0
- package/dist/esm/llm/openai/index.mjs +421 -4
- package/dist/esm/llm/openai/index.mjs.map +1 -1
- package/dist/esm/llm/openai/utils/index.mjs +666 -0
- package/dist/esm/llm/openai/utils/index.mjs.map +1 -0
- package/dist/esm/llm/providers.mjs +5 -5
- package/dist/esm/llm/providers.mjs.map +1 -1
- package/dist/esm/llm/text.mjs +14 -3
- package/dist/esm/llm/text.mjs.map +1 -1
- package/dist/esm/llm/vertexai/index.mjs +328 -0
- package/dist/esm/llm/vertexai/index.mjs.map +1 -0
- package/dist/esm/main.mjs +6 -5
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/run.mjs +138 -87
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/stream.mjs +88 -55
- package/dist/esm/stream.mjs.map +1 -1
- package/dist/esm/tools/ToolNode.mjs +10 -4
- package/dist/esm/tools/ToolNode.mjs.map +1 -1
- package/dist/esm/tools/handlers.mjs +119 -15
- package/dist/esm/tools/handlers.mjs.map +1 -1
- package/dist/esm/tools/search/anthropic.mjs +37 -0
- package/dist/esm/tools/search/anthropic.mjs.map +1 -0
- package/dist/esm/tools/search/firecrawl.mjs +55 -9
- package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
- package/dist/esm/tools/search/format.mjs +7 -7
- package/dist/esm/tools/search/format.mjs.map +1 -1
- package/dist/esm/tools/search/rerankers.mjs +7 -29
- package/dist/esm/tools/search/rerankers.mjs.map +1 -1
- package/dist/esm/tools/search/search.mjs +86 -16
- package/dist/esm/tools/search/search.mjs.map +1 -1
- package/dist/esm/tools/search/tool.mjs +4 -2
- package/dist/esm/tools/search/tool.mjs.map +1 -1
- package/dist/esm/tools/search/utils.mjs +1 -1
- package/dist/esm/tools/search/utils.mjs.map +1 -1
- package/dist/esm/utils/events.mjs +29 -0
- package/dist/esm/utils/events.mjs.map +1 -0
- package/dist/esm/utils/title.mjs +57 -22
- package/dist/esm/utils/title.mjs.map +1 -1
- package/dist/esm/utils/tokens.mjs +54 -8
- package/dist/esm/utils/tokens.mjs.map +1 -1
- package/dist/types/agents/AgentContext.d.ts +91 -0
- package/dist/types/common/enum.d.ts +17 -7
- package/dist/types/events.d.ts +5 -4
- package/dist/types/graphs/Graph.d.ts +64 -67
- package/dist/types/graphs/MultiAgentGraph.d.ts +47 -0
- package/dist/types/graphs/index.d.ts +1 -0
- package/dist/types/llm/anthropic/index.d.ts +11 -0
- package/dist/types/llm/anthropic/types.d.ts +9 -3
- package/dist/types/llm/anthropic/utils/message_inputs.d.ts +1 -1
- package/dist/types/llm/anthropic/utils/output_parsers.d.ts +4 -4
- package/dist/types/llm/anthropic/utils/tools.d.ts +3 -0
- package/dist/types/llm/google/index.d.ts +13 -0
- package/dist/types/llm/google/types.d.ts +32 -0
- package/dist/types/llm/google/utils/common.d.ts +19 -0
- package/dist/types/llm/google/utils/tools.d.ts +10 -0
- package/dist/types/llm/google/utils/zod_to_genai_parameters.d.ts +14 -0
- package/dist/types/llm/ollama/index.d.ts +7 -0
- package/dist/types/llm/ollama/utils.d.ts +7 -0
- package/dist/types/llm/openai/index.d.ts +82 -3
- package/dist/types/llm/openai/types.d.ts +10 -0
- package/dist/types/llm/openai/utils/index.d.ts +20 -0
- package/dist/types/llm/text.d.ts +1 -1
- package/dist/types/llm/vertexai/index.d.ts +293 -0
- package/dist/types/messages/reducer.d.ts +9 -0
- package/dist/types/run.d.ts +19 -12
- package/dist/types/stream.d.ts +10 -3
- package/dist/types/tools/CodeExecutor.d.ts +2 -2
- package/dist/types/tools/ToolNode.d.ts +1 -1
- package/dist/types/tools/handlers.d.ts +17 -4
- package/dist/types/tools/search/anthropic.d.ts +16 -0
- package/dist/types/tools/search/firecrawl.d.ts +15 -0
- package/dist/types/tools/search/rerankers.d.ts +0 -1
- package/dist/types/tools/search/types.d.ts +30 -9
- package/dist/types/types/graph.d.ts +129 -15
- package/dist/types/types/llm.d.ts +25 -10
- package/dist/types/types/run.d.ts +50 -8
- package/dist/types/types/stream.d.ts +16 -2
- package/dist/types/types/tools.d.ts +1 -1
- package/dist/types/utils/events.d.ts +6 -0
- package/dist/types/utils/title.d.ts +2 -1
- package/dist/types/utils/tokens.d.ts +24 -0
- package/package.json +41 -17
- package/src/agents/AgentContext.ts +315 -0
- package/src/common/enum.ts +15 -5
- package/src/events.ts +24 -13
- package/src/graphs/Graph.ts +495 -313
- package/src/graphs/MultiAgentGraph.ts +598 -0
- package/src/graphs/index.ts +2 -1
- package/src/llm/anthropic/Jacob_Lee_Resume_2023.pdf +0 -0
- package/src/llm/anthropic/index.ts +78 -13
- package/src/llm/anthropic/llm.spec.ts +491 -115
- package/src/llm/anthropic/types.ts +39 -3
- package/src/llm/anthropic/utils/message_inputs.ts +67 -11
- package/src/llm/anthropic/utils/message_outputs.ts +21 -2
- package/src/llm/anthropic/utils/output_parsers.ts +25 -6
- package/src/llm/anthropic/utils/tools.ts +29 -0
- package/src/llm/google/index.ts +218 -0
- package/src/llm/google/types.ts +43 -0
- package/src/llm/google/utils/common.ts +646 -0
- package/src/llm/google/utils/tools.ts +160 -0
- package/src/llm/google/utils/zod_to_genai_parameters.ts +86 -0
- package/src/llm/ollama/index.ts +89 -0
- package/src/llm/ollama/utils.ts +193 -0
- package/src/llm/openai/index.ts +641 -14
- package/src/llm/openai/types.ts +24 -0
- package/src/llm/openai/utils/index.ts +912 -0
- package/src/llm/openai/utils/isReasoningModel.test.ts +90 -0
- package/src/llm/providers.ts +10 -9
- package/src/llm/text.ts +26 -7
- package/src/llm/vertexai/index.ts +360 -0
- package/src/messages/reducer.ts +80 -0
- package/src/run.ts +196 -116
- package/src/scripts/ant_web_search.ts +158 -0
- package/src/scripts/args.ts +12 -8
- package/src/scripts/cli4.ts +29 -21
- package/src/scripts/cli5.ts +29 -21
- package/src/scripts/code_exec.ts +54 -23
- package/src/scripts/code_exec_files.ts +48 -17
- package/src/scripts/code_exec_simple.ts +46 -27
- package/src/scripts/handoff-test.ts +135 -0
- package/src/scripts/image.ts +52 -20
- package/src/scripts/multi-agent-chain.ts +278 -0
- package/src/scripts/multi-agent-conditional.ts +220 -0
- package/src/scripts/multi-agent-document-review-chain.ts +197 -0
- package/src/scripts/multi-agent-hybrid-flow.ts +310 -0
- package/src/scripts/multi-agent-parallel.ts +341 -0
- package/src/scripts/multi-agent-sequence.ts +212 -0
- package/src/scripts/multi-agent-supervisor.ts +362 -0
- package/src/scripts/multi-agent-test.ts +186 -0
- package/src/scripts/search.ts +1 -9
- package/src/scripts/simple.ts +25 -10
- package/src/scripts/test-custom-prompt-key.ts +145 -0
- package/src/scripts/test-handoff-input.ts +170 -0
- package/src/scripts/test-multi-agent-list-handoff.ts +261 -0
- package/src/scripts/test-tools-before-handoff.ts +233 -0
- package/src/scripts/tools.ts +48 -18
- package/src/specs/anthropic.simple.test.ts +150 -34
- package/src/specs/azure.simple.test.ts +325 -0
- package/src/specs/openai.simple.test.ts +140 -33
- package/src/specs/openrouter.simple.test.ts +107 -0
- package/src/specs/prune.test.ts +4 -9
- package/src/specs/reasoning.test.ts +80 -44
- package/src/specs/token-memoization.test.ts +39 -0
- package/src/stream.test.ts +94 -0
- package/src/stream.ts +143 -61
- package/src/tools/ToolNode.ts +21 -7
- package/src/tools/handlers.ts +192 -18
- package/src/tools/search/anthropic.ts +51 -0
- package/src/tools/search/firecrawl.ts +69 -20
- package/src/tools/search/format.ts +6 -8
- package/src/tools/search/rerankers.ts +7 -40
- package/src/tools/search/search.ts +97 -16
- package/src/tools/search/tool.ts +5 -2
- package/src/tools/search/types.ts +30 -10
- package/src/tools/search/utils.ts +1 -1
- package/src/types/graph.ts +318 -103
- package/src/types/llm.ts +26 -12
- package/src/types/run.ts +56 -13
- package/src/types/stream.ts +22 -1
- package/src/types/tools.ts +16 -10
- package/src/utils/events.ts +32 -0
- package/src/utils/llmConfig.ts +19 -7
- package/src/utils/title.ts +104 -30
- package/src/utils/tokens.ts +69 -10
- package/dist/types/scripts/abort.d.ts +0 -1
- package/dist/types/scripts/args.d.ts +0 -6
- package/dist/types/scripts/caching.d.ts +0 -1
- package/dist/types/scripts/cli.d.ts +0 -1
- package/dist/types/scripts/cli2.d.ts +0 -1
- package/dist/types/scripts/cli3.d.ts +0 -1
- package/dist/types/scripts/cli4.d.ts +0 -1
- package/dist/types/scripts/cli5.d.ts +0 -1
- package/dist/types/scripts/code_exec.d.ts +0 -1
- package/dist/types/scripts/code_exec_files.d.ts +0 -1
- package/dist/types/scripts/code_exec_simple.d.ts +0 -1
- package/dist/types/scripts/content.d.ts +0 -1
- package/dist/types/scripts/empty_input.d.ts +0 -1
- package/dist/types/scripts/image.d.ts +0 -1
- package/dist/types/scripts/memory.d.ts +0 -1
- package/dist/types/scripts/search.d.ts +0 -1
- package/dist/types/scripts/simple.d.ts +0 -1
- package/dist/types/scripts/stream.d.ts +0 -1
- package/dist/types/scripts/thinking.d.ts +0 -1
- package/dist/types/scripts/tools.d.ts +0 -1
- package/dist/types/specs/spec.utils.d.ts +0 -1
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import { AIMessageChunk } from '@langchain/core/messages';
|
|
2
|
+
import { v4 } from 'uuid';
|
|
3
|
+
|
|
4
|
+
function convertOllamaMessagesToLangChain(messages, extra) {
|
|
5
|
+
const additional_kwargs = {};
|
|
6
|
+
if ('thinking' in messages) {
|
|
7
|
+
additional_kwargs.reasoning_content = messages.thinking;
|
|
8
|
+
}
|
|
9
|
+
return new AIMessageChunk({
|
|
10
|
+
content: messages.content || '',
|
|
11
|
+
tool_call_chunks: messages.tool_calls?.map((tc) => ({
|
|
12
|
+
name: tc.function.name,
|
|
13
|
+
args: JSON.stringify(tc.function.arguments),
|
|
14
|
+
type: 'tool_call_chunk',
|
|
15
|
+
index: 0,
|
|
16
|
+
id: v4(),
|
|
17
|
+
})),
|
|
18
|
+
response_metadata: extra?.responseMetadata,
|
|
19
|
+
usage_metadata: extra?.usageMetadata,
|
|
20
|
+
additional_kwargs,
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
function extractBase64FromDataUrl(dataUrl) {
|
|
24
|
+
const match = dataUrl.match(/^data:.*?;base64,(.*)$/);
|
|
25
|
+
return match ? match[1] : '';
|
|
26
|
+
}
|
|
27
|
+
function convertAMessagesToOllama(messages) {
|
|
28
|
+
if (typeof messages.content === 'string') {
|
|
29
|
+
return [
|
|
30
|
+
{
|
|
31
|
+
role: 'assistant',
|
|
32
|
+
content: messages.content,
|
|
33
|
+
},
|
|
34
|
+
];
|
|
35
|
+
}
|
|
36
|
+
const textFields = messages.content.filter((c) => c.type === 'text' && typeof c.text === 'string');
|
|
37
|
+
const textMessages = textFields.map((c) => ({
|
|
38
|
+
role: 'assistant',
|
|
39
|
+
content: c.text,
|
|
40
|
+
}));
|
|
41
|
+
let toolCallMsgs;
|
|
42
|
+
if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
43
|
+
messages.tool_calls?.length) {
|
|
44
|
+
// `tool_use` content types are accepted if the message has tool calls
|
|
45
|
+
const toolCalls = messages.tool_calls.map((tc) => ({
|
|
46
|
+
id: tc.id,
|
|
47
|
+
type: 'function',
|
|
48
|
+
function: {
|
|
49
|
+
name: tc.name,
|
|
50
|
+
arguments: tc.args,
|
|
51
|
+
},
|
|
52
|
+
}));
|
|
53
|
+
if (toolCalls) {
|
|
54
|
+
toolCallMsgs = {
|
|
55
|
+
role: 'assistant',
|
|
56
|
+
tool_calls: toolCalls,
|
|
57
|
+
content: '',
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
else if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
62
|
+
!messages.tool_calls?.length) {
|
|
63
|
+
throw new Error('\'tool_use\' content type is not supported without tool calls.');
|
|
64
|
+
}
|
|
65
|
+
return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];
|
|
66
|
+
}
|
|
67
|
+
function convertHumanGenericMessagesToOllama(message) {
|
|
68
|
+
if (typeof message.content === 'string') {
|
|
69
|
+
return [
|
|
70
|
+
{
|
|
71
|
+
role: 'user',
|
|
72
|
+
content: message.content,
|
|
73
|
+
},
|
|
74
|
+
];
|
|
75
|
+
}
|
|
76
|
+
return message.content.map((c) => {
|
|
77
|
+
if (c.type === 'text') {
|
|
78
|
+
return {
|
|
79
|
+
role: 'user',
|
|
80
|
+
content: c.text,
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
else if (c.type === 'image_url') {
|
|
84
|
+
if (typeof c.image_url === 'string') {
|
|
85
|
+
return {
|
|
86
|
+
role: 'user',
|
|
87
|
+
content: '',
|
|
88
|
+
images: [extractBase64FromDataUrl(c.image_url)],
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
else if (c.image_url.url && typeof c.image_url.url === 'string') {
|
|
92
|
+
return {
|
|
93
|
+
role: 'user',
|
|
94
|
+
content: '',
|
|
95
|
+
images: [extractBase64FromDataUrl(c.image_url.url)],
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
throw new Error(`Unsupported content type: ${c.type}`);
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
function convertSystemMessageToOllama(message) {
|
|
103
|
+
if (typeof message.content === 'string') {
|
|
104
|
+
return [
|
|
105
|
+
{
|
|
106
|
+
role: 'system',
|
|
107
|
+
content: message.content,
|
|
108
|
+
},
|
|
109
|
+
];
|
|
110
|
+
}
|
|
111
|
+
else if (message.content.every((c) => c.type === 'text' && typeof c.text === 'string')) {
|
|
112
|
+
return message.content.map((c) => ({
|
|
113
|
+
role: 'system',
|
|
114
|
+
content: c.text,
|
|
115
|
+
}));
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
throw new Error(`Unsupported content type(s): ${message.content
|
|
119
|
+
.map((c) => c.type)
|
|
120
|
+
.join(', ')}`);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
function convertToolMessageToOllama(message) {
|
|
124
|
+
if (typeof message.content !== 'string') {
|
|
125
|
+
throw new Error('Non string tool message content is not supported');
|
|
126
|
+
}
|
|
127
|
+
return [
|
|
128
|
+
{
|
|
129
|
+
role: 'tool',
|
|
130
|
+
content: message.content,
|
|
131
|
+
},
|
|
132
|
+
];
|
|
133
|
+
}
|
|
134
|
+
function convertToOllamaMessages(messages) {
|
|
135
|
+
return messages.flatMap((msg) => {
|
|
136
|
+
if (['human', 'generic'].includes(msg._getType())) {
|
|
137
|
+
return convertHumanGenericMessagesToOllama(msg);
|
|
138
|
+
}
|
|
139
|
+
else if (msg._getType() === 'ai') {
|
|
140
|
+
return convertAMessagesToOllama(msg);
|
|
141
|
+
}
|
|
142
|
+
else if (msg._getType() === 'system') {
|
|
143
|
+
return convertSystemMessageToOllama(msg);
|
|
144
|
+
}
|
|
145
|
+
else if (msg._getType() === 'tool') {
|
|
146
|
+
return convertToolMessageToOllama(msg);
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
throw new Error(`Unsupported message type: ${msg._getType()}`);
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
export { convertOllamaMessagesToLangChain, convertToOllamaMessages };
|
|
155
|
+
//# sourceMappingURL=utils.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utils.mjs","sources":["../../../../src/llm/ollama/utils.ts"],"sourcesContent":["import {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n HumanMessage,\n MessageContentText,\n SystemMessage,\n ToolMessage,\n UsageMetadata,\n} from '@langchain/core/messages';\nimport type {\n Message as OllamaMessage,\n ToolCall as OllamaToolCall,\n} from 'ollama';\nimport { v4 as uuidv4 } from 'uuid';\n\nexport function convertOllamaMessagesToLangChain(\n messages: OllamaMessage,\n extra?: {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n responseMetadata?: Record<string, any>;\n usageMetadata?: UsageMetadata;\n }\n): AIMessageChunk {\n const additional_kwargs: BaseMessage['additional_kwargs'] = {};\n if ('thinking' in messages) {\n additional_kwargs.reasoning_content = messages.thinking as string;\n }\n return new AIMessageChunk({\n content: messages.content || '',\n tool_call_chunks: messages.tool_calls?.map((tc) => ({\n name: tc.function.name,\n args: JSON.stringify(tc.function.arguments),\n type: 'tool_call_chunk',\n index: 0,\n id: uuidv4(),\n })),\n response_metadata: extra?.responseMetadata,\n usage_metadata: extra?.usageMetadata,\n additional_kwargs,\n });\n}\n\nfunction extractBase64FromDataUrl(dataUrl: string): string {\n const match = dataUrl.match(/^data:.*?;base64,(.*)$/);\n return match ? match[1] : '';\n}\n\nfunction convertAMessagesToOllama(messages: AIMessage): OllamaMessage[] {\n if (typeof messages.content === 'string') {\n return [\n {\n role: 'assistant',\n content: messages.content,\n },\n ];\n }\n\n const textFields = messages.content.filter(\n (c) => c.type === 'text' && typeof c.text === 'string'\n );\n const textMessages = (textFields as MessageContentText[]).map((c) => ({\n role: 'assistant',\n content: c.text,\n }));\n let toolCallMsgs: OllamaMessage | undefined;\n\n if (\n messages.content.find((c) => c.type === 'tool_use') &&\n messages.tool_calls?.length\n ) {\n // `tool_use` content types are accepted if the message has tool calls\n const toolCalls: OllamaToolCall[] | undefined = messages.tool_calls.map(\n (tc) => ({\n id: tc.id,\n type: 'function',\n function: {\n name: tc.name,\n arguments: tc.args,\n },\n })\n );\n\n if (toolCalls) {\n toolCallMsgs = {\n role: 'assistant',\n tool_calls: toolCalls,\n content: '',\n };\n }\n } else if (\n messages.content.find((c) => c.type === 'tool_use') &&\n !messages.tool_calls?.length\n ) {\n throw new Error(\n '\\'tool_use\\' content type is not supported without tool calls.'\n );\n }\n\n return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];\n}\n\nfunction convertHumanGenericMessagesToOllama(\n message: HumanMessage\n): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'user',\n content: message.content,\n },\n ];\n }\n return message.content.map((c) => {\n if (c.type === 'text') {\n return {\n role: 'user',\n content: c.text,\n };\n } else if (c.type === 'image_url') {\n if (typeof c.image_url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url)],\n };\n } else if (c.image_url.url && typeof c.image_url.url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url.url)],\n };\n }\n }\n throw new Error(`Unsupported content type: ${c.type}`);\n });\n}\n\nfunction convertSystemMessageToOllama(message: SystemMessage): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'system',\n content: message.content,\n },\n ];\n } else if (\n message.content.every(\n (c) => c.type === 'text' && typeof c.text === 'string'\n )\n ) {\n return (message.content as MessageContentText[]).map((c) => ({\n role: 'system',\n content: c.text,\n }));\n } else {\n throw new Error(\n `Unsupported content type(s): ${message.content\n .map((c) => c.type)\n .join(', ')}`\n );\n }\n}\n\nfunction convertToolMessageToOllama(message: ToolMessage): OllamaMessage[] {\n if (typeof message.content !== 'string') {\n throw new Error('Non string tool message content is not supported');\n }\n return [\n {\n role: 'tool',\n content: message.content,\n },\n ];\n}\n\nexport function convertToOllamaMessages(\n messages: BaseMessage[]\n): OllamaMessage[] {\n return messages.flatMap((msg) => {\n if (['human', 'generic'].includes(msg._getType())) {\n return convertHumanGenericMessagesToOllama(msg);\n } else if (msg._getType() === 'ai') {\n return convertAMessagesToOllama(msg);\n } else if (msg._getType() === 'system') {\n return convertSystemMessageToOllama(msg);\n } else if (msg._getType() === 'tool') {\n return convertToolMessageToOllama(msg as ToolMessage);\n } else {\n throw new Error(`Unsupported message type: ${msg._getType()}`);\n }\n });\n}\n"],"names":["uuidv4"],"mappings":";;;AAgBgB,SAAA,gCAAgC,CAC9C,QAAuB,EACvB,KAIC,EAAA;IAED,MAAM,iBAAiB,GAAqC,EAAE;AAC9D,IAAA,IAAI,UAAU,IAAI,QAAQ,EAAE;AAC1B,QAAA,iBAAiB,CAAC,iBAAiB,GAAG,QAAQ,CAAC,QAAkB;;IAEnE,OAAO,IAAI,cAAc,CAAC;AACxB,QAAA,OAAO,EAAE,QAAQ,CAAC,OAAO,IAAI,EAAE;AAC/B,QAAA,gBAAgB,EAAE,QAAQ,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC,EAAE,MAAM;AAClD,YAAA,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI;YACtB,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;AAC3C,YAAA,IAAI,EAAE,iBAAiB;AACvB,YAAA,KAAK,EAAE,CAAC;YACR,EAAE,EAAEA,EAAM,EAAE;AACb,SAAA,CAAC,CAAC;QACH,iBAAiB,EAAE,KAAK,EAAE,gBAAgB;QAC1C,cAAc,EAAE,KAAK,EAAE,aAAa;QACpC,iBAAiB;AAClB,KAAA,CAAC;AACJ;AAEA,SAAS,wBAAwB,CAAC,OAAe,EAAA;IAC/C,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,wBAAwB,CAAC;AACrD,IAAA,OAAO,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;AAC9B;AAEA,SAAS,wBAAwB,CAAC,QAAmB,EAAA;AACnD,IAAA,IAAI,OAAO,QAAQ,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,QAAQ,CAAC,OAAO;AAC1B,aAAA;SACF;;IAGH,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,MAAM,CACxC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD;IACD,MAAM,YAAY,GAAI,UAAmC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACpE,QAAA,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,KAAA,CAAC,CAAC;AACH,IAAA,IAAI,YAAuC;AAE3C,IAAA,IACE,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC3B;;AAEA,QAAA,MAAM,SAAS,GAAiC,QAAQ,CAAC,UAAU,CAAC,GAAG,CACrE,CAAC,EAAE,MAAM;YACP,EAAE,EAAE,EAAE,CAAC,EAAE;AACT,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,QAAQ,EAAE;gBACR,IAAI,EAAE,EAAE,CAAC,IAAI;gBACb,SAAS,EAAE,EAAE,CAAC,IAAI;AACnB,aAAA;AACF,SAAA,CAAC,CACH;QAED,IAAI,SAAS,EAAE;AACb,YAAA,YAAY,GAAG;AACb,gBAAA,IAAI,EAAE,WAAW;AACjB,gBAAA,UAAU,EAAE,SAAS;AACrB,gBAAA,OAAO,EAAE,EAAE;aACZ;;;AAEE,SAAA,IACL,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC5B;AACA,QAAA,MAAM,IAAI,KAAK,CACb,gEAAgE,CACjE;;AAGH,IAAA,OAAO,CAAC,GAAG,YAAY,EAAE,IAAI,YAAY,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC;AACnE;AAEA,SAAS,mCAAmC,CAC1C,OAAqB,EAAA;AAErB,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;IAEH,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;AAC/B,QAAA,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM,EAAE;YACrB,OAAO;AACL,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,CAAC,CAAC,IAAI;aAChB;;AACI,aAAA,IAAI,CAAC,CAAC,IAAI,KAAK,WAAW,EAAE;AACjC,YAAA,IAAI,OAAO,CAAC,CAAC,SAAS,KAAK,QAAQ,EAAE;gBACnC,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;iBAChD;;AACI,iBAAA,IAAI,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,OAAO,CAAC,CAAC,SAAS,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACjE,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;iBACpD;;;QAGL,MAAM,IAAI,KAAK,CAAC,CAAA,0BAAA,EAA6B,CAAC,CAAC,IAAI,CAAE,CAAA,CAAC;AACxD,KAAC,CAAC;AACJ;AAEA,SAAS,4BAA4B,CAAC,OAAsB,EAAA;AAC1D,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;SACI,IACL,OAAO,CAAC,OAAO,CAAC,KAAK,CACnB,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD,EACD;QACA,OAAQ,OAAO,CAAC,OAAgC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC3D,YAAA,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,SAAA,CAAC,CAAC;;SACE;AACL,QAAA,MAAM,IAAI,KAAK,CACb,CAAgC,6BAAA,EAAA,OAAO,CAAC;aACrC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACjB,aAAA,IAAI,CAAC,IAAI,CAAC,CAAA,CAAE,CAChB;;AAEL;AAEA,SAAS,0BAA0B,CAAC,OAAoB,EAAA;AACtD,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC;;IAErE,OAAO;AACL,QAAA;AACE,YAAA,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA;KACF;AACH;AAEM,SAAU,uBAAuB,CACrC,QAAuB,EAAA;AAEvB,IAAA,OAAO,QAAQ,CAAC,OAAO,CAAC,CAAC,GAAG,KAAI;AAC9B,QAAA,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,EAAE;AACjD,YAAA,OAAO,mCAAmC,CAAC,GAAG,CAAC;;AAC1C,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;AAClC,YAAA,OAAO,wBAAwB,CAAC,GAAG,CAAC;;AAC/B,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,QAAQ,EAAE;AACtC,YAAA,OAAO,4BAA4B,CAAC,GAAG,CAAC;;AACnC,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,MAAM,EAAE;AACpC,YAAA,OAAO,0BAA0B,CAAC,GAAkB,CAAC;;aAChD;YACL,MAAM,IAAI,KAAK,CAAC,CAA6B,0BAAA,EAAA,GAAG,CAAC,QAAQ,EAAE,CAAE,CAAA,CAAC;;AAElE,KAAC,CAAC;AACJ;;;;"}
|
|
@@ -1,8 +1,51 @@
|
|
|
1
1
|
import { AzureOpenAI } from 'openai';
|
|
2
|
+
import { AIMessageChunk } from '@langchain/core/messages';
|
|
2
3
|
import { ChatXAI as ChatXAI$1 } from '@langchain/xai';
|
|
4
|
+
import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
5
|
+
import '@langchain/core/utils/function_calling';
|
|
3
6
|
import { ChatDeepSeek as ChatDeepSeek$1 } from '@langchain/deepseek';
|
|
4
7
|
import { getEndpoint, AzureChatOpenAI as AzureChatOpenAI$1, ChatOpenAI as ChatOpenAI$1, OpenAIClient } from '@langchain/openai';
|
|
8
|
+
import { isReasoningModel, _convertMessagesToOpenAIResponsesParams, _convertOpenAIResponsesDeltaToBaseMessageChunk, _convertMessagesToOpenAIParams } from './utils/index.mjs';
|
|
9
|
+
import '../../common/enum.mjs';
|
|
10
|
+
import { sleep } from '../../utils/run.mjs';
|
|
11
|
+
import 'js-tiktoken/lite';
|
|
5
12
|
|
|
13
|
+
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
|
|
14
|
+
const iife = (fn) => fn();
|
|
15
|
+
function isHeaders(headers) {
|
|
16
|
+
return (typeof Headers !== 'undefined' &&
|
|
17
|
+
headers !== null &&
|
|
18
|
+
typeof headers === 'object' &&
|
|
19
|
+
Object.prototype.toString.call(headers) === '[object Headers]');
|
|
20
|
+
}
|
|
21
|
+
function normalizeHeaders(headers) {
|
|
22
|
+
const output = iife(() => {
|
|
23
|
+
// If headers is a Headers instance
|
|
24
|
+
if (isHeaders(headers)) {
|
|
25
|
+
return headers;
|
|
26
|
+
}
|
|
27
|
+
// If headers is an array of [key, value] pairs
|
|
28
|
+
else if (Array.isArray(headers)) {
|
|
29
|
+
return new Headers(headers);
|
|
30
|
+
}
|
|
31
|
+
// If headers is a NullableHeaders-like object (has 'values' property that is a Headers)
|
|
32
|
+
else if (typeof headers === 'object' &&
|
|
33
|
+
headers !== null &&
|
|
34
|
+
'values' in headers &&
|
|
35
|
+
isHeaders(headers.values)) {
|
|
36
|
+
return headers.values;
|
|
37
|
+
}
|
|
38
|
+
// If headers is a plain object
|
|
39
|
+
else if (typeof headers === 'object' && headers !== null) {
|
|
40
|
+
const entries = Object.entries(headers)
|
|
41
|
+
.filter(([, v]) => typeof v === 'string')
|
|
42
|
+
.map(([k, v]) => [k, v]);
|
|
43
|
+
return new Headers(entries);
|
|
44
|
+
}
|
|
45
|
+
return new Headers();
|
|
46
|
+
});
|
|
47
|
+
return Object.fromEntries(output.entries());
|
|
48
|
+
}
|
|
6
49
|
function createAbortHandler(controller) {
|
|
7
50
|
return function () {
|
|
8
51
|
controller.abort();
|
|
@@ -62,7 +105,13 @@ class CustomAzureOpenAIClient extends AzureOpenAI {
|
|
|
62
105
|
}));
|
|
63
106
|
}
|
|
64
107
|
}
|
|
108
|
+
/** @ts-expect-error We are intentionally overriding `getReasoningParams` */
|
|
65
109
|
class ChatOpenAI extends ChatOpenAI$1 {
|
|
110
|
+
_lc_stream_delay;
|
|
111
|
+
constructor(fields) {
|
|
112
|
+
super(fields);
|
|
113
|
+
this._lc_stream_delay = fields?._lc_stream_delay;
|
|
114
|
+
}
|
|
66
115
|
get exposedClient() {
|
|
67
116
|
return this.client;
|
|
68
117
|
}
|
|
@@ -89,11 +138,203 @@ class ChatOpenAI extends ChatOpenAI$1 {
|
|
|
89
138
|
};
|
|
90
139
|
return requestOptions;
|
|
91
140
|
}
|
|
141
|
+
/**
|
|
142
|
+
* Returns backwards compatible reasoning parameters from constructor params and call options
|
|
143
|
+
* @internal
|
|
144
|
+
*/
|
|
145
|
+
getReasoningParams(options) {
|
|
146
|
+
if (!isReasoningModel(this.model)) {
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
// apply options in reverse order of importance -- newer options supersede older options
|
|
150
|
+
let reasoning;
|
|
151
|
+
if (this.reasoning !== undefined) {
|
|
152
|
+
reasoning = {
|
|
153
|
+
...reasoning,
|
|
154
|
+
...this.reasoning,
|
|
155
|
+
};
|
|
156
|
+
}
|
|
157
|
+
if (options?.reasoning !== undefined) {
|
|
158
|
+
reasoning = {
|
|
159
|
+
...reasoning,
|
|
160
|
+
...options.reasoning,
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
return reasoning;
|
|
164
|
+
}
|
|
165
|
+
_getReasoningParams(options) {
|
|
166
|
+
return this.getReasoningParams(options);
|
|
167
|
+
}
|
|
168
|
+
async *_streamResponseChunks(messages, options, runManager) {
|
|
169
|
+
if (!this._useResponseApi(options)) {
|
|
170
|
+
return yield* this._streamResponseChunks2(messages, options, runManager);
|
|
171
|
+
}
|
|
172
|
+
const streamIterable = await this.responseApiWithRetry({
|
|
173
|
+
...this.invocationParams(options, { streaming: true }),
|
|
174
|
+
input: _convertMessagesToOpenAIResponsesParams(messages, this.model, this.zdrEnabled),
|
|
175
|
+
stream: true,
|
|
176
|
+
}, options);
|
|
177
|
+
for await (const data of streamIterable) {
|
|
178
|
+
const chunk = _convertOpenAIResponsesDeltaToBaseMessageChunk(data);
|
|
179
|
+
if (chunk == null)
|
|
180
|
+
continue;
|
|
181
|
+
yield chunk;
|
|
182
|
+
if (this._lc_stream_delay != null) {
|
|
183
|
+
await sleep(this._lc_stream_delay);
|
|
184
|
+
}
|
|
185
|
+
await runManager?.handleLLMNewToken(chunk.text || '', undefined, undefined, undefined, undefined, { chunk });
|
|
186
|
+
}
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
async *_streamResponseChunks2(messages, options, runManager) {
|
|
190
|
+
const messagesMapped = _convertMessagesToOpenAIParams(messages, this.model);
|
|
191
|
+
const params = {
|
|
192
|
+
...this.invocationParams(options, {
|
|
193
|
+
streaming: true,
|
|
194
|
+
}),
|
|
195
|
+
messages: messagesMapped,
|
|
196
|
+
stream: true,
|
|
197
|
+
};
|
|
198
|
+
let defaultRole;
|
|
199
|
+
const streamIterable = await this.completionWithRetry(params, options);
|
|
200
|
+
let usage;
|
|
201
|
+
for await (const data of streamIterable) {
|
|
202
|
+
const choice = data.choices[0];
|
|
203
|
+
if (data.usage) {
|
|
204
|
+
usage = data.usage;
|
|
205
|
+
}
|
|
206
|
+
if (!choice) {
|
|
207
|
+
continue;
|
|
208
|
+
}
|
|
209
|
+
const { delta } = choice;
|
|
210
|
+
if (!delta) {
|
|
211
|
+
continue;
|
|
212
|
+
}
|
|
213
|
+
const chunk = this._convertOpenAIDeltaToBaseMessageChunk(delta, data, defaultRole);
|
|
214
|
+
if ('reasoning_content' in delta) {
|
|
215
|
+
chunk.additional_kwargs.reasoning_content = delta.reasoning_content;
|
|
216
|
+
}
|
|
217
|
+
else if ('reasoning' in delta) {
|
|
218
|
+
chunk.additional_kwargs.reasoning_content = delta.reasoning;
|
|
219
|
+
}
|
|
220
|
+
defaultRole = delta.role ?? defaultRole;
|
|
221
|
+
const newTokenIndices = {
|
|
222
|
+
prompt: options.promptIndex ?? 0,
|
|
223
|
+
completion: choice.index ?? 0,
|
|
224
|
+
};
|
|
225
|
+
if (typeof chunk.content !== 'string') {
|
|
226
|
+
// eslint-disable-next-line no-console
|
|
227
|
+
console.log('[WARNING]: Received non-string content from OpenAI. This is currently not supported.');
|
|
228
|
+
continue;
|
|
229
|
+
}
|
|
230
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
231
|
+
const generationInfo = { ...newTokenIndices };
|
|
232
|
+
if (choice.finish_reason != null) {
|
|
233
|
+
generationInfo.finish_reason = choice.finish_reason;
|
|
234
|
+
// Only include system fingerprint in the last chunk for now
|
|
235
|
+
// to avoid concatenation issues
|
|
236
|
+
generationInfo.system_fingerprint = data.system_fingerprint;
|
|
237
|
+
generationInfo.model_name = data.model;
|
|
238
|
+
generationInfo.service_tier = data.service_tier;
|
|
239
|
+
}
|
|
240
|
+
if (this.logprobs == true) {
|
|
241
|
+
generationInfo.logprobs = choice.logprobs;
|
|
242
|
+
}
|
|
243
|
+
const generationChunk = new ChatGenerationChunk({
|
|
244
|
+
message: chunk,
|
|
245
|
+
text: chunk.content,
|
|
246
|
+
generationInfo,
|
|
247
|
+
});
|
|
248
|
+
yield generationChunk;
|
|
249
|
+
if (this._lc_stream_delay != null) {
|
|
250
|
+
await sleep(this._lc_stream_delay);
|
|
251
|
+
}
|
|
252
|
+
await runManager?.handleLLMNewToken(generationChunk.text || '', newTokenIndices, undefined, undefined, undefined, { chunk: generationChunk });
|
|
253
|
+
}
|
|
254
|
+
if (usage) {
|
|
255
|
+
const inputTokenDetails = {
|
|
256
|
+
...(usage.prompt_tokens_details?.audio_tokens != null && {
|
|
257
|
+
audio: usage.prompt_tokens_details.audio_tokens,
|
|
258
|
+
}),
|
|
259
|
+
...(usage.prompt_tokens_details?.cached_tokens != null && {
|
|
260
|
+
cache_read: usage.prompt_tokens_details.cached_tokens,
|
|
261
|
+
}),
|
|
262
|
+
};
|
|
263
|
+
const outputTokenDetails = {
|
|
264
|
+
...(usage.completion_tokens_details?.audio_tokens != null && {
|
|
265
|
+
audio: usage.completion_tokens_details.audio_tokens,
|
|
266
|
+
}),
|
|
267
|
+
...(usage.completion_tokens_details?.reasoning_tokens != null && {
|
|
268
|
+
reasoning: usage.completion_tokens_details.reasoning_tokens,
|
|
269
|
+
}),
|
|
270
|
+
};
|
|
271
|
+
const generationChunk = new ChatGenerationChunk({
|
|
272
|
+
message: new AIMessageChunk({
|
|
273
|
+
content: '',
|
|
274
|
+
response_metadata: {
|
|
275
|
+
usage: { ...usage },
|
|
276
|
+
},
|
|
277
|
+
usage_metadata: {
|
|
278
|
+
input_tokens: usage.prompt_tokens,
|
|
279
|
+
output_tokens: usage.completion_tokens,
|
|
280
|
+
total_tokens: usage.total_tokens,
|
|
281
|
+
...(Object.keys(inputTokenDetails).length > 0 && {
|
|
282
|
+
input_token_details: inputTokenDetails,
|
|
283
|
+
}),
|
|
284
|
+
...(Object.keys(outputTokenDetails).length > 0 && {
|
|
285
|
+
output_token_details: outputTokenDetails,
|
|
286
|
+
}),
|
|
287
|
+
},
|
|
288
|
+
}),
|
|
289
|
+
text: '',
|
|
290
|
+
});
|
|
291
|
+
yield generationChunk;
|
|
292
|
+
if (this._lc_stream_delay != null) {
|
|
293
|
+
await sleep(this._lc_stream_delay);
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
if (options.signal?.aborted === true) {
|
|
297
|
+
throw new Error('AbortError');
|
|
298
|
+
}
|
|
299
|
+
}
|
|
92
300
|
}
|
|
301
|
+
/** @ts-expect-error We are intentionally overriding `getReasoningParams` */
|
|
93
302
|
class AzureChatOpenAI extends AzureChatOpenAI$1 {
|
|
303
|
+
_lc_stream_delay;
|
|
304
|
+
constructor(fields) {
|
|
305
|
+
super(fields);
|
|
306
|
+
this._lc_stream_delay = fields?._lc_stream_delay;
|
|
307
|
+
}
|
|
94
308
|
get exposedClient() {
|
|
95
309
|
return this.client;
|
|
96
310
|
}
|
|
311
|
+
/**
|
|
312
|
+
* Returns backwards compatible reasoning parameters from constructor params and call options
|
|
313
|
+
* @internal
|
|
314
|
+
*/
|
|
315
|
+
getReasoningParams(options) {
|
|
316
|
+
if (!isReasoningModel(this.model)) {
|
|
317
|
+
return;
|
|
318
|
+
}
|
|
319
|
+
// apply options in reverse order of importance -- newer options supersede older options
|
|
320
|
+
let reasoning;
|
|
321
|
+
if (this.reasoning !== undefined) {
|
|
322
|
+
reasoning = {
|
|
323
|
+
...reasoning,
|
|
324
|
+
...this.reasoning,
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
if (options?.reasoning !== undefined) {
|
|
328
|
+
reasoning = {
|
|
329
|
+
...reasoning,
|
|
330
|
+
...options.reasoning,
|
|
331
|
+
};
|
|
332
|
+
}
|
|
333
|
+
return reasoning;
|
|
334
|
+
}
|
|
335
|
+
_getReasoningParams(options) {
|
|
336
|
+
return this.getReasoningParams(options);
|
|
337
|
+
}
|
|
97
338
|
_getClientOptions(options) {
|
|
98
339
|
if (!this.client) {
|
|
99
340
|
const openAIEndpointConfig = {
|
|
@@ -117,11 +358,12 @@ class AzureChatOpenAI extends AzureChatOpenAI$1 {
|
|
|
117
358
|
if (params.baseURL == null) {
|
|
118
359
|
delete params.baseURL;
|
|
119
360
|
}
|
|
361
|
+
const defaultHeaders = normalizeHeaders(params.defaultHeaders);
|
|
120
362
|
params.defaultHeaders = {
|
|
121
363
|
...params.defaultHeaders,
|
|
122
|
-
'User-Agent':
|
|
123
|
-
? `${
|
|
124
|
-
: '
|
|
364
|
+
'User-Agent': defaultHeaders['User-Agent'] != null
|
|
365
|
+
? `${defaultHeaders['User-Agent']}: librechat-azure-openai-v2`
|
|
366
|
+
: 'librechat-azure-openai-v2',
|
|
125
367
|
};
|
|
126
368
|
this.client = new CustomAzureOpenAIClient({
|
|
127
369
|
apiVersion: this.azureOpenAIApiVersion,
|
|
@@ -145,6 +387,27 @@ class AzureChatOpenAI extends AzureChatOpenAI$1 {
|
|
|
145
387
|
}
|
|
146
388
|
return requestOptions;
|
|
147
389
|
}
|
|
390
|
+
async *_streamResponseChunks(messages, options, runManager) {
|
|
391
|
+
if (!this._useResponseApi(options)) {
|
|
392
|
+
return yield* super._streamResponseChunks(messages, options, runManager);
|
|
393
|
+
}
|
|
394
|
+
const streamIterable = await this.responseApiWithRetry({
|
|
395
|
+
...this.invocationParams(options, { streaming: true }),
|
|
396
|
+
input: _convertMessagesToOpenAIResponsesParams(messages, this.model, this.zdrEnabled),
|
|
397
|
+
stream: true,
|
|
398
|
+
}, options);
|
|
399
|
+
for await (const data of streamIterable) {
|
|
400
|
+
const chunk = _convertOpenAIResponsesDeltaToBaseMessageChunk(data);
|
|
401
|
+
if (chunk == null)
|
|
402
|
+
continue;
|
|
403
|
+
yield chunk;
|
|
404
|
+
if (this._lc_stream_delay != null) {
|
|
405
|
+
await sleep(this._lc_stream_delay);
|
|
406
|
+
}
|
|
407
|
+
await runManager?.handleLLMNewToken(chunk.text || '', undefined, undefined, undefined, undefined, { chunk });
|
|
408
|
+
}
|
|
409
|
+
return;
|
|
410
|
+
}
|
|
148
411
|
}
|
|
149
412
|
class ChatDeepSeek extends ChatDeepSeek$1 {
|
|
150
413
|
get exposedClient() {
|
|
@@ -175,6 +438,21 @@ class ChatDeepSeek extends ChatDeepSeek$1 {
|
|
|
175
438
|
}
|
|
176
439
|
}
|
|
177
440
|
class ChatXAI extends ChatXAI$1 {
|
|
441
|
+
_lc_stream_delay;
|
|
442
|
+
constructor(fields) {
|
|
443
|
+
super(fields);
|
|
444
|
+
this._lc_stream_delay = fields?._lc_stream_delay;
|
|
445
|
+
const customBaseURL = fields?.configuration?.baseURL ?? fields?.clientConfig?.baseURL;
|
|
446
|
+
if (customBaseURL != null && customBaseURL) {
|
|
447
|
+
this.clientConfig = {
|
|
448
|
+
...this.clientConfig,
|
|
449
|
+
baseURL: customBaseURL,
|
|
450
|
+
};
|
|
451
|
+
// Reset the client to force recreation with new config
|
|
452
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
453
|
+
this.client = undefined;
|
|
454
|
+
}
|
|
455
|
+
}
|
|
178
456
|
get exposedClient() {
|
|
179
457
|
return this.client;
|
|
180
458
|
}
|
|
@@ -201,7 +479,146 @@ class ChatXAI extends ChatXAI$1 {
|
|
|
201
479
|
};
|
|
202
480
|
return requestOptions;
|
|
203
481
|
}
|
|
482
|
+
async *_streamResponseChunks(messages, options, runManager) {
|
|
483
|
+
const messagesMapped = _convertMessagesToOpenAIParams(messages, this.model);
|
|
484
|
+
const params = {
|
|
485
|
+
...this.invocationParams(options, {
|
|
486
|
+
streaming: true,
|
|
487
|
+
}),
|
|
488
|
+
messages: messagesMapped,
|
|
489
|
+
stream: true,
|
|
490
|
+
};
|
|
491
|
+
let defaultRole;
|
|
492
|
+
const streamIterable = await this.completionWithRetry(params, options);
|
|
493
|
+
let usage;
|
|
494
|
+
for await (const data of streamIterable) {
|
|
495
|
+
const choice = data.choices[0];
|
|
496
|
+
if (data.usage) {
|
|
497
|
+
usage = data.usage;
|
|
498
|
+
}
|
|
499
|
+
if (!choice) {
|
|
500
|
+
continue;
|
|
501
|
+
}
|
|
502
|
+
const { delta } = choice;
|
|
503
|
+
if (!delta) {
|
|
504
|
+
continue;
|
|
505
|
+
}
|
|
506
|
+
const chunk = this._convertOpenAIDeltaToBaseMessageChunk(delta, data, defaultRole);
|
|
507
|
+
if (chunk.usage_metadata != null) {
|
|
508
|
+
chunk.usage_metadata = {
|
|
509
|
+
input_tokens: chunk.usage_metadata.input_tokens ?? 0,
|
|
510
|
+
output_tokens: chunk.usage_metadata.output_tokens ?? 0,
|
|
511
|
+
total_tokens: chunk.usage_metadata.total_tokens ?? 0,
|
|
512
|
+
};
|
|
513
|
+
}
|
|
514
|
+
if ('reasoning_content' in delta) {
|
|
515
|
+
chunk.additional_kwargs.reasoning_content = delta.reasoning_content;
|
|
516
|
+
}
|
|
517
|
+
defaultRole = delta.role ?? defaultRole;
|
|
518
|
+
const newTokenIndices = {
|
|
519
|
+
prompt: options.promptIndex ?? 0,
|
|
520
|
+
completion: choice.index ?? 0,
|
|
521
|
+
};
|
|
522
|
+
if (typeof chunk.content !== 'string') {
|
|
523
|
+
// eslint-disable-next-line no-console
|
|
524
|
+
console.log('[WARNING]: Received non-string content from OpenAI. This is currently not supported.');
|
|
525
|
+
continue;
|
|
526
|
+
}
|
|
527
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
528
|
+
const generationInfo = { ...newTokenIndices };
|
|
529
|
+
if (choice.finish_reason != null) {
|
|
530
|
+
generationInfo.finish_reason = choice.finish_reason;
|
|
531
|
+
// Only include system fingerprint in the last chunk for now
|
|
532
|
+
// to avoid concatenation issues
|
|
533
|
+
generationInfo.system_fingerprint = data.system_fingerprint;
|
|
534
|
+
generationInfo.model_name = data.model;
|
|
535
|
+
generationInfo.service_tier = data.service_tier;
|
|
536
|
+
}
|
|
537
|
+
if (this.logprobs == true) {
|
|
538
|
+
generationInfo.logprobs = choice.logprobs;
|
|
539
|
+
}
|
|
540
|
+
const generationChunk = new ChatGenerationChunk({
|
|
541
|
+
message: chunk,
|
|
542
|
+
text: chunk.content,
|
|
543
|
+
generationInfo,
|
|
544
|
+
});
|
|
545
|
+
yield generationChunk;
|
|
546
|
+
if (this._lc_stream_delay != null) {
|
|
547
|
+
await sleep(this._lc_stream_delay);
|
|
548
|
+
}
|
|
549
|
+
await runManager?.handleLLMNewToken(generationChunk.text || '', newTokenIndices, undefined, undefined, undefined, { chunk: generationChunk });
|
|
550
|
+
}
|
|
551
|
+
if (usage) {
|
|
552
|
+
// Type assertion for xAI-specific usage structure
|
|
553
|
+
const xaiUsage = usage;
|
|
554
|
+
const inputTokenDetails = {
|
|
555
|
+
// Standard OpenAI fields
|
|
556
|
+
...(usage.prompt_tokens_details?.audio_tokens != null && {
|
|
557
|
+
audio: usage.prompt_tokens_details.audio_tokens,
|
|
558
|
+
}),
|
|
559
|
+
...(usage.prompt_tokens_details?.cached_tokens != null && {
|
|
560
|
+
cache_read: usage.prompt_tokens_details.cached_tokens,
|
|
561
|
+
}),
|
|
562
|
+
// Add xAI-specific prompt token details if they exist
|
|
563
|
+
...(xaiUsage.prompt_tokens_details?.text_tokens != null && {
|
|
564
|
+
text: xaiUsage.prompt_tokens_details.text_tokens,
|
|
565
|
+
}),
|
|
566
|
+
...(xaiUsage.prompt_tokens_details?.image_tokens != null && {
|
|
567
|
+
image: xaiUsage.prompt_tokens_details.image_tokens,
|
|
568
|
+
}),
|
|
569
|
+
};
|
|
570
|
+
const outputTokenDetails = {
|
|
571
|
+
// Standard OpenAI fields
|
|
572
|
+
...(usage.completion_tokens_details?.audio_tokens != null && {
|
|
573
|
+
audio: usage.completion_tokens_details.audio_tokens,
|
|
574
|
+
}),
|
|
575
|
+
...(usage.completion_tokens_details?.reasoning_tokens != null && {
|
|
576
|
+
reasoning: usage.completion_tokens_details.reasoning_tokens,
|
|
577
|
+
}),
|
|
578
|
+
// Add xAI-specific completion token details if they exist
|
|
579
|
+
...(xaiUsage.completion_tokens_details?.accepted_prediction_tokens !=
|
|
580
|
+
null && {
|
|
581
|
+
accepted_prediction: xaiUsage.completion_tokens_details.accepted_prediction_tokens,
|
|
582
|
+
}),
|
|
583
|
+
...(xaiUsage.completion_tokens_details?.rejected_prediction_tokens !=
|
|
584
|
+
null && {
|
|
585
|
+
rejected_prediction: xaiUsage.completion_tokens_details.rejected_prediction_tokens,
|
|
586
|
+
}),
|
|
587
|
+
};
|
|
588
|
+
const generationChunk = new ChatGenerationChunk({
|
|
589
|
+
message: new AIMessageChunk({
|
|
590
|
+
content: '',
|
|
591
|
+
response_metadata: {
|
|
592
|
+
usage: { ...usage },
|
|
593
|
+
// Include xAI-specific metadata if it exists
|
|
594
|
+
...(xaiUsage.num_sources_used != null && {
|
|
595
|
+
num_sources_used: xaiUsage.num_sources_used,
|
|
596
|
+
}),
|
|
597
|
+
},
|
|
598
|
+
usage_metadata: {
|
|
599
|
+
input_tokens: usage.prompt_tokens,
|
|
600
|
+
output_tokens: usage.completion_tokens,
|
|
601
|
+
total_tokens: usage.total_tokens,
|
|
602
|
+
...(Object.keys(inputTokenDetails).length > 0 && {
|
|
603
|
+
input_token_details: inputTokenDetails,
|
|
604
|
+
}),
|
|
605
|
+
...(Object.keys(outputTokenDetails).length > 0 && {
|
|
606
|
+
output_token_details: outputTokenDetails,
|
|
607
|
+
}),
|
|
608
|
+
},
|
|
609
|
+
}),
|
|
610
|
+
text: '',
|
|
611
|
+
});
|
|
612
|
+
yield generationChunk;
|
|
613
|
+
if (this._lc_stream_delay != null) {
|
|
614
|
+
await sleep(this._lc_stream_delay);
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
if (options.signal?.aborted === true) {
|
|
618
|
+
throw new Error('AbortError');
|
|
619
|
+
}
|
|
620
|
+
}
|
|
204
621
|
}
|
|
205
622
|
|
|
206
|
-
export { AzureChatOpenAI, ChatDeepSeek, ChatOpenAI, ChatXAI, CustomAzureOpenAIClient, CustomOpenAIClient };
|
|
623
|
+
export { AzureChatOpenAI, ChatDeepSeek, ChatOpenAI, ChatXAI, CustomAzureOpenAIClient, CustomOpenAIClient, isHeaders, normalizeHeaders };
|
|
207
624
|
//# sourceMappingURL=index.mjs.map
|