@librechat/agents 2.4.322 → 3.0.0-rc10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (279) hide show
  1. package/dist/cjs/agents/AgentContext.cjs +218 -0
  2. package/dist/cjs/agents/AgentContext.cjs.map +1 -0
  3. package/dist/cjs/common/enum.cjs +15 -5
  4. package/dist/cjs/common/enum.cjs.map +1 -1
  5. package/dist/cjs/events.cjs +10 -6
  6. package/dist/cjs/events.cjs.map +1 -1
  7. package/dist/cjs/graphs/Graph.cjs +309 -213
  8. package/dist/cjs/graphs/Graph.cjs.map +1 -1
  9. package/dist/cjs/graphs/MultiAgentGraph.cjs +507 -0
  10. package/dist/cjs/graphs/MultiAgentGraph.cjs.map +1 -0
  11. package/dist/cjs/llm/anthropic/index.cjs +54 -9
  12. package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
  13. package/dist/cjs/llm/anthropic/types.cjs.map +1 -1
  14. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +52 -6
  15. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
  16. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +22 -2
  17. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -1
  18. package/dist/cjs/llm/anthropic/utils/tools.cjs +29 -0
  19. package/dist/cjs/llm/anthropic/utils/tools.cjs.map +1 -0
  20. package/dist/cjs/llm/google/index.cjs +144 -0
  21. package/dist/cjs/llm/google/index.cjs.map +1 -0
  22. package/dist/cjs/llm/google/utils/common.cjs +477 -0
  23. package/dist/cjs/llm/google/utils/common.cjs.map +1 -0
  24. package/dist/cjs/llm/ollama/index.cjs +67 -0
  25. package/dist/cjs/llm/ollama/index.cjs.map +1 -0
  26. package/dist/cjs/llm/ollama/utils.cjs +158 -0
  27. package/dist/cjs/llm/ollama/utils.cjs.map +1 -0
  28. package/dist/cjs/llm/openai/index.cjs +422 -3
  29. package/dist/cjs/llm/openai/index.cjs.map +1 -1
  30. package/dist/cjs/llm/openai/utils/index.cjs +672 -0
  31. package/dist/cjs/llm/openai/utils/index.cjs.map +1 -0
  32. package/dist/cjs/llm/providers.cjs +15 -15
  33. package/dist/cjs/llm/providers.cjs.map +1 -1
  34. package/dist/cjs/llm/text.cjs +14 -3
  35. package/dist/cjs/llm/text.cjs.map +1 -1
  36. package/dist/cjs/llm/vertexai/index.cjs +330 -0
  37. package/dist/cjs/llm/vertexai/index.cjs.map +1 -0
  38. package/dist/cjs/main.cjs +11 -0
  39. package/dist/cjs/main.cjs.map +1 -1
  40. package/dist/cjs/run.cjs +137 -85
  41. package/dist/cjs/run.cjs.map +1 -1
  42. package/dist/cjs/stream.cjs +86 -52
  43. package/dist/cjs/stream.cjs.map +1 -1
  44. package/dist/cjs/tools/ToolNode.cjs +10 -4
  45. package/dist/cjs/tools/ToolNode.cjs.map +1 -1
  46. package/dist/cjs/tools/handlers.cjs +119 -13
  47. package/dist/cjs/tools/handlers.cjs.map +1 -1
  48. package/dist/cjs/tools/search/anthropic.cjs +40 -0
  49. package/dist/cjs/tools/search/anthropic.cjs.map +1 -0
  50. package/dist/cjs/tools/search/firecrawl.cjs +55 -9
  51. package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
  52. package/dist/cjs/tools/search/format.cjs +6 -6
  53. package/dist/cjs/tools/search/format.cjs.map +1 -1
  54. package/dist/cjs/tools/search/rerankers.cjs +7 -29
  55. package/dist/cjs/tools/search/rerankers.cjs.map +1 -1
  56. package/dist/cjs/tools/search/search.cjs +86 -16
  57. package/dist/cjs/tools/search/search.cjs.map +1 -1
  58. package/dist/cjs/tools/search/tool.cjs +4 -2
  59. package/dist/cjs/tools/search/tool.cjs.map +1 -1
  60. package/dist/cjs/tools/search/utils.cjs +1 -1
  61. package/dist/cjs/tools/search/utils.cjs.map +1 -1
  62. package/dist/cjs/utils/events.cjs +31 -0
  63. package/dist/cjs/utils/events.cjs.map +1 -0
  64. package/dist/cjs/utils/title.cjs +57 -21
  65. package/dist/cjs/utils/title.cjs.map +1 -1
  66. package/dist/cjs/utils/tokens.cjs +54 -7
  67. package/dist/cjs/utils/tokens.cjs.map +1 -1
  68. package/dist/esm/agents/AgentContext.mjs +216 -0
  69. package/dist/esm/agents/AgentContext.mjs.map +1 -0
  70. package/dist/esm/common/enum.mjs +16 -6
  71. package/dist/esm/common/enum.mjs.map +1 -1
  72. package/dist/esm/events.mjs +10 -6
  73. package/dist/esm/events.mjs.map +1 -1
  74. package/dist/esm/graphs/Graph.mjs +311 -215
  75. package/dist/esm/graphs/Graph.mjs.map +1 -1
  76. package/dist/esm/graphs/MultiAgentGraph.mjs +505 -0
  77. package/dist/esm/graphs/MultiAgentGraph.mjs.map +1 -0
  78. package/dist/esm/llm/anthropic/index.mjs +54 -9
  79. package/dist/esm/llm/anthropic/index.mjs.map +1 -1
  80. package/dist/esm/llm/anthropic/types.mjs.map +1 -1
  81. package/dist/esm/llm/anthropic/utils/message_inputs.mjs +52 -6
  82. package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
  83. package/dist/esm/llm/anthropic/utils/message_outputs.mjs +22 -2
  84. package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -1
  85. package/dist/esm/llm/anthropic/utils/tools.mjs +27 -0
  86. package/dist/esm/llm/anthropic/utils/tools.mjs.map +1 -0
  87. package/dist/esm/llm/google/index.mjs +142 -0
  88. package/dist/esm/llm/google/index.mjs.map +1 -0
  89. package/dist/esm/llm/google/utils/common.mjs +471 -0
  90. package/dist/esm/llm/google/utils/common.mjs.map +1 -0
  91. package/dist/esm/llm/ollama/index.mjs +65 -0
  92. package/dist/esm/llm/ollama/index.mjs.map +1 -0
  93. package/dist/esm/llm/ollama/utils.mjs +155 -0
  94. package/dist/esm/llm/ollama/utils.mjs.map +1 -0
  95. package/dist/esm/llm/openai/index.mjs +421 -4
  96. package/dist/esm/llm/openai/index.mjs.map +1 -1
  97. package/dist/esm/llm/openai/utils/index.mjs +666 -0
  98. package/dist/esm/llm/openai/utils/index.mjs.map +1 -0
  99. package/dist/esm/llm/providers.mjs +5 -5
  100. package/dist/esm/llm/providers.mjs.map +1 -1
  101. package/dist/esm/llm/text.mjs +14 -3
  102. package/dist/esm/llm/text.mjs.map +1 -1
  103. package/dist/esm/llm/vertexai/index.mjs +328 -0
  104. package/dist/esm/llm/vertexai/index.mjs.map +1 -0
  105. package/dist/esm/main.mjs +6 -5
  106. package/dist/esm/main.mjs.map +1 -1
  107. package/dist/esm/run.mjs +138 -87
  108. package/dist/esm/run.mjs.map +1 -1
  109. package/dist/esm/stream.mjs +88 -55
  110. package/dist/esm/stream.mjs.map +1 -1
  111. package/dist/esm/tools/ToolNode.mjs +10 -4
  112. package/dist/esm/tools/ToolNode.mjs.map +1 -1
  113. package/dist/esm/tools/handlers.mjs +119 -15
  114. package/dist/esm/tools/handlers.mjs.map +1 -1
  115. package/dist/esm/tools/search/anthropic.mjs +37 -0
  116. package/dist/esm/tools/search/anthropic.mjs.map +1 -0
  117. package/dist/esm/tools/search/firecrawl.mjs +55 -9
  118. package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
  119. package/dist/esm/tools/search/format.mjs +7 -7
  120. package/dist/esm/tools/search/format.mjs.map +1 -1
  121. package/dist/esm/tools/search/rerankers.mjs +7 -29
  122. package/dist/esm/tools/search/rerankers.mjs.map +1 -1
  123. package/dist/esm/tools/search/search.mjs +86 -16
  124. package/dist/esm/tools/search/search.mjs.map +1 -1
  125. package/dist/esm/tools/search/tool.mjs +4 -2
  126. package/dist/esm/tools/search/tool.mjs.map +1 -1
  127. package/dist/esm/tools/search/utils.mjs +1 -1
  128. package/dist/esm/tools/search/utils.mjs.map +1 -1
  129. package/dist/esm/utils/events.mjs +29 -0
  130. package/dist/esm/utils/events.mjs.map +1 -0
  131. package/dist/esm/utils/title.mjs +57 -22
  132. package/dist/esm/utils/title.mjs.map +1 -1
  133. package/dist/esm/utils/tokens.mjs +54 -8
  134. package/dist/esm/utils/tokens.mjs.map +1 -1
  135. package/dist/types/agents/AgentContext.d.ts +91 -0
  136. package/dist/types/common/enum.d.ts +17 -7
  137. package/dist/types/events.d.ts +5 -4
  138. package/dist/types/graphs/Graph.d.ts +64 -67
  139. package/dist/types/graphs/MultiAgentGraph.d.ts +47 -0
  140. package/dist/types/graphs/index.d.ts +1 -0
  141. package/dist/types/llm/anthropic/index.d.ts +11 -0
  142. package/dist/types/llm/anthropic/types.d.ts +9 -3
  143. package/dist/types/llm/anthropic/utils/message_inputs.d.ts +1 -1
  144. package/dist/types/llm/anthropic/utils/output_parsers.d.ts +4 -4
  145. package/dist/types/llm/anthropic/utils/tools.d.ts +3 -0
  146. package/dist/types/llm/google/index.d.ts +13 -0
  147. package/dist/types/llm/google/types.d.ts +32 -0
  148. package/dist/types/llm/google/utils/common.d.ts +19 -0
  149. package/dist/types/llm/google/utils/tools.d.ts +10 -0
  150. package/dist/types/llm/google/utils/zod_to_genai_parameters.d.ts +14 -0
  151. package/dist/types/llm/ollama/index.d.ts +7 -0
  152. package/dist/types/llm/ollama/utils.d.ts +7 -0
  153. package/dist/types/llm/openai/index.d.ts +82 -3
  154. package/dist/types/llm/openai/types.d.ts +10 -0
  155. package/dist/types/llm/openai/utils/index.d.ts +20 -0
  156. package/dist/types/llm/text.d.ts +1 -1
  157. package/dist/types/llm/vertexai/index.d.ts +293 -0
  158. package/dist/types/messages/reducer.d.ts +9 -0
  159. package/dist/types/run.d.ts +19 -12
  160. package/dist/types/stream.d.ts +10 -3
  161. package/dist/types/tools/CodeExecutor.d.ts +2 -2
  162. package/dist/types/tools/ToolNode.d.ts +1 -1
  163. package/dist/types/tools/handlers.d.ts +17 -4
  164. package/dist/types/tools/search/anthropic.d.ts +16 -0
  165. package/dist/types/tools/search/firecrawl.d.ts +15 -0
  166. package/dist/types/tools/search/rerankers.d.ts +0 -1
  167. package/dist/types/tools/search/types.d.ts +30 -9
  168. package/dist/types/types/graph.d.ts +129 -15
  169. package/dist/types/types/llm.d.ts +25 -10
  170. package/dist/types/types/run.d.ts +50 -8
  171. package/dist/types/types/stream.d.ts +16 -2
  172. package/dist/types/types/tools.d.ts +1 -1
  173. package/dist/types/utils/events.d.ts +6 -0
  174. package/dist/types/utils/title.d.ts +2 -1
  175. package/dist/types/utils/tokens.d.ts +24 -0
  176. package/package.json +41 -17
  177. package/src/agents/AgentContext.ts +315 -0
  178. package/src/common/enum.ts +15 -5
  179. package/src/events.ts +24 -13
  180. package/src/graphs/Graph.ts +495 -313
  181. package/src/graphs/MultiAgentGraph.ts +598 -0
  182. package/src/graphs/index.ts +2 -1
  183. package/src/llm/anthropic/Jacob_Lee_Resume_2023.pdf +0 -0
  184. package/src/llm/anthropic/index.ts +78 -13
  185. package/src/llm/anthropic/llm.spec.ts +491 -115
  186. package/src/llm/anthropic/types.ts +39 -3
  187. package/src/llm/anthropic/utils/message_inputs.ts +67 -11
  188. package/src/llm/anthropic/utils/message_outputs.ts +21 -2
  189. package/src/llm/anthropic/utils/output_parsers.ts +25 -6
  190. package/src/llm/anthropic/utils/tools.ts +29 -0
  191. package/src/llm/google/index.ts +218 -0
  192. package/src/llm/google/types.ts +43 -0
  193. package/src/llm/google/utils/common.ts +646 -0
  194. package/src/llm/google/utils/tools.ts +160 -0
  195. package/src/llm/google/utils/zod_to_genai_parameters.ts +86 -0
  196. package/src/llm/ollama/index.ts +89 -0
  197. package/src/llm/ollama/utils.ts +193 -0
  198. package/src/llm/openai/index.ts +641 -14
  199. package/src/llm/openai/types.ts +24 -0
  200. package/src/llm/openai/utils/index.ts +912 -0
  201. package/src/llm/openai/utils/isReasoningModel.test.ts +90 -0
  202. package/src/llm/providers.ts +10 -9
  203. package/src/llm/text.ts +26 -7
  204. package/src/llm/vertexai/index.ts +360 -0
  205. package/src/messages/reducer.ts +80 -0
  206. package/src/run.ts +196 -116
  207. package/src/scripts/ant_web_search.ts +158 -0
  208. package/src/scripts/args.ts +12 -8
  209. package/src/scripts/cli4.ts +29 -21
  210. package/src/scripts/cli5.ts +29 -21
  211. package/src/scripts/code_exec.ts +54 -23
  212. package/src/scripts/code_exec_files.ts +48 -17
  213. package/src/scripts/code_exec_simple.ts +46 -27
  214. package/src/scripts/handoff-test.ts +135 -0
  215. package/src/scripts/image.ts +52 -20
  216. package/src/scripts/multi-agent-chain.ts +278 -0
  217. package/src/scripts/multi-agent-conditional.ts +220 -0
  218. package/src/scripts/multi-agent-document-review-chain.ts +197 -0
  219. package/src/scripts/multi-agent-hybrid-flow.ts +310 -0
  220. package/src/scripts/multi-agent-parallel.ts +341 -0
  221. package/src/scripts/multi-agent-sequence.ts +212 -0
  222. package/src/scripts/multi-agent-supervisor.ts +362 -0
  223. package/src/scripts/multi-agent-test.ts +186 -0
  224. package/src/scripts/search.ts +1 -9
  225. package/src/scripts/simple.ts +25 -10
  226. package/src/scripts/test-custom-prompt-key.ts +145 -0
  227. package/src/scripts/test-handoff-input.ts +170 -0
  228. package/src/scripts/test-multi-agent-list-handoff.ts +261 -0
  229. package/src/scripts/test-tools-before-handoff.ts +233 -0
  230. package/src/scripts/tools.ts +48 -18
  231. package/src/specs/anthropic.simple.test.ts +150 -34
  232. package/src/specs/azure.simple.test.ts +325 -0
  233. package/src/specs/openai.simple.test.ts +140 -33
  234. package/src/specs/openrouter.simple.test.ts +107 -0
  235. package/src/specs/prune.test.ts +4 -9
  236. package/src/specs/reasoning.test.ts +80 -44
  237. package/src/specs/token-memoization.test.ts +39 -0
  238. package/src/stream.test.ts +94 -0
  239. package/src/stream.ts +143 -61
  240. package/src/tools/ToolNode.ts +21 -7
  241. package/src/tools/handlers.ts +192 -18
  242. package/src/tools/search/anthropic.ts +51 -0
  243. package/src/tools/search/firecrawl.ts +69 -20
  244. package/src/tools/search/format.ts +6 -8
  245. package/src/tools/search/rerankers.ts +7 -40
  246. package/src/tools/search/search.ts +97 -16
  247. package/src/tools/search/tool.ts +5 -2
  248. package/src/tools/search/types.ts +30 -10
  249. package/src/tools/search/utils.ts +1 -1
  250. package/src/types/graph.ts +318 -103
  251. package/src/types/llm.ts +26 -12
  252. package/src/types/run.ts +56 -13
  253. package/src/types/stream.ts +22 -1
  254. package/src/types/tools.ts +16 -10
  255. package/src/utils/events.ts +32 -0
  256. package/src/utils/llmConfig.ts +19 -7
  257. package/src/utils/title.ts +104 -30
  258. package/src/utils/tokens.ts +69 -10
  259. package/dist/types/scripts/abort.d.ts +0 -1
  260. package/dist/types/scripts/args.d.ts +0 -6
  261. package/dist/types/scripts/caching.d.ts +0 -1
  262. package/dist/types/scripts/cli.d.ts +0 -1
  263. package/dist/types/scripts/cli2.d.ts +0 -1
  264. package/dist/types/scripts/cli3.d.ts +0 -1
  265. package/dist/types/scripts/cli4.d.ts +0 -1
  266. package/dist/types/scripts/cli5.d.ts +0 -1
  267. package/dist/types/scripts/code_exec.d.ts +0 -1
  268. package/dist/types/scripts/code_exec_files.d.ts +0 -1
  269. package/dist/types/scripts/code_exec_simple.d.ts +0 -1
  270. package/dist/types/scripts/content.d.ts +0 -1
  271. package/dist/types/scripts/empty_input.d.ts +0 -1
  272. package/dist/types/scripts/image.d.ts +0 -1
  273. package/dist/types/scripts/memory.d.ts +0 -1
  274. package/dist/types/scripts/search.d.ts +0 -1
  275. package/dist/types/scripts/simple.d.ts +0 -1
  276. package/dist/types/scripts/stream.d.ts +0 -1
  277. package/dist/types/scripts/thinking.d.ts +0 -1
  278. package/dist/types/scripts/tools.d.ts +0 -1
  279. package/dist/types/specs/spec.utils.d.ts +0 -1
@@ -0,0 +1,158 @@
1
+ 'use strict';
2
+
3
+ var messages = require('@langchain/core/messages');
4
+ var uuid = require('uuid');
5
+
6
+ function convertOllamaMessagesToLangChain(messages$1, extra) {
7
+ const additional_kwargs = {};
8
+ if ('thinking' in messages$1) {
9
+ additional_kwargs.reasoning_content = messages$1.thinking;
10
+ }
11
+ return new messages.AIMessageChunk({
12
+ content: messages$1.content || '',
13
+ tool_call_chunks: messages$1.tool_calls?.map((tc) => ({
14
+ name: tc.function.name,
15
+ args: JSON.stringify(tc.function.arguments),
16
+ type: 'tool_call_chunk',
17
+ index: 0,
18
+ id: uuid.v4(),
19
+ })),
20
+ response_metadata: extra?.responseMetadata,
21
+ usage_metadata: extra?.usageMetadata,
22
+ additional_kwargs,
23
+ });
24
+ }
25
+ function extractBase64FromDataUrl(dataUrl) {
26
+ const match = dataUrl.match(/^data:.*?;base64,(.*)$/);
27
+ return match ? match[1] : '';
28
+ }
29
+ function convertAMessagesToOllama(messages) {
30
+ if (typeof messages.content === 'string') {
31
+ return [
32
+ {
33
+ role: 'assistant',
34
+ content: messages.content,
35
+ },
36
+ ];
37
+ }
38
+ const textFields = messages.content.filter((c) => c.type === 'text' && typeof c.text === 'string');
39
+ const textMessages = textFields.map((c) => ({
40
+ role: 'assistant',
41
+ content: c.text,
42
+ }));
43
+ let toolCallMsgs;
44
+ if (messages.content.find((c) => c.type === 'tool_use') &&
45
+ messages.tool_calls?.length) {
46
+ // `tool_use` content types are accepted if the message has tool calls
47
+ const toolCalls = messages.tool_calls.map((tc) => ({
48
+ id: tc.id,
49
+ type: 'function',
50
+ function: {
51
+ name: tc.name,
52
+ arguments: tc.args,
53
+ },
54
+ }));
55
+ if (toolCalls) {
56
+ toolCallMsgs = {
57
+ role: 'assistant',
58
+ tool_calls: toolCalls,
59
+ content: '',
60
+ };
61
+ }
62
+ }
63
+ else if (messages.content.find((c) => c.type === 'tool_use') &&
64
+ !messages.tool_calls?.length) {
65
+ throw new Error('\'tool_use\' content type is not supported without tool calls.');
66
+ }
67
+ return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];
68
+ }
69
+ function convertHumanGenericMessagesToOllama(message) {
70
+ if (typeof message.content === 'string') {
71
+ return [
72
+ {
73
+ role: 'user',
74
+ content: message.content,
75
+ },
76
+ ];
77
+ }
78
+ return message.content.map((c) => {
79
+ if (c.type === 'text') {
80
+ return {
81
+ role: 'user',
82
+ content: c.text,
83
+ };
84
+ }
85
+ else if (c.type === 'image_url') {
86
+ if (typeof c.image_url === 'string') {
87
+ return {
88
+ role: 'user',
89
+ content: '',
90
+ images: [extractBase64FromDataUrl(c.image_url)],
91
+ };
92
+ }
93
+ else if (c.image_url.url && typeof c.image_url.url === 'string') {
94
+ return {
95
+ role: 'user',
96
+ content: '',
97
+ images: [extractBase64FromDataUrl(c.image_url.url)],
98
+ };
99
+ }
100
+ }
101
+ throw new Error(`Unsupported content type: ${c.type}`);
102
+ });
103
+ }
104
+ function convertSystemMessageToOllama(message) {
105
+ if (typeof message.content === 'string') {
106
+ return [
107
+ {
108
+ role: 'system',
109
+ content: message.content,
110
+ },
111
+ ];
112
+ }
113
+ else if (message.content.every((c) => c.type === 'text' && typeof c.text === 'string')) {
114
+ return message.content.map((c) => ({
115
+ role: 'system',
116
+ content: c.text,
117
+ }));
118
+ }
119
+ else {
120
+ throw new Error(`Unsupported content type(s): ${message.content
121
+ .map((c) => c.type)
122
+ .join(', ')}`);
123
+ }
124
+ }
125
+ function convertToolMessageToOllama(message) {
126
+ if (typeof message.content !== 'string') {
127
+ throw new Error('Non string tool message content is not supported');
128
+ }
129
+ return [
130
+ {
131
+ role: 'tool',
132
+ content: message.content,
133
+ },
134
+ ];
135
+ }
136
+ function convertToOllamaMessages(messages) {
137
+ return messages.flatMap((msg) => {
138
+ if (['human', 'generic'].includes(msg._getType())) {
139
+ return convertHumanGenericMessagesToOllama(msg);
140
+ }
141
+ else if (msg._getType() === 'ai') {
142
+ return convertAMessagesToOllama(msg);
143
+ }
144
+ else if (msg._getType() === 'system') {
145
+ return convertSystemMessageToOllama(msg);
146
+ }
147
+ else if (msg._getType() === 'tool') {
148
+ return convertToolMessageToOllama(msg);
149
+ }
150
+ else {
151
+ throw new Error(`Unsupported message type: ${msg._getType()}`);
152
+ }
153
+ });
154
+ }
155
+
156
+ exports.convertOllamaMessagesToLangChain = convertOllamaMessagesToLangChain;
157
+ exports.convertToOllamaMessages = convertToOllamaMessages;
158
+ //# sourceMappingURL=utils.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"utils.cjs","sources":["../../../../src/llm/ollama/utils.ts"],"sourcesContent":["import {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n HumanMessage,\n MessageContentText,\n SystemMessage,\n ToolMessage,\n UsageMetadata,\n} from '@langchain/core/messages';\nimport type {\n Message as OllamaMessage,\n ToolCall as OllamaToolCall,\n} from 'ollama';\nimport { v4 as uuidv4 } from 'uuid';\n\nexport function convertOllamaMessagesToLangChain(\n messages: OllamaMessage,\n extra?: {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n responseMetadata?: Record<string, any>;\n usageMetadata?: UsageMetadata;\n }\n): AIMessageChunk {\n const additional_kwargs: BaseMessage['additional_kwargs'] = {};\n if ('thinking' in messages) {\n additional_kwargs.reasoning_content = messages.thinking as string;\n }\n return new AIMessageChunk({\n content: messages.content || '',\n tool_call_chunks: messages.tool_calls?.map((tc) => ({\n name: tc.function.name,\n args: JSON.stringify(tc.function.arguments),\n type: 'tool_call_chunk',\n index: 0,\n id: uuidv4(),\n })),\n response_metadata: extra?.responseMetadata,\n usage_metadata: extra?.usageMetadata,\n additional_kwargs,\n });\n}\n\nfunction extractBase64FromDataUrl(dataUrl: string): string {\n const match = dataUrl.match(/^data:.*?;base64,(.*)$/);\n return match ? match[1] : '';\n}\n\nfunction convertAMessagesToOllama(messages: AIMessage): OllamaMessage[] {\n if (typeof messages.content === 'string') {\n return [\n {\n role: 'assistant',\n content: messages.content,\n },\n ];\n }\n\n const textFields = messages.content.filter(\n (c) => c.type === 'text' && typeof c.text === 'string'\n );\n const textMessages = (textFields as MessageContentText[]).map((c) => ({\n role: 'assistant',\n content: c.text,\n }));\n let toolCallMsgs: OllamaMessage | undefined;\n\n if (\n messages.content.find((c) => c.type === 'tool_use') &&\n messages.tool_calls?.length\n ) {\n // `tool_use` content types are accepted if the message has tool calls\n const toolCalls: OllamaToolCall[] | undefined = messages.tool_calls.map(\n (tc) => ({\n id: tc.id,\n type: 'function',\n function: {\n name: tc.name,\n arguments: tc.args,\n },\n })\n );\n\n if (toolCalls) {\n toolCallMsgs = {\n role: 'assistant',\n tool_calls: toolCalls,\n content: '',\n };\n }\n } else if (\n messages.content.find((c) => c.type === 'tool_use') &&\n !messages.tool_calls?.length\n ) {\n throw new Error(\n '\\'tool_use\\' content type is not supported without tool calls.'\n );\n }\n\n return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];\n}\n\nfunction convertHumanGenericMessagesToOllama(\n message: HumanMessage\n): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'user',\n content: message.content,\n },\n ];\n }\n return message.content.map((c) => {\n if (c.type === 'text') {\n return {\n role: 'user',\n content: c.text,\n };\n } else if (c.type === 'image_url') {\n if (typeof c.image_url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url)],\n };\n } else if (c.image_url.url && typeof c.image_url.url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url.url)],\n };\n }\n }\n throw new Error(`Unsupported content type: ${c.type}`);\n });\n}\n\nfunction convertSystemMessageToOllama(message: SystemMessage): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'system',\n content: message.content,\n },\n ];\n } else if (\n message.content.every(\n (c) => c.type === 'text' && typeof c.text === 'string'\n )\n ) {\n return (message.content as MessageContentText[]).map((c) => ({\n role: 'system',\n content: c.text,\n }));\n } else {\n throw new Error(\n `Unsupported content type(s): ${message.content\n .map((c) => c.type)\n .join(', ')}`\n );\n }\n}\n\nfunction convertToolMessageToOllama(message: ToolMessage): OllamaMessage[] {\n if (typeof message.content !== 'string') {\n throw new Error('Non string tool message content is not supported');\n }\n return [\n {\n role: 'tool',\n content: message.content,\n },\n ];\n}\n\nexport function convertToOllamaMessages(\n messages: BaseMessage[]\n): OllamaMessage[] {\n return messages.flatMap((msg) => {\n if (['human', 'generic'].includes(msg._getType())) {\n return convertHumanGenericMessagesToOllama(msg);\n } else if (msg._getType() === 'ai') {\n return convertAMessagesToOllama(msg);\n } else if (msg._getType() === 'system') {\n return convertSystemMessageToOllama(msg);\n } else if (msg._getType() === 'tool') {\n return convertToolMessageToOllama(msg as ToolMessage);\n } else {\n throw new Error(`Unsupported message type: ${msg._getType()}`);\n }\n });\n}\n"],"names":["messages","AIMessageChunk","uuidv4"],"mappings":";;;;;AAgBgB,SAAA,gCAAgC,CAC9CA,UAAuB,EACvB,KAIC,EAAA;IAED,MAAM,iBAAiB,GAAqC,EAAE;AAC9D,IAAA,IAAI,UAAU,IAAIA,UAAQ,EAAE;AAC1B,QAAA,iBAAiB,CAAC,iBAAiB,GAAGA,UAAQ,CAAC,QAAkB;;IAEnE,OAAO,IAAIC,uBAAc,CAAC;AACxB,QAAA,OAAO,EAAED,UAAQ,CAAC,OAAO,IAAI,EAAE;AAC/B,QAAA,gBAAgB,EAAEA,UAAQ,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC,EAAE,MAAM;AAClD,YAAA,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI;YACtB,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;AAC3C,YAAA,IAAI,EAAE,iBAAiB;AACvB,YAAA,KAAK,EAAE,CAAC;YACR,EAAE,EAAEE,OAAM,EAAE;AACb,SAAA,CAAC,CAAC;QACH,iBAAiB,EAAE,KAAK,EAAE,gBAAgB;QAC1C,cAAc,EAAE,KAAK,EAAE,aAAa;QACpC,iBAAiB;AAClB,KAAA,CAAC;AACJ;AAEA,SAAS,wBAAwB,CAAC,OAAe,EAAA;IAC/C,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,wBAAwB,CAAC;AACrD,IAAA,OAAO,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;AAC9B;AAEA,SAAS,wBAAwB,CAAC,QAAmB,EAAA;AACnD,IAAA,IAAI,OAAO,QAAQ,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,QAAQ,CAAC,OAAO;AAC1B,aAAA;SACF;;IAGH,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,MAAM,CACxC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD;IACD,MAAM,YAAY,GAAI,UAAmC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACpE,QAAA,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,KAAA,CAAC,CAAC;AACH,IAAA,IAAI,YAAuC;AAE3C,IAAA,IACE,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC3B;;AAEA,QAAA,MAAM,SAAS,GAAiC,QAAQ,CAAC,UAAU,CAAC,GAAG,CACrE,CAAC,EAAE,MAAM;YACP,EAAE,EAAE,EAAE,CAAC,EAAE;AACT,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,QAAQ,EAAE;gBACR,IAAI,EAAE,EAAE,CAAC,IAAI;gBACb,SAAS,EAAE,EAAE,CAAC,IAAI;AACnB,aAAA;AACF,SAAA,CAAC,CACH;QAED,IAAI,SAAS,EAAE;AACb,YAAA,YAAY,GAAG;AACb,gBAAA,IAAI,EAAE,WAAW;AACjB,gBAAA,UAAU,EAAE,SAAS;AACrB,gBAAA,OAAO,EAAE,EAAE;aACZ;;;AAEE,SAAA,IACL,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC5B;AACA,QAAA,MAAM,IAAI,KAAK,CACb,gEAAgE,CACjE;;AAGH,IAAA,OAAO,CAAC,GAAG,YAAY,EAAE,IAAI,YAAY,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC;AACnE;AAEA,SAAS,mCAAmC,CAC1C,OAAqB,EAAA;AAErB,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;IAEH,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;AAC/B,QAAA,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM,EAAE;YACrB,OAAO;AACL,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,CAAC,CAAC,IAAI;aAChB;;AACI,aAAA,IAAI,CAAC,CAAC,IAAI,KAAK,WAAW,EAAE;AACjC,YAAA,IAAI,OAAO,CAAC,CAAC,SAAS,KAAK,QAAQ,EAAE;gBACnC,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;iBAChD;;AACI,iBAAA,IAAI,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,OAAO,CAAC,CAAC,SAAS,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACjE,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;iBACpD;;;QAGL,MAAM,IAAI,KAAK,CAAC,CAAA,0BAAA,EAA6B,CAAC,CAAC,IAAI,CAAE,CAAA,CAAC;AACxD,KAAC,CAAC;AACJ;AAEA,SAAS,4BAA4B,CAAC,OAAsB,EAAA;AAC1D,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;SACI,IACL,OAAO,CAAC,OAAO,CAAC,KAAK,CACnB,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD,EACD;QACA,OAAQ,OAAO,CAAC,OAAgC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC3D,YAAA,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,SAAA,CAAC,CAAC;;SACE;AACL,QAAA,MAAM,IAAI,KAAK,CACb,CAAgC,6BAAA,EAAA,OAAO,CAAC;aACrC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACjB,aAAA,IAAI,CAAC,IAAI,CAAC,CAAA,CAAE,CAChB;;AAEL;AAEA,SAAS,0BAA0B,CAAC,OAAoB,EAAA;AACtD,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC;;IAErE,OAAO;AACL,QAAA;AACE,YAAA,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA;KACF;AACH;AAEM,SAAU,uBAAuB,CACrC,QAAuB,EAAA;AAEvB,IAAA,OAAO,QAAQ,CAAC,OAAO,CAAC,CAAC,GAAG,KAAI;AAC9B,QAAA,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,EAAE;AACjD,YAAA,OAAO,mCAAmC,CAAC,GAAG,CAAC;;AAC1C,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;AAClC,YAAA,OAAO,wBAAwB,CAAC,GAAG,CAAC;;AAC/B,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,QAAQ,EAAE;AACtC,YAAA,OAAO,4BAA4B,CAAC,GAAG,CAAC;;AACnC,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,MAAM,EAAE;AACpC,YAAA,OAAO,0BAA0B,CAAC,GAAkB,CAAC;;aAChD;YACL,MAAM,IAAI,KAAK,CAAC,CAA6B,0BAAA,EAAA,GAAG,CAAC,QAAQ,EAAE,CAAE,CAAA,CAAC;;AAElE,KAAC,CAAC;AACJ;;;;;"}
@@ -1,10 +1,53 @@
1
1
  'use strict';
2
2
 
3
3
  var openai$1 = require('openai');
4
+ var messages = require('@langchain/core/messages');
4
5
  var xai = require('@langchain/xai');
6
+ var outputs = require('@langchain/core/outputs');
7
+ require('@langchain/core/utils/function_calling');
5
8
  var deepseek = require('@langchain/deepseek');
6
9
  var openai = require('@langchain/openai');
10
+ var index = require('./utils/index.cjs');
11
+ require('../../common/enum.cjs');
12
+ var run = require('../../utils/run.cjs');
13
+ require('js-tiktoken/lite');
7
14
 
15
+ // eslint-disable-next-line @typescript-eslint/explicit-function-return-type
16
+ const iife = (fn) => fn();
17
+ function isHeaders(headers) {
18
+ return (typeof Headers !== 'undefined' &&
19
+ headers !== null &&
20
+ typeof headers === 'object' &&
21
+ Object.prototype.toString.call(headers) === '[object Headers]');
22
+ }
23
+ function normalizeHeaders(headers) {
24
+ const output = iife(() => {
25
+ // If headers is a Headers instance
26
+ if (isHeaders(headers)) {
27
+ return headers;
28
+ }
29
+ // If headers is an array of [key, value] pairs
30
+ else if (Array.isArray(headers)) {
31
+ return new Headers(headers);
32
+ }
33
+ // If headers is a NullableHeaders-like object (has 'values' property that is a Headers)
34
+ else if (typeof headers === 'object' &&
35
+ headers !== null &&
36
+ 'values' in headers &&
37
+ isHeaders(headers.values)) {
38
+ return headers.values;
39
+ }
40
+ // If headers is a plain object
41
+ else if (typeof headers === 'object' && headers !== null) {
42
+ const entries = Object.entries(headers)
43
+ .filter(([, v]) => typeof v === 'string')
44
+ .map(([k, v]) => [k, v]);
45
+ return new Headers(entries);
46
+ }
47
+ return new Headers();
48
+ });
49
+ return Object.fromEntries(output.entries());
50
+ }
8
51
  function createAbortHandler(controller) {
9
52
  return function () {
10
53
  controller.abort();
@@ -64,7 +107,13 @@ class CustomAzureOpenAIClient extends openai$1.AzureOpenAI {
64
107
  }));
65
108
  }
66
109
  }
110
+ /** @ts-expect-error We are intentionally overriding `getReasoningParams` */
67
111
  class ChatOpenAI extends openai.ChatOpenAI {
112
+ _lc_stream_delay;
113
+ constructor(fields) {
114
+ super(fields);
115
+ this._lc_stream_delay = fields?._lc_stream_delay;
116
+ }
68
117
  get exposedClient() {
69
118
  return this.client;
70
119
  }
@@ -91,11 +140,203 @@ class ChatOpenAI extends openai.ChatOpenAI {
91
140
  };
92
141
  return requestOptions;
93
142
  }
143
+ /**
144
+ * Returns backwards compatible reasoning parameters from constructor params and call options
145
+ * @internal
146
+ */
147
+ getReasoningParams(options) {
148
+ if (!index.isReasoningModel(this.model)) {
149
+ return;
150
+ }
151
+ // apply options in reverse order of importance -- newer options supersede older options
152
+ let reasoning;
153
+ if (this.reasoning !== undefined) {
154
+ reasoning = {
155
+ ...reasoning,
156
+ ...this.reasoning,
157
+ };
158
+ }
159
+ if (options?.reasoning !== undefined) {
160
+ reasoning = {
161
+ ...reasoning,
162
+ ...options.reasoning,
163
+ };
164
+ }
165
+ return reasoning;
166
+ }
167
+ _getReasoningParams(options) {
168
+ return this.getReasoningParams(options);
169
+ }
170
+ async *_streamResponseChunks(messages, options, runManager) {
171
+ if (!this._useResponseApi(options)) {
172
+ return yield* this._streamResponseChunks2(messages, options, runManager);
173
+ }
174
+ const streamIterable = await this.responseApiWithRetry({
175
+ ...this.invocationParams(options, { streaming: true }),
176
+ input: index._convertMessagesToOpenAIResponsesParams(messages, this.model, this.zdrEnabled),
177
+ stream: true,
178
+ }, options);
179
+ for await (const data of streamIterable) {
180
+ const chunk = index._convertOpenAIResponsesDeltaToBaseMessageChunk(data);
181
+ if (chunk == null)
182
+ continue;
183
+ yield chunk;
184
+ if (this._lc_stream_delay != null) {
185
+ await run.sleep(this._lc_stream_delay);
186
+ }
187
+ await runManager?.handleLLMNewToken(chunk.text || '', undefined, undefined, undefined, undefined, { chunk });
188
+ }
189
+ return;
190
+ }
191
+ async *_streamResponseChunks2(messages$1, options, runManager) {
192
+ const messagesMapped = index._convertMessagesToOpenAIParams(messages$1, this.model);
193
+ const params = {
194
+ ...this.invocationParams(options, {
195
+ streaming: true,
196
+ }),
197
+ messages: messagesMapped,
198
+ stream: true,
199
+ };
200
+ let defaultRole;
201
+ const streamIterable = await this.completionWithRetry(params, options);
202
+ let usage;
203
+ for await (const data of streamIterable) {
204
+ const choice = data.choices[0];
205
+ if (data.usage) {
206
+ usage = data.usage;
207
+ }
208
+ if (!choice) {
209
+ continue;
210
+ }
211
+ const { delta } = choice;
212
+ if (!delta) {
213
+ continue;
214
+ }
215
+ const chunk = this._convertOpenAIDeltaToBaseMessageChunk(delta, data, defaultRole);
216
+ if ('reasoning_content' in delta) {
217
+ chunk.additional_kwargs.reasoning_content = delta.reasoning_content;
218
+ }
219
+ else if ('reasoning' in delta) {
220
+ chunk.additional_kwargs.reasoning_content = delta.reasoning;
221
+ }
222
+ defaultRole = delta.role ?? defaultRole;
223
+ const newTokenIndices = {
224
+ prompt: options.promptIndex ?? 0,
225
+ completion: choice.index ?? 0,
226
+ };
227
+ if (typeof chunk.content !== 'string') {
228
+ // eslint-disable-next-line no-console
229
+ console.log('[WARNING]: Received non-string content from OpenAI. This is currently not supported.');
230
+ continue;
231
+ }
232
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
233
+ const generationInfo = { ...newTokenIndices };
234
+ if (choice.finish_reason != null) {
235
+ generationInfo.finish_reason = choice.finish_reason;
236
+ // Only include system fingerprint in the last chunk for now
237
+ // to avoid concatenation issues
238
+ generationInfo.system_fingerprint = data.system_fingerprint;
239
+ generationInfo.model_name = data.model;
240
+ generationInfo.service_tier = data.service_tier;
241
+ }
242
+ if (this.logprobs == true) {
243
+ generationInfo.logprobs = choice.logprobs;
244
+ }
245
+ const generationChunk = new outputs.ChatGenerationChunk({
246
+ message: chunk,
247
+ text: chunk.content,
248
+ generationInfo,
249
+ });
250
+ yield generationChunk;
251
+ if (this._lc_stream_delay != null) {
252
+ await run.sleep(this._lc_stream_delay);
253
+ }
254
+ await runManager?.handleLLMNewToken(generationChunk.text || '', newTokenIndices, undefined, undefined, undefined, { chunk: generationChunk });
255
+ }
256
+ if (usage) {
257
+ const inputTokenDetails = {
258
+ ...(usage.prompt_tokens_details?.audio_tokens != null && {
259
+ audio: usage.prompt_tokens_details.audio_tokens,
260
+ }),
261
+ ...(usage.prompt_tokens_details?.cached_tokens != null && {
262
+ cache_read: usage.prompt_tokens_details.cached_tokens,
263
+ }),
264
+ };
265
+ const outputTokenDetails = {
266
+ ...(usage.completion_tokens_details?.audio_tokens != null && {
267
+ audio: usage.completion_tokens_details.audio_tokens,
268
+ }),
269
+ ...(usage.completion_tokens_details?.reasoning_tokens != null && {
270
+ reasoning: usage.completion_tokens_details.reasoning_tokens,
271
+ }),
272
+ };
273
+ const generationChunk = new outputs.ChatGenerationChunk({
274
+ message: new messages.AIMessageChunk({
275
+ content: '',
276
+ response_metadata: {
277
+ usage: { ...usage },
278
+ },
279
+ usage_metadata: {
280
+ input_tokens: usage.prompt_tokens,
281
+ output_tokens: usage.completion_tokens,
282
+ total_tokens: usage.total_tokens,
283
+ ...(Object.keys(inputTokenDetails).length > 0 && {
284
+ input_token_details: inputTokenDetails,
285
+ }),
286
+ ...(Object.keys(outputTokenDetails).length > 0 && {
287
+ output_token_details: outputTokenDetails,
288
+ }),
289
+ },
290
+ }),
291
+ text: '',
292
+ });
293
+ yield generationChunk;
294
+ if (this._lc_stream_delay != null) {
295
+ await run.sleep(this._lc_stream_delay);
296
+ }
297
+ }
298
+ if (options.signal?.aborted === true) {
299
+ throw new Error('AbortError');
300
+ }
301
+ }
94
302
  }
303
+ /** @ts-expect-error We are intentionally overriding `getReasoningParams` */
95
304
  class AzureChatOpenAI extends openai.AzureChatOpenAI {
305
+ _lc_stream_delay;
306
+ constructor(fields) {
307
+ super(fields);
308
+ this._lc_stream_delay = fields?._lc_stream_delay;
309
+ }
96
310
  get exposedClient() {
97
311
  return this.client;
98
312
  }
313
+ /**
314
+ * Returns backwards compatible reasoning parameters from constructor params and call options
315
+ * @internal
316
+ */
317
+ getReasoningParams(options) {
318
+ if (!index.isReasoningModel(this.model)) {
319
+ return;
320
+ }
321
+ // apply options in reverse order of importance -- newer options supersede older options
322
+ let reasoning;
323
+ if (this.reasoning !== undefined) {
324
+ reasoning = {
325
+ ...reasoning,
326
+ ...this.reasoning,
327
+ };
328
+ }
329
+ if (options?.reasoning !== undefined) {
330
+ reasoning = {
331
+ ...reasoning,
332
+ ...options.reasoning,
333
+ };
334
+ }
335
+ return reasoning;
336
+ }
337
+ _getReasoningParams(options) {
338
+ return this.getReasoningParams(options);
339
+ }
99
340
  _getClientOptions(options) {
100
341
  if (!this.client) {
101
342
  const openAIEndpointConfig = {
@@ -119,11 +360,12 @@ class AzureChatOpenAI extends openai.AzureChatOpenAI {
119
360
  if (params.baseURL == null) {
120
361
  delete params.baseURL;
121
362
  }
363
+ const defaultHeaders = normalizeHeaders(params.defaultHeaders);
122
364
  params.defaultHeaders = {
123
365
  ...params.defaultHeaders,
124
- 'User-Agent': params.defaultHeaders?.['User-Agent'] != null
125
- ? `${params.defaultHeaders['User-Agent']}: langchainjs-azure-openai-v2`
126
- : 'langchainjs-azure-openai-v2',
366
+ 'User-Agent': defaultHeaders['User-Agent'] != null
367
+ ? `${defaultHeaders['User-Agent']}: librechat-azure-openai-v2`
368
+ : 'librechat-azure-openai-v2',
127
369
  };
128
370
  this.client = new CustomAzureOpenAIClient({
129
371
  apiVersion: this.azureOpenAIApiVersion,
@@ -147,6 +389,27 @@ class AzureChatOpenAI extends openai.AzureChatOpenAI {
147
389
  }
148
390
  return requestOptions;
149
391
  }
392
+ async *_streamResponseChunks(messages, options, runManager) {
393
+ if (!this._useResponseApi(options)) {
394
+ return yield* super._streamResponseChunks(messages, options, runManager);
395
+ }
396
+ const streamIterable = await this.responseApiWithRetry({
397
+ ...this.invocationParams(options, { streaming: true }),
398
+ input: index._convertMessagesToOpenAIResponsesParams(messages, this.model, this.zdrEnabled),
399
+ stream: true,
400
+ }, options);
401
+ for await (const data of streamIterable) {
402
+ const chunk = index._convertOpenAIResponsesDeltaToBaseMessageChunk(data);
403
+ if (chunk == null)
404
+ continue;
405
+ yield chunk;
406
+ if (this._lc_stream_delay != null) {
407
+ await run.sleep(this._lc_stream_delay);
408
+ }
409
+ await runManager?.handleLLMNewToken(chunk.text || '', undefined, undefined, undefined, undefined, { chunk });
410
+ }
411
+ return;
412
+ }
150
413
  }
151
414
  class ChatDeepSeek extends deepseek.ChatDeepSeek {
152
415
  get exposedClient() {
@@ -177,6 +440,21 @@ class ChatDeepSeek extends deepseek.ChatDeepSeek {
177
440
  }
178
441
  }
179
442
  class ChatXAI extends xai.ChatXAI {
443
+ _lc_stream_delay;
444
+ constructor(fields) {
445
+ super(fields);
446
+ this._lc_stream_delay = fields?._lc_stream_delay;
447
+ const customBaseURL = fields?.configuration?.baseURL ?? fields?.clientConfig?.baseURL;
448
+ if (customBaseURL != null && customBaseURL) {
449
+ this.clientConfig = {
450
+ ...this.clientConfig,
451
+ baseURL: customBaseURL,
452
+ };
453
+ // Reset the client to force recreation with new config
454
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
455
+ this.client = undefined;
456
+ }
457
+ }
180
458
  get exposedClient() {
181
459
  return this.client;
182
460
  }
@@ -203,6 +481,145 @@ class ChatXAI extends xai.ChatXAI {
203
481
  };
204
482
  return requestOptions;
205
483
  }
484
+ async *_streamResponseChunks(messages$1, options, runManager) {
485
+ const messagesMapped = index._convertMessagesToOpenAIParams(messages$1, this.model);
486
+ const params = {
487
+ ...this.invocationParams(options, {
488
+ streaming: true,
489
+ }),
490
+ messages: messagesMapped,
491
+ stream: true,
492
+ };
493
+ let defaultRole;
494
+ const streamIterable = await this.completionWithRetry(params, options);
495
+ let usage;
496
+ for await (const data of streamIterable) {
497
+ const choice = data.choices[0];
498
+ if (data.usage) {
499
+ usage = data.usage;
500
+ }
501
+ if (!choice) {
502
+ continue;
503
+ }
504
+ const { delta } = choice;
505
+ if (!delta) {
506
+ continue;
507
+ }
508
+ const chunk = this._convertOpenAIDeltaToBaseMessageChunk(delta, data, defaultRole);
509
+ if (chunk.usage_metadata != null) {
510
+ chunk.usage_metadata = {
511
+ input_tokens: chunk.usage_metadata.input_tokens ?? 0,
512
+ output_tokens: chunk.usage_metadata.output_tokens ?? 0,
513
+ total_tokens: chunk.usage_metadata.total_tokens ?? 0,
514
+ };
515
+ }
516
+ if ('reasoning_content' in delta) {
517
+ chunk.additional_kwargs.reasoning_content = delta.reasoning_content;
518
+ }
519
+ defaultRole = delta.role ?? defaultRole;
520
+ const newTokenIndices = {
521
+ prompt: options.promptIndex ?? 0,
522
+ completion: choice.index ?? 0,
523
+ };
524
+ if (typeof chunk.content !== 'string') {
525
+ // eslint-disable-next-line no-console
526
+ console.log('[WARNING]: Received non-string content from OpenAI. This is currently not supported.');
527
+ continue;
528
+ }
529
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
530
+ const generationInfo = { ...newTokenIndices };
531
+ if (choice.finish_reason != null) {
532
+ generationInfo.finish_reason = choice.finish_reason;
533
+ // Only include system fingerprint in the last chunk for now
534
+ // to avoid concatenation issues
535
+ generationInfo.system_fingerprint = data.system_fingerprint;
536
+ generationInfo.model_name = data.model;
537
+ generationInfo.service_tier = data.service_tier;
538
+ }
539
+ if (this.logprobs == true) {
540
+ generationInfo.logprobs = choice.logprobs;
541
+ }
542
+ const generationChunk = new outputs.ChatGenerationChunk({
543
+ message: chunk,
544
+ text: chunk.content,
545
+ generationInfo,
546
+ });
547
+ yield generationChunk;
548
+ if (this._lc_stream_delay != null) {
549
+ await run.sleep(this._lc_stream_delay);
550
+ }
551
+ await runManager?.handleLLMNewToken(generationChunk.text || '', newTokenIndices, undefined, undefined, undefined, { chunk: generationChunk });
552
+ }
553
+ if (usage) {
554
+ // Type assertion for xAI-specific usage structure
555
+ const xaiUsage = usage;
556
+ const inputTokenDetails = {
557
+ // Standard OpenAI fields
558
+ ...(usage.prompt_tokens_details?.audio_tokens != null && {
559
+ audio: usage.prompt_tokens_details.audio_tokens,
560
+ }),
561
+ ...(usage.prompt_tokens_details?.cached_tokens != null && {
562
+ cache_read: usage.prompt_tokens_details.cached_tokens,
563
+ }),
564
+ // Add xAI-specific prompt token details if they exist
565
+ ...(xaiUsage.prompt_tokens_details?.text_tokens != null && {
566
+ text: xaiUsage.prompt_tokens_details.text_tokens,
567
+ }),
568
+ ...(xaiUsage.prompt_tokens_details?.image_tokens != null && {
569
+ image: xaiUsage.prompt_tokens_details.image_tokens,
570
+ }),
571
+ };
572
+ const outputTokenDetails = {
573
+ // Standard OpenAI fields
574
+ ...(usage.completion_tokens_details?.audio_tokens != null && {
575
+ audio: usage.completion_tokens_details.audio_tokens,
576
+ }),
577
+ ...(usage.completion_tokens_details?.reasoning_tokens != null && {
578
+ reasoning: usage.completion_tokens_details.reasoning_tokens,
579
+ }),
580
+ // Add xAI-specific completion token details if they exist
581
+ ...(xaiUsage.completion_tokens_details?.accepted_prediction_tokens !=
582
+ null && {
583
+ accepted_prediction: xaiUsage.completion_tokens_details.accepted_prediction_tokens,
584
+ }),
585
+ ...(xaiUsage.completion_tokens_details?.rejected_prediction_tokens !=
586
+ null && {
587
+ rejected_prediction: xaiUsage.completion_tokens_details.rejected_prediction_tokens,
588
+ }),
589
+ };
590
+ const generationChunk = new outputs.ChatGenerationChunk({
591
+ message: new messages.AIMessageChunk({
592
+ content: '',
593
+ response_metadata: {
594
+ usage: { ...usage },
595
+ // Include xAI-specific metadata if it exists
596
+ ...(xaiUsage.num_sources_used != null && {
597
+ num_sources_used: xaiUsage.num_sources_used,
598
+ }),
599
+ },
600
+ usage_metadata: {
601
+ input_tokens: usage.prompt_tokens,
602
+ output_tokens: usage.completion_tokens,
603
+ total_tokens: usage.total_tokens,
604
+ ...(Object.keys(inputTokenDetails).length > 0 && {
605
+ input_token_details: inputTokenDetails,
606
+ }),
607
+ ...(Object.keys(outputTokenDetails).length > 0 && {
608
+ output_token_details: outputTokenDetails,
609
+ }),
610
+ },
611
+ }),
612
+ text: '',
613
+ });
614
+ yield generationChunk;
615
+ if (this._lc_stream_delay != null) {
616
+ await run.sleep(this._lc_stream_delay);
617
+ }
618
+ }
619
+ if (options.signal?.aborted === true) {
620
+ throw new Error('AbortError');
621
+ }
622
+ }
206
623
  }
207
624
 
208
625
  exports.AzureChatOpenAI = AzureChatOpenAI;
@@ -211,4 +628,6 @@ exports.ChatOpenAI = ChatOpenAI;
211
628
  exports.ChatXAI = ChatXAI;
212
629
  exports.CustomAzureOpenAIClient = CustomAzureOpenAIClient;
213
630
  exports.CustomOpenAIClient = CustomOpenAIClient;
631
+ exports.isHeaders = isHeaders;
632
+ exports.normalizeHeaders = normalizeHeaders;
214
633
  //# sourceMappingURL=index.cjs.map