illuma-agents 1.0.8 → 1.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (250) hide show
  1. package/LICENSE +1 -5
  2. package/dist/cjs/common/enum.cjs +1 -2
  3. package/dist/cjs/common/enum.cjs.map +1 -1
  4. package/dist/cjs/events.cjs +11 -0
  5. package/dist/cjs/events.cjs.map +1 -1
  6. package/dist/cjs/graphs/Graph.cjs +2 -1
  7. package/dist/cjs/graphs/Graph.cjs.map +1 -1
  8. package/dist/cjs/instrumentation.cjs +3 -1
  9. package/dist/cjs/instrumentation.cjs.map +1 -1
  10. package/dist/cjs/llm/anthropic/types.cjs.map +1 -1
  11. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +79 -2
  12. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
  13. package/dist/cjs/llm/anthropic/utils/tools.cjs.map +1 -1
  14. package/dist/cjs/llm/bedrock/index.cjs +99 -0
  15. package/dist/cjs/llm/bedrock/index.cjs.map +1 -0
  16. package/dist/cjs/llm/fake.cjs.map +1 -1
  17. package/dist/cjs/llm/openai/index.cjs +102 -0
  18. package/dist/cjs/llm/openai/index.cjs.map +1 -1
  19. package/dist/cjs/llm/openai/utils/index.cjs +87 -1
  20. package/dist/cjs/llm/openai/utils/index.cjs.map +1 -1
  21. package/dist/cjs/llm/openrouter/index.cjs +175 -1
  22. package/dist/cjs/llm/openrouter/index.cjs.map +1 -1
  23. package/dist/cjs/llm/providers.cjs +13 -16
  24. package/dist/cjs/llm/providers.cjs.map +1 -1
  25. package/dist/cjs/llm/text.cjs.map +1 -1
  26. package/dist/cjs/messages/core.cjs +14 -14
  27. package/dist/cjs/messages/core.cjs.map +1 -1
  28. package/dist/cjs/messages/ids.cjs.map +1 -1
  29. package/dist/cjs/messages/prune.cjs.map +1 -1
  30. package/dist/cjs/run.cjs +18 -1
  31. package/dist/cjs/run.cjs.map +1 -1
  32. package/dist/cjs/splitStream.cjs.map +1 -1
  33. package/dist/cjs/stream.cjs +24 -1
  34. package/dist/cjs/stream.cjs.map +1 -1
  35. package/dist/cjs/tools/ToolNode.cjs +20 -1
  36. package/dist/cjs/tools/ToolNode.cjs.map +1 -1
  37. package/dist/cjs/tools/handlers.cjs +29 -25
  38. package/dist/cjs/tools/handlers.cjs.map +1 -1
  39. package/dist/cjs/tools/search/anthropic.cjs.map +1 -1
  40. package/dist/cjs/tools/search/content.cjs.map +1 -1
  41. package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
  42. package/dist/cjs/tools/search/format.cjs.map +1 -1
  43. package/dist/cjs/tools/search/highlights.cjs.map +1 -1
  44. package/dist/cjs/tools/search/rerankers.cjs.map +1 -1
  45. package/dist/cjs/tools/search/schema.cjs +27 -25
  46. package/dist/cjs/tools/search/schema.cjs.map +1 -1
  47. package/dist/cjs/tools/search/search.cjs +6 -1
  48. package/dist/cjs/tools/search/search.cjs.map +1 -1
  49. package/dist/cjs/tools/search/serper-scraper.cjs.map +1 -1
  50. package/dist/cjs/tools/search/tool.cjs +182 -35
  51. package/dist/cjs/tools/search/tool.cjs.map +1 -1
  52. package/dist/cjs/tools/search/utils.cjs.map +1 -1
  53. package/dist/cjs/utils/graph.cjs.map +1 -1
  54. package/dist/cjs/utils/llm.cjs +0 -1
  55. package/dist/cjs/utils/llm.cjs.map +1 -1
  56. package/dist/cjs/utils/misc.cjs.map +1 -1
  57. package/dist/cjs/utils/run.cjs.map +1 -1
  58. package/dist/cjs/utils/title.cjs +7 -7
  59. package/dist/cjs/utils/title.cjs.map +1 -1
  60. package/dist/esm/common/enum.mjs +1 -2
  61. package/dist/esm/common/enum.mjs.map +1 -1
  62. package/dist/esm/events.mjs +11 -0
  63. package/dist/esm/events.mjs.map +1 -1
  64. package/dist/esm/graphs/Graph.mjs +2 -1
  65. package/dist/esm/graphs/Graph.mjs.map +1 -1
  66. package/dist/esm/instrumentation.mjs +3 -1
  67. package/dist/esm/instrumentation.mjs.map +1 -1
  68. package/dist/esm/llm/anthropic/types.mjs.map +1 -1
  69. package/dist/esm/llm/anthropic/utils/message_inputs.mjs +79 -2
  70. package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
  71. package/dist/esm/llm/anthropic/utils/tools.mjs.map +1 -1
  72. package/dist/esm/llm/bedrock/index.mjs +97 -0
  73. package/dist/esm/llm/bedrock/index.mjs.map +1 -0
  74. package/dist/esm/llm/fake.mjs.map +1 -1
  75. package/dist/esm/llm/openai/index.mjs +103 -1
  76. package/dist/esm/llm/openai/index.mjs.map +1 -1
  77. package/dist/esm/llm/openai/utils/index.mjs +88 -2
  78. package/dist/esm/llm/openai/utils/index.mjs.map +1 -1
  79. package/dist/esm/llm/openrouter/index.mjs +175 -1
  80. package/dist/esm/llm/openrouter/index.mjs.map +1 -1
  81. package/dist/esm/llm/providers.mjs +2 -5
  82. package/dist/esm/llm/providers.mjs.map +1 -1
  83. package/dist/esm/llm/text.mjs.map +1 -1
  84. package/dist/esm/messages/core.mjs +14 -14
  85. package/dist/esm/messages/core.mjs.map +1 -1
  86. package/dist/esm/messages/ids.mjs.map +1 -1
  87. package/dist/esm/messages/prune.mjs.map +1 -1
  88. package/dist/esm/run.mjs +18 -1
  89. package/dist/esm/run.mjs.map +1 -1
  90. package/dist/esm/splitStream.mjs.map +1 -1
  91. package/dist/esm/stream.mjs +24 -1
  92. package/dist/esm/stream.mjs.map +1 -1
  93. package/dist/esm/tools/ToolNode.mjs +20 -1
  94. package/dist/esm/tools/ToolNode.mjs.map +1 -1
  95. package/dist/esm/tools/handlers.mjs +30 -26
  96. package/dist/esm/tools/handlers.mjs.map +1 -1
  97. package/dist/esm/tools/search/anthropic.mjs.map +1 -1
  98. package/dist/esm/tools/search/content.mjs.map +1 -1
  99. package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
  100. package/dist/esm/tools/search/format.mjs.map +1 -1
  101. package/dist/esm/tools/search/highlights.mjs.map +1 -1
  102. package/dist/esm/tools/search/rerankers.mjs.map +1 -1
  103. package/dist/esm/tools/search/schema.mjs +27 -25
  104. package/dist/esm/tools/search/schema.mjs.map +1 -1
  105. package/dist/esm/tools/search/search.mjs +6 -1
  106. package/dist/esm/tools/search/search.mjs.map +1 -1
  107. package/dist/esm/tools/search/serper-scraper.mjs.map +1 -1
  108. package/dist/esm/tools/search/tool.mjs +182 -35
  109. package/dist/esm/tools/search/tool.mjs.map +1 -1
  110. package/dist/esm/tools/search/utils.mjs.map +1 -1
  111. package/dist/esm/utils/graph.mjs.map +1 -1
  112. package/dist/esm/utils/llm.mjs +0 -1
  113. package/dist/esm/utils/llm.mjs.map +1 -1
  114. package/dist/esm/utils/misc.mjs.map +1 -1
  115. package/dist/esm/utils/run.mjs.map +1 -1
  116. package/dist/esm/utils/title.mjs +7 -7
  117. package/dist/esm/utils/title.mjs.map +1 -1
  118. package/dist/types/common/enum.d.ts +1 -2
  119. package/dist/types/llm/bedrock/index.d.ts +36 -0
  120. package/dist/types/llm/openai/index.d.ts +1 -0
  121. package/dist/types/llm/openai/utils/index.d.ts +10 -1
  122. package/dist/types/llm/openrouter/index.d.ts +4 -1
  123. package/dist/types/tools/search/types.d.ts +2 -0
  124. package/dist/types/types/llm.d.ts +3 -8
  125. package/package.json +16 -12
  126. package/src/common/enum.ts +1 -2
  127. package/src/common/index.ts +1 -1
  128. package/src/events.ts +11 -0
  129. package/src/graphs/Graph.ts +2 -1
  130. package/src/instrumentation.ts +25 -22
  131. package/src/llm/anthropic/llm.spec.ts +1442 -1442
  132. package/src/llm/anthropic/types.ts +140 -140
  133. package/src/llm/anthropic/utils/message_inputs.ts +757 -660
  134. package/src/llm/anthropic/utils/output_parsers.ts +133 -133
  135. package/src/llm/anthropic/utils/tools.ts +29 -29
  136. package/src/llm/bedrock/index.ts +128 -0
  137. package/src/llm/fake.ts +133 -133
  138. package/src/llm/google/llm.spec.ts +3 -1
  139. package/src/llm/google/utils/tools.ts +160 -160
  140. package/src/llm/openai/index.ts +126 -0
  141. package/src/llm/openai/types.ts +24 -24
  142. package/src/llm/openai/utils/index.ts +116 -1
  143. package/src/llm/openai/utils/isReasoningModel.test.ts +90 -90
  144. package/src/llm/openrouter/index.ts +222 -1
  145. package/src/llm/providers.ts +2 -7
  146. package/src/llm/text.ts +94 -94
  147. package/src/messages/core.ts +463 -463
  148. package/src/messages/formatAgentMessages.tools.test.ts +400 -400
  149. package/src/messages/formatMessage.test.ts +693 -693
  150. package/src/messages/ids.ts +26 -26
  151. package/src/messages/prune.ts +567 -567
  152. package/src/messages/shiftIndexTokenCountMap.test.ts +81 -81
  153. package/src/mockStream.ts +98 -98
  154. package/src/prompts/collab.ts +5 -5
  155. package/src/prompts/index.ts +1 -1
  156. package/src/prompts/taskmanager.ts +61 -61
  157. package/src/run.ts +22 -4
  158. package/src/scripts/ant_web_search_edge_case.ts +162 -0
  159. package/src/scripts/ant_web_search_error_edge_case.ts +148 -0
  160. package/src/scripts/args.ts +48 -48
  161. package/src/scripts/caching.ts +123 -123
  162. package/src/scripts/code_exec_files.ts +193 -193
  163. package/src/scripts/empty_input.ts +137 -137
  164. package/src/scripts/memory.ts +97 -97
  165. package/src/scripts/test-tools-before-handoff.ts +1 -5
  166. package/src/scripts/thinking.ts +149 -149
  167. package/src/scripts/tools.ts +1 -4
  168. package/src/specs/anthropic.simple.test.ts +67 -0
  169. package/src/specs/spec.utils.ts +3 -3
  170. package/src/specs/token-distribution-edge-case.test.ts +316 -316
  171. package/src/specs/tool-error.test.ts +193 -193
  172. package/src/splitStream.test.ts +691 -691
  173. package/src/splitStream.ts +234 -234
  174. package/src/stream.test.ts +94 -94
  175. package/src/stream.ts +30 -1
  176. package/src/tools/ToolNode.ts +24 -1
  177. package/src/tools/handlers.ts +32 -28
  178. package/src/tools/search/anthropic.ts +51 -51
  179. package/src/tools/search/content.test.ts +173 -173
  180. package/src/tools/search/content.ts +147 -147
  181. package/src/tools/search/direct-url.test.ts +530 -0
  182. package/src/tools/search/firecrawl.ts +210 -210
  183. package/src/tools/search/format.ts +250 -250
  184. package/src/tools/search/highlights.ts +320 -320
  185. package/src/tools/search/index.ts +2 -2
  186. package/src/tools/search/jina-reranker.test.ts +126 -126
  187. package/src/tools/search/output.md +2775 -2775
  188. package/src/tools/search/rerankers.ts +242 -242
  189. package/src/tools/search/schema.ts +65 -63
  190. package/src/tools/search/search.ts +766 -759
  191. package/src/tools/search/serper-scraper.ts +155 -155
  192. package/src/tools/search/test.html +883 -883
  193. package/src/tools/search/test.md +642 -642
  194. package/src/tools/search/test.ts +159 -159
  195. package/src/tools/search/tool.ts +641 -471
  196. package/src/tools/search/types.ts +689 -687
  197. package/src/tools/search/utils.ts +79 -79
  198. package/src/types/index.ts +6 -6
  199. package/src/types/llm.ts +2 -8
  200. package/src/utils/graph.ts +10 -10
  201. package/src/utils/llm.ts +26 -27
  202. package/src/utils/llmConfig.ts +13 -5
  203. package/src/utils/logging.ts +48 -48
  204. package/src/utils/misc.ts +57 -57
  205. package/src/utils/run.ts +100 -100
  206. package/src/utils/title.ts +165 -165
  207. package/dist/cjs/llm/ollama/index.cjs +0 -70
  208. package/dist/cjs/llm/ollama/index.cjs.map +0 -1
  209. package/dist/cjs/llm/ollama/utils.cjs +0 -158
  210. package/dist/cjs/llm/ollama/utils.cjs.map +0 -1
  211. package/dist/esm/llm/ollama/index.mjs +0 -68
  212. package/dist/esm/llm/ollama/index.mjs.map +0 -1
  213. package/dist/esm/llm/ollama/utils.mjs +0 -155
  214. package/dist/esm/llm/ollama/utils.mjs.map +0 -1
  215. package/dist/types/llm/ollama/index.d.ts +0 -8
  216. package/dist/types/llm/ollama/utils.d.ts +0 -7
  217. package/src/llm/ollama/index.ts +0 -92
  218. package/src/llm/ollama/utils.ts +0 -193
  219. package/src/proto/CollabGraph.ts +0 -269
  220. package/src/proto/TaskManager.ts +0 -243
  221. package/src/proto/collab.ts +0 -200
  222. package/src/proto/collab_design.ts +0 -184
  223. package/src/proto/collab_design_v2.ts +0 -224
  224. package/src/proto/collab_design_v3.ts +0 -255
  225. package/src/proto/collab_design_v4.ts +0 -220
  226. package/src/proto/collab_design_v5.ts +0 -251
  227. package/src/proto/collab_graph.ts +0 -181
  228. package/src/proto/collab_original.ts +0 -123
  229. package/src/proto/example.ts +0 -93
  230. package/src/proto/example_new.ts +0 -68
  231. package/src/proto/example_old.ts +0 -201
  232. package/src/proto/example_test.ts +0 -152
  233. package/src/proto/example_test_anthropic.ts +0 -100
  234. package/src/proto/log_stream.ts +0 -202
  235. package/src/proto/main_collab_community_event.ts +0 -133
  236. package/src/proto/main_collab_design_v2.ts +0 -96
  237. package/src/proto/main_collab_design_v4.ts +0 -100
  238. package/src/proto/main_collab_design_v5.ts +0 -135
  239. package/src/proto/main_collab_global_analysis.ts +0 -122
  240. package/src/proto/main_collab_hackathon_event.ts +0 -153
  241. package/src/proto/main_collab_space_mission.ts +0 -153
  242. package/src/proto/main_philosophy.ts +0 -210
  243. package/src/proto/original_script.ts +0 -126
  244. package/src/proto/standard.ts +0 -100
  245. package/src/proto/stream.ts +0 -56
  246. package/src/proto/tasks.ts +0 -118
  247. package/src/proto/tools/global_analysis_tools.ts +0 -86
  248. package/src/proto/tools/space_mission_tools.ts +0 -60
  249. package/src/proto/vertexai.ts +0 -54
  250. package/src/scripts/image.ts +0 -178
@@ -1,4 +1,7 @@
1
1
  import { ChatOpenAI } from '@/llm/openai';
2
+ import { ChatGenerationChunk } from '@langchain/core/outputs';
3
+ import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
4
+ import { AIMessageChunk as AIMessageChunkClass } from '@langchain/core/messages';
2
5
  import type {
3
6
  FunctionMessageChunk,
4
7
  SystemMessageChunk,
@@ -6,12 +9,25 @@ import type {
6
9
  ToolMessageChunk,
7
10
  ChatMessageChunk,
8
11
  AIMessageChunk,
12
+ BaseMessage,
9
13
  } from '@langchain/core/messages';
10
14
  import type {
11
15
  ChatOpenAICallOptions,
12
16
  OpenAIChatInput,
13
17
  OpenAIClient,
14
18
  } from '@langchain/openai';
19
+ import { _convertMessagesToOpenAIParams } from '@/llm/openai/utils';
20
+
21
+ type OpenAICompletionParam =
22
+ OpenAIClient.Chat.Completions.ChatCompletionMessageParam;
23
+
24
+ type OpenAIRoleEnum =
25
+ | 'system'
26
+ | 'developer'
27
+ | 'assistant'
28
+ | 'user'
29
+ | 'function'
30
+ | 'tool';
15
31
 
16
32
  export interface ChatOpenRouterCallOptions extends ChatOpenAICallOptions {
17
33
  include_reasoning?: boolean;
@@ -54,7 +70,212 @@ export class ChatOpenRouter extends ChatOpenAI {
54
70
  rawResponse,
55
71
  defaultRole
56
72
  );
57
- messageChunk.additional_kwargs.reasoning = delta.reasoning;
73
+ if (delta.reasoning != null) {
74
+ messageChunk.additional_kwargs.reasoning = delta.reasoning;
75
+ }
76
+ if (delta.reasoning_details != null) {
77
+ messageChunk.additional_kwargs.reasoning_details =
78
+ delta.reasoning_details;
79
+ }
58
80
  return messageChunk;
59
81
  }
82
+
83
+ async *_streamResponseChunks2(
84
+ messages: BaseMessage[],
85
+ options: this['ParsedCallOptions'],
86
+ runManager?: CallbackManagerForLLMRun
87
+ ): AsyncGenerator<ChatGenerationChunk> {
88
+ const messagesMapped: OpenAICompletionParam[] =
89
+ _convertMessagesToOpenAIParams(messages, this.model, {
90
+ includeReasoningDetails: true,
91
+ convertReasoningDetailsToContent: true,
92
+ });
93
+
94
+ const params = {
95
+ ...this.invocationParams(options, {
96
+ streaming: true,
97
+ }),
98
+ messages: messagesMapped,
99
+ stream: true as const,
100
+ };
101
+ let defaultRole: OpenAIRoleEnum | undefined;
102
+
103
+ const streamIterable = await this.completionWithRetry(params, options);
104
+ let usage: OpenAIClient.Completions.CompletionUsage | undefined;
105
+
106
+ // Store reasoning_details keyed by unique identifier to prevent incorrect merging
107
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
108
+ const reasoningTextByIndex: Map<number, Record<string, any>> = new Map();
109
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
110
+ const reasoningEncryptedById: Map<string, Record<string, any>> = new Map();
111
+
112
+ for await (const data of streamIterable) {
113
+ const choice = data.choices[0] as
114
+ | Partial<OpenAIClient.Chat.Completions.ChatCompletionChunk.Choice>
115
+ | undefined;
116
+ if (data.usage) {
117
+ usage = data.usage;
118
+ }
119
+ if (!choice) {
120
+ continue;
121
+ }
122
+
123
+ const { delta } = choice;
124
+ if (!delta) {
125
+ continue;
126
+ }
127
+
128
+ // Accumulate reasoning_details from each delta
129
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
130
+ const deltaAny = delta as Record<string, any>;
131
+ if (
132
+ deltaAny.reasoning_details != null &&
133
+ Array.isArray(deltaAny.reasoning_details)
134
+ ) {
135
+ for (const detail of deltaAny.reasoning_details) {
136
+ // For encrypted reasoning (thought signatures), store by ID - MUST be separate
137
+ if (detail.type === 'reasoning.encrypted' && detail.id) {
138
+ reasoningEncryptedById.set(detail.id, {
139
+ type: detail.type,
140
+ id: detail.id,
141
+ data: detail.data,
142
+ format: detail.format,
143
+ index: detail.index,
144
+ });
145
+ } else if (detail.type === 'reasoning.text') {
146
+ // For text reasoning, accumulate text by index
147
+ const idx = detail.index ?? 0;
148
+ const existing = reasoningTextByIndex.get(idx);
149
+ if (existing) {
150
+ // Only append text, keep other fields from first entry
151
+ existing.text = (existing.text || '') + (detail.text || '');
152
+ } else {
153
+ reasoningTextByIndex.set(idx, {
154
+ type: detail.type,
155
+ text: detail.text || '',
156
+ format: detail.format,
157
+ index: idx,
158
+ });
159
+ }
160
+ }
161
+ }
162
+ }
163
+
164
+ const chunk = this._convertOpenAIDeltaToBaseMessageChunk(
165
+ delta,
166
+ data,
167
+ defaultRole
168
+ );
169
+
170
+ // IMPORTANT: Only set reasoning_details on the FINAL chunk to prevent
171
+ // LangChain's chunk concatenation from corrupting the array
172
+ // Check if this is the final chunk (has finish_reason)
173
+ if (choice.finish_reason != null) {
174
+ // Build properly structured reasoning_details array
175
+ // Text entries first (but we only need the encrypted ones for thought signatures)
176
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
177
+ const finalReasoningDetails: Record<string, any>[] = [
178
+ ...reasoningTextByIndex.values(),
179
+ ...reasoningEncryptedById.values(),
180
+ ];
181
+
182
+ if (finalReasoningDetails.length > 0) {
183
+ chunk.additional_kwargs.reasoning_details = finalReasoningDetails;
184
+ }
185
+ } else {
186
+ // Clear reasoning_details from intermediate chunks to prevent concatenation issues
187
+ delete chunk.additional_kwargs.reasoning_details;
188
+ }
189
+
190
+ defaultRole = delta.role ?? defaultRole;
191
+ const newTokenIndices = {
192
+ prompt: options.promptIndex ?? 0,
193
+ completion: choice.index ?? 0,
194
+ };
195
+ if (typeof chunk.content !== 'string') {
196
+ // eslint-disable-next-line no-console
197
+ console.log(
198
+ '[WARNING]: Received non-string content from OpenAI. This is currently not supported.'
199
+ );
200
+ continue;
201
+ }
202
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
203
+ const generationInfo: Record<string, any> = { ...newTokenIndices };
204
+ if (choice.finish_reason != null) {
205
+ generationInfo.finish_reason = choice.finish_reason;
206
+ generationInfo.system_fingerprint = data.system_fingerprint;
207
+ generationInfo.model_name = data.model;
208
+ generationInfo.service_tier = data.service_tier;
209
+ }
210
+ if (this.logprobs == true) {
211
+ generationInfo.logprobs = choice.logprobs;
212
+ }
213
+ const generationChunk = new ChatGenerationChunk({
214
+ message: chunk,
215
+ text: chunk.content,
216
+ generationInfo,
217
+ });
218
+ yield generationChunk;
219
+ if (this._lc_stream_delay != null) {
220
+ await new Promise((resolve) =>
221
+ setTimeout(resolve, this._lc_stream_delay)
222
+ );
223
+ }
224
+ await runManager?.handleLLMNewToken(
225
+ generationChunk.text || '',
226
+ newTokenIndices,
227
+ undefined,
228
+ undefined,
229
+ undefined,
230
+ { chunk: generationChunk }
231
+ );
232
+ }
233
+ if (usage) {
234
+ const inputTokenDetails = {
235
+ ...(usage.prompt_tokens_details?.audio_tokens != null && {
236
+ audio: usage.prompt_tokens_details.audio_tokens,
237
+ }),
238
+ ...(usage.prompt_tokens_details?.cached_tokens != null && {
239
+ cache_read: usage.prompt_tokens_details.cached_tokens,
240
+ }),
241
+ };
242
+ const outputTokenDetails = {
243
+ ...(usage.completion_tokens_details?.audio_tokens != null && {
244
+ audio: usage.completion_tokens_details.audio_tokens,
245
+ }),
246
+ ...(usage.completion_tokens_details?.reasoning_tokens != null && {
247
+ reasoning: usage.completion_tokens_details.reasoning_tokens,
248
+ }),
249
+ };
250
+ const generationChunk = new ChatGenerationChunk({
251
+ message: new AIMessageChunkClass({
252
+ content: '',
253
+ response_metadata: {
254
+ usage: { ...usage },
255
+ },
256
+ usage_metadata: {
257
+ input_tokens: usage.prompt_tokens,
258
+ output_tokens: usage.completion_tokens,
259
+ total_tokens: usage.total_tokens,
260
+ ...(Object.keys(inputTokenDetails).length > 0 && {
261
+ input_token_details: inputTokenDetails,
262
+ }),
263
+ ...(Object.keys(outputTokenDetails).length > 0 && {
264
+ output_token_details: outputTokenDetails,
265
+ }),
266
+ },
267
+ }),
268
+ text: '',
269
+ });
270
+ yield generationChunk;
271
+ if (this._lc_stream_delay != null) {
272
+ await new Promise((resolve) =>
273
+ setTimeout(resolve, this._lc_stream_delay)
274
+ );
275
+ }
276
+ }
277
+ if (options.signal?.aborted === true) {
278
+ throw new Error('AbortError');
279
+ }
280
+ }
60
281
  }
@@ -1,8 +1,5 @@
1
1
  // src/llm/providers.ts
2
2
  import { ChatMistralAI } from '@langchain/mistralai';
3
- import { ChatBedrockConverse } from '@langchain/aws';
4
- // import { ChatAnthropic } from '@langchain/anthropic';
5
- // import { ChatVertexAI } from '@langchain/google-vertexai';
6
3
  import type {
7
4
  ChatModelConstructorMap,
8
5
  ProviderOptionsMap,
@@ -15,16 +12,15 @@ import {
15
12
  ChatXAI,
16
13
  } from '@/llm/openai';
17
14
  import { CustomChatGoogleGenerativeAI } from '@/llm/google';
15
+ import { CustomChatBedrockConverse } from '@/llm/bedrock';
18
16
  import { CustomAnthropic } from '@/llm/anthropic';
19
17
  import { ChatOpenRouter } from '@/llm/openrouter';
20
18
  import { ChatVertexAI } from '@/llm/vertexai';
21
- import { ChatOllama } from '@/llm/ollama';
22
19
  import { Providers } from '@/common';
23
20
 
24
21
  export const llmProviders: Partial<ChatModelConstructorMap> = {
25
22
  [Providers.XAI]: ChatXAI,
26
23
  [Providers.OPENAI]: ChatOpenAI,
27
- [Providers.OLLAMA]: ChatOllama,
28
24
  [Providers.AZURE]: AzureChatOpenAI,
29
25
  [Providers.VERTEXAI]: ChatVertexAI,
30
26
  [Providers.DEEPSEEK]: ChatDeepSeek,
@@ -32,7 +28,7 @@ export const llmProviders: Partial<ChatModelConstructorMap> = {
32
28
  [Providers.MISTRAL]: ChatMistralAI,
33
29
  [Providers.ANTHROPIC]: CustomAnthropic,
34
30
  [Providers.OPENROUTER]: ChatOpenRouter,
35
- [Providers.BEDROCK]: ChatBedrockConverse,
31
+ [Providers.BEDROCK]: CustomChatBedrockConverse,
36
32
  // [Providers.ANTHROPIC]: ChatAnthropic,
37
33
  [Providers.GOOGLE]: CustomChatGoogleGenerativeAI,
38
34
  };
@@ -40,7 +36,6 @@ export const llmProviders: Partial<ChatModelConstructorMap> = {
40
36
  export const manualToolStreamProviders = new Set<Providers | string>([
41
37
  Providers.ANTHROPIC,
42
38
  Providers.BEDROCK,
43
- Providers.OLLAMA,
44
39
  ]);
45
40
 
46
41
  export const getChatModelClass = <P extends Providers>(
package/src/llm/text.ts CHANGED
@@ -1,94 +1,94 @@
1
- export interface TextStreamOptions {
2
- minChunkSize?: number;
3
- maxChunkSize?: number;
4
- delay?: number;
5
- firstWordChunk?: boolean;
6
- }
7
-
8
- export type ProgressCallback = (chunk: string) => void;
9
- export type PostChunkCallback = (chunk: string) => void;
10
-
11
- export class TextStream {
12
- private text: string;
13
- private currentIndex: number;
14
- private minChunkSize: number;
15
- private maxChunkSize: number;
16
- private delay: number;
17
- private firstWordChunk: boolean;
18
-
19
- constructor(text: string, options: TextStreamOptions = {}) {
20
- this.text = text;
21
- this.currentIndex = 0;
22
- this.minChunkSize = options.minChunkSize ?? 4;
23
- this.maxChunkSize = options.maxChunkSize ?? 8;
24
- this.delay = options.delay ?? 20;
25
- this.firstWordChunk = options.firstWordChunk ?? true;
26
- }
27
-
28
- private randomInt(min: number, max: number): number {
29
- return Math.floor(Math.random() * (max - min)) + min;
30
- }
31
-
32
- private static readonly BOUNDARIES = new Set([
33
- ' ',
34
- '.',
35
- ',',
36
- '!',
37
- '?',
38
- ';',
39
- ':',
40
- ]);
41
-
42
- private findFirstWordBoundary(text: string, minSize: number): number {
43
- if (minSize >= text.length) return text.length;
44
-
45
- // Ensure we meet the minimum size first
46
- let pos = minSize;
47
-
48
- // Look forward until we find a boundary
49
- while (pos < text.length) {
50
- if (TextStream.BOUNDARIES.has(text[pos])) {
51
- return pos + 1; // Include the boundary character
52
- }
53
- pos++;
54
- }
55
-
56
- return text.length; // If no boundary found, return entire remaining text
57
- }
58
-
59
- async *generateText(
60
- signal?: AbortSignal,
61
- progressCallback?: ProgressCallback
62
- ): AsyncGenerator<string, void, unknown> {
63
- const { delay, minChunkSize, maxChunkSize } = this;
64
-
65
- while (this.currentIndex < this.text.length) {
66
- if (signal?.aborted === true) {
67
- break;
68
- }
69
- await new Promise((resolve) => setTimeout(resolve, delay));
70
-
71
- const remainingText = this.text.slice(this.currentIndex);
72
- let chunkSize: number;
73
-
74
- if (this.firstWordChunk) {
75
- chunkSize = this.findFirstWordBoundary(remainingText, minChunkSize);
76
- } else {
77
- const remainingChars = remainingText.length;
78
- chunkSize = Math.min(
79
- this.randomInt(minChunkSize, maxChunkSize + 1),
80
- remainingChars
81
- );
82
- }
83
-
84
- const chunk = this.text.slice(
85
- this.currentIndex,
86
- this.currentIndex + chunkSize
87
- );
88
- progressCallback?.(chunk);
89
-
90
- yield chunk;
91
- this.currentIndex += chunkSize;
92
- }
93
- }
94
- }
1
+ export interface TextStreamOptions {
2
+ minChunkSize?: number;
3
+ maxChunkSize?: number;
4
+ delay?: number;
5
+ firstWordChunk?: boolean;
6
+ }
7
+
8
+ export type ProgressCallback = (chunk: string) => void;
9
+ export type PostChunkCallback = (chunk: string) => void;
10
+
11
+ export class TextStream {
12
+ private text: string;
13
+ private currentIndex: number;
14
+ private minChunkSize: number;
15
+ private maxChunkSize: number;
16
+ private delay: number;
17
+ private firstWordChunk: boolean;
18
+
19
+ constructor(text: string, options: TextStreamOptions = {}) {
20
+ this.text = text;
21
+ this.currentIndex = 0;
22
+ this.minChunkSize = options.minChunkSize ?? 4;
23
+ this.maxChunkSize = options.maxChunkSize ?? 8;
24
+ this.delay = options.delay ?? 20;
25
+ this.firstWordChunk = options.firstWordChunk ?? true;
26
+ }
27
+
28
+ private randomInt(min: number, max: number): number {
29
+ return Math.floor(Math.random() * (max - min)) + min;
30
+ }
31
+
32
+ private static readonly BOUNDARIES = new Set([
33
+ ' ',
34
+ '.',
35
+ ',',
36
+ '!',
37
+ '?',
38
+ ';',
39
+ ':',
40
+ ]);
41
+
42
+ private findFirstWordBoundary(text: string, minSize: number): number {
43
+ if (minSize >= text.length) return text.length;
44
+
45
+ // Ensure we meet the minimum size first
46
+ let pos = minSize;
47
+
48
+ // Look forward until we find a boundary
49
+ while (pos < text.length) {
50
+ if (TextStream.BOUNDARIES.has(text[pos])) {
51
+ return pos + 1; // Include the boundary character
52
+ }
53
+ pos++;
54
+ }
55
+
56
+ return text.length; // If no boundary found, return entire remaining text
57
+ }
58
+
59
+ async *generateText(
60
+ signal?: AbortSignal,
61
+ progressCallback?: ProgressCallback
62
+ ): AsyncGenerator<string, void, unknown> {
63
+ const { delay, minChunkSize, maxChunkSize } = this;
64
+
65
+ while (this.currentIndex < this.text.length) {
66
+ if (signal?.aborted === true) {
67
+ break;
68
+ }
69
+ await new Promise((resolve) => setTimeout(resolve, delay));
70
+
71
+ const remainingText = this.text.slice(this.currentIndex);
72
+ let chunkSize: number;
73
+
74
+ if (this.firstWordChunk) {
75
+ chunkSize = this.findFirstWordBoundary(remainingText, minChunkSize);
76
+ } else {
77
+ const remainingChars = remainingText.length;
78
+ chunkSize = Math.min(
79
+ this.randomInt(minChunkSize, maxChunkSize + 1),
80
+ remainingChars
81
+ );
82
+ }
83
+
84
+ const chunk = this.text.slice(
85
+ this.currentIndex,
86
+ this.currentIndex + chunkSize
87
+ );
88
+ progressCallback?.(chunk);
89
+
90
+ yield chunk;
91
+ this.currentIndex += chunkSize;
92
+ }
93
+ }
94
+ }