illuma-agents 1.0.7 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (237) hide show
  1. package/LICENSE +1 -5
  2. package/dist/cjs/common/enum.cjs +1 -2
  3. package/dist/cjs/common/enum.cjs.map +1 -1
  4. package/dist/cjs/instrumentation.cjs.map +1 -1
  5. package/dist/cjs/llm/anthropic/types.cjs.map +1 -1
  6. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +79 -2
  7. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -1
  8. package/dist/cjs/llm/anthropic/utils/tools.cjs.map +1 -1
  9. package/dist/cjs/llm/bedrock/index.cjs +99 -0
  10. package/dist/cjs/llm/bedrock/index.cjs.map +1 -0
  11. package/dist/cjs/llm/fake.cjs.map +1 -1
  12. package/dist/cjs/llm/google/index.cjs +78 -9
  13. package/dist/cjs/llm/google/index.cjs.map +1 -1
  14. package/dist/cjs/llm/google/utils/common.cjs +185 -28
  15. package/dist/cjs/llm/google/utils/common.cjs.map +1 -1
  16. package/dist/cjs/llm/providers.cjs +13 -16
  17. package/dist/cjs/llm/providers.cjs.map +1 -1
  18. package/dist/cjs/llm/text.cjs.map +1 -1
  19. package/dist/cjs/messages/core.cjs +14 -14
  20. package/dist/cjs/messages/core.cjs.map +1 -1
  21. package/dist/cjs/messages/ids.cjs.map +1 -1
  22. package/dist/cjs/messages/prune.cjs.map +1 -1
  23. package/dist/cjs/run.cjs +10 -1
  24. package/dist/cjs/run.cjs.map +1 -1
  25. package/dist/cjs/splitStream.cjs.map +1 -1
  26. package/dist/cjs/stream.cjs +4 -1
  27. package/dist/cjs/stream.cjs.map +1 -1
  28. package/dist/cjs/tools/ToolNode.cjs +163 -55
  29. package/dist/cjs/tools/ToolNode.cjs.map +1 -1
  30. package/dist/cjs/tools/handlers.cjs +29 -25
  31. package/dist/cjs/tools/handlers.cjs.map +1 -1
  32. package/dist/cjs/tools/search/anthropic.cjs.map +1 -1
  33. package/dist/cjs/tools/search/content.cjs.map +1 -1
  34. package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
  35. package/dist/cjs/tools/search/format.cjs.map +1 -1
  36. package/dist/cjs/tools/search/highlights.cjs.map +1 -1
  37. package/dist/cjs/tools/search/rerankers.cjs.map +1 -1
  38. package/dist/cjs/tools/search/schema.cjs +25 -25
  39. package/dist/cjs/tools/search/schema.cjs.map +1 -1
  40. package/dist/cjs/tools/search/search.cjs +6 -1
  41. package/dist/cjs/tools/search/search.cjs.map +1 -1
  42. package/dist/cjs/tools/search/serper-scraper.cjs.map +1 -1
  43. package/dist/cjs/tools/search/tool.cjs +162 -35
  44. package/dist/cjs/tools/search/tool.cjs.map +1 -1
  45. package/dist/cjs/tools/search/utils.cjs.map +1 -1
  46. package/dist/cjs/utils/graph.cjs.map +1 -1
  47. package/dist/cjs/utils/llm.cjs +0 -1
  48. package/dist/cjs/utils/llm.cjs.map +1 -1
  49. package/dist/cjs/utils/misc.cjs.map +1 -1
  50. package/dist/cjs/utils/run.cjs.map +1 -1
  51. package/dist/cjs/utils/title.cjs +7 -7
  52. package/dist/cjs/utils/title.cjs.map +1 -1
  53. package/dist/esm/common/enum.mjs +1 -2
  54. package/dist/esm/common/enum.mjs.map +1 -1
  55. package/dist/esm/instrumentation.mjs.map +1 -1
  56. package/dist/esm/llm/anthropic/types.mjs.map +1 -1
  57. package/dist/esm/llm/anthropic/utils/message_inputs.mjs +79 -2
  58. package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -1
  59. package/dist/esm/llm/anthropic/utils/tools.mjs.map +1 -1
  60. package/dist/esm/llm/bedrock/index.mjs +97 -0
  61. package/dist/esm/llm/bedrock/index.mjs.map +1 -0
  62. package/dist/esm/llm/fake.mjs.map +1 -1
  63. package/dist/esm/llm/google/index.mjs +79 -10
  64. package/dist/esm/llm/google/index.mjs.map +1 -1
  65. package/dist/esm/llm/google/utils/common.mjs +184 -30
  66. package/dist/esm/llm/google/utils/common.mjs.map +1 -1
  67. package/dist/esm/llm/providers.mjs +2 -5
  68. package/dist/esm/llm/providers.mjs.map +1 -1
  69. package/dist/esm/llm/text.mjs.map +1 -1
  70. package/dist/esm/messages/core.mjs +14 -14
  71. package/dist/esm/messages/core.mjs.map +1 -1
  72. package/dist/esm/messages/ids.mjs.map +1 -1
  73. package/dist/esm/messages/prune.mjs.map +1 -1
  74. package/dist/esm/run.mjs +10 -1
  75. package/dist/esm/run.mjs.map +1 -1
  76. package/dist/esm/splitStream.mjs.map +1 -1
  77. package/dist/esm/stream.mjs +4 -1
  78. package/dist/esm/stream.mjs.map +1 -1
  79. package/dist/esm/tools/ToolNode.mjs +164 -56
  80. package/dist/esm/tools/ToolNode.mjs.map +1 -1
  81. package/dist/esm/tools/handlers.mjs +30 -26
  82. package/dist/esm/tools/handlers.mjs.map +1 -1
  83. package/dist/esm/tools/search/anthropic.mjs.map +1 -1
  84. package/dist/esm/tools/search/content.mjs.map +1 -1
  85. package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
  86. package/dist/esm/tools/search/format.mjs.map +1 -1
  87. package/dist/esm/tools/search/highlights.mjs.map +1 -1
  88. package/dist/esm/tools/search/rerankers.mjs.map +1 -1
  89. package/dist/esm/tools/search/schema.mjs +25 -25
  90. package/dist/esm/tools/search/schema.mjs.map +1 -1
  91. package/dist/esm/tools/search/search.mjs +6 -1
  92. package/dist/esm/tools/search/search.mjs.map +1 -1
  93. package/dist/esm/tools/search/serper-scraper.mjs.map +1 -1
  94. package/dist/esm/tools/search/tool.mjs +162 -35
  95. package/dist/esm/tools/search/tool.mjs.map +1 -1
  96. package/dist/esm/tools/search/utils.mjs.map +1 -1
  97. package/dist/esm/utils/graph.mjs.map +1 -1
  98. package/dist/esm/utils/llm.mjs +0 -1
  99. package/dist/esm/utils/llm.mjs.map +1 -1
  100. package/dist/esm/utils/misc.mjs.map +1 -1
  101. package/dist/esm/utils/run.mjs.map +1 -1
  102. package/dist/esm/utils/title.mjs +7 -7
  103. package/dist/esm/utils/title.mjs.map +1 -1
  104. package/dist/types/common/enum.d.ts +1 -2
  105. package/dist/types/llm/bedrock/index.d.ts +36 -0
  106. package/dist/types/llm/google/index.d.ts +10 -0
  107. package/dist/types/llm/google/types.d.ts +11 -1
  108. package/dist/types/llm/google/utils/common.d.ts +17 -2
  109. package/dist/types/tools/ToolNode.d.ts +9 -1
  110. package/dist/types/tools/search/types.d.ts +2 -0
  111. package/dist/types/types/llm.d.ts +3 -8
  112. package/dist/types/types/tools.d.ts +1 -1
  113. package/package.json +15 -11
  114. package/src/common/enum.ts +1 -2
  115. package/src/common/index.ts +1 -1
  116. package/src/instrumentation.ts +22 -22
  117. package/src/llm/anthropic/llm.spec.ts +1442 -1442
  118. package/src/llm/anthropic/types.ts +140 -140
  119. package/src/llm/anthropic/utils/message_inputs.ts +757 -660
  120. package/src/llm/anthropic/utils/output_parsers.ts +133 -133
  121. package/src/llm/anthropic/utils/tools.ts +29 -29
  122. package/src/llm/bedrock/index.ts +128 -0
  123. package/src/llm/fake.ts +133 -133
  124. package/src/llm/google/data/gettysburg10.wav +0 -0
  125. package/src/llm/google/data/hotdog.jpg +0 -0
  126. package/src/llm/google/index.ts +129 -14
  127. package/src/llm/google/llm.spec.ts +932 -0
  128. package/src/llm/google/types.ts +56 -43
  129. package/src/llm/google/utils/common.ts +873 -660
  130. package/src/llm/google/utils/tools.ts +160 -160
  131. package/src/llm/openai/types.ts +24 -24
  132. package/src/llm/openai/utils/isReasoningModel.test.ts +90 -90
  133. package/src/llm/providers.ts +2 -7
  134. package/src/llm/text.ts +94 -94
  135. package/src/messages/core.ts +463 -463
  136. package/src/messages/formatAgentMessages.tools.test.ts +400 -400
  137. package/src/messages/formatMessage.test.ts +693 -693
  138. package/src/messages/ids.ts +26 -26
  139. package/src/messages/prune.ts +567 -567
  140. package/src/messages/shiftIndexTokenCountMap.test.ts +81 -81
  141. package/src/mockStream.ts +98 -98
  142. package/src/prompts/collab.ts +5 -5
  143. package/src/prompts/index.ts +1 -1
  144. package/src/prompts/taskmanager.ts +61 -61
  145. package/src/run.ts +13 -4
  146. package/src/scripts/ant_web_search_edge_case.ts +162 -0
  147. package/src/scripts/ant_web_search_error_edge_case.ts +148 -0
  148. package/src/scripts/args.ts +48 -48
  149. package/src/scripts/caching.ts +123 -123
  150. package/src/scripts/code_exec_files.ts +193 -193
  151. package/src/scripts/empty_input.ts +137 -137
  152. package/src/scripts/image.ts +178 -178
  153. package/src/scripts/memory.ts +97 -97
  154. package/src/scripts/thinking.ts +149 -149
  155. package/src/specs/anthropic.simple.test.ts +67 -0
  156. package/src/specs/spec.utils.ts +3 -3
  157. package/src/specs/token-distribution-edge-case.test.ts +316 -316
  158. package/src/specs/tool-error.test.ts +193 -193
  159. package/src/splitStream.test.ts +691 -691
  160. package/src/splitStream.ts +234 -234
  161. package/src/stream.test.ts +94 -94
  162. package/src/stream.ts +4 -1
  163. package/src/tools/ToolNode.ts +206 -64
  164. package/src/tools/handlers.ts +32 -28
  165. package/src/tools/search/anthropic.ts +51 -51
  166. package/src/tools/search/content.test.ts +173 -173
  167. package/src/tools/search/content.ts +147 -147
  168. package/src/tools/search/direct-url.test.ts +530 -0
  169. package/src/tools/search/firecrawl.ts +210 -210
  170. package/src/tools/search/format.ts +250 -250
  171. package/src/tools/search/highlights.ts +320 -320
  172. package/src/tools/search/index.ts +2 -2
  173. package/src/tools/search/jina-reranker.test.ts +126 -126
  174. package/src/tools/search/output.md +2775 -2775
  175. package/src/tools/search/rerankers.ts +242 -242
  176. package/src/tools/search/schema.ts +63 -63
  177. package/src/tools/search/search.ts +766 -759
  178. package/src/tools/search/serper-scraper.ts +155 -155
  179. package/src/tools/search/test.html +883 -883
  180. package/src/tools/search/test.md +642 -642
  181. package/src/tools/search/test.ts +159 -159
  182. package/src/tools/search/tool.ts +619 -471
  183. package/src/tools/search/types.ts +689 -687
  184. package/src/tools/search/utils.ts +79 -79
  185. package/src/types/index.ts +6 -6
  186. package/src/types/llm.ts +2 -8
  187. package/src/types/tools.ts +80 -80
  188. package/src/utils/graph.ts +10 -10
  189. package/src/utils/llm.ts +26 -27
  190. package/src/utils/llmConfig.ts +5 -3
  191. package/src/utils/logging.ts +48 -48
  192. package/src/utils/misc.ts +57 -57
  193. package/src/utils/run.ts +100 -100
  194. package/src/utils/title.ts +165 -165
  195. package/dist/cjs/llm/ollama/index.cjs +0 -70
  196. package/dist/cjs/llm/ollama/index.cjs.map +0 -1
  197. package/dist/cjs/llm/ollama/utils.cjs +0 -158
  198. package/dist/cjs/llm/ollama/utils.cjs.map +0 -1
  199. package/dist/esm/llm/ollama/index.mjs +0 -68
  200. package/dist/esm/llm/ollama/index.mjs.map +0 -1
  201. package/dist/esm/llm/ollama/utils.mjs +0 -155
  202. package/dist/esm/llm/ollama/utils.mjs.map +0 -1
  203. package/dist/types/llm/ollama/index.d.ts +0 -8
  204. package/dist/types/llm/ollama/utils.d.ts +0 -7
  205. package/src/llm/ollama/index.ts +0 -92
  206. package/src/llm/ollama/utils.ts +0 -193
  207. package/src/proto/CollabGraph.ts +0 -269
  208. package/src/proto/TaskManager.ts +0 -243
  209. package/src/proto/collab.ts +0 -200
  210. package/src/proto/collab_design.ts +0 -184
  211. package/src/proto/collab_design_v2.ts +0 -224
  212. package/src/proto/collab_design_v3.ts +0 -255
  213. package/src/proto/collab_design_v4.ts +0 -220
  214. package/src/proto/collab_design_v5.ts +0 -251
  215. package/src/proto/collab_graph.ts +0 -181
  216. package/src/proto/collab_original.ts +0 -123
  217. package/src/proto/example.ts +0 -93
  218. package/src/proto/example_new.ts +0 -68
  219. package/src/proto/example_old.ts +0 -201
  220. package/src/proto/example_test.ts +0 -152
  221. package/src/proto/example_test_anthropic.ts +0 -100
  222. package/src/proto/log_stream.ts +0 -202
  223. package/src/proto/main_collab_community_event.ts +0 -133
  224. package/src/proto/main_collab_design_v2.ts +0 -96
  225. package/src/proto/main_collab_design_v4.ts +0 -100
  226. package/src/proto/main_collab_design_v5.ts +0 -135
  227. package/src/proto/main_collab_global_analysis.ts +0 -122
  228. package/src/proto/main_collab_hackathon_event.ts +0 -153
  229. package/src/proto/main_collab_space_mission.ts +0 -153
  230. package/src/proto/main_philosophy.ts +0 -210
  231. package/src/proto/original_script.ts +0 -126
  232. package/src/proto/standard.ts +0 -100
  233. package/src/proto/stream.ts +0 -56
  234. package/src/proto/tasks.ts +0 -118
  235. package/src/proto/tools/global_analysis_tools.ts +0 -86
  236. package/src/proto/tools/space_mission_tools.ts +0 -60
  237. package/src/proto/vertexai.ts +0 -54
@@ -1,202 +0,0 @@
1
- import fs from 'fs/promises';
2
- import { pull } from 'langchain/hub';
3
- import { ChatOpenAI } from '@langchain/openai';
4
- import type { ChatPromptTemplate } from '@langchain/core/prompts';
5
- import { AgentExecutor, createOpenAIFunctionsAgent, AgentStep } from 'langchain/agents';
6
- import { TavilySearchResults } from '@langchain/community/tools/tavily_search';
7
- import type { RunLogPatch } from '@langchain/core/tracers/log_stream';
8
- import dotenv from 'dotenv';
9
-
10
- type ExtractedJSONPatchOperation = Pick<RunLogPatch, 'ops'>;
11
- type OperationType = ExtractedJSONPatchOperation extends { ops: (infer T)[] } ? T : never;
12
-
13
- // Load environment variables from .env file
14
- dotenv.config();
15
-
16
- // Define the tools the agent will have access to.
17
- const tools = [new TavilySearchResults({})];
18
-
19
- const llm = new ChatOpenAI({
20
- model: 'gpt-3.5-turbo-1106',
21
- temperature: 0,
22
- streaming: true,
23
- });
24
-
25
- // Get the prompt to use - you can modify this!
26
- // If you want to see the prompt in full, you can at:
27
- // https://smith.langchain.com/hub/hwchase17/openai-functions-agent
28
- const prompt = await pull<ChatPromptTemplate>(
29
- 'hwchase17/openai-functions-agent'
30
- );
31
-
32
- const agent = await createOpenAIFunctionsAgent({
33
- llm,
34
- tools,
35
- prompt,
36
- });
37
-
38
- const agentExecutor = new AgentExecutor({
39
- agent,
40
- tools,
41
- });
42
-
43
- const logStream = await agentExecutor.streamLog({
44
- input: 'what are the current US election polls 2024. today is 7/6/24',
45
- });
46
-
47
- const finalState: RunLogPatch[] = [];
48
- const outputs: RunLogPatch[] = [];
49
- let accumulatedOutput = '';
50
- let accumulatedArguments = '';
51
-
52
- let functionName: string | undefined = undefined;
53
-
54
- function processStreamedOutput(op: any) {
55
- let output = '';
56
- if (op.value.text !== undefined) {
57
- output += op.value.text;
58
- }
59
- if (op.value.message && op.value.message.kwargs) {
60
- const kwargs = op.value.message.kwargs;
61
- if (kwargs.content) {
62
- output += kwargs.content;
63
- }
64
- }
65
- if (output) {
66
- accumulatedOutput += output;
67
- process.stdout.write(output);
68
- }
69
- }
70
-
71
- // A helper function to handle the event pattern for logged arguments
72
- function handleLoggedArgument(loggedArgument: any) {
73
- if (loggedArgument.value?.message?.additional_kwargs?.function_call) {
74
- const functionCall = loggedArgument.value.message.additional_kwargs.function_call;
75
-
76
- if (functionCall.name) {
77
- functionName = functionCall.name;
78
- process.stdout.write(`Logged Function Name:
79
- ${JSON.stringify(functionCall, null, 2)}
80
- `);
81
- }
82
-
83
- if (functionCall.arguments) {
84
- accumulatedArguments += functionCall.arguments;
85
- // Print the part of the argument as it comes
86
- // process.stdout.write(`Logged Argument: { "arguments": "${functionCall.arguments}" }\n`);
87
- process.stdout.write(`Logged Argument:\n${JSON.stringify(functionCall, null, 2)}`);
88
- }
89
-
90
- // Check if the full arguments string has been accumulated
91
- if (accumulatedArguments.startsWith('{') && accumulatedArguments.endsWith('}')) {
92
- // Build the final logged argument string
93
- const completeArguments = accumulatedArguments;
94
- const namePart = functionName ? `"name": "${functionName}", ` : '';
95
-
96
- console.log(`\nLogged Argument: {\n ${namePart}"arguments": ${completeArguments}\n}\n`);
97
-
98
- // Reset accumulators
99
- accumulatedArguments = '';
100
- functionName = undefined;
101
- }
102
- }
103
- }
104
-
105
- for await (const chunk of logStream) {
106
- finalState.push(chunk);
107
- outputs.push(chunk);
108
-
109
- if (!chunk.ops) continue;
110
-
111
- for (const op of chunk.ops) {
112
- if (isStreamedOutput(op)) {
113
- processStreamedOutput(op);
114
- if (hasFunctionCall(op)) {
115
- handleLoggedArgument(op);
116
- }
117
- } else if (isFinalOutput(op)) {
118
- printFinalOutput(op);
119
- }
120
- }
121
- }
122
-
123
- function isStreamedOutput(op: OperationType) {
124
- return op.op === 'add' && (
125
- op.path.includes('/streamed_output/-') ||
126
- op.path.includes('/streamed_output_str/-')
127
- );
128
- }
129
-
130
- function hasFunctionCall(op: OperationType) {
131
- return (op as any)?.value?.message?.additional_kwargs?.function_call;
132
- }
133
-
134
- function isFinalOutput(op: OperationType) {
135
- return op.op === 'add' &&
136
- op.value?.output &&
137
- op.path?.startsWith('/logs/') &&
138
- op.path?.endsWith('final_output') &&
139
- !op.path?.includes('Runnable');
140
- }
141
-
142
- function printFinalOutput(op: OperationType) {
143
- process.stdout.write(JSON.stringify(op, null, 2));
144
- process.stdout.write(`
145
-
146
- ########################_START_##########################
147
- ${JSON.stringify((op as any)?.value?.output, null, 2)}
148
- ########################__END__##########################
149
-
150
- `);
151
- }
152
-
153
- // Define types for the final output structure
154
- interface FinalOutput {
155
- id: string;
156
- streamed_output: Array<{
157
- intermediateSteps?: AgentStep[];
158
- output?: string;
159
- }>;
160
- final_output?: {
161
- output: string;
162
- };
163
- logs: Record<string, any>;
164
- }
165
-
166
- // Process finalState to create FinalOutput
167
- const finalOutput: FinalOutput = {
168
- id: '',
169
- streamed_output: [],
170
- logs: {},
171
- };
172
-
173
- for (const patch of finalState) {
174
- if (patch.ops) {
175
- for (const op of patch.ops) {
176
- if (op.op === 'add' || op.op === 'replace') {
177
- if (op.path === '/id') {
178
- finalOutput.id = op.value;
179
- } else if (op.path === '/streamed_output/-') {
180
- finalOutput.streamed_output.push(op.value);
181
- } else if (op.path === '/final_output') {
182
- finalOutput.final_output = op.value;
183
- } else if (op.path.startsWith('/logs/')) {
184
- const logKey = op.path.split('/')[2];
185
- finalOutput.logs[logKey] = op.value;
186
- }
187
- }
188
- }
189
- }
190
- }
191
-
192
- // Save outputs to a JSON file
193
- await fs.writeFile('outputs.json', JSON.stringify(outputs, null, 2));
194
- console.log('\n\nOutputs have been saved to outputs.json');
195
-
196
- // Save the final state separately
197
- await fs.writeFile('final_output.json', JSON.stringify(finalOutput, null, 2));
198
- console.log('\n\nFinal output has been saved to final_output.json');
199
-
200
- // Save the cleaned-up accumulated output
201
- await fs.writeFile('cleaned_output.txt', accumulatedOutput);
202
- console.log('\n\nCleaned output has been saved to cleaned_output.txt');
@@ -1,133 +0,0 @@
1
- // src/main_collab_community_event.ts
2
- import dotenv from 'dotenv';
3
- import { HumanMessage } from '@langchain/core/messages';
4
- import type * as t from '@/types';
5
- import {
6
- ChatModelStreamHandler,
7
- LLMStreamHandler,
8
- } from '@/stream';
9
- import { CollaborativeProcessor, Member } from '@/collab_design_v5';
10
- import { tavilyTool, chartTool } from '@/tools/example';
11
- import { supervisorPrompt } from '@/prompts/collab';
12
- import { GraphEvents, Providers } from '@/common';
13
- import fs from 'fs';
14
- import util from 'util';
15
-
16
- dotenv.config();
17
-
18
- // Create a write stream
19
- const logFile = fs.createWriteStream('event_log.log', { flags: 'a' });
20
-
21
- // Redirect console.log and console.error
22
- const originalConsoleLog = console.log;
23
- const originalConsoleError = console.error;
24
-
25
- console.log = function(...args) {
26
- logFile.write(util.format.apply(null, args) + '\n');
27
- originalConsoleLog.apply(console, args);
28
- };
29
-
30
- console.error = function(...args) {
31
- logFile.write(util.format.apply(null, args) + '\n');
32
- originalConsoleError.apply(console, args);
33
- };
34
-
35
- // Redirect process.stdout.write
36
- const originalStdoutWrite = process.stdout.write;
37
- process.stdout.write = function(chunk: string | Uint8Array, encoding?: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean {
38
- logFile.write(chunk, encoding);
39
- return originalStdoutWrite.apply(process.stdout, [chunk, encoding, callback]);
40
- } as any;
41
-
42
- // Redirect process.stderr.write
43
- const originalStderrWrite = process.stderr.write;
44
- process.stderr.write = function(chunk: string | Uint8Array, encoding?: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean {
45
- logFile.write(chunk, encoding);
46
- return originalStderrWrite.apply(process.stderr, [chunk, encoding, callback]);
47
- } as any;
48
-
49
- async function testCollaborativeCommunityEvent() {
50
- const customHandlers = {
51
- [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
52
- [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
53
- [GraphEvents.LLM_START]: {
54
- handle: (event: string, data: t.StreamEventData) => {
55
- console.log('LLM Start:', event);
56
- }
57
- },
58
- [GraphEvents.LLM_END]: {
59
- handle: (event: string, data: t.StreamEventData) => {
60
- console.log('LLM End:', event);
61
- }
62
- },
63
- [GraphEvents.CHAT_MODEL_END]: {
64
- handle: (event: string, data: t.StreamEventData) => {
65
- console.log('Chat Model End:', event);
66
- }
67
- },
68
- [GraphEvents.TOOL_END]: {
69
- handle: (event: string, data: t.StreamEventData) => {
70
- console.log('Tool End:', event);
71
- console.dir(data, { depth: null });
72
- }
73
- },
74
- };
75
-
76
- // Define the collaborative members
77
- const members: Member[] = [
78
- {
79
- name: 'resource_finder',
80
- systemPrompt: 'You are a resource finder. You utilize the Tavily search engine to gather necessary resources and contacts needed for the community event.',
81
- tools: [tavilyTool],
82
- llmConfig: {
83
- provider: Providers.OPENAI,
84
- modelName: 'gpt-4o',
85
- temperature: 0,
86
- },
87
- },
88
- {
89
- name: 'event_scheduler',
90
- systemPrompt: 'You are an event scheduler. You manage the timeline of the event activities using the Chart Generator to visualize the schedule.',
91
- tools: [chartTool],
92
- llmConfig: {
93
- provider: Providers.OPENAI,
94
- modelName: 'gpt-4o',
95
- temperature: 0.2,
96
- },
97
- },
98
- ];
99
-
100
- const supervisorConfig = {
101
- systemPrompt: supervisorPrompt,
102
- llmConfig: {
103
- provider: Providers.OPENAI,
104
- modelName: 'gpt-4o',
105
- temperature: 0,
106
- },
107
- };
108
-
109
- const collaborativeProcessor = new CollaborativeProcessor(members, supervisorConfig, customHandlers);
110
- await collaborativeProcessor.initialize();
111
-
112
- const config = {
113
- configurable: { thread_id: 'collaborative-event-planning-1' },
114
- streamMode: 'events',
115
- version: 'v2',
116
- };
117
-
118
- console.log('\nCollaborative Test: Plan a community event');
119
-
120
- const input = {
121
- messages: [new HumanMessage('Plan a community fair including activities for all ages, food vendors, and a performance stage.')],
122
- };
123
-
124
- await collaborativeProcessor.processStream(input, config);
125
- }
126
-
127
- async function main() {
128
- await testCollaborativeCommunityEvent();
129
- }
130
-
131
- main().catch(console.error).finally(() => {
132
- logFile.end();
133
- });
@@ -1,96 +0,0 @@
1
- // src/collaborative_main.ts
2
- import dotenv from 'dotenv';
3
- import { HumanMessage } from '@langchain/core/messages';
4
- import type * as t from '@/types';
5
- import {
6
- ChatModelStreamHandler,
7
- LLMStreamHandler,
8
- } from '@/stream';
9
- import { CollaborativeProcessor, Member } from '@/collab_design_v5';
10
- import { tavilyTool, chartTool } from '@/tools/example';
11
- import { GraphEvents, Providers } from '@/common';
12
-
13
- dotenv.config();
14
-
15
- async function testCollaborativeStreaming() {
16
- const customHandlers = {
17
- [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
18
- [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
19
- [GraphEvents.LLM_START]: {
20
- handle: (event: string, data: t.StreamEventData) => {
21
- console.log('LLM Start:', event);
22
- // console.dir(data, { depth: null });
23
- }
24
- },
25
- [GraphEvents.LLM_END]: {
26
- handle: (event: string, data: t.StreamEventData) => {
27
- console.log('LLM End:', event);
28
- // console.dir(data, { depth: null });
29
- }
30
- },
31
- [GraphEvents.CHAT_MODEL_END]: {
32
- handle: (event: string, data: t.StreamEventData) => {
33
- console.log('Chat Model End:', event);
34
- // console.dir(data, { depth: null });
35
- }
36
- },
37
- [GraphEvents.TOOL_END]: {
38
- handle: (event: string, data: t.StreamEventData) => {
39
- console.log('Tool End:', event);
40
- // console.dir(data, { depth: null });
41
- }
42
- },
43
- };
44
-
45
- const members: Member[] = [
46
- {
47
- name: 'researcher',
48
- systemPrompt: 'You are a web researcher. You may use the Tavily search engine to search the web for important information, so the Chart Generator in your team can make useful plots.',
49
- tools: [tavilyTool],
50
- llmConfig: {
51
- provider: Providers.OPENAI,
52
- modelName: 'gpt-4o',
53
- temperature: 0,
54
- },
55
- },
56
- {
57
- name: 'chart_generator',
58
- systemPrompt: 'You excel at generating bar charts. Use the researcher\'s information to generate the charts.',
59
- tools: [chartTool],
60
- llmConfig: {
61
- provider: Providers.OPENAI,
62
- modelName: 'gpt-4o',
63
- temperature: 0.2,
64
- },
65
- },
66
- ];
67
-
68
- const collaborativeProcessor = new CollaborativeProcessor(members, {
69
- llmConfig: {
70
- provider: Providers.OPENAI,
71
- modelName: 'gpt-4o',
72
- temperature: 0.5,
73
- },
74
- }, customHandlers);
75
- await collaborativeProcessor.initialize();
76
-
77
- const config = {
78
- configurable: { thread_id: 'collaborative-conversation-1' },
79
- streamMode: 'values',
80
- version: 'v2' as const,
81
- };
82
-
83
- console.log('\nCollaborative Test: Create a chart');
84
-
85
- const input = {
86
- messages: [new HumanMessage('Create a chart showing the population growth of the top 5 most populous countries over the last 50 years.')],
87
- };
88
-
89
- await collaborativeProcessor.processStream(input, config);
90
- }
91
-
92
- async function main() {
93
- await testCollaborativeStreaming();
94
- }
95
-
96
- main().catch(console.error);
@@ -1,100 +0,0 @@
1
- // src/collaborative_main.ts
2
- import dotenv from 'dotenv';
3
- import { HumanMessage } from '@langchain/core/messages';
4
- import type * as t from '@/types';
5
- import {
6
- ChatModelStreamHandler,
7
- LLMStreamHandler,
8
- } from '@/stream';
9
- import { CollaborativeProcessor, Member } from '@/collab_design_v4';
10
- import { tavilyTool, chartTool } from '@/tools/example';
11
- import { supervisorPrompt } from '@/prompts/collab';
12
- import { GraphEvents, Providers } from '@/common';
13
-
14
- dotenv.config();
15
-
16
- async function testCollaborativeStreaming() {
17
- const customHandlers = {
18
- [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
19
- [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
20
- [GraphEvents.LLM_START]: {
21
- handle: (event: string, data: t.StreamEventData) => {
22
- console.log('LLM Start:', event);
23
- // console.dir(data, { depth: null });
24
- }
25
- },
26
- [GraphEvents.LLM_END]: {
27
- handle: (event: string, data: t.StreamEventData) => {
28
- console.log('LLM End:', event);
29
- // console.dir(data, { depth: null });
30
- }
31
- },
32
- [GraphEvents.CHAT_MODEL_END]: {
33
- handle: (event: string, data: t.StreamEventData) => {
34
- console.log('Chat Model End:', event);
35
- // console.dir(data, { depth: null });
36
- }
37
- },
38
- [GraphEvents.TOOL_END]: {
39
- handle: (event: string, data: t.StreamEventData) => {
40
- console.log('Tool End:', event);
41
- console.dir(data, { depth: null });
42
- }
43
- },
44
- };
45
-
46
- const members: Member[] = [
47
- {
48
- name: 'researcher',
49
- systemPrompt: 'You are a web researcher. You may use the Tavily search engine to search the web for important information, so the Chart Generator in your team can make useful plots.',
50
- tools: [tavilyTool],
51
- llmConfig: {
52
- provider: Providers.OPENAI,
53
- modelName: 'gpt-4o',
54
- temperature: 0,
55
- },
56
- },
57
- {
58
- name: 'chart_generator',
59
- systemPrompt: 'You excel at generating bar charts. Use the researcher\'s information to generate the charts.',
60
- tools: [chartTool],
61
- llmConfig: {
62
- provider: Providers.OPENAI,
63
- modelName: 'gpt-4o',
64
- temperature: 0.2,
65
- },
66
- },
67
- ];
68
-
69
- const supervisorConfig = {
70
- systemPrompt: supervisorPrompt,
71
- llmConfig: {
72
- provider: Providers.OPENAI,
73
- modelName: 'gpt-4o',
74
- temperature: 0,
75
- },
76
- };
77
-
78
- const collaborativeProcessor = new CollaborativeProcessor(members, supervisorConfig, customHandlers);
79
- await collaborativeProcessor.initialize();
80
-
81
- const config = {
82
- configurable: { thread_id: 'collaborative-conversation-1' },
83
- streamMode: 'values',
84
- version: 'v2' as const,
85
- };
86
-
87
- console.log('\nCollaborative Test: Create a chart');
88
-
89
- const input = {
90
- messages: [new HumanMessage('Create a chart showing the population growth of the top 5 most populous countries over the last 50 years.')],
91
- };
92
-
93
- await collaborativeProcessor.processStream(input, config);
94
- }
95
-
96
- async function main() {
97
- await testCollaborativeStreaming();
98
- }
99
-
100
- main().catch(console.error);
@@ -1,135 +0,0 @@
1
- // src/main_collab_design_v5.ts
2
- import dotenv from 'dotenv';
3
- import { HumanMessage } from '@langchain/core/messages';
4
- import type * as t from '@/types';
5
- import {
6
- ChatModelStreamHandler,
7
- LLMStreamHandler,
8
- } from '@/stream';
9
- import { CollaborativeProcessor, Member } from '@/collab_design_v5';
10
- import { tavilyTool, chartTool } from '@/tools/example';
11
- import { supervisorPrompt } from '@/prompts/collab';
12
- import { GraphEvents, Providers } from '@/common';
13
- import fs from 'fs';
14
- import util from 'util';
15
-
16
- dotenv.config();
17
-
18
- // Create a write stream
19
- const logFile = fs.createWriteStream('output.log', { flags: 'a' });
20
-
21
- // Redirect console.log and console.error
22
- const originalConsoleLog = console.log;
23
- const originalConsoleError = console.error;
24
-
25
- console.log = function(...args) {
26
- logFile.write(util.format.apply(null, args) + '\n');
27
- originalConsoleLog.apply(console, args);
28
- };
29
-
30
- console.error = function(...args) {
31
- logFile.write(util.format.apply(null, args) + '\n');
32
- originalConsoleError.apply(console, args);
33
- };
34
-
35
- // Redirect process.stdout.write
36
- const originalStdoutWrite = process.stdout.write;
37
- process.stdout.write = function(chunk: string | Uint8Array, encoding?: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean {
38
- logFile.write(chunk, encoding);
39
- return originalStdoutWrite.apply(process.stdout, [chunk, encoding, callback]);
40
- } as any;
41
-
42
- // Redirect process.stderr.write
43
- const originalStderrWrite = process.stderr.write;
44
- process.stderr.write = function(chunk: string | Uint8Array, encoding?: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean {
45
- logFile.write(chunk, encoding);
46
- return originalStderrWrite.apply(process.stderr, [chunk, encoding, callback]);
47
- } as any;
48
-
49
- async function testCollaborativeStreaming() {
50
- const customHandlers = {
51
- [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
52
- [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
53
- [GraphEvents.LLM_START]: {
54
- handle: (event: string, data: t.StreamEventData) => {
55
- console.log('LLM Start:', event);
56
- // console.dir(data, { depth: null });
57
- }
58
- },
59
- [GraphEvents.LLM_END]: {
60
- handle: (event: string, data: t.StreamEventData) => {
61
- console.log('LLM End:', event);
62
- // console.dir(data, { depth: null });
63
- }
64
- },
65
- [GraphEvents.CHAT_MODEL_END]: {
66
- handle: (event: string, data: t.StreamEventData) => {
67
- console.log('Chat Model End:', event);
68
- // console.dir(data, { depth: null });
69
- }
70
- },
71
- [GraphEvents.TOOL_END]: {
72
- handle: (event: string, data: t.StreamEventData) => {
73
- console.log('Tool End:', event);
74
- console.dir(data, { depth: null });
75
- }
76
- },
77
- };
78
-
79
- const members: Member[] = [
80
- {
81
- name: 'researcher',
82
- systemPrompt: 'You are a web researcher. You may use the Tavily search engine to search the web for important information, so the Chart Generator in your team can make useful plots.',
83
- tools: [tavilyTool],
84
- llmConfig: {
85
- provider: Providers.OPENAI,
86
- modelName: 'gpt-4o',
87
- temperature: 0,
88
- },
89
- },
90
- {
91
- name: 'chart_generator',
92
- systemPrompt: 'You excel at generating bar charts. Use the researcher\'s information to generate the charts.',
93
- tools: [chartTool],
94
- llmConfig: {
95
- provider: Providers.OPENAI,
96
- modelName: 'gpt-4o',
97
- temperature: 0.2,
98
- },
99
- },
100
- ];
101
-
102
- const supervisorConfig = {
103
- systemPrompt: supervisorPrompt,
104
- llmConfig: {
105
- provider: Providers.OPENAI,
106
- modelName: 'gpt-4o',
107
- temperature: 0,
108
- },
109
- };
110
-
111
- const collaborativeProcessor = new CollaborativeProcessor(members, supervisorConfig, customHandlers);
112
- await collaborativeProcessor.initialize();
113
-
114
- const config = {
115
- configurable: { thread_id: 'collaborative-conversation-1' },
116
- streamMode: 'values',
117
- version: 'v2' as const,
118
- };
119
-
120
- console.log('\nCollaborative Test: Create a chart');
121
-
122
- const input = {
123
- messages: [new HumanMessage('Create a chart showing the population growth of the top 5 most populous countries over the last 50 years.')],
124
- };
125
-
126
- await collaborativeProcessor.processStream(input, config);
127
- }
128
-
129
- async function main() {
130
- await testCollaborativeStreaming();
131
- }
132
-
133
- main().catch(console.error).finally(() => {
134
- logFile.end();
135
- });