@librechat/agents 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. package/LICENSE +21 -0
  2. package/dist/cjs/common/enum.cjs +108 -0
  3. package/dist/cjs/common/enum.cjs.map +1 -0
  4. package/dist/cjs/events.cjs +104 -0
  5. package/dist/cjs/events.cjs.map +1 -0
  6. package/dist/cjs/graphs/Graph.cjs +313 -0
  7. package/dist/cjs/graphs/Graph.cjs.map +1 -0
  8. package/dist/cjs/llm/providers.cjs +30 -0
  9. package/dist/cjs/llm/providers.cjs.map +1 -0
  10. package/dist/cjs/main.cjs +59 -0
  11. package/dist/cjs/main.cjs.map +1 -0
  12. package/dist/cjs/messages.cjs +195 -0
  13. package/dist/cjs/messages.cjs.map +1 -0
  14. package/dist/cjs/run.cjs +106 -0
  15. package/dist/cjs/run.cjs.map +1 -0
  16. package/dist/cjs/stream.cjs +133 -0
  17. package/dist/cjs/stream.cjs.map +1 -0
  18. package/dist/cjs/tools/ToolNode.cjs +80 -0
  19. package/dist/cjs/tools/ToolNode.cjs.map +1 -0
  20. package/dist/cjs/utils/graph.cjs +16 -0
  21. package/dist/cjs/utils/graph.cjs.map +1 -0
  22. package/dist/cjs/utils/run.cjs +59 -0
  23. package/dist/cjs/utils/run.cjs.map +1 -0
  24. package/dist/esm/common/enum.mjs +108 -0
  25. package/dist/esm/common/enum.mjs.map +1 -0
  26. package/dist/esm/events.mjs +97 -0
  27. package/dist/esm/events.mjs.map +1 -0
  28. package/dist/esm/graphs/Graph.mjs +310 -0
  29. package/dist/esm/graphs/Graph.mjs.map +1 -0
  30. package/dist/esm/llm/providers.mjs +27 -0
  31. package/dist/esm/llm/providers.mjs.map +1 -0
  32. package/dist/esm/main.mjs +9 -0
  33. package/dist/esm/main.mjs.map +1 -0
  34. package/dist/esm/messages.mjs +190 -0
  35. package/dist/esm/messages.mjs.map +1 -0
  36. package/dist/esm/run.mjs +104 -0
  37. package/dist/esm/run.mjs.map +1 -0
  38. package/dist/esm/stream.mjs +131 -0
  39. package/dist/esm/stream.mjs.map +1 -0
  40. package/dist/esm/tools/ToolNode.mjs +77 -0
  41. package/dist/esm/tools/ToolNode.mjs.map +1 -0
  42. package/dist/esm/utils/graph.mjs +13 -0
  43. package/dist/esm/utils/graph.mjs.map +1 -0
  44. package/dist/esm/utils/run.mjs +57 -0
  45. package/dist/esm/utils/run.mjs.map +1 -0
  46. package/dist/types/common/enum.d.ts +79 -0
  47. package/dist/types/common/index.d.ts +1 -0
  48. package/dist/types/events.d.ts +22 -0
  49. package/dist/types/graphs/Graph.d.ts +86 -0
  50. package/dist/types/graphs/index.d.ts +1 -0
  51. package/dist/types/index.d.ts +8 -0
  52. package/dist/types/llm/providers.d.ts +4 -0
  53. package/dist/types/messages.d.ts +10 -0
  54. package/dist/types/prompts/collab.d.ts +1 -0
  55. package/dist/types/prompts/index.d.ts +2 -0
  56. package/dist/types/prompts/taskmanager.d.ts +41 -0
  57. package/dist/types/run.d.ts +21 -0
  58. package/dist/types/scripts/args.d.ts +6 -0
  59. package/dist/types/scripts/cli.d.ts +1 -0
  60. package/dist/types/scripts/cli2.d.ts +1 -0
  61. package/dist/types/scripts/cli3.d.ts +1 -0
  62. package/dist/types/scripts/cli4.d.ts +1 -0
  63. package/dist/types/scripts/cli5.d.ts +1 -0
  64. package/dist/types/scripts/empty_input.d.ts +1 -0
  65. package/dist/types/stream.d.ts +5 -0
  66. package/dist/types/tools/ToolNode.d.ts +15 -0
  67. package/dist/types/tools/example.d.ts +26 -0
  68. package/dist/types/types/graph.d.ts +108 -0
  69. package/dist/types/types/index.d.ts +5 -0
  70. package/dist/types/types/llm.d.ts +25 -0
  71. package/dist/types/types/run.d.ts +53 -0
  72. package/dist/types/types/stream.d.ts +134 -0
  73. package/dist/types/types/tools.d.ts +24 -0
  74. package/dist/types/utils/graph.d.ts +2 -0
  75. package/dist/types/utils/index.d.ts +2 -0
  76. package/dist/types/utils/llmConfig.d.ts +2 -0
  77. package/dist/types/utils/logging.d.ts +1 -0
  78. package/dist/types/utils/run.d.ts +20 -0
  79. package/package.json +142 -0
  80. package/src/common/enum.ts +121 -0
  81. package/src/common/index.ts +2 -0
  82. package/src/events.ts +110 -0
  83. package/src/graphs/Graph.ts +416 -0
  84. package/src/graphs/index.ts +1 -0
  85. package/src/index.ts +15 -0
  86. package/src/llm/providers.ts +27 -0
  87. package/src/messages.ts +210 -0
  88. package/src/prompts/collab.ts +6 -0
  89. package/src/prompts/index.ts +2 -0
  90. package/src/prompts/taskmanager.ts +61 -0
  91. package/src/proto/CollabGraph.ts +269 -0
  92. package/src/proto/TaskManager.ts +243 -0
  93. package/src/proto/collab.ts +200 -0
  94. package/src/proto/collab_design.ts +184 -0
  95. package/src/proto/collab_design_v2.ts +224 -0
  96. package/src/proto/collab_design_v3.ts +255 -0
  97. package/src/proto/collab_design_v4.ts +220 -0
  98. package/src/proto/collab_design_v5.ts +251 -0
  99. package/src/proto/collab_graph.ts +181 -0
  100. package/src/proto/collab_original.ts +123 -0
  101. package/src/proto/example.ts +93 -0
  102. package/src/proto/example_new.ts +68 -0
  103. package/src/proto/example_old.ts +201 -0
  104. package/src/proto/example_test.ts +152 -0
  105. package/src/proto/example_test_anthropic.ts +100 -0
  106. package/src/proto/log_stream.ts +202 -0
  107. package/src/proto/main_collab_community_event.ts +133 -0
  108. package/src/proto/main_collab_design_v2.ts +96 -0
  109. package/src/proto/main_collab_design_v4.ts +100 -0
  110. package/src/proto/main_collab_design_v5.ts +135 -0
  111. package/src/proto/main_collab_global_analysis.ts +122 -0
  112. package/src/proto/main_collab_hackathon_event.ts +153 -0
  113. package/src/proto/main_collab_space_mission.ts +153 -0
  114. package/src/proto/main_philosophy.ts +210 -0
  115. package/src/proto/original_script.ts +126 -0
  116. package/src/proto/standard.ts +100 -0
  117. package/src/proto/stream.ts +56 -0
  118. package/src/proto/tasks.ts +118 -0
  119. package/src/proto/tools/global_analysis_tools.ts +86 -0
  120. package/src/proto/tools/space_mission_tools.ts +60 -0
  121. package/src/proto/vertexai.ts +54 -0
  122. package/src/run.ts +132 -0
  123. package/src/scripts/args.ts +42 -0
  124. package/src/scripts/cli.ts +166 -0
  125. package/src/scripts/cli2.ts +124 -0
  126. package/src/scripts/cli3.ts +175 -0
  127. package/src/scripts/cli4.ts +182 -0
  128. package/src/scripts/cli5.ts +182 -0
  129. package/src/scripts/empty_input.ts +136 -0
  130. package/src/stream.ts +145 -0
  131. package/src/tools/ToolNode.ts +108 -0
  132. package/src/tools/example.ts +52 -0
  133. package/src/types/graph.ts +126 -0
  134. package/src/types/index.ts +6 -0
  135. package/src/types/llm.ts +38 -0
  136. package/src/types/run.ts +56 -0
  137. package/src/types/stream.ts +174 -0
  138. package/src/types/tools.ts +31 -0
  139. package/src/utils/graph.ts +11 -0
  140. package/src/utils/index.ts +2 -0
  141. package/src/utils/llmConfig.ts +50 -0
  142. package/src/utils/logging.ts +48 -0
  143. package/src/utils/run.ts +91 -0
@@ -0,0 +1,182 @@
1
+ /* eslint-disable no-console */
2
+ // src/scripts/cli.ts
3
+ import { config } from 'dotenv';
4
+ config();
5
+ import { HumanMessage, BaseMessage } from '@langchain/core/messages';
6
+ import { TavilySearchResults } from '@langchain/community/tools/tavily_search';
7
+ import type * as t from '@/types';
8
+ import { ModelEndHandler, ToolEndHandler } from '@/events';
9
+ import { ChatModelStreamHandler } from '@/stream';
10
+
11
+
12
+ import { getArgs } from '@/scripts/args';
13
+ import { Run } from '@/run';
14
+ import { GraphEvents, Callback, Providers } from '@/common';
15
+ import { getLLMConfig } from '@/utils/llmConfig';
16
+
17
+ const conversationHistory: BaseMessage[] = [];
18
+ async function testStandardStreaming(): Promise<void> {
19
+ const { userName, location, provider, currentDate } = await getArgs();
20
+ const customHandlers = {
21
+ [GraphEvents.TOOL_END]: new ToolEndHandler(),
22
+ [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
23
+ [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
24
+ [GraphEvents.ON_RUN_STEP_COMPLETED]: {
25
+ handle: (_event: string, data: t.StreamEventData): void => {
26
+ console.log('====== ON_RUN_STEP_COMPLETED ======');
27
+ console.dir(data, { depth: null });
28
+ }
29
+ },
30
+ [GraphEvents.ON_RUN_STEP]: {
31
+ handle: (_event: string, data: t.StreamEventData): void => {
32
+ console.log('====== ON_RUN_STEP ======');
33
+ console.dir(data, { depth: null });
34
+ }
35
+ },
36
+ [GraphEvents.ON_RUN_STEP_DELTA]: {
37
+ handle: (_event: string, data: t.StreamEventData): void => {
38
+ console.log('====== ON_RUN_STEP_DELTA ======');
39
+ console.dir(data, { depth: null });
40
+ }
41
+ },
42
+ [GraphEvents.ON_MESSAGE_DELTA]: {
43
+ handle: (_event: string, data: t.StreamEventData): void => {
44
+ console.log('====== ON_MESSAGE_DELTA ======');
45
+ console.dir(data, { depth: null });
46
+ }
47
+ },
48
+ [GraphEvents.TOOL_START]: {
49
+ handle: (_event: string, data: t.StreamEventData, metadata?: Record<string, unknown>): void => {
50
+ console.log('====== TOOL_START ======');
51
+ console.dir(data, { depth: null });
52
+ }
53
+ },
54
+ // [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
55
+ // [GraphEvents.LLM_START]: {
56
+ // handle: (_event: string, data: t.StreamEventData): void => {
57
+ // console.log('====== LLM_START ======');
58
+ // console.dir(data, { depth: null });
59
+ // }
60
+ // },
61
+ // [GraphEvents.LLM_END]: {
62
+ // handle: (_event: string, data: t.StreamEventData): void => {
63
+ // console.log('====== LLM_END ======');
64
+ // console.dir(data, { depth: null });
65
+ // }
66
+ // },
67
+ /*
68
+ [GraphEvents.CHAIN_START]: {
69
+ handle: (_event: string, data: t.StreamEventData): void => {
70
+ console.log('====== CHAIN_START ======');
71
+ // console.dir(data, { depth: null });
72
+ }
73
+ },
74
+ [GraphEvents.CHAIN_END]: {
75
+ handle: (_event: string, data: t.StreamEventData): void => {
76
+ console.log('====== CHAIN_END ======');
77
+ // console.dir(data, { depth: null });
78
+ }
79
+ },
80
+ */
81
+ // [GraphEvents.CHAT_MODEL_START]: {
82
+ // handle: (_event: string, _data: t.StreamEventData): void => {
83
+ // console.log('====== CHAT_MODEL_START ======');
84
+ // console.dir(_data, { depth: null });
85
+ // // Intentionally left empty
86
+ // }
87
+ // },
88
+ };
89
+
90
+ // const llmConfig = getLLMConfig(provider);
91
+ let llmConfig = getLLMConfig(Providers.OPENAI);
92
+
93
+ const graphConfig: t.StandardGraphConfig = {
94
+ type: 'standard',
95
+ llmConfig,
96
+ tools: [new TavilySearchResults()],
97
+ instructions: 'You are a friendly AI assistant. Always address the user by their name.',
98
+ additional_instructions: `The user's name is ${userName} and they are located in ${location}.`,
99
+ };
100
+
101
+ let run = await Run.create<t.IState>({
102
+ graphConfig,
103
+ customHandlers,
104
+ });
105
+
106
+ const config = {
107
+ configurable: {
108
+ provider,
109
+ thread_id: 'conversation-num-1',
110
+ },
111
+ streamMode: 'values',
112
+ version: 'v2' as const,
113
+ };
114
+
115
+ console.log(' Test 1: OpenAI Tool Usage');
116
+
117
+ // conversationHistory.push(new HumanMessage(`Hi I'm ${userName}.`));
118
+ conversationHistory.push(new HumanMessage(`search for good sunrise hikes near ${location}
119
+ then search weather in ${location} for today which is ${currentDate}`));
120
+ let inputs = {
121
+ messages: conversationHistory,
122
+ };
123
+ const contentParts = await run.processStream(inputs, config,
124
+ // {
125
+ // [Callback.TOOL_START]: (graph, ...args) => {
126
+ // console.log('TOOL_START callback');
127
+ // },
128
+ // [Callback.TOOL_END]: (graph, ...args) => {
129
+ // console.log('TOOL_END callback');
130
+ // },
131
+ // }
132
+ );
133
+ const finalMessages = run.getRunMessages();
134
+ if (finalMessages) {
135
+ conversationHistory.push(...finalMessages);
136
+ }
137
+
138
+ console.log(' Test 2: Anthropic Follow-up Response');
139
+
140
+ // const userMessage = `
141
+ // Make a search for the weather in ${location} today, which is ${currentDate}.
142
+ // Make sure to always refer to me by name.
143
+ // After giving me a thorough summary, tell me a joke about the weather forecast we went over.
144
+ // `;
145
+ const userMessage = `Thanks!`;
146
+
147
+ conversationHistory.push(new HumanMessage(userMessage));
148
+
149
+ inputs = {
150
+ messages: conversationHistory,
151
+ };
152
+
153
+ llmConfig = getLLMConfig(Providers.ANTHROPIC);
154
+ graphConfig.llmConfig = llmConfig;
155
+ config.configurable.provider = Providers.ANTHROPIC;
156
+
157
+ run = await Run.create<t.IState>({
158
+ graphConfig,
159
+ customHandlers,
160
+ });
161
+
162
+ const contentParts2 = await run.processStream(inputs, config);
163
+ const finalMessages2 = run.getRunMessages();
164
+ if (finalMessages2) {
165
+ conversationHistory.push(...finalMessages2);
166
+ // console.dir(conversationHistory, { depth: null });
167
+ }
168
+ }
169
+
170
+ process.on('unhandledRejection', (reason, promise) => {
171
+ console.error('Unhandled Rejection at:', promise, 'reason:', reason);
172
+ console.log('Conversation history:');
173
+ console.dir(conversationHistory, { depth: null });
174
+ process.exit(1);
175
+ });
176
+
177
+ testStandardStreaming().catch((err) => {
178
+ console.error(err);
179
+ console.log('Conversation history:');
180
+ console.dir(conversationHistory, { depth: null });
181
+ process.exit(1);
182
+ });
@@ -0,0 +1,182 @@
1
+ /* eslint-disable no-console */
2
+ // src/scripts/cli.ts
3
+ import { config } from 'dotenv';
4
+ config();
5
+ import { HumanMessage, BaseMessage } from '@langchain/core/messages';
6
+ import { TavilySearchResults } from '@langchain/community/tools/tavily_search';
7
+ import type * as t from '@/types';
8
+ import { ModelEndHandler, ToolEndHandler } from '@/events';
9
+ import { ChatModelStreamHandler } from '@/stream';
10
+
11
+
12
+ import { getArgs } from '@/scripts/args';
13
+ import { Run } from '@/run';
14
+ import { GraphEvents, Callback, Providers } from '@/common';
15
+ import { getLLMConfig } from '@/utils/llmConfig';
16
+
17
+ const conversationHistory: BaseMessage[] = [];
18
+ async function testStandardStreaming(): Promise<void> {
19
+ const { userName, location, provider, currentDate } = await getArgs();
20
+ const customHandlers = {
21
+ [GraphEvents.TOOL_END]: new ToolEndHandler(),
22
+ [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
23
+ [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
24
+ [GraphEvents.ON_RUN_STEP_COMPLETED]: {
25
+ handle: (_event: string, data: t.StreamEventData): void => {
26
+ console.log('====== ON_RUN_STEP_COMPLETED ======');
27
+ console.dir(data, { depth: null });
28
+ }
29
+ },
30
+ [GraphEvents.ON_RUN_STEP]: {
31
+ handle: (_event: string, data: t.StreamEventData): void => {
32
+ console.log('====== ON_RUN_STEP ======');
33
+ console.dir(data, { depth: null });
34
+ }
35
+ },
36
+ [GraphEvents.ON_RUN_STEP_DELTA]: {
37
+ handle: (_event: string, data: t.StreamEventData): void => {
38
+ console.log('====== ON_RUN_STEP_DELTA ======');
39
+ console.dir(data, { depth: null });
40
+ }
41
+ },
42
+ [GraphEvents.ON_MESSAGE_DELTA]: {
43
+ handle: (_event: string, data: t.StreamEventData): void => {
44
+ console.log('====== ON_MESSAGE_DELTA ======');
45
+ console.dir(data, { depth: null });
46
+ }
47
+ },
48
+ [GraphEvents.TOOL_START]: {
49
+ handle: (_event: string, data: t.StreamEventData, metadata?: Record<string, unknown>): void => {
50
+ console.log('====== TOOL_START ======');
51
+ console.dir(data, { depth: null });
52
+ }
53
+ },
54
+ // [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
55
+ // [GraphEvents.LLM_START]: {
56
+ // handle: (_event: string, data: t.StreamEventData): void => {
57
+ // console.log('====== LLM_START ======');
58
+ // console.dir(data, { depth: null });
59
+ // }
60
+ // },
61
+ // [GraphEvents.LLM_END]: {
62
+ // handle: (_event: string, data: t.StreamEventData): void => {
63
+ // console.log('====== LLM_END ======');
64
+ // console.dir(data, { depth: null });
65
+ // }
66
+ // },
67
+ /*
68
+ [GraphEvents.CHAIN_START]: {
69
+ handle: (_event: string, data: t.StreamEventData): void => {
70
+ console.log('====== CHAIN_START ======');
71
+ // console.dir(data, { depth: null });
72
+ }
73
+ },
74
+ [GraphEvents.CHAIN_END]: {
75
+ handle: (_event: string, data: t.StreamEventData): void => {
76
+ console.log('====== CHAIN_END ======');
77
+ // console.dir(data, { depth: null });
78
+ }
79
+ },
80
+ */
81
+ // [GraphEvents.CHAT_MODEL_START]: {
82
+ // handle: (_event: string, _data: t.StreamEventData): void => {
83
+ // console.log('====== CHAT_MODEL_START ======');
84
+ // console.dir(_data, { depth: null });
85
+ // // Intentionally left empty
86
+ // }
87
+ // },
88
+ };
89
+
90
+ // const llmConfig = getLLMConfig(provider);
91
+ let llmConfig = getLLMConfig(Providers.ANTHROPIC);
92
+
93
+ const graphConfig: t.StandardGraphConfig = {
94
+ type: 'standard',
95
+ llmConfig,
96
+ tools: [new TavilySearchResults()],
97
+ instructions: 'You are a friendly AI assistant. Always address the user by their name.',
98
+ additional_instructions: `The user's name is ${userName} and they are located in ${location}.`,
99
+ };
100
+
101
+ let run = await Run.create<t.IState>({
102
+ graphConfig,
103
+ customHandlers,
104
+ });
105
+
106
+ const config = {
107
+ configurable: {
108
+ provider: Providers.ANTHROPIC,
109
+ thread_id: 'conversation-num-1',
110
+ },
111
+ streamMode: 'values',
112
+ version: 'v2' as const,
113
+ };
114
+
115
+ console.log(' Test 1: Anthropic Tool Usage');
116
+
117
+ // conversationHistory.push(new HumanMessage(`Hi I'm ${userName}.`));
118
+ conversationHistory.push(new HumanMessage(`search for good sunrise hikes near ${location}
119
+ then search weather in ${location} for today which is ${currentDate}`));
120
+ let inputs = {
121
+ messages: conversationHistory,
122
+ };
123
+ const contentParts = await run.processStream(inputs, config,
124
+ // {
125
+ // [Callback.TOOL_START]: (graph, ...args) => {
126
+ // console.log('TOOL_START callback');
127
+ // },
128
+ // [Callback.TOOL_END]: (graph, ...args) => {
129
+ // console.log('TOOL_END callback');
130
+ // },
131
+ // }
132
+ );
133
+ const finalMessages = run.getRunMessages();
134
+ if (finalMessages) {
135
+ conversationHistory.push(...finalMessages);
136
+ }
137
+
138
+ console.log(' Test 2: OpenAI Follow-up Response');
139
+
140
+ // const userMessage = `
141
+ // Make a search for the weather in ${location} today, which is ${currentDate}.
142
+ // Make sure to always refer to me by name.
143
+ // After giving me a thorough summary, tell me a joke about the weather forecast we went over.
144
+ // `;
145
+ const userMessage = `Thanks!`;
146
+
147
+ conversationHistory.push(new HumanMessage(userMessage));
148
+
149
+ inputs = {
150
+ messages: conversationHistory,
151
+ };
152
+
153
+ llmConfig = getLLMConfig(Providers.OPENAI);
154
+ graphConfig.llmConfig = llmConfig;
155
+ config.configurable.provider = Providers.OPENAI;
156
+
157
+ run = await Run.create<t.IState>({
158
+ graphConfig,
159
+ customHandlers,
160
+ });
161
+
162
+ const contentParts2 = await run.processStream(inputs, config);
163
+ const finalMessages2 = run.getRunMessages();
164
+ if (finalMessages2) {
165
+ conversationHistory.push(...finalMessages2);
166
+ // console.dir(conversationHistory, { depth: null });
167
+ }
168
+ }
169
+
170
+ process.on('unhandledRejection', (reason, promise) => {
171
+ console.error('Unhandled Rejection at:', promise, 'reason:', reason);
172
+ console.log('Conversation history:');
173
+ console.dir(conversationHistory, { depth: null });
174
+ process.exit(1);
175
+ });
176
+
177
+ testStandardStreaming().catch((err) => {
178
+ console.error(err);
179
+ console.log('Conversation history:');
180
+ console.dir(conversationHistory, { depth: null });
181
+ process.exit(1);
182
+ });
@@ -0,0 +1,136 @@
1
+ import { z } from 'zod';
2
+ import { config } from 'dotenv';
3
+ config();
4
+ import { HumanMessage } from '@langchain/core/messages';
5
+ import { tool } from "@langchain/core/tools";
6
+ import { getArgs } from '@/scripts/args';
7
+ import { Run } from '@/run';
8
+ import { getLLMConfig } from '@/utils/llmConfig';
9
+ import { ChatModelStreamHandler } from '@/stream';
10
+ import { ToolEndHandler, ModelEndHandler } from '@/events';
11
+ import { GraphEvents } from '@/common';
12
+ import type * as t from '@/types';
13
+
14
+ const pingServerTool = tool(
15
+ () => {
16
+ return 'server has been pinged';
17
+ },
18
+ {
19
+ name: 'pingServer',
20
+ description: 'Ping server',
21
+ schema: z.object({}),
22
+ }
23
+ );
24
+
25
+ async function testPingServer(): Promise<void> {
26
+ const { provider } = await getArgs();
27
+ const customHandlers = {
28
+ [GraphEvents.TOOL_END]: new ToolEndHandler(),
29
+ [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
30
+ [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
31
+ [GraphEvents.ON_RUN_STEP_COMPLETED]: {
32
+ handle: (_event: string, data: t.StreamEventData): void => {
33
+ console.log('====== ON_RUN_STEP_COMPLETED ======');
34
+ console.dir(data, { depth: null });
35
+ }
36
+ },
37
+ [GraphEvents.ON_RUN_STEP]: {
38
+ handle: (_event: string, data: t.StreamEventData): void => {
39
+ console.log('====== ON_RUN_STEP ======');
40
+ console.dir(data, { depth: null });
41
+ }
42
+ },
43
+ [GraphEvents.ON_RUN_STEP_DELTA]: {
44
+ handle: (_event: string, data: t.StreamEventData): void => {
45
+ console.log('====== ON_RUN_STEP_DELTA ======');
46
+ console.dir(data, { depth: null });
47
+ }
48
+ },
49
+ [GraphEvents.ON_MESSAGE_DELTA]: {
50
+ handle: (_event: string, data: t.StreamEventData): void => {
51
+ console.log('====== ON_MESSAGE_DELTA ======');
52
+ console.dir(data, { depth: null });
53
+ }
54
+ },
55
+ [GraphEvents.TOOL_START]: {
56
+ handle: (_event: string, data: t.StreamEventData, metadata?: Record<string, unknown>): void => {
57
+ console.log('====== TOOL_START ======');
58
+ console.dir(data, { depth: null });
59
+ }
60
+ },
61
+ // [GraphEvents.LLM_STREAM]: new LLMStreamHandler(),
62
+ // [GraphEvents.LLM_START]: {
63
+ // handle: (_event: string, data: t.StreamEventData): void => {
64
+ // console.log('====== LLM_START ======');
65
+ // console.dir(data, { depth: null });
66
+ // }
67
+ // },
68
+ // [GraphEvents.LLM_END]: {
69
+ // handle: (_event: string, data: t.StreamEventData): void => {
70
+ // console.log('====== LLM_END ======');
71
+ // console.dir(data, { depth: null });
72
+ // }
73
+ // },
74
+ /*
75
+ [GraphEvents.CHAIN_START]: {
76
+ handle: (_event: string, data: t.StreamEventData): void => {
77
+ console.log('====== CHAIN_START ======');
78
+ // console.dir(data, { depth: null });
79
+ }
80
+ },
81
+ [GraphEvents.CHAIN_END]: {
82
+ handle: (_event: string, data: t.StreamEventData): void => {
83
+ console.log('====== CHAIN_END ======');
84
+ // console.dir(data, { depth: null });
85
+ }
86
+ },
87
+ */
88
+ // [GraphEvents.CHAT_MODEL_START]: {
89
+ // handle: (_event: string, _data: t.StreamEventData): void => {
90
+ // console.log('====== CHAT_MODEL_START ======');
91
+ // console.dir(_data, { depth: null });
92
+ // // Intentionally left empty
93
+ // }
94
+ // },
95
+ };
96
+
97
+ const llmConfig = getLLMConfig(provider);
98
+
99
+ const run = await Run.create<t.IState>({
100
+ graphConfig: {
101
+ type: 'standard',
102
+ llmConfig,
103
+ tools: [pingServerTool],
104
+ instructions: 'You are a helpful AI assistant.',
105
+ },
106
+ customHandlers,
107
+ });
108
+
109
+ const config = {
110
+ configurable: {
111
+ provider,
112
+ thread_id: 'ping-server-test',
113
+ },
114
+ streamMode: 'values',
115
+ version: 'v2' as const,
116
+ };
117
+
118
+ console.log('Pinging server test:');
119
+
120
+ const userMessage = "Please ping the server.";
121
+ const inputs = {
122
+ messages: [new HumanMessage(userMessage)],
123
+ };
124
+
125
+ const contentParts = await run.processStream(inputs, config);
126
+ const finalMessages = run.getRunMessages();
127
+ if (finalMessages) {
128
+ console.log('\nFinal messages:');
129
+ console.dir(finalMessages, { depth: null });
130
+ }
131
+ }
132
+
133
+ testPingServer().catch((err) => {
134
+ console.error(err);
135
+ process.exit(1);
136
+ });
package/src/stream.ts ADDED
@@ -0,0 +1,145 @@
1
+ // src/stream.ts
2
+ import { nanoid } from 'nanoid';
3
+ import type { AIMessageChunk } from '@langchain/core/messages';
4
+ import type { ToolCall } from '@langchain/core/messages/tool';
5
+ import type { Graph } from '@/graphs';
6
+ import type * as t from '@/types';
7
+ import { StepTypes } from '@/common';
8
+
9
+ const getMessageId = (stepKey: string, graph: Graph<t.BaseGraphState>): string | undefined => {
10
+ const messageId = graph.messageIdsByStepKey.get(stepKey);
11
+ if (messageId) {
12
+ return;
13
+ }
14
+
15
+ const prelimMessageId = graph.prelimMessageIdsByStepKey.get(stepKey);
16
+ if (prelimMessageId) {
17
+ graph.prelimMessageIdsByStepKey.delete(stepKey);
18
+ graph.messageIdsByStepKey.set(stepKey, prelimMessageId);
19
+ return prelimMessageId;
20
+ }
21
+
22
+ const message_id = `msg_${nanoid()}`;
23
+ graph.messageIdsByStepKey.set(stepKey, message_id);
24
+ return message_id;
25
+ };
26
+
27
+ export class ChatModelStreamHandler implements t.EventHandler {
28
+ handle(event: string, data: t.StreamEventData, metadata?: Record<string, unknown>, graph?: Graph): void {
29
+ if (!graph) {
30
+ throw new Error('Graph not found');
31
+ }
32
+
33
+ const chunk = data.chunk as AIMessageChunk;
34
+ const content = chunk?.content;
35
+
36
+ if (!graph.config) {
37
+ throw new Error('Config not found in graph');
38
+ }
39
+
40
+ if (!chunk) {
41
+ console.warn(`No chunk found in ${event} event`);
42
+ return;
43
+ }
44
+
45
+ const hasToolCalls = chunk.tool_calls && chunk.tool_calls.length > 0;
46
+ const hasToolCallChunks = chunk.tool_call_chunks && chunk.tool_call_chunks.length > 0;
47
+
48
+ if (hasToolCalls && chunk.tool_calls?.every((tc) => tc.id)) {
49
+ const tool_calls: ToolCall[] = [];
50
+ for (const tool_call of chunk.tool_calls) {
51
+ if (!tool_call.id || graph.toolCallStepIds.has(tool_call.id)) {
52
+ continue;
53
+ }
54
+
55
+ tool_calls.push(tool_call);
56
+ }
57
+
58
+ const stepKey = graph.getStepKey(metadata);
59
+ graph.dispatchRunStep(stepKey, {
60
+ type: StepTypes.TOOL_CALLS,
61
+ tool_calls,
62
+ });
63
+ }
64
+
65
+ const isEmptyContent = !content || !content.length;
66
+ const isEmptyChunk = isEmptyContent && !hasToolCallChunks;
67
+ if (isEmptyChunk && chunk.id && chunk.id?.startsWith('msg')) {
68
+ if (graph.messageIdsByStepKey.has(chunk.id)) {
69
+ return;
70
+ } else if (graph.prelimMessageIdsByStepKey.has(chunk.id)) {
71
+ return;
72
+ }
73
+
74
+ const stepKey = graph.getStepKey(metadata);
75
+ graph.prelimMessageIdsByStepKey.set(stepKey, chunk.id);
76
+ return;
77
+ } else if (isEmptyChunk) {
78
+ return;
79
+ }
80
+
81
+ const stepKey = graph.getStepKey(metadata);
82
+
83
+ if (hasToolCallChunks && chunk.tool_call_chunks?.length && typeof chunk.tool_call_chunks[0]?.index === 'number') {
84
+ const stepId = graph.getStepIdByKey(stepKey, chunk.tool_call_chunks[0].index);
85
+ graph.dispatchRunStepDelta(stepId, {
86
+ type: StepTypes.TOOL_CALLS,
87
+ tool_calls: chunk.tool_call_chunks,
88
+ });
89
+ }
90
+
91
+ if (isEmptyContent) {
92
+ return;
93
+ }
94
+
95
+ const message_id = getMessageId(stepKey, graph);
96
+ if (message_id) {
97
+ graph.dispatchRunStep(stepKey, {
98
+ type: StepTypes.MESSAGE_CREATION,
99
+ message_creation: {
100
+ message_id,
101
+ },
102
+ });
103
+ }
104
+
105
+ const stepId = graph.getStepIdByKey(stepKey);
106
+ const runStep = graph.getRunStep(stepId);
107
+ if (!runStep) {
108
+ // eslint-disable-next-line no-console
109
+ console.warn(`\n
110
+ ==============================================================
111
+
112
+
113
+ Run step for ${stepId} does not exist, cannot dispatch delta event.
114
+
115
+ event: ${event}
116
+ stepId: ${stepId}
117
+ stepKey: ${stepKey}
118
+ message_id: ${message_id}
119
+ hasToolCalls: ${hasToolCalls}
120
+ hasToolCallChunks: ${hasToolCallChunks}
121
+
122
+ ==============================================================
123
+ \n`);
124
+ return;
125
+ }
126
+
127
+ /* Note: tool call chunks may have non-empty content that matches the current tool chunk generation */
128
+ if (typeof content === 'string' && runStep.type === StepTypes.TOOL_CALLS) {
129
+ return;
130
+ } else if (hasToolCallChunks && chunk.tool_call_chunks?.some((tc) => tc.args === content)) {
131
+ return;
132
+ } else if (typeof content === 'string') {
133
+ graph.dispatchMessageDelta(stepId, {
134
+ content: [{
135
+ type: 'text',
136
+ text: content,
137
+ }],
138
+ });
139
+ } else if (content?.every((c) => c.type?.startsWith('text'))) {
140
+ graph.dispatchMessageDelta(stepId, {
141
+ content,
142
+ });
143
+ }
144
+ }
145
+ }