@langgraph-js/sdk 1.11.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/LangGraphClient.d.ts +178 -66
- package/dist/LangGraphClient.js +52 -260
- package/dist/MessageProcessor.d.ts +94 -0
- package/dist/MessageProcessor.js +324 -0
- package/dist/TestKit.d.ts +5 -2
- package/dist/client/LanggraphServer.d.ts +2 -0
- package/dist/client/LanggraphServer.js +4 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2 -0
- package/dist/types.d.ts +130 -0
- package/dist/types.js +1 -0
- package/dist/ui-store/createChatStore.d.ts +3 -3
- package/dist/ui-store/createChatStore.js +37 -21
- package/package.json +2 -1
- package/src/LangGraphClient.ts +107 -297
- package/src/MessageProcessor.ts +352 -0
- package/src/TestKit.ts +1 -1
- package/src/client/LanggraphServer.ts +6 -0
- package/src/index.ts +2 -0
- package/src/types.ts +166 -0
- package/src/ui-store/createChatStore.ts +40 -20
package/dist/LangGraphClient.js
CHANGED
|
@@ -1,45 +1,38 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { EventEmitter } from "eventemitter3";
|
|
2
2
|
import { ToolManager } from "./ToolManager.js";
|
|
3
|
-
|
|
4
|
-
* @zh StreamingMessageType 类用于判断消息的类型。
|
|
5
|
-
* @en The StreamingMessageType class is used to determine the type of a message.
|
|
6
|
-
*/
|
|
7
|
-
export class StreamingMessageType {
|
|
8
|
-
static isUser(m) {
|
|
9
|
-
return m.type === "human";
|
|
10
|
-
}
|
|
11
|
-
static isTool(m) {
|
|
12
|
-
return m.type === "tool";
|
|
13
|
-
}
|
|
14
|
-
static isAssistant(m) {
|
|
15
|
-
return m.type === "ai" && !this.isToolAssistant(m);
|
|
16
|
-
}
|
|
17
|
-
static isToolAssistant(m) {
|
|
18
|
-
var _a, _b;
|
|
19
|
-
/** @ts-ignore */
|
|
20
|
-
return m.type === "ai" && (((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) || ((_b = m.tool_call_chunks) === null || _b === void 0 ? void 0 : _b.length));
|
|
21
|
-
}
|
|
22
|
-
}
|
|
3
|
+
import { MessageProcessor } from "./MessageProcessor.js";
|
|
23
4
|
/**
|
|
24
5
|
* @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
|
|
25
6
|
* @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
|
|
26
7
|
*/
|
|
27
|
-
export class LangGraphClient extends
|
|
8
|
+
export class LangGraphClient extends EventEmitter {
|
|
28
9
|
constructor(config) {
|
|
29
|
-
super(
|
|
10
|
+
super();
|
|
30
11
|
this.currentAssistant = null;
|
|
31
12
|
this.currentThread = null;
|
|
32
|
-
this.streamingCallbacks = new Set();
|
|
33
13
|
this.tools = new ToolManager();
|
|
34
14
|
this.stopController = null;
|
|
15
|
+
/** 用于存储 subAgent 状态数据的键 */
|
|
16
|
+
this.subAgentsKey = "task_store";
|
|
35
17
|
this.availableAssistants = [];
|
|
36
|
-
this.streamingMessage = [];
|
|
37
|
-
/** 图发过来的更新信息 */
|
|
38
|
-
this.graphMessages = [];
|
|
39
18
|
this.graphState = {};
|
|
40
19
|
/** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
|
|
41
20
|
this.graphPosition = "";
|
|
42
21
|
this.extraParams = {};
|
|
22
|
+
this.client = config.client;
|
|
23
|
+
this.messageProcessor = new MessageProcessor(this.subAgentsKey);
|
|
24
|
+
}
|
|
25
|
+
/** 代理 assistants 属性到内部 client */
|
|
26
|
+
get assistants() {
|
|
27
|
+
return this.client.assistants;
|
|
28
|
+
}
|
|
29
|
+
/** 代理 threads 属性到内部 client */
|
|
30
|
+
get threads() {
|
|
31
|
+
return this.client.threads;
|
|
32
|
+
}
|
|
33
|
+
/** 代理 runs 属性到内部 client */
|
|
34
|
+
get runs() {
|
|
35
|
+
return this.client.runs;
|
|
43
36
|
}
|
|
44
37
|
listAssistants() {
|
|
45
38
|
return this.assistants.search({
|
|
@@ -107,6 +100,9 @@ export class LangGraphClient extends Client {
|
|
|
107
100
|
sortOrder: "desc",
|
|
108
101
|
});
|
|
109
102
|
}
|
|
103
|
+
async deleteThread(threadId) {
|
|
104
|
+
return this.threads.delete(threadId);
|
|
105
|
+
}
|
|
110
106
|
/**
|
|
111
107
|
* @zh 从历史中恢复 Thread 数据。
|
|
112
108
|
* @en Resets the Thread data from history.
|
|
@@ -116,14 +112,12 @@ export class LangGraphClient extends Client {
|
|
|
116
112
|
await this.initAssistant(agent);
|
|
117
113
|
this.currentThread = await this.threads.get(threadId);
|
|
118
114
|
this.graphState = this.currentThread.values;
|
|
119
|
-
|
|
120
|
-
this.
|
|
121
|
-
|
|
115
|
+
const graphMessages = ((_a = this.graphState) === null || _a === void 0 ? void 0 : _a.messages) || [];
|
|
116
|
+
this.messageProcessor.setGraphMessages(graphMessages);
|
|
117
|
+
this.emit("value", {
|
|
118
|
+
event: "messages/partial",
|
|
122
119
|
data: {
|
|
123
|
-
|
|
124
|
-
data: {
|
|
125
|
-
messages: this.graphMessages,
|
|
126
|
-
},
|
|
120
|
+
messages: this.messageProcessor.getGraphMessages(),
|
|
127
121
|
},
|
|
128
122
|
});
|
|
129
123
|
return this.currentThread;
|
|
@@ -137,175 +131,21 @@ export class LangGraphClient extends Client {
|
|
|
137
131
|
}
|
|
138
132
|
}
|
|
139
133
|
cloneMessage(message) {
|
|
140
|
-
return
|
|
141
|
-
}
|
|
142
|
-
updateStreamingMessage(message) {
|
|
143
|
-
const lastMessage = this.streamingMessage[this.streamingMessage.length - 1];
|
|
144
|
-
if (!(lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.id) || message.id !== lastMessage.id) {
|
|
145
|
-
this.streamingMessage.push(message);
|
|
146
|
-
return;
|
|
147
|
-
}
|
|
148
|
-
this.streamingMessage[this.streamingMessage.length - 1] = message;
|
|
149
|
-
}
|
|
150
|
-
/** 将 graphMessages 和 streamingMessage 合并,并返回新的消息数组 */
|
|
151
|
-
combineGraphMessagesWithStreamingMessages() {
|
|
152
|
-
const idMap = new Map(this.streamingMessage.map((i) => [i.id, i]));
|
|
153
|
-
return [
|
|
154
|
-
...this.graphMessages.map((i) => {
|
|
155
|
-
if (idMap.has(i.id)) {
|
|
156
|
-
const newValue = idMap.get(i.id);
|
|
157
|
-
idMap.delete(i.id);
|
|
158
|
-
return newValue;
|
|
159
|
-
}
|
|
160
|
-
return i;
|
|
161
|
-
}),
|
|
162
|
-
...idMap.values(),
|
|
163
|
-
];
|
|
134
|
+
return this.messageProcessor.cloneMessage(message);
|
|
164
135
|
}
|
|
165
136
|
/**
|
|
166
137
|
* @zh 用于 UI 中的流式渲染中的消息。
|
|
167
138
|
* @en Messages used for streaming rendering in the UI.
|
|
168
139
|
*/
|
|
169
140
|
get renderMessage() {
|
|
170
|
-
|
|
171
|
-
const previousMessage = new Map();
|
|
172
|
-
const closedToolCallIds = new Set();
|
|
173
|
-
const result = [];
|
|
174
|
-
const inputMessages = this.combineGraphMessagesWithStreamingMessages();
|
|
175
|
-
// console.log(inputMessages);
|
|
176
|
-
// 从后往前遍历,这样可以保证最新的消息在前面
|
|
177
|
-
for (let i = inputMessages.length - 1; i >= 0; i--) {
|
|
178
|
-
const message = this.cloneMessage(inputMessages[i]);
|
|
179
|
-
if (!message.id) {
|
|
180
|
-
result.unshift(message);
|
|
181
|
-
continue;
|
|
182
|
-
}
|
|
183
|
-
if (message.type === "ai") {
|
|
184
|
-
/** @ts-ignore */
|
|
185
|
-
if (!message.name)
|
|
186
|
-
message.name = this.getGraphNodeNow().name;
|
|
187
|
-
}
|
|
188
|
-
if (StreamingMessageType.isToolAssistant(message)) {
|
|
189
|
-
const m = message;
|
|
190
|
-
// 记录这个 id 的消息,并添加到结果中
|
|
191
|
-
previousMessage.set(message.id, m);
|
|
192
|
-
/** @ts-ignore */
|
|
193
|
-
const tool_calls = ((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) ? m.tool_calls : m.tool_call_chunks;
|
|
194
|
-
const new_tool_calls = tool_calls
|
|
195
|
-
.filter((i) => {
|
|
196
|
-
return !closedToolCallIds.has(i.id);
|
|
197
|
-
})
|
|
198
|
-
.map((tool, index) => {
|
|
199
|
-
var _a, _b, _c, _d;
|
|
200
|
-
return {
|
|
201
|
-
type: "tool",
|
|
202
|
-
additional_kwargs: {},
|
|
203
|
-
/** @ts-ignore */
|
|
204
|
-
tool_input: (_d = (_c = (_b = (_a = m.additional_kwargs) === null || _a === void 0 ? void 0 : _a.tool_calls) === null || _b === void 0 ? void 0 : _b[index]) === null || _c === void 0 ? void 0 : _c.function) === null || _d === void 0 ? void 0 : _d.arguments,
|
|
205
|
-
id: tool.id,
|
|
206
|
-
name: tool.name,
|
|
207
|
-
response_metadata: {},
|
|
208
|
-
tool_call_id: tool.id,
|
|
209
|
-
content: "",
|
|
210
|
-
};
|
|
211
|
-
});
|
|
212
|
-
for (const tool of new_tool_calls) {
|
|
213
|
-
if (!previousMessage.has(tool.id)) {
|
|
214
|
-
result.unshift(tool);
|
|
215
|
-
previousMessage.set(tool.id, tool);
|
|
216
|
-
}
|
|
217
|
-
}
|
|
218
|
-
result.unshift(m);
|
|
219
|
-
}
|
|
220
|
-
else {
|
|
221
|
-
if (message.type === "tool" && message.tool_call_id) {
|
|
222
|
-
closedToolCallIds.add(message.tool_call_id);
|
|
223
|
-
}
|
|
224
|
-
previousMessage.set(message.id, message);
|
|
225
|
-
result.unshift(message);
|
|
226
|
-
}
|
|
227
|
-
}
|
|
228
|
-
return this.attachInfoForMessage(this.composeToolMessages(result));
|
|
229
|
-
}
|
|
230
|
-
/**
|
|
231
|
-
* @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
|
|
232
|
-
* @en Attaches additional information to messages, such as spend time, unique ID, etc.
|
|
233
|
-
*/
|
|
234
|
-
attachInfoForMessage(result) {
|
|
235
|
-
var _a, _b, _c;
|
|
236
|
-
let lastMessage = null;
|
|
237
|
-
for (const message of result) {
|
|
238
|
-
const createTime = ((_a = message.response_metadata) === null || _a === void 0 ? void 0 : _a.create_time) || "";
|
|
239
|
-
// 工具必须要使用 tool_call_id 来保证一致性
|
|
240
|
-
message.unique_id = message.tool_call_id || message.id;
|
|
241
|
-
message.spend_time = new Date(createTime).getTime() - new Date(((_b = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.response_metadata) === null || _b === void 0 ? void 0 : _b.create_time) || createTime).getTime();
|
|
242
|
-
if (!message.usage_metadata && ((_c = message.response_metadata) === null || _c === void 0 ? void 0 : _c.usage)) {
|
|
243
|
-
const usage = message.response_metadata.usage;
|
|
244
|
-
message.usage_metadata = {
|
|
245
|
-
...usage,
|
|
246
|
-
input_tokens: usage.prompt_tokens,
|
|
247
|
-
output_tokens: usage.completion_tokens,
|
|
248
|
-
total_tokens: usage.total_tokens,
|
|
249
|
-
};
|
|
250
|
-
}
|
|
251
|
-
lastMessage = message;
|
|
252
|
-
}
|
|
253
|
-
return result;
|
|
254
|
-
}
|
|
255
|
-
/**
|
|
256
|
-
* @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
|
|
257
|
-
* @en Composes tool messages, associating AI tool calls with tool execution results.
|
|
258
|
-
*/
|
|
259
|
-
composeToolMessages(messages) {
|
|
260
|
-
var _a, _b;
|
|
261
|
-
const result = [];
|
|
262
|
-
const assistantToolMessages = new Map();
|
|
263
|
-
const toolParentMessage = new Map();
|
|
264
|
-
for (const message of messages) {
|
|
265
|
-
if (StreamingMessageType.isToolAssistant(message)) {
|
|
266
|
-
/** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
|
|
267
|
-
(_a = (message.tool_calls || message.tool_call_chunks)) === null || _a === void 0 ? void 0 : _a.forEach((element) => {
|
|
268
|
-
assistantToolMessages.set(element.id, element);
|
|
269
|
-
toolParentMessage.set(element.id, message);
|
|
270
|
-
});
|
|
271
|
-
if (!message.content)
|
|
272
|
-
continue;
|
|
273
|
-
}
|
|
274
|
-
if (StreamingMessageType.isTool(message) && !message.tool_input) {
|
|
275
|
-
const assistantToolMessage = assistantToolMessages.get(message.tool_call_id);
|
|
276
|
-
const parentMessage = toolParentMessage.get(message.tool_call_id);
|
|
277
|
-
if (assistantToolMessage) {
|
|
278
|
-
message.tool_input = typeof assistantToolMessage.args !== "string" ? JSON.stringify(assistantToolMessage.args) : assistantToolMessage.args;
|
|
279
|
-
if (message.additional_kwargs) {
|
|
280
|
-
message.additional_kwargs.done = true;
|
|
281
|
-
message.done = true;
|
|
282
|
-
}
|
|
283
|
-
else {
|
|
284
|
-
message.done = true;
|
|
285
|
-
message.additional_kwargs = {
|
|
286
|
-
done: true,
|
|
287
|
-
};
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
if (parentMessage) {
|
|
291
|
-
message.usage_metadata = parentMessage.usage_metadata;
|
|
292
|
-
message.node_name = parentMessage.name;
|
|
293
|
-
// 修补特殊情况下,tool name 丢失的问题
|
|
294
|
-
if (!message.name) {
|
|
295
|
-
message.name = (_b = parentMessage.tool_calls.find((i) => i.id === message.tool_call_id)) === null || _b === void 0 ? void 0 : _b.name;
|
|
296
|
-
}
|
|
297
|
-
}
|
|
298
|
-
}
|
|
299
|
-
result.push(message);
|
|
300
|
-
}
|
|
301
|
-
return result;
|
|
141
|
+
return this.messageProcessor.renderMessages(this.graphState, () => this.getGraphNodeNow());
|
|
302
142
|
}
|
|
303
143
|
/**
|
|
304
144
|
* @zh 获取 Token 计数器信息。
|
|
305
145
|
* @en Gets the Token counter information.
|
|
306
146
|
*/
|
|
307
147
|
get tokenCounter() {
|
|
308
|
-
return this.
|
|
148
|
+
return this.messageProcessor.getGraphMessages().reduce((acc, message) => {
|
|
309
149
|
var _a, _b, _c, _d, _e;
|
|
310
150
|
if (message.usage_metadata) {
|
|
311
151
|
acc.total_tokens += ((_a = message.usage_metadata) === null || _a === void 0 ? void 0 : _a.total_tokens) || 0;
|
|
@@ -325,19 +165,6 @@ export class LangGraphClient extends Client {
|
|
|
325
165
|
output_tokens: 0,
|
|
326
166
|
});
|
|
327
167
|
}
|
|
328
|
-
/**
|
|
329
|
-
* @zh 注册流式更新的回调函数。
|
|
330
|
-
* @en Registers a callback function for streaming updates.
|
|
331
|
-
*/
|
|
332
|
-
onStreamingUpdate(callback) {
|
|
333
|
-
this.streamingCallbacks.add(callback);
|
|
334
|
-
return () => {
|
|
335
|
-
this.streamingCallbacks.delete(callback);
|
|
336
|
-
};
|
|
337
|
-
}
|
|
338
|
-
emitStreamingUpdate(event) {
|
|
339
|
-
this.streamingCallbacks.forEach((callback) => callback(event));
|
|
340
|
-
}
|
|
341
168
|
/** 前端工具人机交互时,锁住面板 */
|
|
342
169
|
isFELocking(messages) {
|
|
343
170
|
var _a;
|
|
@@ -369,13 +196,10 @@ export class LangGraphClient extends Client {
|
|
|
369
196
|
}
|
|
370
197
|
if (!this.currentThread) {
|
|
371
198
|
await this.createThread();
|
|
372
|
-
this.
|
|
373
|
-
|
|
199
|
+
this.emit("thread", {
|
|
200
|
+
event: "thread/create",
|
|
374
201
|
data: {
|
|
375
|
-
|
|
376
|
-
data: {
|
|
377
|
-
thread: this.currentThread,
|
|
378
|
-
},
|
|
202
|
+
thread: this.currentThread,
|
|
379
203
|
},
|
|
380
204
|
});
|
|
381
205
|
}
|
|
@@ -409,11 +233,8 @@ export class LangGraphClient extends Client {
|
|
|
409
233
|
};
|
|
410
234
|
const streamResponse = await createStreamResponse();
|
|
411
235
|
const streamRecord = [];
|
|
412
|
-
this.
|
|
413
|
-
|
|
414
|
-
data: {
|
|
415
|
-
event: "start",
|
|
416
|
-
},
|
|
236
|
+
this.emit("start", {
|
|
237
|
+
event: "start",
|
|
417
238
|
});
|
|
418
239
|
for await (const chunk of streamResponse) {
|
|
419
240
|
streamRecord.push(chunk);
|
|
@@ -421,33 +242,24 @@ export class LangGraphClient extends Client {
|
|
|
421
242
|
this.currentRun = chunk.data;
|
|
422
243
|
}
|
|
423
244
|
else if (chunk.event === "error") {
|
|
424
|
-
this.
|
|
425
|
-
type: "error",
|
|
426
|
-
data: chunk,
|
|
427
|
-
});
|
|
245
|
+
this.emit("error", chunk);
|
|
428
246
|
}
|
|
429
247
|
else if (chunk.event === "messages/partial") {
|
|
430
248
|
for (const message of chunk.data) {
|
|
431
|
-
this.updateStreamingMessage(message);
|
|
249
|
+
this.messageProcessor.updateStreamingMessage(message);
|
|
432
250
|
}
|
|
433
|
-
this.
|
|
434
|
-
type: "message",
|
|
435
|
-
data: chunk,
|
|
436
|
-
});
|
|
251
|
+
this.emit("message", chunk);
|
|
437
252
|
continue;
|
|
438
253
|
}
|
|
439
254
|
else if (chunk.event === "values") {
|
|
440
255
|
const data = chunk.data;
|
|
441
256
|
if (data.messages) {
|
|
442
257
|
const isResume = !!(command === null || command === void 0 ? void 0 : command.resume);
|
|
443
|
-
const isLongerThanLocal = data.messages.length >= this.
|
|
258
|
+
const isLongerThanLocal = data.messages.length >= this.messageProcessor.getGraphMessages().length;
|
|
444
259
|
// resume 情况下,长度低于前端 message 的统统不接受
|
|
445
260
|
if (!isResume || (isResume && isLongerThanLocal)) {
|
|
446
|
-
this.
|
|
447
|
-
this.
|
|
448
|
-
type: "value",
|
|
449
|
-
data: chunk,
|
|
450
|
-
});
|
|
261
|
+
this.messageProcessor.setGraphMessages(data.messages);
|
|
262
|
+
this.emit("value", chunk);
|
|
451
263
|
}
|
|
452
264
|
this.graphState = chunk.data;
|
|
453
265
|
}
|
|
@@ -456,7 +268,7 @@ export class LangGraphClient extends Client {
|
|
|
456
268
|
else if (chunk.event.startsWith("values|")) {
|
|
457
269
|
// 这个 values 必然是子 values
|
|
458
270
|
if ((_a = chunk.data) === null || _a === void 0 ? void 0 : _a.messages) {
|
|
459
|
-
this.mergeSubGraphMessagesToStreamingMessages(chunk.data.messages);
|
|
271
|
+
this.messageProcessor.mergeSubGraphMessagesToStreamingMessages(chunk.data.messages);
|
|
460
272
|
}
|
|
461
273
|
this.graphPosition = chunk.event.split("|")[1];
|
|
462
274
|
}
|
|
@@ -464,13 +276,10 @@ export class LangGraphClient extends Client {
|
|
|
464
276
|
const data = await this.runFETool();
|
|
465
277
|
if (data)
|
|
466
278
|
streamRecord.push(...data);
|
|
467
|
-
this.
|
|
468
|
-
|
|
469
|
-
data: {
|
|
470
|
-
event: "done",
|
|
471
|
-
},
|
|
279
|
+
this.emit("done", {
|
|
280
|
+
event: "done",
|
|
472
281
|
});
|
|
473
|
-
this.
|
|
282
|
+
this.messageProcessor.clearStreamingMessages();
|
|
474
283
|
return streamRecord;
|
|
475
284
|
}
|
|
476
285
|
getGraphPosition() {
|
|
@@ -486,26 +295,9 @@ export class LangGraphClient extends Client {
|
|
|
486
295
|
const position = this.getGraphPosition();
|
|
487
296
|
return position[position.length - 1];
|
|
488
297
|
}
|
|
489
|
-
/** 子图的数据需要通过 merge 的方式重新进行合并更新 */
|
|
490
|
-
mergeSubGraphMessagesToStreamingMessages(messages) {
|
|
491
|
-
const map = new Map(messages.filter((i) => i.id).map((i) => [i.id, i]));
|
|
492
|
-
this.streamingMessage.forEach((i) => {
|
|
493
|
-
if (map.has(i.id)) {
|
|
494
|
-
const newValue = map.get(i.id);
|
|
495
|
-
Object.assign(i, newValue);
|
|
496
|
-
map.delete(i.id);
|
|
497
|
-
}
|
|
498
|
-
});
|
|
499
|
-
// 剩余的 message 一定不在 streamMessage 中
|
|
500
|
-
map.forEach((i) => {
|
|
501
|
-
if (i.type === "tool" && i.tool_call_id) {
|
|
502
|
-
this.streamingMessage.push(i);
|
|
503
|
-
}
|
|
504
|
-
});
|
|
505
|
-
}
|
|
506
298
|
runFETool() {
|
|
507
299
|
var _a;
|
|
508
|
-
const data = this.
|
|
300
|
+
const data = this.messageProcessor.getStreamingMessages(); // 需要保证不被清理
|
|
509
301
|
const lastMessage = data[data.length - 1];
|
|
510
302
|
if (!lastMessage)
|
|
511
303
|
return;
|
|
@@ -581,14 +373,14 @@ export class LangGraphClient extends Client {
|
|
|
581
373
|
await this.initAssistant((_a = this.currentAssistant) === null || _a === void 0 ? void 0 : _a.graph_id);
|
|
582
374
|
this.currentThread = null;
|
|
583
375
|
this.graphState = {};
|
|
584
|
-
this.
|
|
585
|
-
this.
|
|
376
|
+
this.messageProcessor.setGraphMessages([]);
|
|
377
|
+
this.messageProcessor.clearStreamingMessages();
|
|
586
378
|
this.currentRun = undefined;
|
|
587
379
|
this.tools.clearWaiting();
|
|
588
|
-
this.
|
|
589
|
-
|
|
380
|
+
this.emit("value", {
|
|
381
|
+
event: "messages/partial",
|
|
590
382
|
data: {
|
|
591
|
-
|
|
383
|
+
messages: [],
|
|
592
384
|
},
|
|
593
385
|
});
|
|
594
386
|
}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { Message, AIMessage, ToolMessage } from "@langchain/langgraph-sdk";
|
|
2
|
+
import { RenderMessage } from "./LangGraphClient.js";
|
|
3
|
+
/**
|
|
4
|
+
* @zh StreamingMessageType 类用于判断消息的类型。
|
|
5
|
+
* @en The StreamingMessageType class is used to determine the type of a message.
|
|
6
|
+
*/
|
|
7
|
+
export declare class StreamingMessageType {
|
|
8
|
+
static isTool(m: Message): m is ToolMessage;
|
|
9
|
+
static isToolAssistant(m: Message): m is AIMessage;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* @zh MessageProcessor 类用于统一处理 Message 相关的逻辑,避免重复处理。
|
|
13
|
+
* @en The MessageProcessor class is used to uniformly handle Message-related logic and avoid duplicate processing.
|
|
14
|
+
*/
|
|
15
|
+
export declare class MessageProcessor {
|
|
16
|
+
private subAgentsKey;
|
|
17
|
+
/** 流式消息缓存 */
|
|
18
|
+
private streamingMessage;
|
|
19
|
+
/** 图发过来的更新信息 */
|
|
20
|
+
private graphMessages;
|
|
21
|
+
constructor(subAgentsKey?: string);
|
|
22
|
+
/**
|
|
23
|
+
* @zh 获取流式消息
|
|
24
|
+
* @en Get streaming messages
|
|
25
|
+
*/
|
|
26
|
+
getStreamingMessages(): RenderMessage[];
|
|
27
|
+
/**
|
|
28
|
+
* @zh 设置流式消息
|
|
29
|
+
* @en Set streaming messages
|
|
30
|
+
*/
|
|
31
|
+
setStreamingMessages(messages: RenderMessage[]): void;
|
|
32
|
+
/**
|
|
33
|
+
* @zh 清空流式消息
|
|
34
|
+
* @en Clear streaming messages
|
|
35
|
+
*/
|
|
36
|
+
clearStreamingMessages(): void;
|
|
37
|
+
/**
|
|
38
|
+
* @zh 获取图消息
|
|
39
|
+
* @en Get graph messages
|
|
40
|
+
*/
|
|
41
|
+
getGraphMessages(): RenderMessage[];
|
|
42
|
+
/**
|
|
43
|
+
* @zh 设置图消息
|
|
44
|
+
* @en Set graph messages
|
|
45
|
+
*/
|
|
46
|
+
setGraphMessages(messages: RenderMessage[]): void;
|
|
47
|
+
/**
|
|
48
|
+
* @zh 更新流式消息
|
|
49
|
+
* @en Update streaming message
|
|
50
|
+
*/
|
|
51
|
+
updateStreamingMessage(message: RenderMessage): void;
|
|
52
|
+
/**
|
|
53
|
+
* @zh 将 graphMessages 和 streamingMessage 合并,并返回新的消息数组
|
|
54
|
+
* @en Combine graphMessages and streamingMessage and return a new message array
|
|
55
|
+
*/
|
|
56
|
+
combineGraphMessagesWithStreamingMessages(): RenderMessage[];
|
|
57
|
+
/**
|
|
58
|
+
* @zh 子图的数据需要通过 merge 的方式重新进行合并更新
|
|
59
|
+
* @en Subgraph data needs to be merged and updated through merge method
|
|
60
|
+
*/
|
|
61
|
+
mergeSubGraphMessagesToStreamingMessages(messages: Message[]): void;
|
|
62
|
+
/**
|
|
63
|
+
* @zh 克隆消息对象
|
|
64
|
+
* @en Clone message object
|
|
65
|
+
*/
|
|
66
|
+
cloneMessage(message: Message): Message;
|
|
67
|
+
/**
|
|
68
|
+
* @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
|
|
69
|
+
* @en Attaches additional information to messages, such as spend time, unique ID, etc.
|
|
70
|
+
*/
|
|
71
|
+
attachInfoForMessage(messages: RenderMessage[]): RenderMessage[];
|
|
72
|
+
/**
|
|
73
|
+
* @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
|
|
74
|
+
* @en Composes tool messages, associating AI tool calls with tool execution results.
|
|
75
|
+
*/
|
|
76
|
+
composeToolMessages(messages: RenderMessage[]): RenderMessage[];
|
|
77
|
+
/**
|
|
78
|
+
* @zh 转换 subAgent 消息为工具的子消息
|
|
79
|
+
* @en Convert subAgent messages to tool sub-messages
|
|
80
|
+
*/
|
|
81
|
+
convertSubAgentMessages(messages: RenderMessage[], graphState: any): RenderMessage[];
|
|
82
|
+
/**
|
|
83
|
+
* @zh 生成用于 UI 中的流式渲染的消息
|
|
84
|
+
* @en Generate messages used for streaming rendering in the UI
|
|
85
|
+
*/
|
|
86
|
+
renderMessages(graphState: any, getGraphNodeNow: () => {
|
|
87
|
+
name: string;
|
|
88
|
+
}): RenderMessage[];
|
|
89
|
+
/**
|
|
90
|
+
* @zh 统一的消息处理入口,按顺序执行所有处理步骤
|
|
91
|
+
* @en Unified message processing entry point, executing all processing steps in order
|
|
92
|
+
*/
|
|
93
|
+
processMessages(messages: RenderMessage[], graphState?: any): RenderMessage[];
|
|
94
|
+
}
|