@langgraph-js/sdk 1.12.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/LangGraphClient.d.ts +158 -73
- package/dist/LangGraphClient.js +52 -311
- package/dist/MessageProcessor.d.ts +94 -0
- package/dist/MessageProcessor.js +324 -0
- package/dist/TestKit.d.ts +2 -2
- package/dist/client/LanggraphServer.d.ts +3 -0
- package/dist/client/LanggraphServer.js +4 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2 -0
- package/dist/types.d.ts +106 -0
- package/dist/types.js +1 -0
- package/dist/ui-store/createChatStore.d.ts +4 -4
- package/dist/ui-store/createChatStore.js +37 -21
- package/package.json +2 -1
- package/src/LangGraphClient.ts +108 -347
- package/src/MessageProcessor.ts +352 -0
- package/src/TestKit.ts +1 -1
- package/src/client/LanggraphServer.ts +7 -0
- package/src/index.ts +2 -0
- package/src/types.ts +129 -0
- package/src/ui-store/createChatStore.ts +41 -21
package/src/LangGraphClient.ts
CHANGED
|
@@ -1,25 +1,10 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import type { Thread, Message, Assistant, HumanMessage, AIMessage, ToolMessage, Command } from "@langchain/langgraph-sdk";
|
|
2
|
+
import { EventEmitter } from "eventemitter3";
|
|
2
3
|
import { ToolManager } from "./ToolManager.js";
|
|
3
4
|
import { CallToolResult } from "./tool/createTool.js";
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
* Defaults to `Infinity`, which means no limit.
|
|
8
|
-
*/
|
|
9
|
-
maxConcurrency?: number;
|
|
10
|
-
/**
|
|
11
|
-
* The maximum number of retries that can be made for a single call,
|
|
12
|
-
* with an exponential backoff between each attempt. Defaults to 6.
|
|
13
|
-
*/
|
|
14
|
-
maxRetries?: number;
|
|
15
|
-
onFailedResponseHook?: any;
|
|
16
|
-
/**
|
|
17
|
-
* Specify a custom fetch implementation.
|
|
18
|
-
*
|
|
19
|
-
* By default we expect the `fetch` is available in the global scope.
|
|
20
|
-
*/
|
|
21
|
-
fetch?: typeof fetch | ((...args: any[]) => any);
|
|
22
|
-
}
|
|
5
|
+
import { ILangGraphClient } from "./types.js";
|
|
6
|
+
import { MessageProcessor } from "./MessageProcessor.js";
|
|
7
|
+
|
|
23
8
|
export type RenderMessage = Message & {
|
|
24
9
|
/** 对于 AIMessage 来说是节点名称,对于工具节点来说是工具名称 */
|
|
25
10
|
name?: string;
|
|
@@ -61,55 +46,85 @@ export type SendMessageOptions = {
|
|
|
61
46
|
export interface LangGraphClientConfig {
|
|
62
47
|
apiUrl?: string;
|
|
63
48
|
apiKey?: string;
|
|
64
|
-
callerOptions?:
|
|
49
|
+
callerOptions?: {
|
|
50
|
+
/**
|
|
51
|
+
* The maximum number of concurrent calls that can be made.
|
|
52
|
+
* Defaults to `Infinity`, which means no limit.
|
|
53
|
+
*/
|
|
54
|
+
maxConcurrency?: number;
|
|
55
|
+
/**
|
|
56
|
+
* The maximum number of retries that can be made for a single call,
|
|
57
|
+
* with an exponential backoff between each attempt. Defaults to 6.
|
|
58
|
+
*/
|
|
59
|
+
maxRetries?: number;
|
|
60
|
+
onFailedResponseHook?: any;
|
|
61
|
+
/**
|
|
62
|
+
* Specify a custom fetch implementation.
|
|
63
|
+
*
|
|
64
|
+
* By default we expect the `fetch` is available in the global scope.
|
|
65
|
+
*/
|
|
66
|
+
fetch?: typeof fetch | ((...args: any[]) => any);
|
|
67
|
+
};
|
|
65
68
|
timeoutMs?: number;
|
|
66
69
|
defaultHeaders?: Record<string, string | null | undefined>;
|
|
70
|
+
/** 自定义客户端实现,如果不提供则使用官方 Client */
|
|
71
|
+
client: ILangGraphClient<any, any>;
|
|
67
72
|
}
|
|
68
73
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
}
|
|
83
|
-
static isToolAssistant(m: Message): m is AIMessage {
|
|
84
|
-
/** @ts-ignore */
|
|
85
|
-
return m.type === "ai" && (m.tool_calls?.length || m.tool_call_chunks?.length);
|
|
86
|
-
}
|
|
74
|
+
// 定义事件数据类型
|
|
75
|
+
export interface LangGraphEvents {
|
|
76
|
+
/** 流开始事件 */
|
|
77
|
+
start: { event: "start" };
|
|
78
|
+
/** 消息部分更新事件 */
|
|
79
|
+
message: { event: "messages/partial"; data: Message[] };
|
|
80
|
+
/** 值更新事件 */
|
|
81
|
+
value: { event: "messages/partial" | "values"; data: { messages?: Message[] } };
|
|
82
|
+
/** 错误事件 */
|
|
83
|
+
error: { event: "error"; data: any };
|
|
84
|
+
/** Thread 创建事件 */
|
|
85
|
+
thread: { event: "thread/create"; data: { thread: Thread } };
|
|
86
|
+
/** 流完成事件 */
|
|
87
|
+
done: { event: "done" };
|
|
87
88
|
}
|
|
88
89
|
|
|
89
|
-
type StreamingUpdateEvent = {
|
|
90
|
-
type: "message" | "value" | "update" | "error" | "thread" | "done" | "start";
|
|
91
|
-
data: any;
|
|
92
|
-
};
|
|
93
|
-
|
|
94
|
-
type StreamingUpdateCallback = (event: StreamingUpdateEvent) => void;
|
|
95
|
-
|
|
96
90
|
/**
|
|
97
91
|
* @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
|
|
98
92
|
* @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
|
|
99
93
|
*/
|
|
100
|
-
export class LangGraphClient extends
|
|
94
|
+
export class LangGraphClient<TStateType = unknown, TUpdateType = TStateType> extends EventEmitter<LangGraphEvents> {
|
|
95
|
+
private client: ILangGraphClient<TStateType, TUpdateType>;
|
|
101
96
|
private currentAssistant: Assistant | null = null;
|
|
102
|
-
private currentThread: Thread | null = null;
|
|
103
|
-
private streamingCallbacks: Set<StreamingUpdateCallback> = new Set();
|
|
97
|
+
private currentThread: Thread<TStateType> | null = null;
|
|
104
98
|
tools: ToolManager = new ToolManager();
|
|
99
|
+
availableAssistants: Assistant[] = [];
|
|
100
|
+
graphState: any = {};
|
|
101
|
+
currentRun?: { run_id: string };
|
|
105
102
|
stopController: AbortController | null = null;
|
|
106
103
|
/** 用于存储 subAgent 状态数据的键 */
|
|
107
104
|
subAgentsKey = "task_store";
|
|
105
|
+
/** Message 处理器 */
|
|
106
|
+
private messageProcessor: MessageProcessor;
|
|
108
107
|
|
|
109
108
|
constructor(config: LangGraphClientConfig) {
|
|
110
|
-
super(
|
|
109
|
+
super();
|
|
110
|
+
this.client = config.client;
|
|
111
|
+
this.messageProcessor = new MessageProcessor(this.subAgentsKey);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/** 代理 assistants 属性到内部 client */
|
|
115
|
+
get assistants() {
|
|
116
|
+
return this.client.assistants;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/** 代理 threads 属性到内部 client */
|
|
120
|
+
get threads() {
|
|
121
|
+
return this.client.threads;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/** 代理 runs 属性到内部 client */
|
|
125
|
+
get runs() {
|
|
126
|
+
return this.client.runs;
|
|
111
127
|
}
|
|
112
|
-
availableAssistants: Assistant[] = [];
|
|
113
128
|
private listAssistants() {
|
|
114
129
|
return this.assistants.search({
|
|
115
130
|
metadata: null,
|
|
@@ -127,7 +142,7 @@ export class LangGraphClient extends Client {
|
|
|
127
142
|
this.availableAssistants = assistants;
|
|
128
143
|
if (assistants.length > 0) {
|
|
129
144
|
if (agentName) {
|
|
130
|
-
this.currentAssistant = assistants.find((assistant) => assistant.graph_id === agentName) || null;
|
|
145
|
+
this.currentAssistant = assistants.find((assistant: any) => assistant.graph_id === agentName) || null;
|
|
131
146
|
if (!this.currentAssistant) {
|
|
132
147
|
throw new Error("Agent not found: " + agentName);
|
|
133
148
|
}
|
|
@@ -164,7 +179,7 @@ export class LangGraphClient extends Client {
|
|
|
164
179
|
}
|
|
165
180
|
|
|
166
181
|
graphVisualize() {
|
|
167
|
-
return this.assistants.getGraph(this.currentAssistant?.assistant_id!, {
|
|
182
|
+
return this.assistants.getGraph((this.currentAssistant as any)?.assistant_id!, {
|
|
168
183
|
xray: true,
|
|
169
184
|
});
|
|
170
185
|
}
|
|
@@ -172,11 +187,14 @@ export class LangGraphClient extends Client {
|
|
|
172
187
|
* @zh 列出所有的 Thread。
|
|
173
188
|
* @en Lists all Threads.
|
|
174
189
|
*/
|
|
175
|
-
async listThreads
|
|
176
|
-
return this.threads.search
|
|
190
|
+
async listThreads() {
|
|
191
|
+
return this.threads.search({
|
|
177
192
|
sortOrder: "desc",
|
|
178
193
|
});
|
|
179
194
|
}
|
|
195
|
+
async deleteThread(threadId: string) {
|
|
196
|
+
return this.threads.delete(threadId);
|
|
197
|
+
}
|
|
180
198
|
|
|
181
199
|
/**
|
|
182
200
|
* @zh 从历史中恢复 Thread 数据。
|
|
@@ -185,249 +203,42 @@ export class LangGraphClient extends Client {
|
|
|
185
203
|
async resetThread(agent: string, threadId: string) {
|
|
186
204
|
await this.initAssistant(agent);
|
|
187
205
|
this.currentThread = await this.threads.get(threadId);
|
|
188
|
-
this.graphState = this.currentThread.values;
|
|
189
|
-
|
|
190
|
-
this.
|
|
191
|
-
|
|
206
|
+
this.graphState = (this.currentThread as any).values;
|
|
207
|
+
const graphMessages = this.graphState?.messages || [];
|
|
208
|
+
this.messageProcessor.setGraphMessages(graphMessages);
|
|
209
|
+
this.emit("value", {
|
|
210
|
+
event: "messages/partial",
|
|
192
211
|
data: {
|
|
193
|
-
|
|
194
|
-
data: {
|
|
195
|
-
messages: this.graphMessages,
|
|
196
|
-
},
|
|
212
|
+
messages: this.messageProcessor.getGraphMessages(),
|
|
197
213
|
},
|
|
198
214
|
});
|
|
199
215
|
return this.currentThread;
|
|
200
216
|
}
|
|
201
217
|
// 从历史中恢复时,应该恢复流式状态
|
|
202
218
|
async resetStream() {
|
|
203
|
-
const runs = await this.runs.list(this.currentThread!.thread_id);
|
|
204
|
-
const runningRun = runs?.find((run) => run.status === "running" || run.status === "pending");
|
|
219
|
+
const runs = await this.runs.list((this.currentThread as any)!.thread_id);
|
|
220
|
+
const runningRun = runs?.find((run: any) => run.status === "running" || run.status === "pending");
|
|
205
221
|
if (runningRun) {
|
|
206
222
|
await this.sendMessage([], { joinRunId: runningRun.run_id });
|
|
207
223
|
}
|
|
208
224
|
}
|
|
209
225
|
|
|
210
|
-
streamingMessage: RenderMessage[] = [];
|
|
211
|
-
/** 图发过来的更新信息 */
|
|
212
|
-
graphMessages: RenderMessage[] = [];
|
|
213
226
|
cloneMessage(message: Message): Message {
|
|
214
|
-
return
|
|
215
|
-
}
|
|
216
|
-
private updateStreamingMessage(message: RenderMessage) {
|
|
217
|
-
const lastMessage = this.streamingMessage[this.streamingMessage.length - 1];
|
|
218
|
-
if (!lastMessage?.id || message.id !== lastMessage.id) {
|
|
219
|
-
this.streamingMessage.push(message);
|
|
220
|
-
return;
|
|
221
|
-
}
|
|
222
|
-
this.streamingMessage[this.streamingMessage.length - 1] = message;
|
|
223
|
-
}
|
|
224
|
-
/** 将 graphMessages 和 streamingMessage 合并,并返回新的消息数组 */
|
|
225
|
-
private combineGraphMessagesWithStreamingMessages() {
|
|
226
|
-
const idMap = new Map<string, RenderMessage>(this.streamingMessage.map((i) => [i.id!, i]));
|
|
227
|
-
return [
|
|
228
|
-
...this.graphMessages.map((i) => {
|
|
229
|
-
if (idMap.has(i.id!)) {
|
|
230
|
-
const newValue = idMap.get(i.id!)!;
|
|
231
|
-
idMap.delete(i.id!);
|
|
232
|
-
return newValue;
|
|
233
|
-
}
|
|
234
|
-
return i;
|
|
235
|
-
}),
|
|
236
|
-
...idMap.values(),
|
|
237
|
-
];
|
|
227
|
+
return this.messageProcessor.cloneMessage(message);
|
|
238
228
|
}
|
|
239
229
|
/**
|
|
240
230
|
* @zh 用于 UI 中的流式渲染中的消息。
|
|
241
231
|
* @en Messages used for streaming rendering in the UI.
|
|
242
232
|
*/
|
|
243
233
|
get renderMessage() {
|
|
244
|
-
|
|
245
|
-
const closedToolCallIds = new Set<string>();
|
|
246
|
-
const result: Message[] = [];
|
|
247
|
-
const inputMessages = this.combineGraphMessagesWithStreamingMessages();
|
|
248
|
-
// console.log(inputMessages);
|
|
249
|
-
// 从后往前遍历,这样可以保证最新的消息在前面
|
|
250
|
-
for (let i = inputMessages.length - 1; i >= 0; i--) {
|
|
251
|
-
const message = this.cloneMessage(inputMessages[i]);
|
|
252
|
-
|
|
253
|
-
if (!message.id) {
|
|
254
|
-
result.unshift(message);
|
|
255
|
-
continue;
|
|
256
|
-
}
|
|
257
|
-
if (message.type === "ai") {
|
|
258
|
-
/** @ts-ignore */
|
|
259
|
-
if (!message.name) message.name = this.getGraphNodeNow().name;
|
|
260
|
-
}
|
|
261
|
-
if (StreamingMessageType.isToolAssistant(message)) {
|
|
262
|
-
const m = message;
|
|
263
|
-
// 记录这个 id 的消息,并添加到结果中
|
|
264
|
-
previousMessage.set(message.id, m);
|
|
265
|
-
|
|
266
|
-
/** @ts-ignore */
|
|
267
|
-
const tool_calls: NonNullable<AIMessage["tool_calls"]> = (m as AIMessage).tool_calls?.length ? (m as AIMessage).tool_calls : (m as RenderMessage).tool_call_chunks;
|
|
268
|
-
const new_tool_calls = tool_calls
|
|
269
|
-
.filter((i) => {
|
|
270
|
-
return !closedToolCallIds.has(i.id!);
|
|
271
|
-
})!
|
|
272
|
-
.map((tool, index) => {
|
|
273
|
-
return {
|
|
274
|
-
type: "tool",
|
|
275
|
-
additional_kwargs: {},
|
|
276
|
-
/** @ts-ignore */
|
|
277
|
-
tool_input: m.additional_kwargs?.tool_calls?.[index]?.function?.arguments,
|
|
278
|
-
id: tool.id,
|
|
279
|
-
name: tool.name,
|
|
280
|
-
response_metadata: {},
|
|
281
|
-
tool_call_id: tool.id!,
|
|
282
|
-
content: "",
|
|
283
|
-
} as ToolMessage;
|
|
284
|
-
});
|
|
285
|
-
for (const tool of new_tool_calls) {
|
|
286
|
-
if (!previousMessage.has(tool.id!)) {
|
|
287
|
-
result.unshift(tool);
|
|
288
|
-
previousMessage.set(tool.id!, tool);
|
|
289
|
-
}
|
|
290
|
-
}
|
|
291
|
-
result.unshift(m);
|
|
292
|
-
} else {
|
|
293
|
-
if (message.type === "tool" && message.tool_call_id) {
|
|
294
|
-
closedToolCallIds.add(message.tool_call_id);
|
|
295
|
-
}
|
|
296
|
-
|
|
297
|
-
previousMessage.set(message.id, message);
|
|
298
|
-
result.unshift(message);
|
|
299
|
-
}
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
return this.convertSubAgentMessages(this.attachInfoForMessage(this.composeToolMessages(result as RenderMessage[])));
|
|
303
|
-
}
|
|
304
|
-
/** 转换 subAgent 消息为工具的子消息 */
|
|
305
|
-
private convertSubAgentMessages(messages: RenderMessage[]) {
|
|
306
|
-
const origin_task_store = this.graphState[this.subAgentsKey];
|
|
307
|
-
if (!origin_task_store) return messages;
|
|
308
|
-
|
|
309
|
-
const task_store = JSON.parse(JSON.stringify(origin_task_store));
|
|
310
|
-
console.log(messages);
|
|
311
|
-
/** 获取 subAgent 消息的 id,用于流式过程中对数据进行标记 */
|
|
312
|
-
messages
|
|
313
|
-
.filter((i) => {
|
|
314
|
-
return i.node_name?.startsWith("subagent_");
|
|
315
|
-
})
|
|
316
|
-
.forEach((i) => {
|
|
317
|
-
const tool_call_id = i.node_name!.replace("subagent_", "");
|
|
318
|
-
const store = task_store[tool_call_id];
|
|
319
|
-
if (store) {
|
|
320
|
-
// 根据 id 进行去重
|
|
321
|
-
const exists = (store.messages as RenderMessage[]).some((msg) => msg.id === i.id);
|
|
322
|
-
if (!exists) {
|
|
323
|
-
(store.messages as RenderMessage[]).push(i);
|
|
324
|
-
}
|
|
325
|
-
} else {
|
|
326
|
-
task_store[tool_call_id] = {
|
|
327
|
-
messages: [i],
|
|
328
|
-
};
|
|
329
|
-
}
|
|
330
|
-
});
|
|
331
|
-
|
|
332
|
-
const ignoreIds = new Set<string>();
|
|
333
|
-
Object.values(task_store).forEach((task: any) => {
|
|
334
|
-
task.messages.forEach((message: RenderMessage) => {
|
|
335
|
-
ignoreIds.add(message.id!);
|
|
336
|
-
});
|
|
337
|
-
});
|
|
338
|
-
const result: RenderMessage[] = [];
|
|
339
|
-
for (const message of messages) {
|
|
340
|
-
if (message.type === "tool" && message.tool_call_id) {
|
|
341
|
-
const task = task_store[message.tool_call_id];
|
|
342
|
-
if (task) {
|
|
343
|
-
message.sub_agent_messages = this.attachInfoForMessage(this.composeToolMessages(task.messages));
|
|
344
|
-
}
|
|
345
|
-
}
|
|
346
|
-
if (message.id && ignoreIds.has(message.id)) continue;
|
|
347
|
-
result.push(message);
|
|
348
|
-
}
|
|
349
|
-
return result;
|
|
350
|
-
}
|
|
351
|
-
/**
|
|
352
|
-
* @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
|
|
353
|
-
* @en Attaches additional information to messages, such as spend time, unique ID, etc.
|
|
354
|
-
*/
|
|
355
|
-
private attachInfoForMessage(result: RenderMessage[]) {
|
|
356
|
-
let lastMessage: RenderMessage | null = null;
|
|
357
|
-
for (const message of result) {
|
|
358
|
-
const createTime = message.response_metadata?.create_time || "";
|
|
359
|
-
// 工具必须要使用 tool_call_id 来保证一致性
|
|
360
|
-
message.unique_id = message.tool_call_id! || message.id!;
|
|
361
|
-
|
|
362
|
-
message.spend_time = new Date(createTime).getTime() - new Date(lastMessage?.response_metadata?.create_time || createTime).getTime();
|
|
363
|
-
if (!message.usage_metadata && (message as AIMessage).response_metadata?.usage) {
|
|
364
|
-
const usage = (message as AIMessage).response_metadata!.usage as {
|
|
365
|
-
prompt_tokens: number;
|
|
366
|
-
completion_tokens: number;
|
|
367
|
-
total_tokens: number;
|
|
368
|
-
};
|
|
369
|
-
message.usage_metadata = {
|
|
370
|
-
...usage,
|
|
371
|
-
input_tokens: usage.prompt_tokens,
|
|
372
|
-
output_tokens: usage.completion_tokens,
|
|
373
|
-
total_tokens: usage.total_tokens,
|
|
374
|
-
};
|
|
375
|
-
}
|
|
376
|
-
lastMessage = message;
|
|
377
|
-
}
|
|
378
|
-
return result;
|
|
379
|
-
}
|
|
380
|
-
/**
|
|
381
|
-
* @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
|
|
382
|
-
* @en Composes tool messages, associating AI tool calls with tool execution results.
|
|
383
|
-
*/
|
|
384
|
-
private composeToolMessages(messages: RenderMessage[]): RenderMessage[] {
|
|
385
|
-
const result: RenderMessage[] = [];
|
|
386
|
-
const assistantToolMessages = new Map<string, { args: string }>();
|
|
387
|
-
const toolParentMessage = new Map<string, RenderMessage>();
|
|
388
|
-
for (const message of messages) {
|
|
389
|
-
if (StreamingMessageType.isToolAssistant(message)) {
|
|
390
|
-
/** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
|
|
391
|
-
(message.tool_calls || message.tool_call_chunks)?.forEach((element) => {
|
|
392
|
-
assistantToolMessages.set(element.id!, element);
|
|
393
|
-
toolParentMessage.set(element.id!, message);
|
|
394
|
-
});
|
|
395
|
-
if (!message.content) continue;
|
|
396
|
-
}
|
|
397
|
-
if (StreamingMessageType.isTool(message) && !message.tool_input) {
|
|
398
|
-
const assistantToolMessage = assistantToolMessages.get(message.tool_call_id!);
|
|
399
|
-
const parentMessage = toolParentMessage.get(message.tool_call_id!);
|
|
400
|
-
if (assistantToolMessage) {
|
|
401
|
-
message.tool_input = typeof assistantToolMessage.args !== "string" ? JSON.stringify(assistantToolMessage.args) : assistantToolMessage.args;
|
|
402
|
-
if (message.additional_kwargs) {
|
|
403
|
-
message.additional_kwargs.done = true;
|
|
404
|
-
message.done = true;
|
|
405
|
-
} else {
|
|
406
|
-
message.done = true;
|
|
407
|
-
message.additional_kwargs = {
|
|
408
|
-
done: true,
|
|
409
|
-
};
|
|
410
|
-
}
|
|
411
|
-
}
|
|
412
|
-
if (parentMessage) {
|
|
413
|
-
message.usage_metadata = parentMessage.usage_metadata;
|
|
414
|
-
message.node_name = parentMessage.name;
|
|
415
|
-
// 修补特殊情况下,tool name 丢失的问题
|
|
416
|
-
if (!message.name) {
|
|
417
|
-
message.name = (parentMessage as AIMessage).tool_calls!.find((i) => i.id === message.tool_call_id)?.name;
|
|
418
|
-
}
|
|
419
|
-
}
|
|
420
|
-
}
|
|
421
|
-
result.push(message);
|
|
422
|
-
}
|
|
423
|
-
return result;
|
|
234
|
+
return this.messageProcessor.renderMessages(this.graphState, () => this.getGraphNodeNow());
|
|
424
235
|
}
|
|
425
236
|
/**
|
|
426
237
|
* @zh 获取 Token 计数器信息。
|
|
427
238
|
* @en Gets the Token counter information.
|
|
428
239
|
*/
|
|
429
240
|
get tokenCounter() {
|
|
430
|
-
return this.
|
|
241
|
+
return this.messageProcessor.getGraphMessages().reduce(
|
|
431
242
|
(acc, message) => {
|
|
432
243
|
if (message.usage_metadata) {
|
|
433
244
|
acc.total_tokens += message.usage_metadata?.total_tokens || 0;
|
|
@@ -454,20 +265,6 @@ export class LangGraphClient extends Client {
|
|
|
454
265
|
);
|
|
455
266
|
}
|
|
456
267
|
|
|
457
|
-
/**
|
|
458
|
-
* @zh 注册流式更新的回调函数。
|
|
459
|
-
* @en Registers a callback function for streaming updates.
|
|
460
|
-
*/
|
|
461
|
-
onStreamingUpdate(callback: StreamingUpdateCallback) {
|
|
462
|
-
this.streamingCallbacks.add(callback);
|
|
463
|
-
return () => {
|
|
464
|
-
this.streamingCallbacks.delete(callback);
|
|
465
|
-
};
|
|
466
|
-
}
|
|
467
|
-
|
|
468
|
-
private emitStreamingUpdate(event: StreamingUpdateEvent) {
|
|
469
|
-
this.streamingCallbacks.forEach((callback) => callback(event));
|
|
470
|
-
}
|
|
471
268
|
/** 前端工具人机交互时,锁住面板 */
|
|
472
269
|
isFELocking(messages: RenderMessage[]) {
|
|
473
270
|
const lastMessage = messages[messages.length - 1];
|
|
@@ -477,15 +274,14 @@ export class LangGraphClient extends Client {
|
|
|
477
274
|
const tool = this.tools.getTool(lastMessage?.name!);
|
|
478
275
|
return tool && tool.render && lastMessage?.type === "tool" && !lastMessage?.additional_kwargs?.done;
|
|
479
276
|
}
|
|
480
|
-
|
|
481
|
-
currentRun?: { run_id: string };
|
|
277
|
+
|
|
482
278
|
/**
|
|
483
279
|
* @zh 取消当前的 Run。
|
|
484
280
|
* @en Cancels the current Run.
|
|
485
281
|
*/
|
|
486
282
|
cancelRun() {
|
|
487
|
-
if (this.currentThread?.thread_id && this.currentRun?.run_id) {
|
|
488
|
-
this.runs.cancel(this.currentThread!.thread_id, this.currentRun.run_id);
|
|
283
|
+
if ((this.currentThread as any)?.thread_id && this.currentRun?.run_id) {
|
|
284
|
+
this.runs.cancel((this.currentThread as any)!.thread_id, this.currentRun.run_id);
|
|
489
285
|
}
|
|
490
286
|
}
|
|
491
287
|
/**
|
|
@@ -498,13 +294,10 @@ export class LangGraphClient extends Client {
|
|
|
498
294
|
}
|
|
499
295
|
if (!this.currentThread) {
|
|
500
296
|
await this.createThread();
|
|
501
|
-
this.
|
|
502
|
-
|
|
297
|
+
this.emit("thread", {
|
|
298
|
+
event: "thread/create",
|
|
503
299
|
data: {
|
|
504
|
-
|
|
505
|
-
data: {
|
|
506
|
-
thread: this.currentThread,
|
|
507
|
-
},
|
|
300
|
+
thread: this.currentThread,
|
|
508
301
|
},
|
|
509
302
|
});
|
|
510
303
|
}
|
|
@@ -541,43 +334,31 @@ export class LangGraphClient extends Client {
|
|
|
541
334
|
const streamResponse = await createStreamResponse();
|
|
542
335
|
|
|
543
336
|
const streamRecord: any[] = [];
|
|
544
|
-
this.
|
|
545
|
-
|
|
546
|
-
data: {
|
|
547
|
-
event: "start",
|
|
548
|
-
},
|
|
337
|
+
this.emit("start", {
|
|
338
|
+
event: "start",
|
|
549
339
|
});
|
|
550
340
|
for await (const chunk of streamResponse) {
|
|
551
341
|
streamRecord.push(chunk);
|
|
552
342
|
if (chunk.event === "metadata") {
|
|
553
343
|
this.currentRun = chunk.data;
|
|
554
344
|
} else if (chunk.event === "error") {
|
|
555
|
-
this.
|
|
556
|
-
type: "error",
|
|
557
|
-
data: chunk,
|
|
558
|
-
});
|
|
345
|
+
this.emit("error", chunk);
|
|
559
346
|
} else if (chunk.event === "messages/partial") {
|
|
560
347
|
for (const message of chunk.data) {
|
|
561
|
-
this.updateStreamingMessage(message);
|
|
348
|
+
this.messageProcessor.updateStreamingMessage(message);
|
|
562
349
|
}
|
|
563
|
-
this.
|
|
564
|
-
type: "message",
|
|
565
|
-
data: chunk,
|
|
566
|
-
});
|
|
350
|
+
this.emit("message", chunk);
|
|
567
351
|
continue;
|
|
568
352
|
} else if (chunk.event === "values") {
|
|
569
353
|
const data = chunk.data as { messages: Message[] };
|
|
570
354
|
|
|
571
355
|
if (data.messages) {
|
|
572
356
|
const isResume = !!command?.resume;
|
|
573
|
-
const isLongerThanLocal = data.messages.length >= this.
|
|
357
|
+
const isLongerThanLocal = data.messages.length >= this.messageProcessor.getGraphMessages().length;
|
|
574
358
|
// resume 情况下,长度低于前端 message 的统统不接受
|
|
575
359
|
if (!isResume || (isResume && isLongerThanLocal)) {
|
|
576
|
-
this.
|
|
577
|
-
this.
|
|
578
|
-
type: "value",
|
|
579
|
-
data: chunk,
|
|
580
|
-
});
|
|
360
|
+
this.messageProcessor.setGraphMessages(data.messages as RenderMessage[]);
|
|
361
|
+
this.emit("value", chunk);
|
|
581
362
|
}
|
|
582
363
|
this.graphState = chunk.data;
|
|
583
364
|
}
|
|
@@ -585,20 +366,17 @@ export class LangGraphClient extends Client {
|
|
|
585
366
|
} else if (chunk.event.startsWith("values|")) {
|
|
586
367
|
// 这个 values 必然是子 values
|
|
587
368
|
if (chunk.data?.messages) {
|
|
588
|
-
this.mergeSubGraphMessagesToStreamingMessages(chunk.data.messages);
|
|
369
|
+
this.messageProcessor.mergeSubGraphMessagesToStreamingMessages(chunk.data.messages);
|
|
589
370
|
}
|
|
590
371
|
this.graphPosition = chunk.event.split("|")[1];
|
|
591
372
|
}
|
|
592
373
|
}
|
|
593
374
|
const data = await this.runFETool();
|
|
594
375
|
if (data) streamRecord.push(...data);
|
|
595
|
-
this.
|
|
596
|
-
|
|
597
|
-
data: {
|
|
598
|
-
event: "done",
|
|
599
|
-
},
|
|
376
|
+
this.emit("done", {
|
|
377
|
+
event: "done",
|
|
600
378
|
});
|
|
601
|
-
this.
|
|
379
|
+
this.messageProcessor.clearStreamingMessages();
|
|
602
380
|
return streamRecord;
|
|
603
381
|
}
|
|
604
382
|
/** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
|
|
@@ -616,26 +394,9 @@ export class LangGraphClient extends Client {
|
|
|
616
394
|
const position = this.getGraphPosition();
|
|
617
395
|
return position[position.length - 1];
|
|
618
396
|
}
|
|
619
|
-
/** 子图的数据需要通过 merge 的方式重新进行合并更新 */
|
|
620
|
-
private mergeSubGraphMessagesToStreamingMessages(messages: Message[]) {
|
|
621
|
-
const map = new Map(messages.filter((i) => i.id).map((i) => [i.id!, i]));
|
|
622
|
-
this.streamingMessage.forEach((i) => {
|
|
623
|
-
if (map.has(i.id!)) {
|
|
624
|
-
const newValue = map.get(i.id!)!;
|
|
625
|
-
Object.assign(i, newValue);
|
|
626
|
-
map.delete(i.id!);
|
|
627
|
-
}
|
|
628
|
-
});
|
|
629
|
-
// 剩余的 message 一定不在 streamMessage 中
|
|
630
|
-
map.forEach((i) => {
|
|
631
|
-
if (i.type === "tool" && i.tool_call_id) {
|
|
632
|
-
this.streamingMessage.push(i as RenderMessage);
|
|
633
|
-
}
|
|
634
|
-
});
|
|
635
|
-
}
|
|
636
397
|
|
|
637
398
|
private runFETool() {
|
|
638
|
-
const data = this.
|
|
399
|
+
const data = this.messageProcessor.getStreamingMessages(); // 需要保证不被清理
|
|
639
400
|
const lastMessage = data[data.length - 1];
|
|
640
401
|
if (!lastMessage) return;
|
|
641
402
|
// 如果最后一条消息是前端工具消息,则调用工具
|
|
@@ -713,14 +474,14 @@ export class LangGraphClient extends Client {
|
|
|
713
474
|
await this.initAssistant(this.currentAssistant?.graph_id!);
|
|
714
475
|
this.currentThread = null;
|
|
715
476
|
this.graphState = {};
|
|
716
|
-
this.
|
|
717
|
-
this.
|
|
477
|
+
this.messageProcessor.setGraphMessages([]);
|
|
478
|
+
this.messageProcessor.clearStreamingMessages();
|
|
718
479
|
this.currentRun = undefined;
|
|
719
480
|
this.tools.clearWaiting();
|
|
720
|
-
this.
|
|
721
|
-
|
|
481
|
+
this.emit("value", {
|
|
482
|
+
event: "messages/partial",
|
|
722
483
|
data: {
|
|
723
|
-
|
|
484
|
+
messages: [],
|
|
724
485
|
},
|
|
725
486
|
});
|
|
726
487
|
}
|