@langgraph-js/sdk 1.7.11 → 1.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +0 -0
- package/.turbo/turbo-build.log +5 -0
- package/LICENSE +201 -201
- package/README.md +163 -163
- package/dist/LangGraphClient.js +3 -0
- package/dist/ToolManager.d.ts +1 -1
- package/dist/ToolManager.js +2 -1
- package/dist/server/createState.d.ts +13 -0
- package/dist/server/createState.js +20 -0
- package/dist/server/feTools.d.ts +16 -0
- package/dist/server/feTools.js +37 -0
- package/dist/server/index.d.ts +3 -0
- package/dist/server/index.js +3 -0
- package/dist/server/interrupt/index.d.ts +23 -0
- package/dist/server/interrupt/index.js +36 -0
- package/dist/server/swarm/handoff.d.ts +11 -0
- package/dist/server/swarm/handoff.js +84 -0
- package/dist/server/swarm/keepState.d.ts +6 -0
- package/dist/server/swarm/keepState.js +21 -0
- package/dist/server/tools/index.d.ts +1 -0
- package/dist/server/tools/index.js +1 -0
- package/dist/server/tools/sequential-thinking.d.ts +52 -0
- package/dist/server/tools/sequential-thinking.js +69 -0
- package/dist/server/utils.d.ts +3 -0
- package/dist/server/utils.js +24 -0
- package/dist/tool/createTool.d.ts +25 -23
- package/dist/tool/createTool.js +31 -9
- package/package.json +1 -1
- package/src/LangGraphClient.ts +658 -655
- package/src/SpendTime.ts +60 -60
- package/src/ToolManager.ts +132 -132
- package/src/index.ts +5 -5
- package/src/tool/ToolUI.ts +55 -55
- package/src/tool/copilotkit-actions.ts +72 -72
- package/src/tool/createTool.ts +123 -107
- package/src/tool/index.ts +3 -3
- package/src/tool/utils.ts +158 -158
- package/src/ui-store/UnionStore.ts +29 -29
- package/src/ui-store/createChatStore.ts +295 -295
- package/src/ui-store/index.ts +2 -2
- package/src/ui-store/rafDebounce.ts +29 -29
- package/test/testResponse.json +5418 -5418
- package/tsconfig.json +112 -112
package/src/LangGraphClient.ts
CHANGED
|
@@ -1,655 +1,658 @@
|
|
|
1
|
-
import { Client, Thread, Message, Assistant, HumanMessage, AIMessage, ToolMessage, Command } from "@langchain/langgraph-sdk";
|
|
2
|
-
import { ToolManager } from "./ToolManager.js";
|
|
3
|
-
import { CallToolResult } from "./tool/createTool.js";
|
|
4
|
-
interface AsyncCallerParams {
|
|
5
|
-
/**
|
|
6
|
-
* The maximum number of concurrent calls that can be made.
|
|
7
|
-
* Defaults to `Infinity`, which means no limit.
|
|
8
|
-
*/
|
|
9
|
-
maxConcurrency?: number;
|
|
10
|
-
/**
|
|
11
|
-
* The maximum number of retries that can be made for a single call,
|
|
12
|
-
* with an exponential backoff between each attempt. Defaults to 6.
|
|
13
|
-
*/
|
|
14
|
-
maxRetries?: number;
|
|
15
|
-
onFailedResponseHook?: any;
|
|
16
|
-
/**
|
|
17
|
-
* Specify a custom fetch implementation.
|
|
18
|
-
*
|
|
19
|
-
* By default we expect the `fetch` is available in the global scope.
|
|
20
|
-
*/
|
|
21
|
-
fetch?: typeof fetch | ((...args: any[]) => any);
|
|
22
|
-
}
|
|
23
|
-
export type RenderMessage = Message & {
|
|
24
|
-
/** 对于 AIMessage 来说是节点名称,对于工具节点来说是工具名称 */
|
|
25
|
-
name?: string;
|
|
26
|
-
/** 工具节点的触发节点名称 */
|
|
27
|
-
node_name?: string;
|
|
28
|
-
/** 工具入参 ,聚合而来*/
|
|
29
|
-
tool_input?: string;
|
|
30
|
-
additional_kwargs?: {
|
|
31
|
-
done?: boolean;
|
|
32
|
-
tool_calls?: {
|
|
33
|
-
function: {
|
|
34
|
-
arguments: string;
|
|
35
|
-
};
|
|
36
|
-
}[];
|
|
37
|
-
};
|
|
38
|
-
usage_metadata?: {
|
|
39
|
-
total_tokens: number;
|
|
40
|
-
input_tokens: number;
|
|
41
|
-
output_tokens: number;
|
|
42
|
-
};
|
|
43
|
-
tool_call_id?: string;
|
|
44
|
-
response_metadata?: {
|
|
45
|
-
create_time: string;
|
|
46
|
-
};
|
|
47
|
-
/** 耗时 */
|
|
48
|
-
spend_time?: number;
|
|
49
|
-
/** 渲染时的唯一 id,聚合而来*/
|
|
50
|
-
unique_id?: string;
|
|
51
|
-
/** 工具调用是否完成 */
|
|
52
|
-
done?: boolean;
|
|
53
|
-
};
|
|
54
|
-
export type SendMessageOptions = {
|
|
55
|
-
extraParams?: Record<string, any>;
|
|
56
|
-
_debug?: { streamResponse?: any };
|
|
57
|
-
command?: Command;
|
|
58
|
-
};
|
|
59
|
-
export interface LangGraphClientConfig {
|
|
60
|
-
apiUrl?: string;
|
|
61
|
-
apiKey?: string;
|
|
62
|
-
callerOptions?: AsyncCallerParams;
|
|
63
|
-
timeoutMs?: number;
|
|
64
|
-
defaultHeaders?: Record<string, string | null | undefined>;
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
/**
|
|
68
|
-
* @zh StreamingMessageType 类用于判断消息的类型。
|
|
69
|
-
* @en The StreamingMessageType class is used to determine the type of a message.
|
|
70
|
-
*/
|
|
71
|
-
export class StreamingMessageType {
|
|
72
|
-
static isUser(m: Message): m is HumanMessage {
|
|
73
|
-
return m.type === "human";
|
|
74
|
-
}
|
|
75
|
-
static isTool(m: Message): m is ToolMessage {
|
|
76
|
-
return m.type === "tool";
|
|
77
|
-
}
|
|
78
|
-
static isAssistant(m: Message): m is AIMessage {
|
|
79
|
-
return m.type === "ai" && !this.isToolAssistant(m);
|
|
80
|
-
}
|
|
81
|
-
static isToolAssistant(m: Message): m is AIMessage {
|
|
82
|
-
/** @ts-ignore */
|
|
83
|
-
return m.type === "ai" && (m.tool_calls?.length || m.tool_call_chunks?.length);
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
type StreamingUpdateEvent = {
|
|
88
|
-
type: "message" | "value" | "update" | "error" | "thread" | "done" | "start";
|
|
89
|
-
data: any;
|
|
90
|
-
};
|
|
91
|
-
|
|
92
|
-
type StreamingUpdateCallback = (event: StreamingUpdateEvent) => void;
|
|
93
|
-
|
|
94
|
-
/**
|
|
95
|
-
* @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
|
|
96
|
-
* @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
|
|
97
|
-
*/
|
|
98
|
-
export class LangGraphClient extends Client {
|
|
99
|
-
private currentAssistant: Assistant | null = null;
|
|
100
|
-
private currentThread: Thread | null = null;
|
|
101
|
-
private streamingCallbacks: Set<StreamingUpdateCallback> = new Set();
|
|
102
|
-
tools: ToolManager = new ToolManager();
|
|
103
|
-
stopController: AbortController | null = null;
|
|
104
|
-
|
|
105
|
-
constructor(config: LangGraphClientConfig) {
|
|
106
|
-
super(config);
|
|
107
|
-
}
|
|
108
|
-
availableAssistants: Assistant[] = [];
|
|
109
|
-
private listAssistants() {
|
|
110
|
-
return this.assistants.search({
|
|
111
|
-
metadata: null,
|
|
112
|
-
offset: 0,
|
|
113
|
-
limit: 100,
|
|
114
|
-
});
|
|
115
|
-
}
|
|
116
|
-
/**
|
|
117
|
-
* @zh 初始化 Assistant。
|
|
118
|
-
* @en Initializes the Assistant.
|
|
119
|
-
*/
|
|
120
|
-
async initAssistant(agentName?: string) {
|
|
121
|
-
try {
|
|
122
|
-
const assistants = await this.listAssistants();
|
|
123
|
-
this.availableAssistants = assistants;
|
|
124
|
-
if (assistants.length > 0) {
|
|
125
|
-
if (agentName) {
|
|
126
|
-
this.currentAssistant = assistants.find((assistant) => assistant.graph_id === agentName) || null;
|
|
127
|
-
if (!this.currentAssistant) {
|
|
128
|
-
throw new Error("Agent not found: " + agentName);
|
|
129
|
-
}
|
|
130
|
-
} else {
|
|
131
|
-
this.currentAssistant = assistants[0];
|
|
132
|
-
}
|
|
133
|
-
} else {
|
|
134
|
-
throw new Error("No assistants found");
|
|
135
|
-
}
|
|
136
|
-
} catch (error) {
|
|
137
|
-
console.error("Failed to initialize LangGraphClient:", error);
|
|
138
|
-
throw error;
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
/**
|
|
143
|
-
* @zh 创建一个新的 Thread。
|
|
144
|
-
* @en Creates a new Thread.
|
|
145
|
-
*/
|
|
146
|
-
async createThread({
|
|
147
|
-
threadId,
|
|
148
|
-
}: {
|
|
149
|
-
threadId?: string;
|
|
150
|
-
} = {}) {
|
|
151
|
-
try {
|
|
152
|
-
this.currentThread = await this.threads.create({
|
|
153
|
-
threadId,
|
|
154
|
-
});
|
|
155
|
-
return this.currentThread;
|
|
156
|
-
} catch (error) {
|
|
157
|
-
console.error("Failed to create new thread:", error);
|
|
158
|
-
throw error;
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
graphVisualize() {
|
|
163
|
-
return this.assistants.getGraph(this.currentAssistant?.assistant_id!, {
|
|
164
|
-
xray: true,
|
|
165
|
-
});
|
|
166
|
-
}
|
|
167
|
-
/**
|
|
168
|
-
* @zh 列出所有的 Thread。
|
|
169
|
-
* @en Lists all Threads.
|
|
170
|
-
*/
|
|
171
|
-
async listThreads<T>() {
|
|
172
|
-
return this.threads.search<T>({
|
|
173
|
-
sortOrder: "desc",
|
|
174
|
-
});
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
/**
|
|
178
|
-
* @zh 从历史中恢复 Thread 数据。
|
|
179
|
-
* @en Resets the Thread data from history.
|
|
180
|
-
*/
|
|
181
|
-
async resetThread(agent: string, threadId: string) {
|
|
182
|
-
await this.initAssistant(agent);
|
|
183
|
-
this.currentThread = await this.threads.get(threadId);
|
|
184
|
-
this.graphState = this.currentThread.values;
|
|
185
|
-
this.graphMessages = this.graphState.messages;
|
|
186
|
-
this.emitStreamingUpdate({
|
|
187
|
-
type: "value",
|
|
188
|
-
data: {
|
|
189
|
-
event: "messages/partial",
|
|
190
|
-
data: {
|
|
191
|
-
messages: this.graphMessages,
|
|
192
|
-
},
|
|
193
|
-
},
|
|
194
|
-
});
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
streamingMessage: RenderMessage[] = [];
|
|
198
|
-
/** 图发过来的更新信息 */
|
|
199
|
-
graphMessages: RenderMessage[] = [];
|
|
200
|
-
cloneMessage(message: Message): Message {
|
|
201
|
-
return JSON.parse(JSON.stringify(message));
|
|
202
|
-
}
|
|
203
|
-
private updateStreamingMessage(message: RenderMessage) {
|
|
204
|
-
const lastMessage = this.streamingMessage[this.streamingMessage.length - 1];
|
|
205
|
-
if (!lastMessage?.id || message.id !== lastMessage.id) {
|
|
206
|
-
this.streamingMessage.push(message);
|
|
207
|
-
return;
|
|
208
|
-
}
|
|
209
|
-
this.streamingMessage[this.streamingMessage.length - 1] = message;
|
|
210
|
-
}
|
|
211
|
-
/** 将 graphMessages 和 streamingMessage 合并,并返回新的消息数组 */
|
|
212
|
-
private combineGraphMessagesWithStreamingMessages() {
|
|
213
|
-
const idMap = new Map<string, RenderMessage>(this.streamingMessage.map((i) => [i.id!, i]));
|
|
214
|
-
return [
|
|
215
|
-
...this.graphMessages.map((i) => {
|
|
216
|
-
if (idMap.has(i.id!)) {
|
|
217
|
-
const newValue = idMap.get(i.id!)!;
|
|
218
|
-
idMap.delete(i.id!);
|
|
219
|
-
return newValue;
|
|
220
|
-
}
|
|
221
|
-
return i;
|
|
222
|
-
}),
|
|
223
|
-
...idMap.values(),
|
|
224
|
-
];
|
|
225
|
-
}
|
|
226
|
-
/**
|
|
227
|
-
* @zh 用于 UI 中的流式渲染中的消息。
|
|
228
|
-
* @en Messages used for streaming rendering in the UI.
|
|
229
|
-
*/
|
|
230
|
-
get renderMessage() {
|
|
231
|
-
const previousMessage = new Map<string, Message>();
|
|
232
|
-
const closedToolCallIds = new Set<string>();
|
|
233
|
-
const result: Message[] = [];
|
|
234
|
-
const inputMessages = this.combineGraphMessagesWithStreamingMessages();
|
|
235
|
-
// console.log(inputMessages);
|
|
236
|
-
// 从后往前遍历,这样可以保证最新的消息在前面
|
|
237
|
-
for (let i = inputMessages.length - 1; i >= 0; i--) {
|
|
238
|
-
const message = this.cloneMessage(inputMessages[i]);
|
|
239
|
-
|
|
240
|
-
if (!message.id) {
|
|
241
|
-
result.unshift(message);
|
|
242
|
-
continue;
|
|
243
|
-
}
|
|
244
|
-
if (message.type === "ai") {
|
|
245
|
-
/** @ts-ignore */
|
|
246
|
-
if (!message.name) message.name = this.getGraphNodeNow().name;
|
|
247
|
-
}
|
|
248
|
-
if (StreamingMessageType.isToolAssistant(message)) {
|
|
249
|
-
const m = message;
|
|
250
|
-
// 记录这个 id 的消息,并添加到结果中
|
|
251
|
-
previousMessage.set(message.id, m);
|
|
252
|
-
|
|
253
|
-
/** @ts-ignore */
|
|
254
|
-
const tool_calls: NonNullable<AIMessage["tool_calls"]> = (m as AIMessage).tool_calls?.length ? (m as AIMessage).tool_calls : (m as RenderMessage).tool_call_chunks;
|
|
255
|
-
const new_tool_calls = tool_calls
|
|
256
|
-
.filter((i) => {
|
|
257
|
-
return !closedToolCallIds.has(i.id!);
|
|
258
|
-
})!
|
|
259
|
-
.map((tool, index) => {
|
|
260
|
-
return {
|
|
261
|
-
type: "tool",
|
|
262
|
-
additional_kwargs: {},
|
|
263
|
-
/** @ts-ignore */
|
|
264
|
-
tool_input: m.additional_kwargs?.tool_calls?.[index]?.function?.arguments,
|
|
265
|
-
id: tool.id,
|
|
266
|
-
name: tool.name,
|
|
267
|
-
response_metadata: {},
|
|
268
|
-
tool_call_id: tool.id!,
|
|
269
|
-
content: "",
|
|
270
|
-
} as ToolMessage;
|
|
271
|
-
});
|
|
272
|
-
for (const tool of new_tool_calls) {
|
|
273
|
-
if (!previousMessage.has(tool.id!)) {
|
|
274
|
-
result.unshift(tool);
|
|
275
|
-
previousMessage.set(tool.id!, tool);
|
|
276
|
-
}
|
|
277
|
-
}
|
|
278
|
-
result.unshift(m);
|
|
279
|
-
} else {
|
|
280
|
-
if (message.type === "tool" && message.tool_call_id) {
|
|
281
|
-
closedToolCallIds.add(message.tool_call_id);
|
|
282
|
-
}
|
|
283
|
-
|
|
284
|
-
previousMessage.set(message.id, message);
|
|
285
|
-
result.unshift(message);
|
|
286
|
-
}
|
|
287
|
-
}
|
|
288
|
-
|
|
289
|
-
return this.attachInfoForMessage(this.composeToolMessages(result as RenderMessage[]));
|
|
290
|
-
}
|
|
291
|
-
/**
|
|
292
|
-
* @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
|
|
293
|
-
* @en Attaches additional information to messages, such as spend time, unique ID, etc.
|
|
294
|
-
*/
|
|
295
|
-
private attachInfoForMessage(result: RenderMessage[]) {
|
|
296
|
-
let lastMessage: RenderMessage | null = null;
|
|
297
|
-
for (const message of result) {
|
|
298
|
-
const createTime = message.response_metadata?.create_time || "";
|
|
299
|
-
// 工具必须要使用 tool_call_id 来保证一致性
|
|
300
|
-
message.unique_id = message.tool_call_id! || message.id!;
|
|
301
|
-
|
|
302
|
-
message.spend_time = new Date(createTime).getTime() - new Date(lastMessage?.response_metadata?.create_time || createTime).getTime();
|
|
303
|
-
if (!message.usage_metadata && (message as AIMessage).response_metadata?.usage) {
|
|
304
|
-
const usage = (message as AIMessage).response_metadata!.usage as {
|
|
305
|
-
prompt_tokens: number;
|
|
306
|
-
completion_tokens: number;
|
|
307
|
-
total_tokens: number;
|
|
308
|
-
};
|
|
309
|
-
message.usage_metadata = {
|
|
310
|
-
...usage,
|
|
311
|
-
input_tokens: usage.prompt_tokens,
|
|
312
|
-
output_tokens: usage.completion_tokens,
|
|
313
|
-
total_tokens: usage.total_tokens,
|
|
314
|
-
};
|
|
315
|
-
}
|
|
316
|
-
lastMessage = message;
|
|
317
|
-
}
|
|
318
|
-
return result;
|
|
319
|
-
}
|
|
320
|
-
/**
|
|
321
|
-
* @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
|
|
322
|
-
* @en Composes tool messages, associating AI tool calls with tool execution results.
|
|
323
|
-
*/
|
|
324
|
-
private composeToolMessages(messages: RenderMessage[]): RenderMessage[] {
|
|
325
|
-
const result: RenderMessage[] = [];
|
|
326
|
-
const assistantToolMessages = new Map<string, { args: string }>();
|
|
327
|
-
const toolParentMessage = new Map<string, RenderMessage>();
|
|
328
|
-
for (const message of messages) {
|
|
329
|
-
if (StreamingMessageType.isToolAssistant(message)) {
|
|
330
|
-
/** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
|
|
331
|
-
(message.tool_calls || message.tool_call_chunks)?.forEach((element) => {
|
|
332
|
-
assistantToolMessages.set(element.id!, element);
|
|
333
|
-
toolParentMessage.set(element.id!, message);
|
|
334
|
-
});
|
|
335
|
-
if (!message.content) continue;
|
|
336
|
-
}
|
|
337
|
-
if (StreamingMessageType.isTool(message) && !message.tool_input) {
|
|
338
|
-
const assistantToolMessage = assistantToolMessages.get(message.tool_call_id!);
|
|
339
|
-
const parentMessage = toolParentMessage.get(message.tool_call_id!);
|
|
340
|
-
if (assistantToolMessage) {
|
|
341
|
-
message.tool_input = typeof assistantToolMessage.args !== "string" ? JSON.stringify(assistantToolMessage.args) : assistantToolMessage.args;
|
|
342
|
-
if (message.additional_kwargs) {
|
|
343
|
-
message.additional_kwargs.done = true;
|
|
344
|
-
message.done = true;
|
|
345
|
-
} else {
|
|
346
|
-
message.done = true;
|
|
347
|
-
message.additional_kwargs = {
|
|
348
|
-
done: true,
|
|
349
|
-
};
|
|
350
|
-
}
|
|
351
|
-
}
|
|
352
|
-
if (parentMessage) {
|
|
353
|
-
message.usage_metadata = parentMessage.usage_metadata;
|
|
354
|
-
message.node_name = parentMessage.name;
|
|
355
|
-
// 修补特殊情况下,tool name 丢失的问题
|
|
356
|
-
if (!message.name) {
|
|
357
|
-
message.name = (parentMessage as AIMessage).tool_calls!.find((i) => i.id === message.tool_call_id)?.name;
|
|
358
|
-
}
|
|
359
|
-
}
|
|
360
|
-
}
|
|
361
|
-
result.push(message);
|
|
362
|
-
}
|
|
363
|
-
return result;
|
|
364
|
-
}
|
|
365
|
-
/**
|
|
366
|
-
* @zh 获取 Token 计数器信息。
|
|
367
|
-
* @en Gets the Token counter information.
|
|
368
|
-
*/
|
|
369
|
-
get tokenCounter() {
|
|
370
|
-
return this.graphMessages.reduce(
|
|
371
|
-
(acc, message) => {
|
|
372
|
-
if (message.usage_metadata) {
|
|
373
|
-
acc.total_tokens += message.usage_metadata?.total_tokens || 0;
|
|
374
|
-
acc.input_tokens += message.usage_metadata?.input_tokens || 0;
|
|
375
|
-
acc.output_tokens += message.usage_metadata?.output_tokens || 0;
|
|
376
|
-
} else if ((message as AIMessage).response_metadata?.usage) {
|
|
377
|
-
const usage = (message as AIMessage).response_metadata?.usage as {
|
|
378
|
-
prompt_tokens: number;
|
|
379
|
-
completion_tokens: number;
|
|
380
|
-
total_tokens: number;
|
|
381
|
-
};
|
|
382
|
-
acc.total_tokens += usage.total_tokens || 0;
|
|
383
|
-
acc.input_tokens += usage.prompt_tokens || 0;
|
|
384
|
-
acc.output_tokens += usage.completion_tokens || 0;
|
|
385
|
-
}
|
|
386
|
-
|
|
387
|
-
return acc;
|
|
388
|
-
},
|
|
389
|
-
{
|
|
390
|
-
total_tokens: 0,
|
|
391
|
-
input_tokens: 0,
|
|
392
|
-
output_tokens: 0,
|
|
393
|
-
}
|
|
394
|
-
);
|
|
395
|
-
}
|
|
396
|
-
|
|
397
|
-
/**
|
|
398
|
-
* @zh 注册流式更新的回调函数。
|
|
399
|
-
* @en Registers a callback function for streaming updates.
|
|
400
|
-
*/
|
|
401
|
-
onStreamingUpdate(callback: StreamingUpdateCallback) {
|
|
402
|
-
this.streamingCallbacks.add(callback);
|
|
403
|
-
return () => {
|
|
404
|
-
this.streamingCallbacks.delete(callback);
|
|
405
|
-
};
|
|
406
|
-
}
|
|
407
|
-
|
|
408
|
-
private emitStreamingUpdate(event: StreamingUpdateEvent) {
|
|
409
|
-
this.streamingCallbacks.forEach((callback) => callback(event));
|
|
410
|
-
}
|
|
411
|
-
/** 前端工具人机交互时,锁住面板 */
|
|
412
|
-
isFELocking(messages: RenderMessage[]) {
|
|
413
|
-
const lastMessage = messages[messages.length - 1];
|
|
414
|
-
if (!lastMessage) {
|
|
415
|
-
return false;
|
|
416
|
-
}
|
|
417
|
-
const tool = this.tools.getTool(lastMessage?.name!);
|
|
418
|
-
return tool && tool.render && lastMessage?.type === "tool" && !lastMessage?.additional_kwargs?.done;
|
|
419
|
-
}
|
|
420
|
-
graphState: any = {};
|
|
421
|
-
currentRun?: { run_id: string };
|
|
422
|
-
/**
|
|
423
|
-
* @zh 取消当前的 Run。
|
|
424
|
-
* @en Cancels the current Run.
|
|
425
|
-
*/
|
|
426
|
-
cancelRun() {
|
|
427
|
-
if (this.currentThread?.thread_id && this.currentRun?.run_id) {
|
|
428
|
-
this.runs.cancel(this.currentThread!.thread_id, this.currentRun.run_id);
|
|
429
|
-
}
|
|
430
|
-
}
|
|
431
|
-
/**
|
|
432
|
-
* @zh 发送消息到 LangGraph 后端。
|
|
433
|
-
* @en Sends a message to the LangGraph backend.
|
|
434
|
-
*/
|
|
435
|
-
async sendMessage(input: string | Message[], { extraParams, _debug, command }: SendMessageOptions = {}) {
|
|
436
|
-
if (!this.currentAssistant) {
|
|
437
|
-
throw new Error("Thread or Assistant not initialized");
|
|
438
|
-
}
|
|
439
|
-
if (!this.currentThread) {
|
|
440
|
-
await this.createThread();
|
|
441
|
-
this.emitStreamingUpdate({
|
|
442
|
-
type: "thread",
|
|
443
|
-
data: {
|
|
444
|
-
event: "thread/create",
|
|
445
|
-
data: {
|
|
446
|
-
thread: this.currentThread,
|
|
447
|
-
},
|
|
448
|
-
},
|
|
449
|
-
});
|
|
450
|
-
}
|
|
451
|
-
|
|
452
|
-
const messagesToSend = Array.isArray(input)
|
|
453
|
-
? input
|
|
454
|
-
: [
|
|
455
|
-
{
|
|
456
|
-
type: "human",
|
|
457
|
-
content: input,
|
|
458
|
-
} as HumanMessage,
|
|
459
|
-
];
|
|
460
|
-
const streamResponse =
|
|
461
|
-
_debug?.streamResponse ||
|
|
462
|
-
this.runs.stream(this.currentThread!.thread_id, this.currentAssistant.assistant_id, {
|
|
463
|
-
input: {
|
|
464
|
-
...this.graphState,
|
|
465
|
-
...this.extraParams,
|
|
466
|
-
...(extraParams || {}),
|
|
467
|
-
messages: messagesToSend,
|
|
468
|
-
fe_tools: this.tools.toJSON(this.currentAssistant.graph_id),
|
|
469
|
-
},
|
|
470
|
-
streamMode: ["messages", "values"],
|
|
471
|
-
streamSubgraphs: true,
|
|
472
|
-
command,
|
|
473
|
-
});
|
|
474
|
-
const streamRecord: any[] = [];
|
|
475
|
-
this.emitStreamingUpdate({
|
|
476
|
-
type: "start",
|
|
477
|
-
data: {
|
|
478
|
-
event: "start",
|
|
479
|
-
},
|
|
480
|
-
});
|
|
481
|
-
for await (const chunk of streamResponse) {
|
|
482
|
-
streamRecord.push(chunk);
|
|
483
|
-
if (chunk.event === "metadata") {
|
|
484
|
-
this.currentRun = chunk.data;
|
|
485
|
-
} else if (chunk.event === "error") {
|
|
486
|
-
this.emitStreamingUpdate({
|
|
487
|
-
type: "error",
|
|
488
|
-
data: chunk,
|
|
489
|
-
});
|
|
490
|
-
} else if (chunk.event === "messages/partial") {
|
|
491
|
-
for (const message of chunk.data) {
|
|
492
|
-
this.updateStreamingMessage(message);
|
|
493
|
-
}
|
|
494
|
-
this.emitStreamingUpdate({
|
|
495
|
-
type: "message",
|
|
496
|
-
data: chunk,
|
|
497
|
-
});
|
|
498
|
-
continue;
|
|
499
|
-
} else if (chunk.event === "values") {
|
|
500
|
-
const data = chunk.data as { messages: Message[] };
|
|
501
|
-
|
|
502
|
-
if (data.messages) {
|
|
503
|
-
const isResume = !!command?.resume;
|
|
504
|
-
const isLongerThanLocal = data.messages.length >= this.graphMessages.length;
|
|
505
|
-
// resume 情况下,长度低于前端 message 的统统不接受
|
|
506
|
-
if (!isResume || (isResume && isLongerThanLocal)) {
|
|
507
|
-
this.graphMessages = data.messages as RenderMessage[];
|
|
508
|
-
this.emitStreamingUpdate({
|
|
509
|
-
type: "value",
|
|
510
|
-
data: chunk,
|
|
511
|
-
});
|
|
512
|
-
}
|
|
513
|
-
this.graphState = chunk.data;
|
|
514
|
-
}
|
|
515
|
-
continue;
|
|
516
|
-
} else if (chunk.event.startsWith("values|")) {
|
|
517
|
-
// 这个 values 必然是子 values
|
|
518
|
-
if (chunk.data?.messages) {
|
|
519
|
-
this.mergeSubGraphMessagesToStreamingMessages(chunk.data.messages);
|
|
520
|
-
}
|
|
521
|
-
this.graphPosition = chunk.event.split("|")[1];
|
|
522
|
-
}
|
|
523
|
-
}
|
|
524
|
-
const data = await this.runFETool();
|
|
525
|
-
if (data) streamRecord.push(...data);
|
|
526
|
-
this.emitStreamingUpdate({
|
|
527
|
-
type: "done",
|
|
528
|
-
data: {
|
|
529
|
-
event: "done",
|
|
530
|
-
},
|
|
531
|
-
});
|
|
532
|
-
this.streamingMessage = [];
|
|
533
|
-
return streamRecord;
|
|
534
|
-
}
|
|
535
|
-
/** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
|
|
536
|
-
private graphPosition = "";
|
|
537
|
-
getGraphPosition() {
|
|
538
|
-
return this.graphPosition.split("|").map((i) => {
|
|
539
|
-
const [name, id] = i.split(":");
|
|
540
|
-
return {
|
|
541
|
-
id,
|
|
542
|
-
name,
|
|
543
|
-
};
|
|
544
|
-
});
|
|
545
|
-
}
|
|
546
|
-
getGraphNodeNow() {
|
|
547
|
-
const position = this.getGraphPosition();
|
|
548
|
-
return position[position.length - 1];
|
|
549
|
-
}
|
|
550
|
-
/** 子图的数据需要通过 merge 的方式重新进行合并更新 */
|
|
551
|
-
private mergeSubGraphMessagesToStreamingMessages(messages: Message[]) {
|
|
552
|
-
const map = new Map(messages.filter((i) => i.id).map((i) => [i.id!, i]));
|
|
553
|
-
this.streamingMessage.forEach((i) => {
|
|
554
|
-
if (map.has(i.id!)) {
|
|
555
|
-
const newValue = map.get(i.id!)!;
|
|
556
|
-
Object.assign(i, newValue);
|
|
557
|
-
map.delete(i.id!);
|
|
558
|
-
}
|
|
559
|
-
});
|
|
560
|
-
// 剩余的 message 一定不在 streamMessage 中
|
|
561
|
-
map.forEach((i) => {
|
|
562
|
-
if (i.type === "tool" && i.tool_call_id) {
|
|
563
|
-
this.streamingMessage.push(i as RenderMessage);
|
|
564
|
-
}
|
|
565
|
-
});
|
|
566
|
-
}
|
|
567
|
-
|
|
568
|
-
private runFETool() {
|
|
569
|
-
const data = this.streamingMessage; // 需要保证不被清理
|
|
570
|
-
const lastMessage = data[data.length - 1];
|
|
571
|
-
if (!lastMessage) return;
|
|
572
|
-
// 如果最后一条消息是前端工具消息,则调用工具
|
|
573
|
-
if (lastMessage.type === "ai" && lastMessage.tool_calls?.length) {
|
|
574
|
-
const result = lastMessage.tool_calls.map((tool) => {
|
|
575
|
-
if (this.tools.getTool(tool.name!)) {
|
|
576
|
-
const toolMessage: ToolMessage = {
|
|
577
|
-
...tool,
|
|
578
|
-
tool_call_id: tool.id!,
|
|
579
|
-
/** @ts-ignore */
|
|
580
|
-
tool_input: JSON.stringify(tool.args),
|
|
581
|
-
additional_kwargs: {},
|
|
582
|
-
};
|
|
583
|
-
// json 校验
|
|
584
|
-
return this.callFETool(toolMessage, tool.args);
|
|
585
|
-
}
|
|
586
|
-
});
|
|
587
|
-
this.currentThread!.status = "interrupted"; // 修复某些机制下,状态不为 interrupted 与后端有差异
|
|
588
|
-
return Promise.all(result);
|
|
589
|
-
}
|
|
590
|
-
}
|
|
591
|
-
private async callFETool(message: ToolMessage, args: any) {
|
|
592
|
-
const that = this; // 防止 this 被错误解析
|
|
593
|
-
const result = await this.tools.callTool(message.name!, args, { client: that, message });
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
this.
|
|
645
|
-
this.
|
|
646
|
-
this.
|
|
647
|
-
this.
|
|
648
|
-
this.
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
}
|
|
1
|
+
import { Client, Thread, Message, Assistant, HumanMessage, AIMessage, ToolMessage, Command } from "@langchain/langgraph-sdk";
|
|
2
|
+
import { ToolManager } from "./ToolManager.js";
|
|
3
|
+
import { CallToolResult } from "./tool/createTool.js";
|
|
4
|
+
interface AsyncCallerParams {
|
|
5
|
+
/**
|
|
6
|
+
* The maximum number of concurrent calls that can be made.
|
|
7
|
+
* Defaults to `Infinity`, which means no limit.
|
|
8
|
+
*/
|
|
9
|
+
maxConcurrency?: number;
|
|
10
|
+
/**
|
|
11
|
+
* The maximum number of retries that can be made for a single call,
|
|
12
|
+
* with an exponential backoff between each attempt. Defaults to 6.
|
|
13
|
+
*/
|
|
14
|
+
maxRetries?: number;
|
|
15
|
+
onFailedResponseHook?: any;
|
|
16
|
+
/**
|
|
17
|
+
* Specify a custom fetch implementation.
|
|
18
|
+
*
|
|
19
|
+
* By default we expect the `fetch` is available in the global scope.
|
|
20
|
+
*/
|
|
21
|
+
fetch?: typeof fetch | ((...args: any[]) => any);
|
|
22
|
+
}
|
|
23
|
+
export type RenderMessage = Message & {
|
|
24
|
+
/** 对于 AIMessage 来说是节点名称,对于工具节点来说是工具名称 */
|
|
25
|
+
name?: string;
|
|
26
|
+
/** 工具节点的触发节点名称 */
|
|
27
|
+
node_name?: string;
|
|
28
|
+
/** 工具入参 ,聚合而来*/
|
|
29
|
+
tool_input?: string;
|
|
30
|
+
additional_kwargs?: {
|
|
31
|
+
done?: boolean;
|
|
32
|
+
tool_calls?: {
|
|
33
|
+
function: {
|
|
34
|
+
arguments: string;
|
|
35
|
+
};
|
|
36
|
+
}[];
|
|
37
|
+
};
|
|
38
|
+
usage_metadata?: {
|
|
39
|
+
total_tokens: number;
|
|
40
|
+
input_tokens: number;
|
|
41
|
+
output_tokens: number;
|
|
42
|
+
};
|
|
43
|
+
tool_call_id?: string;
|
|
44
|
+
response_metadata?: {
|
|
45
|
+
create_time: string;
|
|
46
|
+
};
|
|
47
|
+
/** 耗时 */
|
|
48
|
+
spend_time?: number;
|
|
49
|
+
/** 渲染时的唯一 id,聚合而来*/
|
|
50
|
+
unique_id?: string;
|
|
51
|
+
/** 工具调用是否完成 */
|
|
52
|
+
done?: boolean;
|
|
53
|
+
};
|
|
54
|
+
export type SendMessageOptions = {
|
|
55
|
+
extraParams?: Record<string, any>;
|
|
56
|
+
_debug?: { streamResponse?: any };
|
|
57
|
+
command?: Command;
|
|
58
|
+
};
|
|
59
|
+
export interface LangGraphClientConfig {
|
|
60
|
+
apiUrl?: string;
|
|
61
|
+
apiKey?: string;
|
|
62
|
+
callerOptions?: AsyncCallerParams;
|
|
63
|
+
timeoutMs?: number;
|
|
64
|
+
defaultHeaders?: Record<string, string | null | undefined>;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* @zh StreamingMessageType 类用于判断消息的类型。
|
|
69
|
+
* @en The StreamingMessageType class is used to determine the type of a message.
|
|
70
|
+
*/
|
|
71
|
+
export class StreamingMessageType {
|
|
72
|
+
static isUser(m: Message): m is HumanMessage {
|
|
73
|
+
return m.type === "human";
|
|
74
|
+
}
|
|
75
|
+
static isTool(m: Message): m is ToolMessage {
|
|
76
|
+
return m.type === "tool";
|
|
77
|
+
}
|
|
78
|
+
static isAssistant(m: Message): m is AIMessage {
|
|
79
|
+
return m.type === "ai" && !this.isToolAssistant(m);
|
|
80
|
+
}
|
|
81
|
+
static isToolAssistant(m: Message): m is AIMessage {
|
|
82
|
+
/** @ts-ignore */
|
|
83
|
+
return m.type === "ai" && (m.tool_calls?.length || m.tool_call_chunks?.length);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
type StreamingUpdateEvent = {
|
|
88
|
+
type: "message" | "value" | "update" | "error" | "thread" | "done" | "start";
|
|
89
|
+
data: any;
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
type StreamingUpdateCallback = (event: StreamingUpdateEvent) => void;
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
|
|
96
|
+
* @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
|
|
97
|
+
*/
|
|
98
|
+
export class LangGraphClient extends Client {
|
|
99
|
+
private currentAssistant: Assistant | null = null;
|
|
100
|
+
private currentThread: Thread | null = null;
|
|
101
|
+
private streamingCallbacks: Set<StreamingUpdateCallback> = new Set();
|
|
102
|
+
tools: ToolManager = new ToolManager();
|
|
103
|
+
stopController: AbortController | null = null;
|
|
104
|
+
|
|
105
|
+
constructor(config: LangGraphClientConfig) {
|
|
106
|
+
super(config);
|
|
107
|
+
}
|
|
108
|
+
availableAssistants: Assistant[] = [];
|
|
109
|
+
private listAssistants() {
|
|
110
|
+
return this.assistants.search({
|
|
111
|
+
metadata: null,
|
|
112
|
+
offset: 0,
|
|
113
|
+
limit: 100,
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* @zh 初始化 Assistant。
|
|
118
|
+
* @en Initializes the Assistant.
|
|
119
|
+
*/
|
|
120
|
+
async initAssistant(agentName?: string) {
|
|
121
|
+
try {
|
|
122
|
+
const assistants = await this.listAssistants();
|
|
123
|
+
this.availableAssistants = assistants;
|
|
124
|
+
if (assistants.length > 0) {
|
|
125
|
+
if (agentName) {
|
|
126
|
+
this.currentAssistant = assistants.find((assistant) => assistant.graph_id === agentName) || null;
|
|
127
|
+
if (!this.currentAssistant) {
|
|
128
|
+
throw new Error("Agent not found: " + agentName);
|
|
129
|
+
}
|
|
130
|
+
} else {
|
|
131
|
+
this.currentAssistant = assistants[0];
|
|
132
|
+
}
|
|
133
|
+
} else {
|
|
134
|
+
throw new Error("No assistants found");
|
|
135
|
+
}
|
|
136
|
+
} catch (error) {
|
|
137
|
+
console.error("Failed to initialize LangGraphClient:", error);
|
|
138
|
+
throw error;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* @zh 创建一个新的 Thread。
|
|
144
|
+
* @en Creates a new Thread.
|
|
145
|
+
*/
|
|
146
|
+
async createThread({
|
|
147
|
+
threadId,
|
|
148
|
+
}: {
|
|
149
|
+
threadId?: string;
|
|
150
|
+
} = {}) {
|
|
151
|
+
try {
|
|
152
|
+
this.currentThread = await this.threads.create({
|
|
153
|
+
threadId,
|
|
154
|
+
});
|
|
155
|
+
return this.currentThread;
|
|
156
|
+
} catch (error) {
|
|
157
|
+
console.error("Failed to create new thread:", error);
|
|
158
|
+
throw error;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
graphVisualize() {
|
|
163
|
+
return this.assistants.getGraph(this.currentAssistant?.assistant_id!, {
|
|
164
|
+
xray: true,
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* @zh 列出所有的 Thread。
|
|
169
|
+
* @en Lists all Threads.
|
|
170
|
+
*/
|
|
171
|
+
async listThreads<T>() {
|
|
172
|
+
return this.threads.search<T>({
|
|
173
|
+
sortOrder: "desc",
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* @zh 从历史中恢复 Thread 数据。
|
|
179
|
+
* @en Resets the Thread data from history.
|
|
180
|
+
*/
|
|
181
|
+
async resetThread(agent: string, threadId: string) {
|
|
182
|
+
await this.initAssistant(agent);
|
|
183
|
+
this.currentThread = await this.threads.get(threadId);
|
|
184
|
+
this.graphState = this.currentThread.values;
|
|
185
|
+
this.graphMessages = this.graphState.messages;
|
|
186
|
+
this.emitStreamingUpdate({
|
|
187
|
+
type: "value",
|
|
188
|
+
data: {
|
|
189
|
+
event: "messages/partial",
|
|
190
|
+
data: {
|
|
191
|
+
messages: this.graphMessages,
|
|
192
|
+
},
|
|
193
|
+
},
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
streamingMessage: RenderMessage[] = [];
|
|
198
|
+
/** 图发过来的更新信息 */
|
|
199
|
+
graphMessages: RenderMessage[] = [];
|
|
200
|
+
cloneMessage(message: Message): Message {
|
|
201
|
+
return JSON.parse(JSON.stringify(message));
|
|
202
|
+
}
|
|
203
|
+
private updateStreamingMessage(message: RenderMessage) {
|
|
204
|
+
const lastMessage = this.streamingMessage[this.streamingMessage.length - 1];
|
|
205
|
+
if (!lastMessage?.id || message.id !== lastMessage.id) {
|
|
206
|
+
this.streamingMessage.push(message);
|
|
207
|
+
return;
|
|
208
|
+
}
|
|
209
|
+
this.streamingMessage[this.streamingMessage.length - 1] = message;
|
|
210
|
+
}
|
|
211
|
+
/** 将 graphMessages 和 streamingMessage 合并,并返回新的消息数组 */
|
|
212
|
+
private combineGraphMessagesWithStreamingMessages() {
|
|
213
|
+
const idMap = new Map<string, RenderMessage>(this.streamingMessage.map((i) => [i.id!, i]));
|
|
214
|
+
return [
|
|
215
|
+
...this.graphMessages.map((i) => {
|
|
216
|
+
if (idMap.has(i.id!)) {
|
|
217
|
+
const newValue = idMap.get(i.id!)!;
|
|
218
|
+
idMap.delete(i.id!);
|
|
219
|
+
return newValue;
|
|
220
|
+
}
|
|
221
|
+
return i;
|
|
222
|
+
}),
|
|
223
|
+
...idMap.values(),
|
|
224
|
+
];
|
|
225
|
+
}
|
|
226
|
+
/**
|
|
227
|
+
* @zh 用于 UI 中的流式渲染中的消息。
|
|
228
|
+
* @en Messages used for streaming rendering in the UI.
|
|
229
|
+
*/
|
|
230
|
+
get renderMessage() {
|
|
231
|
+
const previousMessage = new Map<string, Message>();
|
|
232
|
+
const closedToolCallIds = new Set<string>();
|
|
233
|
+
const result: Message[] = [];
|
|
234
|
+
const inputMessages = this.combineGraphMessagesWithStreamingMessages();
|
|
235
|
+
// console.log(inputMessages);
|
|
236
|
+
// 从后往前遍历,这样可以保证最新的消息在前面
|
|
237
|
+
for (let i = inputMessages.length - 1; i >= 0; i--) {
|
|
238
|
+
const message = this.cloneMessage(inputMessages[i]);
|
|
239
|
+
|
|
240
|
+
if (!message.id) {
|
|
241
|
+
result.unshift(message);
|
|
242
|
+
continue;
|
|
243
|
+
}
|
|
244
|
+
if (message.type === "ai") {
|
|
245
|
+
/** @ts-ignore */
|
|
246
|
+
if (!message.name) message.name = this.getGraphNodeNow().name;
|
|
247
|
+
}
|
|
248
|
+
if (StreamingMessageType.isToolAssistant(message)) {
|
|
249
|
+
const m = message;
|
|
250
|
+
// 记录这个 id 的消息,并添加到结果中
|
|
251
|
+
previousMessage.set(message.id, m);
|
|
252
|
+
|
|
253
|
+
/** @ts-ignore */
|
|
254
|
+
const tool_calls: NonNullable<AIMessage["tool_calls"]> = (m as AIMessage).tool_calls?.length ? (m as AIMessage).tool_calls : (m as RenderMessage).tool_call_chunks;
|
|
255
|
+
const new_tool_calls = tool_calls
|
|
256
|
+
.filter((i) => {
|
|
257
|
+
return !closedToolCallIds.has(i.id!);
|
|
258
|
+
})!
|
|
259
|
+
.map((tool, index) => {
|
|
260
|
+
return {
|
|
261
|
+
type: "tool",
|
|
262
|
+
additional_kwargs: {},
|
|
263
|
+
/** @ts-ignore */
|
|
264
|
+
tool_input: m.additional_kwargs?.tool_calls?.[index]?.function?.arguments,
|
|
265
|
+
id: tool.id,
|
|
266
|
+
name: tool.name,
|
|
267
|
+
response_metadata: {},
|
|
268
|
+
tool_call_id: tool.id!,
|
|
269
|
+
content: "",
|
|
270
|
+
} as ToolMessage;
|
|
271
|
+
});
|
|
272
|
+
for (const tool of new_tool_calls) {
|
|
273
|
+
if (!previousMessage.has(tool.id!)) {
|
|
274
|
+
result.unshift(tool);
|
|
275
|
+
previousMessage.set(tool.id!, tool);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
result.unshift(m);
|
|
279
|
+
} else {
|
|
280
|
+
if (message.type === "tool" && message.tool_call_id) {
|
|
281
|
+
closedToolCallIds.add(message.tool_call_id);
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
previousMessage.set(message.id, message);
|
|
285
|
+
result.unshift(message);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
return this.attachInfoForMessage(this.composeToolMessages(result as RenderMessage[]));
|
|
290
|
+
}
|
|
291
|
+
/**
|
|
292
|
+
* @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
|
|
293
|
+
* @en Attaches additional information to messages, such as spend time, unique ID, etc.
|
|
294
|
+
*/
|
|
295
|
+
private attachInfoForMessage(result: RenderMessage[]) {
|
|
296
|
+
let lastMessage: RenderMessage | null = null;
|
|
297
|
+
for (const message of result) {
|
|
298
|
+
const createTime = message.response_metadata?.create_time || "";
|
|
299
|
+
// 工具必须要使用 tool_call_id 来保证一致性
|
|
300
|
+
message.unique_id = message.tool_call_id! || message.id!;
|
|
301
|
+
|
|
302
|
+
message.spend_time = new Date(createTime).getTime() - new Date(lastMessage?.response_metadata?.create_time || createTime).getTime();
|
|
303
|
+
if (!message.usage_metadata && (message as AIMessage).response_metadata?.usage) {
|
|
304
|
+
const usage = (message as AIMessage).response_metadata!.usage as {
|
|
305
|
+
prompt_tokens: number;
|
|
306
|
+
completion_tokens: number;
|
|
307
|
+
total_tokens: number;
|
|
308
|
+
};
|
|
309
|
+
message.usage_metadata = {
|
|
310
|
+
...usage,
|
|
311
|
+
input_tokens: usage.prompt_tokens,
|
|
312
|
+
output_tokens: usage.completion_tokens,
|
|
313
|
+
total_tokens: usage.total_tokens,
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
lastMessage = message;
|
|
317
|
+
}
|
|
318
|
+
return result;
|
|
319
|
+
}
|
|
320
|
+
/**
|
|
321
|
+
* @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
|
|
322
|
+
* @en Composes tool messages, associating AI tool calls with tool execution results.
|
|
323
|
+
*/
|
|
324
|
+
private composeToolMessages(messages: RenderMessage[]): RenderMessage[] {
|
|
325
|
+
const result: RenderMessage[] = [];
|
|
326
|
+
const assistantToolMessages = new Map<string, { args: string }>();
|
|
327
|
+
const toolParentMessage = new Map<string, RenderMessage>();
|
|
328
|
+
for (const message of messages) {
|
|
329
|
+
if (StreamingMessageType.isToolAssistant(message)) {
|
|
330
|
+
/** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
|
|
331
|
+
(message.tool_calls || message.tool_call_chunks)?.forEach((element) => {
|
|
332
|
+
assistantToolMessages.set(element.id!, element);
|
|
333
|
+
toolParentMessage.set(element.id!, message);
|
|
334
|
+
});
|
|
335
|
+
if (!message.content) continue;
|
|
336
|
+
}
|
|
337
|
+
if (StreamingMessageType.isTool(message) && !message.tool_input) {
|
|
338
|
+
const assistantToolMessage = assistantToolMessages.get(message.tool_call_id!);
|
|
339
|
+
const parentMessage = toolParentMessage.get(message.tool_call_id!);
|
|
340
|
+
if (assistantToolMessage) {
|
|
341
|
+
message.tool_input = typeof assistantToolMessage.args !== "string" ? JSON.stringify(assistantToolMessage.args) : assistantToolMessage.args;
|
|
342
|
+
if (message.additional_kwargs) {
|
|
343
|
+
message.additional_kwargs.done = true;
|
|
344
|
+
message.done = true;
|
|
345
|
+
} else {
|
|
346
|
+
message.done = true;
|
|
347
|
+
message.additional_kwargs = {
|
|
348
|
+
done: true,
|
|
349
|
+
};
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
if (parentMessage) {
|
|
353
|
+
message.usage_metadata = parentMessage.usage_metadata;
|
|
354
|
+
message.node_name = parentMessage.name;
|
|
355
|
+
// 修补特殊情况下,tool name 丢失的问题
|
|
356
|
+
if (!message.name) {
|
|
357
|
+
message.name = (parentMessage as AIMessage).tool_calls!.find((i) => i.id === message.tool_call_id)?.name;
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
result.push(message);
|
|
362
|
+
}
|
|
363
|
+
return result;
|
|
364
|
+
}
|
|
365
|
+
/**
|
|
366
|
+
* @zh 获取 Token 计数器信息。
|
|
367
|
+
* @en Gets the Token counter information.
|
|
368
|
+
*/
|
|
369
|
+
get tokenCounter() {
|
|
370
|
+
return this.graphMessages.reduce(
|
|
371
|
+
(acc, message) => {
|
|
372
|
+
if (message.usage_metadata) {
|
|
373
|
+
acc.total_tokens += message.usage_metadata?.total_tokens || 0;
|
|
374
|
+
acc.input_tokens += message.usage_metadata?.input_tokens || 0;
|
|
375
|
+
acc.output_tokens += message.usage_metadata?.output_tokens || 0;
|
|
376
|
+
} else if ((message as AIMessage).response_metadata?.usage) {
|
|
377
|
+
const usage = (message as AIMessage).response_metadata?.usage as {
|
|
378
|
+
prompt_tokens: number;
|
|
379
|
+
completion_tokens: number;
|
|
380
|
+
total_tokens: number;
|
|
381
|
+
};
|
|
382
|
+
acc.total_tokens += usage.total_tokens || 0;
|
|
383
|
+
acc.input_tokens += usage.prompt_tokens || 0;
|
|
384
|
+
acc.output_tokens += usage.completion_tokens || 0;
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
return acc;
|
|
388
|
+
},
|
|
389
|
+
{
|
|
390
|
+
total_tokens: 0,
|
|
391
|
+
input_tokens: 0,
|
|
392
|
+
output_tokens: 0,
|
|
393
|
+
}
|
|
394
|
+
);
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
/**
|
|
398
|
+
* @zh 注册流式更新的回调函数。
|
|
399
|
+
* @en Registers a callback function for streaming updates.
|
|
400
|
+
*/
|
|
401
|
+
onStreamingUpdate(callback: StreamingUpdateCallback) {
|
|
402
|
+
this.streamingCallbacks.add(callback);
|
|
403
|
+
return () => {
|
|
404
|
+
this.streamingCallbacks.delete(callback);
|
|
405
|
+
};
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
private emitStreamingUpdate(event: StreamingUpdateEvent) {
|
|
409
|
+
this.streamingCallbacks.forEach((callback) => callback(event));
|
|
410
|
+
}
|
|
411
|
+
/** 前端工具人机交互时,锁住面板 */
|
|
412
|
+
isFELocking(messages: RenderMessage[]) {
|
|
413
|
+
const lastMessage = messages[messages.length - 1];
|
|
414
|
+
if (!lastMessage) {
|
|
415
|
+
return false;
|
|
416
|
+
}
|
|
417
|
+
const tool = this.tools.getTool(lastMessage?.name!);
|
|
418
|
+
return tool && tool.render && lastMessage?.type === "tool" && !lastMessage?.additional_kwargs?.done;
|
|
419
|
+
}
|
|
420
|
+
graphState: any = {};
|
|
421
|
+
currentRun?: { run_id: string };
|
|
422
|
+
/**
|
|
423
|
+
* @zh 取消当前的 Run。
|
|
424
|
+
* @en Cancels the current Run.
|
|
425
|
+
*/
|
|
426
|
+
cancelRun() {
|
|
427
|
+
if (this.currentThread?.thread_id && this.currentRun?.run_id) {
|
|
428
|
+
this.runs.cancel(this.currentThread!.thread_id, this.currentRun.run_id);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
/**
|
|
432
|
+
* @zh 发送消息到 LangGraph 后端。
|
|
433
|
+
* @en Sends a message to the LangGraph backend.
|
|
434
|
+
*/
|
|
435
|
+
async sendMessage(input: string | Message[], { extraParams, _debug, command }: SendMessageOptions = {}) {
|
|
436
|
+
if (!this.currentAssistant) {
|
|
437
|
+
throw new Error("Thread or Assistant not initialized");
|
|
438
|
+
}
|
|
439
|
+
if (!this.currentThread) {
|
|
440
|
+
await this.createThread();
|
|
441
|
+
this.emitStreamingUpdate({
|
|
442
|
+
type: "thread",
|
|
443
|
+
data: {
|
|
444
|
+
event: "thread/create",
|
|
445
|
+
data: {
|
|
446
|
+
thread: this.currentThread,
|
|
447
|
+
},
|
|
448
|
+
},
|
|
449
|
+
});
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
const messagesToSend = Array.isArray(input)
|
|
453
|
+
? input
|
|
454
|
+
: [
|
|
455
|
+
{
|
|
456
|
+
type: "human",
|
|
457
|
+
content: input,
|
|
458
|
+
} as HumanMessage,
|
|
459
|
+
];
|
|
460
|
+
const streamResponse =
|
|
461
|
+
_debug?.streamResponse ||
|
|
462
|
+
this.runs.stream(this.currentThread!.thread_id, this.currentAssistant.assistant_id, {
|
|
463
|
+
input: {
|
|
464
|
+
...this.graphState,
|
|
465
|
+
...this.extraParams,
|
|
466
|
+
...(extraParams || {}),
|
|
467
|
+
messages: messagesToSend,
|
|
468
|
+
fe_tools: this.tools.toJSON(this.currentAssistant.graph_id),
|
|
469
|
+
},
|
|
470
|
+
streamMode: ["messages", "values"],
|
|
471
|
+
streamSubgraphs: true,
|
|
472
|
+
command,
|
|
473
|
+
});
|
|
474
|
+
const streamRecord: any[] = [];
|
|
475
|
+
this.emitStreamingUpdate({
|
|
476
|
+
type: "start",
|
|
477
|
+
data: {
|
|
478
|
+
event: "start",
|
|
479
|
+
},
|
|
480
|
+
});
|
|
481
|
+
for await (const chunk of streamResponse) {
|
|
482
|
+
streamRecord.push(chunk);
|
|
483
|
+
if (chunk.event === "metadata") {
|
|
484
|
+
this.currentRun = chunk.data;
|
|
485
|
+
} else if (chunk.event === "error") {
|
|
486
|
+
this.emitStreamingUpdate({
|
|
487
|
+
type: "error",
|
|
488
|
+
data: chunk,
|
|
489
|
+
});
|
|
490
|
+
} else if (chunk.event === "messages/partial") {
|
|
491
|
+
for (const message of chunk.data) {
|
|
492
|
+
this.updateStreamingMessage(message);
|
|
493
|
+
}
|
|
494
|
+
this.emitStreamingUpdate({
|
|
495
|
+
type: "message",
|
|
496
|
+
data: chunk,
|
|
497
|
+
});
|
|
498
|
+
continue;
|
|
499
|
+
} else if (chunk.event === "values") {
|
|
500
|
+
const data = chunk.data as { messages: Message[] };
|
|
501
|
+
|
|
502
|
+
if (data.messages) {
|
|
503
|
+
const isResume = !!command?.resume;
|
|
504
|
+
const isLongerThanLocal = data.messages.length >= this.graphMessages.length;
|
|
505
|
+
// resume 情况下,长度低于前端 message 的统统不接受
|
|
506
|
+
if (!isResume || (isResume && isLongerThanLocal)) {
|
|
507
|
+
this.graphMessages = data.messages as RenderMessage[];
|
|
508
|
+
this.emitStreamingUpdate({
|
|
509
|
+
type: "value",
|
|
510
|
+
data: chunk,
|
|
511
|
+
});
|
|
512
|
+
}
|
|
513
|
+
this.graphState = chunk.data;
|
|
514
|
+
}
|
|
515
|
+
continue;
|
|
516
|
+
} else if (chunk.event.startsWith("values|")) {
|
|
517
|
+
// 这个 values 必然是子 values
|
|
518
|
+
if (chunk.data?.messages) {
|
|
519
|
+
this.mergeSubGraphMessagesToStreamingMessages(chunk.data.messages);
|
|
520
|
+
}
|
|
521
|
+
this.graphPosition = chunk.event.split("|")[1];
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
const data = await this.runFETool();
|
|
525
|
+
if (data) streamRecord.push(...data);
|
|
526
|
+
this.emitStreamingUpdate({
|
|
527
|
+
type: "done",
|
|
528
|
+
data: {
|
|
529
|
+
event: "done",
|
|
530
|
+
},
|
|
531
|
+
});
|
|
532
|
+
this.streamingMessage = [];
|
|
533
|
+
return streamRecord;
|
|
534
|
+
}
|
|
535
|
+
/** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
|
|
536
|
+
private graphPosition = "";
|
|
537
|
+
getGraphPosition() {
|
|
538
|
+
return this.graphPosition.split("|").map((i) => {
|
|
539
|
+
const [name, id] = i.split(":");
|
|
540
|
+
return {
|
|
541
|
+
id,
|
|
542
|
+
name,
|
|
543
|
+
};
|
|
544
|
+
});
|
|
545
|
+
}
|
|
546
|
+
getGraphNodeNow() {
|
|
547
|
+
const position = this.getGraphPosition();
|
|
548
|
+
return position[position.length - 1];
|
|
549
|
+
}
|
|
550
|
+
/** 子图的数据需要通过 merge 的方式重新进行合并更新 */
|
|
551
|
+
private mergeSubGraphMessagesToStreamingMessages(messages: Message[]) {
|
|
552
|
+
const map = new Map(messages.filter((i) => i.id).map((i) => [i.id!, i]));
|
|
553
|
+
this.streamingMessage.forEach((i) => {
|
|
554
|
+
if (map.has(i.id!)) {
|
|
555
|
+
const newValue = map.get(i.id!)!;
|
|
556
|
+
Object.assign(i, newValue);
|
|
557
|
+
map.delete(i.id!);
|
|
558
|
+
}
|
|
559
|
+
});
|
|
560
|
+
// 剩余的 message 一定不在 streamMessage 中
|
|
561
|
+
map.forEach((i) => {
|
|
562
|
+
if (i.type === "tool" && i.tool_call_id) {
|
|
563
|
+
this.streamingMessage.push(i as RenderMessage);
|
|
564
|
+
}
|
|
565
|
+
});
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
private runFETool() {
|
|
569
|
+
const data = this.streamingMessage; // 需要保证不被清理
|
|
570
|
+
const lastMessage = data[data.length - 1];
|
|
571
|
+
if (!lastMessage) return;
|
|
572
|
+
// 如果最后一条消息是前端工具消息,则调用工具
|
|
573
|
+
if (lastMessage.type === "ai" && lastMessage.tool_calls?.length) {
|
|
574
|
+
const result = lastMessage.tool_calls.map((tool) => {
|
|
575
|
+
if (this.tools.getTool(tool.name!)) {
|
|
576
|
+
const toolMessage: ToolMessage = {
|
|
577
|
+
...tool,
|
|
578
|
+
tool_call_id: tool.id!,
|
|
579
|
+
/** @ts-ignore */
|
|
580
|
+
tool_input: JSON.stringify(tool.args),
|
|
581
|
+
additional_kwargs: {},
|
|
582
|
+
};
|
|
583
|
+
// json 校验
|
|
584
|
+
return this.callFETool(toolMessage, tool.args);
|
|
585
|
+
}
|
|
586
|
+
});
|
|
587
|
+
this.currentThread!.status = "interrupted"; // 修复某些机制下,状态不为 interrupted 与后端有差异
|
|
588
|
+
return Promise.all(result);
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
private async callFETool(message: ToolMessage, args: any) {
|
|
592
|
+
const that = this; // 防止 this 被错误解析
|
|
593
|
+
const result = await this.tools.callTool(message.name!, args, { client: that, message });
|
|
594
|
+
if (!result) {
|
|
595
|
+
return;
|
|
596
|
+
}
|
|
597
|
+
return this.resume(result);
|
|
598
|
+
}
|
|
599
|
+
extraParams: Record<string, any> = {};
|
|
600
|
+
|
|
601
|
+
/**
|
|
602
|
+
* @zh 继续被前端工具中断的流程。
|
|
603
|
+
* @en Resumes a process interrupted by a frontend tool.
|
|
604
|
+
*/
|
|
605
|
+
resume(result: CallToolResult) {
|
|
606
|
+
return this.sendMessage([], {
|
|
607
|
+
command: {
|
|
608
|
+
resume: result,
|
|
609
|
+
},
|
|
610
|
+
});
|
|
611
|
+
}
|
|
612
|
+
/**
|
|
613
|
+
* @zh 标记前端工具等待已完成。
|
|
614
|
+
* @en Marks the frontend tool waiting as completed.
|
|
615
|
+
*/
|
|
616
|
+
doneFEToolWaiting(id: string, result: CallToolResult) {
|
|
617
|
+
const done = this.tools.doneWaiting(id, result);
|
|
618
|
+
if (!done && this.currentThread?.status === "interrupted") {
|
|
619
|
+
this.resume(result);
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
/**
|
|
624
|
+
* @zh 获取当前的 Thread。
|
|
625
|
+
* @en Gets the current Thread.
|
|
626
|
+
*/
|
|
627
|
+
getCurrentThread() {
|
|
628
|
+
return this.currentThread;
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
/**
|
|
632
|
+
* @zh 获取当前的 Assistant。
|
|
633
|
+
* @en Gets the current Assistant.
|
|
634
|
+
*/
|
|
635
|
+
getCurrentAssistant() {
|
|
636
|
+
return this.currentAssistant;
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
/**
|
|
640
|
+
* @zh 重置客户端状态。
|
|
641
|
+
* @en Resets the client state.
|
|
642
|
+
*/
|
|
643
|
+
async reset() {
|
|
644
|
+
await this.initAssistant(this.currentAssistant?.graph_id!);
|
|
645
|
+
this.currentThread = null;
|
|
646
|
+
this.graphState = {};
|
|
647
|
+
this.graphMessages = [];
|
|
648
|
+
this.streamingMessage = [];
|
|
649
|
+
this.currentRun = undefined;
|
|
650
|
+
this.tools.clearWaiting();
|
|
651
|
+
this.emitStreamingUpdate({
|
|
652
|
+
type: "value",
|
|
653
|
+
data: {
|
|
654
|
+
event: "messages/partial",
|
|
655
|
+
},
|
|
656
|
+
});
|
|
657
|
+
}
|
|
658
|
+
}
|