@langgraph-js/sdk 3.6.0 → 3.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/README.md +29 -0
  2. package/dist/LangGraphClient.d.ts +13 -1
  3. package/dist/LangGraphClient.js +101 -77
  4. package/dist/MessageProcessor.js +24 -33
  5. package/dist/SpendTime.js +4 -9
  6. package/dist/TestKit.js +16 -15
  7. package/dist/ToolManager.js +4 -7
  8. package/dist/artifacts/index.js +1 -1
  9. package/dist/client/LanggraphServer.js +1 -1
  10. package/dist/client/LowJSServer.d.ts +3 -0
  11. package/dist/client/LowJSServer.js +80 -0
  12. package/dist/client/index.d.ts +2 -0
  13. package/dist/client/index.js +2 -0
  14. package/dist/client/utils/sse.d.ts +8 -0
  15. package/dist/client/utils/sse.js +151 -0
  16. package/dist/client/utils/stream.d.ts +15 -0
  17. package/dist/client/utils/stream.js +104 -0
  18. package/dist/index.d.ts +1 -0
  19. package/dist/index.js +1 -0
  20. package/dist/react/ChatContext.d.ts +3 -0
  21. package/dist/react/ChatContext.js +8 -3
  22. package/dist/tool/ToolUI.js +3 -2
  23. package/dist/tool/createTool.js +3 -6
  24. package/dist/tool/utils.js +3 -4
  25. package/dist/ui-store/createChatStore.js +23 -39
  26. package/dist/vue/ChatContext.d.ts +3 -0
  27. package/dist/vue/ChatContext.js +3 -2
  28. package/package.json +3 -1
  29. package/src/LangGraphClient.ts +73 -45
  30. package/src/MessageProcessor.ts +7 -9
  31. package/src/client/LanggraphServer.ts +1 -2
  32. package/src/client/LowJSServer.ts +80 -0
  33. package/src/client/index.ts +2 -0
  34. package/src/client/utils/sse.ts +176 -0
  35. package/src/client/utils/stream.ts +114 -0
  36. package/src/index.ts +1 -0
  37. package/src/react/ChatContext.ts +20 -15
  38. package/src/vue/ChatContext.ts +5 -0
  39. package/test/TestKit.test.ts +10 -2
  40. package/tsconfig.json +1 -1
package/README.md CHANGED
@@ -57,6 +57,35 @@ pnpm add @langgraph-js/sdk
57
57
 
58
58
  - ✅ Read History from LangGraph
59
59
 
60
+ ## Legacy Mode
61
+
62
+ Legacy mode is designed to be compatible with environments that don't support `AsyncGeneratorFunction` (such as WeChat Mini Programs). In these environments, standard async iterators may not work properly.
63
+
64
+ ### Legacy Mode Example
65
+
66
+ ```typescript
67
+ import { TestLangGraphChat, createChatStore, createLowerJSClient } from "@langgraph-js/sdk";
68
+
69
+ const client = await createLowerJSClient({
70
+ apiUrl: "http://localhost:8123",
71
+ defaultHeaders: {
72
+ Authorization: "Bearer 123",
73
+ },
74
+ });
75
+
76
+ createChatStore(
77
+ "graph",
78
+ {
79
+ defaultHeaders: {
80
+ Authorization: "Bearer 123",
81
+ },
82
+ client,
83
+ legacyMode: true,
84
+ },
85
+ {}
86
+ );
87
+ ```
88
+
60
89
  ## Advanced Usage
61
90
 
62
91
  ### Creating a Chat Store
@@ -84,6 +84,8 @@ export interface LangGraphClientConfig {
84
84
  defaultHeaders?: Record<string, string | null | undefined>;
85
85
  /** 自定义客户端实现,如果不提供则使用官方 Client */
86
86
  client: ILangGraphClient<any>;
87
+ /** 是否使用 legacy 模式,默认 false */
88
+ legacyMode?: boolean;
87
89
  }
88
90
  export interface LangGraphEvents {
89
91
  /** 流开始事件 */
@@ -136,11 +138,15 @@ export declare class LangGraphClient<TStateType = unknown> extends EventEmitter<
136
138
  stopController: AbortController | null;
137
139
  /** Message 处理器 */
138
140
  private messageProcessor;
141
+ private legacyMode;
139
142
  constructor(config: LangGraphClientConfig);
140
143
  /** 代理 assistants 属性到内部 client */
141
144
  get assistants(): {
142
145
  search(query?: {
143
- graphId?: string;
146
+ graphId? /**
147
+ * The maximum number of concurrent calls that can be made.
148
+ * Defaults to `Infinity`, which means no limit.
149
+ */: string;
144
150
  metadata?: import("@langchain/langgraph-sdk").Metadata;
145
151
  limit?: number;
146
152
  offset?: number;
@@ -248,6 +254,12 @@ export declare class LangGraphClient<TStateType = unknown> extends EventEmitter<
248
254
  id: string;
249
255
  name: string;
250
256
  };
257
+ /**
258
+ * @zh 处理流式响应的单个 chunk。
259
+ * @en Processes a single chunk from the stream response.
260
+ * @returns 是否需要跳过后续处理 (continue)
261
+ */
262
+ private processStreamChunk;
251
263
  private runFETool;
252
264
  private callFETool;
253
265
  extraParams: Record<string, any>;
@@ -2,25 +2,27 @@ import { EventEmitter } from "eventemitter3";
2
2
  import { ToolManager } from "./ToolManager.js";
3
3
  import { MessageProcessor } from "./MessageProcessor.js";
4
4
  import { revertChatTo } from "./time-travel/index.js";
5
+ import camelcaseKeys from "camelcase-keys";
5
6
  /**
6
7
  * @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
7
8
  * @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
8
9
  */
9
10
  export class LangGraphClient extends EventEmitter {
11
+ client;
12
+ currentAssistant = null;
13
+ currentThread = null;
14
+ tools = new ToolManager();
15
+ availableAssistants = [];
16
+ graphState = {};
17
+ currentRun;
18
+ stopController = null;
19
+ /** Message 处理器 */
20
+ messageProcessor;
21
+ legacyMode;
10
22
  constructor(config) {
11
23
  super();
12
- this.currentAssistant = null;
13
- this.currentThread = null;
14
- this.tools = new ToolManager();
15
- this.availableAssistants = [];
16
- this.graphState = {};
17
- this.stopController = null;
18
- this.messagesMetadata = {};
19
- this.humanInTheLoop = null;
20
- /** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
21
- this.graphPosition = "";
22
- this.extraParams = {};
23
24
  this.client = config.client;
25
+ this.legacyMode = config.legacyMode ?? false;
24
26
  this.messageProcessor = new MessageProcessor();
25
27
  }
26
28
  /** 代理 assistants 属性到内部 client */
@@ -93,8 +95,7 @@ export class LangGraphClient extends EventEmitter {
93
95
  }
94
96
  }
95
97
  graphVisualize() {
96
- var _a;
97
- return this.assistants.getGraph((_a = this.currentAssistant) === null || _a === void 0 ? void 0 : _a.assistant_id, {
98
+ return this.assistants.getGraph(this.currentAssistant?.assistant_id, {
98
99
  xray: true,
99
100
  });
100
101
  }
@@ -116,11 +117,10 @@ export class LangGraphClient extends EventEmitter {
116
117
  * @en Resets the Thread data from history.
117
118
  */
118
119
  async resetThread(agent, threadId) {
119
- var _a;
120
120
  await this.initAssistant(agent);
121
121
  this.currentThread = await this.threads.get(threadId);
122
122
  this.graphState = this.currentThread.values;
123
- const graphMessages = ((_a = this.graphState) === null || _a === void 0 ? void 0 : _a.messages) || [];
123
+ const graphMessages = this.graphState?.messages || [];
124
124
  this.messageProcessor.setGraphMessages(graphMessages);
125
125
  this.emit("value", {
126
126
  event: "messages/partial",
@@ -133,7 +133,7 @@ export class LangGraphClient extends EventEmitter {
133
133
  // 从历史中恢复时,应该恢复流式状态
134
134
  async resetStream() {
135
135
  const runs = await this.runs.list(this.currentThread.thread_id);
136
- const runningRun = runs === null || runs === void 0 ? void 0 : runs.find((run) => run.status === "running" || run.status === "pending");
136
+ const runningRun = runs?.find((run) => run.status === "running" || run.status === "pending");
137
137
  if (runningRun) {
138
138
  await this.sendMessage([], { joinRunId: runningRun.run_id });
139
139
  }
@@ -154,14 +154,13 @@ export class LangGraphClient extends EventEmitter {
154
154
  */
155
155
  get tokenCounter() {
156
156
  return this.messageProcessor.getGraphMessages().reduce((acc, message) => {
157
- var _a, _b, _c, _d, _e;
158
157
  if (message.usage_metadata) {
159
- acc.total_tokens += ((_a = message.usage_metadata) === null || _a === void 0 ? void 0 : _a.total_tokens) || 0;
160
- acc.input_tokens += ((_b = message.usage_metadata) === null || _b === void 0 ? void 0 : _b.input_tokens) || 0;
161
- acc.output_tokens += ((_c = message.usage_metadata) === null || _c === void 0 ? void 0 : _c.output_tokens) || 0;
158
+ acc.total_tokens += message.usage_metadata?.total_tokens || 0;
159
+ acc.input_tokens += message.usage_metadata?.input_tokens || 0;
160
+ acc.output_tokens += message.usage_metadata?.output_tokens || 0;
162
161
  }
163
- else if ((_d = message.response_metadata) === null || _d === void 0 ? void 0 : _d.usage) {
164
- const usage = (_e = message.response_metadata) === null || _e === void 0 ? void 0 : _e.usage;
162
+ else if (message.response_metadata?.usage) {
163
+ const usage = message.response_metadata?.usage;
165
164
  acc.total_tokens += usage.total_tokens || 0;
166
165
  acc.input_tokens += usage.prompt_tokens || 0;
167
166
  acc.output_tokens += usage.completion_tokens || 0;
@@ -175,21 +174,19 @@ export class LangGraphClient extends EventEmitter {
175
174
  }
176
175
  /** 前端工具人机交互时,锁住面板 */
177
176
  isFELocking(messages) {
178
- var _a;
179
177
  const lastMessage = messages[messages.length - 1];
180
178
  if (!lastMessage) {
181
179
  return false;
182
180
  }
183
- const tool = this.tools.getTool(lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.name);
184
- return tool && tool.render && (lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.type) === "tool" && !((_a = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.additional_kwargs) === null || _a === void 0 ? void 0 : _a.done);
181
+ const tool = this.tools.getTool(lastMessage?.name);
182
+ return tool && tool.render && lastMessage?.type === "tool" && !lastMessage?.additional_kwargs?.done;
185
183
  }
186
184
  /**
187
185
  * @zh 取消当前的 Run。
188
186
  * @en Cancels the current Run.
189
187
  */
190
188
  cancelRun() {
191
- var _a, _b;
192
- if (((_a = this.currentThread) === null || _a === void 0 ? void 0 : _a.thread_id) && ((_b = this.currentRun) === null || _b === void 0 ? void 0 : _b.run_id)) {
189
+ if (this.currentThread?.thread_id && this.currentRun?.run_id) {
193
190
  this.runs.cancel(this.currentThread.thread_id, this.currentRun.run_id);
194
191
  }
195
192
  }
@@ -204,6 +201,8 @@ export class LangGraphClient extends EventEmitter {
204
201
  this.messageProcessor.setGraphMessages(state.messages);
205
202
  return state;
206
203
  }
204
+ messagesMetadata = {};
205
+ humanInTheLoop = null;
207
206
  /**
208
207
  * @zh 发送消息到 LangGraph 后端。
209
208
  * @en Sends a message to the LangGraph backend.
@@ -229,12 +228,25 @@ export class LangGraphClient extends EventEmitter {
229
228
  content: input,
230
229
  },
231
230
  ];
231
+ const streamRecord = [];
232
+ this.emit("start", {
233
+ event: "start",
234
+ });
232
235
  const createStreamResponse = async () => {
233
- if (_debug === null || _debug === void 0 ? void 0 : _debug.streamResponse) {
236
+ if (_debug?.streamResponse) {
234
237
  return _debug.streamResponse;
235
238
  }
239
+ const onCallback = this.legacyMode
240
+ ? (chunk) => {
241
+ streamRecord.push(chunk);
242
+ this.processStreamChunk(chunk, command);
243
+ }
244
+ : undefined;
236
245
  if (joinRunId) {
237
- return this.runs.joinStream(this.currentThread.thread_id, joinRunId);
246
+ return this.runs.joinStream(this.currentThread.thread_id, joinRunId, {
247
+ /** @ts-ignore */
248
+ onCallback,
249
+ });
238
250
  }
239
251
  return this.runs.stream(this.currentThread.thread_id, this.currentAssistant.assistant_id, {
240
252
  input: {
@@ -247,51 +259,16 @@ export class LangGraphClient extends EventEmitter {
247
259
  streamMode: ["messages", "values"],
248
260
  streamSubgraphs: true,
249
261
  command,
262
+ /** @ts-ignore 为兼容不支持 AsyncIterableFunction 的环境*/
263
+ onCallback,
250
264
  });
251
265
  };
252
266
  const streamResponse = await createStreamResponse();
253
- const streamRecord = [];
254
- this.emit("start", {
255
- event: "start",
256
- });
257
- for await (const chunk of streamResponse) {
258
- streamRecord.push(chunk);
259
- if (chunk.event === "metadata") {
260
- this.currentRun = chunk.data;
261
- }
262
- else if (chunk.event === "error" || chunk.event === "Error" || chunk.event === "__stream_error__") {
263
- this.emit("error", chunk);
264
- }
265
- else if (chunk.event === "messages/metadata") {
266
- Object.assign(this.messagesMetadata, chunk.data);
267
- continue;
268
- }
269
- else if (chunk.event === "messages/partial" || chunk.event === "messages/complete") {
270
- for (const message of chunk.data) {
271
- this.messageProcessor.updateStreamingMessage(message);
272
- }
273
- this.emit("message", chunk);
274
- continue;
275
- }
276
- else if (chunk.event === "values") {
277
- const data = chunk.data;
278
- if (data.__interrupt__) {
279
- this.humanInTheLoop = data.__interrupt__;
280
- }
281
- else if (data.messages) {
282
- const isResume = !!(command === null || command === void 0 ? void 0 : command.resume);
283
- const isLongerThanLocal = data.messages.length >= this.messageProcessor.getGraphMessages().length;
284
- // resume 情况下,长度低于前端 message 的统统不接受
285
- if (!isResume || (isResume && isLongerThanLocal)) {
286
- this.messageProcessor.setGraphMessages(data.messages);
287
- this.emit("value", chunk);
288
- }
289
- this.graphState = chunk.data;
290
- }
291
- continue;
292
- }
293
- else if (chunk.event.startsWith("values|")) {
294
- this.graphPosition = chunk.event.split("|")[1];
267
+ if (!this.legacyMode) {
268
+ // 正常的 JS 环境都可以执行,但是部分环境不支持 AsyncGeneratorFunction(比如 sb 的微信小程序)
269
+ for await (const chunk of streamResponse) {
270
+ streamRecord.push(chunk);
271
+ this.processStreamChunk(chunk, command);
295
272
  }
296
273
  }
297
274
  const data = await this.runFETool();
@@ -304,6 +281,8 @@ export class LangGraphClient extends EventEmitter {
304
281
  this.messageProcessor.clearStreamingMessages();
305
282
  return streamRecord;
306
283
  }
284
+ /** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
285
+ graphPosition = "";
307
286
  getGraphPosition() {
308
287
  return this.graphPosition.split("|").map((i) => {
309
288
  const [name, id] = i.split(":");
@@ -317,14 +296,60 @@ export class LangGraphClient extends EventEmitter {
317
296
  const position = this.getGraphPosition();
318
297
  return position[position.length - 1];
319
298
  }
299
+ /**
300
+ * @zh 处理流式响应的单个 chunk。
301
+ * @en Processes a single chunk from the stream response.
302
+ * @returns 是否需要跳过后续处理 (continue)
303
+ */
304
+ processStreamChunk(chunk, command) {
305
+ if (chunk.event === "metadata") {
306
+ this.currentRun = chunk.data;
307
+ }
308
+ else if (chunk.event === "error" || chunk.event === "Error" || chunk.event === "__stream_error__") {
309
+ this.emit("error", chunk);
310
+ }
311
+ else if (chunk.event === "messages/metadata") {
312
+ Object.assign(this.messagesMetadata, chunk.data);
313
+ return true;
314
+ }
315
+ else if (chunk.event === "messages/partial" || chunk.event === "messages/complete") {
316
+ for (const message of chunk.data) {
317
+ this.messageProcessor.updateStreamingMessage(message);
318
+ }
319
+ this.emit("message", chunk);
320
+ return true;
321
+ }
322
+ else if (chunk.event === "values") {
323
+ const data = chunk.data;
324
+ if (data.__interrupt__) {
325
+ this.humanInTheLoop = camelcaseKeys(data.__interrupt__, {
326
+ deep: true,
327
+ });
328
+ }
329
+ else if (data.messages) {
330
+ const isResume = !!command?.resume;
331
+ const isLongerThanLocal = data.messages.length >= this.messageProcessor.getGraphMessages().length;
332
+ // resume 情况下,长度低于前端 message 的统统不接受
333
+ if (!isResume || (isResume && isLongerThanLocal)) {
334
+ this.messageProcessor.setGraphMessages(data.messages);
335
+ this.emit("value", chunk);
336
+ }
337
+ this.graphState = chunk.data;
338
+ }
339
+ return true;
340
+ }
341
+ else if (chunk.event.startsWith("values|")) {
342
+ this.graphPosition = chunk.event.split("|")[1];
343
+ }
344
+ return false;
345
+ }
320
346
  runFETool() {
321
- var _a;
322
347
  const data = this.messageProcessor.getStreamingMessages(); // 需要保证不被清理
323
348
  const lastMessage = data[data.length - 1];
324
349
  if (!lastMessage)
325
350
  return;
326
351
  // 如果最后一条消息是前端工具消息,则调用工具
327
- if (lastMessage.type === "ai" && ((_a = lastMessage.tool_calls) === null || _a === void 0 ? void 0 : _a.length)) {
352
+ if (lastMessage.type === "ai" && lastMessage.tool_calls?.length) {
328
353
  const result = lastMessage.tool_calls.map((tool) => {
329
354
  const toolMessage = {
330
355
  ...tool,
@@ -348,6 +373,7 @@ export class LangGraphClient extends EventEmitter {
348
373
  }
349
374
  return this.resume(result);
350
375
  }
376
+ extraParams = {};
351
377
  /**
352
378
  * @zh 继续被前端工具中断的流程。
353
379
  * @en Resumes a process interrupted by a frontend tool.
@@ -364,9 +390,8 @@ export class LangGraphClient extends EventEmitter {
364
390
  * @en Marks the frontend tool waiting as completed.
365
391
  */
366
392
  doneFEToolWaiting(id, result) {
367
- var _a;
368
393
  const done = this.tools.doneWaiting(id, result);
369
- if (!done && ((_a = this.currentThread) === null || _a === void 0 ? void 0 : _a.status) === "interrupted") {
394
+ if (!done && this.currentThread?.status === "interrupted") {
370
395
  this.resume(result);
371
396
  }
372
397
  }
@@ -389,8 +414,7 @@ export class LangGraphClient extends EventEmitter {
389
414
  * @en Resets the client state.
390
415
  */
391
416
  async reset() {
392
- var _a;
393
- await this.initAssistant((_a = this.currentAssistant) === null || _a === void 0 ? void 0 : _a.graph_id);
417
+ await this.initAssistant(this.currentAssistant?.graph_id);
394
418
  this.currentThread = null;
395
419
  this.graphState = {};
396
420
  this.messageProcessor.setGraphMessages([]);
@@ -7,9 +7,8 @@ export class StreamingMessageType {
7
7
  return m.type === "tool";
8
8
  }
9
9
  static isToolAssistant(m) {
10
- var _a, _b;
11
10
  /** @ts-ignore */
12
- return m.type === "ai" && (((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) || ((_b = m.tool_call_chunks) === null || _b === void 0 ? void 0 : _b.length));
11
+ return m.type === "ai" && (m.tool_calls?.length || m.tool_call_chunks?.length);
13
12
  }
14
13
  }
15
14
  /**
@@ -17,12 +16,11 @@ export class StreamingMessageType {
17
16
  * @en The MessageProcessor class is used to uniformly handle Message-related logic and avoid duplicate processing.
18
17
  */
19
18
  export class MessageProcessor {
20
- constructor() {
21
- /** 流式消息缓存 */
22
- this.streamingMessage = [];
23
- /** 图发过来的更新信息 */
24
- this.graphMessages = [];
25
- }
19
+ /** 流式消息缓存 */
20
+ streamingMessage = [];
21
+ /** 图发过来的更新信息 */
22
+ graphMessages = [];
23
+ constructor() { }
26
24
  /**
27
25
  * @zh 获取流式消息
28
26
  * @en Get streaming messages
@@ -64,7 +62,7 @@ export class MessageProcessor {
64
62
  */
65
63
  updateStreamingMessage(message) {
66
64
  const lastMessage = this.streamingMessage[this.streamingMessage.length - 1];
67
- if (!(lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.id) || message.id !== lastMessage.id) {
65
+ if (!lastMessage?.id || message.id !== lastMessage.id) {
68
66
  this.streamingMessage.push(message);
69
67
  return;
70
68
  }
@@ -100,15 +98,14 @@ export class MessageProcessor {
100
98
  * @en Attaches additional information to messages, such as spend time, unique ID, etc.
101
99
  */
102
100
  attachInfoForMessage(messages) {
103
- var _a, _b, _c;
104
101
  let lastMessage = null;
105
102
  const result = [...messages]; // 创建副本避免修改原数组
106
103
  for (const message of result) {
107
- const createTime = ((_a = message.response_metadata) === null || _a === void 0 ? void 0 : _a.create_time) || "";
104
+ const createTime = message.response_metadata?.create_time || "";
108
105
  // 工具必须要使用 tool_call_id 来保证一致性
109
106
  message.unique_id = message.tool_call_id || message.id;
110
- message.spend_time = new Date(createTime).getTime() - new Date(((_b = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.response_metadata) === null || _b === void 0 ? void 0 : _b.create_time) || createTime).getTime();
111
- if (!message.usage_metadata && ((_c = message.response_metadata) === null || _c === void 0 ? void 0 : _c.usage)) {
107
+ message.spend_time = new Date(createTime).getTime() - new Date(lastMessage?.response_metadata?.create_time || createTime).getTime();
108
+ if (!message.usage_metadata && message.response_metadata?.usage) {
112
109
  const usage = message.response_metadata.usage;
113
110
  message.usage_metadata = {
114
111
  ...usage,
@@ -126,14 +123,13 @@ export class MessageProcessor {
126
123
  * @en Composes tool messages, associating AI tool calls with tool execution results.
127
124
  */
128
125
  composeToolMessages(messages) {
129
- var _a, _b;
130
126
  const result = [];
131
127
  const assistantToolMessages = new Map();
132
128
  const toolParentMessage = new Map();
133
129
  for (const message of messages) {
134
130
  if (StreamingMessageType.isToolAssistant(message)) {
135
131
  /** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
136
- (_a = (message.tool_calls || message.tool_call_chunks)) === null || _a === void 0 ? void 0 : _a.forEach((element) => {
132
+ (message.tool_calls || message.tool_call_chunks)?.forEach((element) => {
137
133
  assistantToolMessages.set(element.id, element);
138
134
  toolParentMessage.set(element.id, message);
139
135
  });
@@ -145,23 +141,20 @@ export class MessageProcessor {
145
141
  const parentMessage = toolParentMessage.get(message.tool_call_id);
146
142
  if (assistantToolMessage) {
147
143
  message.tool_input = typeof assistantToolMessage.args !== "string" ? JSON.stringify(assistantToolMessage.args) : assistantToolMessage.args;
148
- if (message.additional_kwargs) {
149
- message.additional_kwargs.done = true;
150
- message.done = true;
151
- }
152
- else {
153
- message.done = true;
154
- message.additional_kwargs = {
155
- done: true,
156
- };
157
- }
144
+ const isDone = !!message.content;
145
+ message.done = isDone;
146
+ message.additional_kwargs = {
147
+ ...(parentMessage?.additional_kwargs || {}),
148
+ ...(message.additional_kwargs || {}),
149
+ done: isDone,
150
+ };
158
151
  }
159
152
  if (parentMessage) {
160
153
  message.usage_metadata = parentMessage.usage_metadata;
161
154
  message.node_name = parentMessage.name;
162
155
  // 修补特殊情况下,tool name 丢失的问题
163
156
  if (!message.name) {
164
- message.name = (_b = parentMessage.tool_calls.find((i) => i.id === message.tool_call_id)) === null || _b === void 0 ? void 0 : _b.name;
157
+ message.name = parentMessage.tool_calls.find((i) => i.id === message.tool_call_id)?.name;
165
158
  }
166
159
  }
167
160
  }
@@ -174,7 +167,6 @@ export class MessageProcessor {
174
167
  * @en Generate messages used for streaming rendering in the UI
175
168
  */
176
169
  renderMessages(graphState, getGraphNodeNow, messagesMetadata) {
177
- var _a;
178
170
  const previousMessage = new Map();
179
171
  const closedToolCallIds = new Set();
180
172
  const result = [];
@@ -196,18 +188,17 @@ export class MessageProcessor {
196
188
  // 记录这个 id 的消息,并添加到结果中
197
189
  previousMessage.set(message.id, m);
198
190
  /** @ts-ignore */
199
- const tool_calls = ((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) ? m.tool_calls : m.tool_call_chunks;
191
+ const tool_calls = m.tool_calls?.length ? m.tool_calls : m.tool_call_chunks;
200
192
  const new_tool_calls = tool_calls
201
193
  .filter((i) => {
202
194
  return !closedToolCallIds.has(i.id);
203
195
  })
204
196
  .map((tool, index) => {
205
- var _a, _b, _c, _d;
206
197
  return {
207
198
  type: "tool",
208
199
  additional_kwargs: {},
209
200
  /** @ts-ignore */
210
- tool_input: (_d = (_c = (_b = (_a = m.additional_kwargs) === null || _a === void 0 ? void 0 : _a.tool_calls) === null || _b === void 0 ? void 0 : _b[index]) === null || _c === void 0 ? void 0 : _c.function) === null || _d === void 0 ? void 0 : _d.arguments,
201
+ tool_input: m.additional_kwargs?.tool_calls?.[index]?.function?.arguments,
211
202
  id: tool.id,
212
203
  name: tool.name,
213
204
  response_metadata: {},
@@ -234,14 +225,14 @@ export class MessageProcessor {
234
225
  return this.processMessages(result, graphState, messagesMetadata);
235
226
  }
236
227
  foldTreeMessages(messages, graphState, messagesMetadata) {
237
- const state_sub_messages = Object.entries((graphState === null || graphState === void 0 ? void 0 : graphState.task_store) || {}).map(([key, value]) => [key, value.messages]);
228
+ const state_sub_messages = Object.entries(graphState?.task_store || {}).map(([key, value]) => [key, value.messages]);
238
229
  const state_sub_messages_map = new Map(state_sub_messages);
239
230
  const nonRootMessageId = new Set();
240
231
  const parentPointer = new Map(Object.entries(messagesMetadata || {})
241
232
  .map(([childId, metadata]) => {
242
- if (metadata === null || metadata === void 0 ? void 0 : metadata.parent_id) {
233
+ if (metadata?.parent_id) {
243
234
  nonRootMessageId.add(childId);
244
- return [childId, metadata === null || metadata === void 0 ? void 0 : metadata.parent_id];
235
+ return [childId, metadata?.parent_id];
245
236
  }
246
237
  return;
247
238
  })
package/dist/SpendTime.js CHANGED
@@ -3,9 +3,7 @@
3
3
  * @en The SpendTime class is used to calculate and record the time spent on operations.
4
4
  */
5
5
  export class SpendTime {
6
- constructor() {
7
- this.timeCounter = new Map();
8
- }
6
+ timeCounter = new Map();
9
7
  /**
10
8
  * @zh 开始计时。
11
9
  * @en Starts timing.
@@ -18,8 +16,7 @@ export class SpendTime {
18
16
  * @en Ends timing.
19
17
  */
20
18
  end(key) {
21
- var _a;
22
- this.timeCounter.set(key, [((_a = this.timeCounter.get(key)) === null || _a === void 0 ? void 0 : _a[0]) || new Date(), new Date()]);
19
+ this.timeCounter.set(key, [this.timeCounter.get(key)?.[0] || new Date(), new Date()]);
23
20
  }
24
21
  /**
25
22
  * @zh 设置或更新指定键的耗时记录。如果键已存在,则更新结束时间;否则,开始新的计时。
@@ -38,16 +35,14 @@ export class SpendTime {
38
35
  * @en Gets the start time for the specified key.
39
36
  */
40
37
  getStartTime(key) {
41
- var _a;
42
- return ((_a = this.timeCounter.get(key)) === null || _a === void 0 ? void 0 : _a[0]) || new Date();
38
+ return this.timeCounter.get(key)?.[0] || new Date();
43
39
  }
44
40
  /**
45
41
  * @zh 获取指定键的结束时间。
46
42
  * @en Gets the end time for the specified key.
47
43
  */
48
44
  getEndTime(key) {
49
- var _a;
50
- return ((_a = this.timeCounter.get(key)) === null || _a === void 0 ? void 0 : _a[1]) || new Date();
45
+ return this.timeCounter.get(key)?.[1] || new Date();
51
46
  }
52
47
  /**
53
48
  * @zh 获取指定键的耗时(毫秒)。
package/dist/TestKit.js CHANGED
@@ -4,18 +4,17 @@ export class TestLogger {
4
4
  console.log(message);
5
5
  }
6
6
  logMessage(message) {
7
- var _a, _b, _c, _d, _e, _f, _g;
8
7
  const emoji = message.type === "ai" ? "🤖" : message.type === "human" ? "👤" : "🔧";
9
- const header = `${emoji} ${message.type} | ${(_a = message.name) !== null && _a !== void 0 ? _a : "null"} | ${message.id}`;
8
+ const header = `${emoji} ${message.type} | ${message.name ?? "null"} | ${message.id}`;
10
9
  if (message.type === "tool") {
11
10
  return console.log(`${header}
12
- 🔧 Input: ${(_c = (_b = message.tool_input) === null || _b === void 0 ? void 0 : _b.slice(0, 100)) !== null && _c !== void 0 ? _c : ""}
13
- 💬 Output: ${(_e = (_d = message.content) === null || _d === void 0 ? void 0 : _d.slice(0, 100)) !== null && _e !== void 0 ? _e : ""}
11
+ 🔧 Input: ${message.tool_input?.slice(0, 100) ?? ""}
12
+ 💬 Output: ${message.content?.slice(0, 100) ?? ""}
14
13
  `);
15
14
  }
16
15
  console.log(`---
17
16
  ${header}
18
- 💬 Output: ${(_g = (_f = message.content) === null || _f === void 0 ? void 0 : _f.slice(0, 100)) !== null && _g !== void 0 ? _g : ""}
17
+ 💬 Output: ${message.content?.slice(0, 100) ?? ""}
19
18
  `);
20
19
  }
21
20
  }
@@ -33,22 +32,23 @@ ${header}
33
32
  * ```
34
33
  */
35
34
  export class TestLangGraphChat {
35
+ store;
36
+ /** 是否开启调试模式 */
37
+ debug = false;
38
+ /** 上次消息数量,用于检测消息变化 */
39
+ lastLength = 0;
40
+ /** 待处理的测试任务列表 */
41
+ processFunc = [];
42
+ /** 自定义日志器 */
43
+ logger;
36
44
  /**
37
45
  * @zh 构造函数,初始化测试环境
38
46
  * @en Constructor, initialize test environment
39
47
  */
40
48
  constructor(store, options) {
41
- var _a, _b;
42
49
  this.store = store;
43
- /** 是否开启调试模式 */
44
- this.debug = false;
45
- /** 上次消息数量,用于检测消息变化 */
46
- this.lastLength = 0;
47
- /** 待处理的测试任务列表 */
48
- this.processFunc = [];
49
- this.readited = false;
50
- this.debug = (_a = options.debug) !== null && _a !== void 0 ? _a : false;
51
- this.logger = (_b = options.logger) !== null && _b !== void 0 ? _b : new TestLogger();
50
+ this.debug = options.debug ?? false;
51
+ this.logger = options.logger ?? new TestLogger();
52
52
  options.tools && this.addTools(options.tools);
53
53
  const renderMessages = this.store.data.renderMessages;
54
54
  // 订阅消息变化,自动检查任务完成状态
@@ -126,6 +126,7 @@ export class TestLangGraphChat {
126
126
  this.logger.logMessage(item);
127
127
  }
128
128
  }
129
+ readited = false;
129
130
  /**
130
131
  * @zh 准备测试环境,初始化客户端连接
131
132
  * @en Prepare test environment, initialize client connection
@@ -4,11 +4,9 @@ import { createJSONDefineTool } from "./tool/createTool.js";
4
4
  * @en The ToolManager class is used to manage and execute tools.
5
5
  */
6
6
  export class ToolManager {
7
- constructor() {
8
- this.tools = new Map();
9
- // === 专门为前端设计的异步触发结构
10
- this.waitingMap = new Map();
11
- }
7
+ tools = new Map();
8
+ // === 专门为前端设计的异步触发结构
9
+ waitingMap = new Map();
12
10
  /**
13
11
  * @zh 注册一个工具。
14
12
  * @en Registers a tool.
@@ -66,9 +64,8 @@ export class ToolManager {
66
64
  * @en Calls the tool with the specified name.
67
65
  */
68
66
  async callTool(name, args, context) {
69
- var _a;
70
67
  const tool = this.getTool(name) || this.getTool("__default__");
71
- return await ((_a = tool.execute) === null || _a === void 0 ? void 0 : _a.call(tool, args, context));
68
+ return await tool.execute?.(args, context);
72
69
  }
73
70
  /**
74
71
  * @zh 将所有工具转换为 JSON 定义格式。