@langgraph-js/sdk 3.7.0 → 3.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +29 -0
  2. package/dist/History.d.ts +115 -0
  3. package/dist/History.js +226 -0
  4. package/dist/LangGraphClient.d.ts +23 -2
  5. package/dist/LangGraphClient.js +118 -80
  6. package/dist/MessageProcessor.js +18 -24
  7. package/dist/SpendTime.js +4 -9
  8. package/dist/TestKit.d.ts +1 -1
  9. package/dist/TestKit.js +16 -15
  10. package/dist/ToolManager.js +4 -7
  11. package/dist/artifacts/index.js +1 -1
  12. package/dist/client/LanggraphServer.js +1 -1
  13. package/dist/client/LowJSServer.d.ts +3 -0
  14. package/dist/client/LowJSServer.js +80 -0
  15. package/dist/client/index.d.ts +2 -0
  16. package/dist/client/index.js +2 -0
  17. package/dist/client/utils/sse.d.ts +8 -0
  18. package/dist/client/utils/sse.js +151 -0
  19. package/dist/client/utils/stream.d.ts +15 -0
  20. package/dist/client/utils/stream.js +104 -0
  21. package/dist/index.d.ts +2 -0
  22. package/dist/index.js +2 -0
  23. package/dist/react/ChatContext.d.ts +31 -20
  24. package/dist/react/ChatContext.js +10 -4
  25. package/dist/tool/ToolUI.js +3 -2
  26. package/dist/tool/createTool.js +3 -6
  27. package/dist/tool/utils.js +3 -4
  28. package/dist/ui-store/createChatStore.d.ts +33 -66
  29. package/dist/ui-store/createChatStore.js +261 -247
  30. package/dist/vue/ChatContext.d.ts +41 -21
  31. package/dist/vue/ChatContext.js +8 -2
  32. package/package.json +3 -1
  33. package/src/History.ts +294 -0
  34. package/src/LangGraphClient.ts +98 -48
  35. package/src/client/LanggraphServer.ts +1 -2
  36. package/src/client/LowJSServer.ts +80 -0
  37. package/src/client/index.ts +2 -0
  38. package/src/client/utils/sse.ts +176 -0
  39. package/src/client/utils/stream.ts +114 -0
  40. package/src/index.ts +2 -0
  41. package/src/react/ChatContext.ts +25 -16
  42. package/src/ui-store/createChatStore.ts +310 -236
  43. package/src/vue/ChatContext.ts +12 -0
  44. package/test/TestKit.test.ts +10 -2
  45. package/tsconfig.json +1 -1
@@ -2,25 +2,29 @@ import { EventEmitter } from "eventemitter3";
2
2
  import { ToolManager } from "./ToolManager.js";
3
3
  import { MessageProcessor } from "./MessageProcessor.js";
4
4
  import { revertChatTo } from "./time-travel/index.js";
5
+ import camelcaseKeys from "camelcase-keys";
5
6
  /**
6
7
  * @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
7
8
  * @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
8
9
  */
9
10
  export class LangGraphClient extends EventEmitter {
11
+ client;
12
+ currentAssistant = null;
13
+ currentThread = null;
14
+ tools = new ToolManager();
15
+ availableAssistants = [];
16
+ graphState = {};
17
+ currentRun;
18
+ stopController = null;
19
+ /** Message 处理器 */
20
+ messageProcessor;
21
+ legacyMode;
22
+ /** 当前流式状态 */
23
+ _status = "idle";
10
24
  constructor(config) {
11
25
  super();
12
- this.currentAssistant = null;
13
- this.currentThread = null;
14
- this.tools = new ToolManager();
15
- this.availableAssistants = [];
16
- this.graphState = {};
17
- this.stopController = null;
18
- this.messagesMetadata = {};
19
- this.humanInTheLoop = null;
20
- /** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
21
- this.graphPosition = "";
22
- this.extraParams = {};
23
26
  this.client = config.client;
27
+ this.legacyMode = config.legacyMode ?? false;
24
28
  this.messageProcessor = new MessageProcessor();
25
29
  }
26
30
  /** 代理 assistants 属性到内部 client */
@@ -35,6 +39,10 @@ export class LangGraphClient extends EventEmitter {
35
39
  get runs() {
36
40
  return this.client.runs;
37
41
  }
42
+ /** 获取当前流式状态 */
43
+ get status() {
44
+ return this._status;
45
+ }
38
46
  listAssistants() {
39
47
  return this.assistants.search({
40
48
  metadata: null,
@@ -93,8 +101,7 @@ export class LangGraphClient extends EventEmitter {
93
101
  }
94
102
  }
95
103
  graphVisualize() {
96
- var _a;
97
- return this.assistants.getGraph((_a = this.currentAssistant) === null || _a === void 0 ? void 0 : _a.assistant_id, {
104
+ return this.assistants.getGraph(this.currentAssistant?.assistant_id, {
98
105
  xray: true,
99
106
  });
100
107
  }
@@ -102,10 +109,12 @@ export class LangGraphClient extends EventEmitter {
102
109
  * @zh 列出所有的 Thread。
103
110
  * @en Lists all Threads.
104
111
  */
105
- async listThreads() {
112
+ async listThreads(options = {}) {
106
113
  return this.threads.search({
107
- sortOrder: "desc",
108
- sortBy: "updated_at",
114
+ sortOrder: options.sortOrder || "desc",
115
+ sortBy: options.sortBy || "updated_at",
116
+ offset: options.offset || 0,
117
+ limit: options.limit || 10,
109
118
  });
110
119
  }
111
120
  async deleteThread(threadId) {
@@ -116,11 +125,10 @@ export class LangGraphClient extends EventEmitter {
116
125
  * @en Resets the Thread data from history.
117
126
  */
118
127
  async resetThread(agent, threadId) {
119
- var _a;
120
128
  await this.initAssistant(agent);
121
129
  this.currentThread = await this.threads.get(threadId);
122
130
  this.graphState = this.currentThread.values;
123
- const graphMessages = ((_a = this.graphState) === null || _a === void 0 ? void 0 : _a.messages) || [];
131
+ const graphMessages = this.graphState?.messages || [];
124
132
  this.messageProcessor.setGraphMessages(graphMessages);
125
133
  this.emit("value", {
126
134
  event: "messages/partial",
@@ -133,7 +141,7 @@ export class LangGraphClient extends EventEmitter {
133
141
  // 从历史中恢复时,应该恢复流式状态
134
142
  async resetStream() {
135
143
  const runs = await this.runs.list(this.currentThread.thread_id);
136
- const runningRun = runs === null || runs === void 0 ? void 0 : runs.find((run) => run.status === "running" || run.status === "pending");
144
+ const runningRun = runs?.find((run) => run.status === "running" || run.status === "pending");
137
145
  if (runningRun) {
138
146
  await this.sendMessage([], { joinRunId: runningRun.run_id });
139
147
  }
@@ -154,14 +162,13 @@ export class LangGraphClient extends EventEmitter {
154
162
  */
155
163
  get tokenCounter() {
156
164
  return this.messageProcessor.getGraphMessages().reduce((acc, message) => {
157
- var _a, _b, _c, _d, _e;
158
165
  if (message.usage_metadata) {
159
- acc.total_tokens += ((_a = message.usage_metadata) === null || _a === void 0 ? void 0 : _a.total_tokens) || 0;
160
- acc.input_tokens += ((_b = message.usage_metadata) === null || _b === void 0 ? void 0 : _b.input_tokens) || 0;
161
- acc.output_tokens += ((_c = message.usage_metadata) === null || _c === void 0 ? void 0 : _c.output_tokens) || 0;
166
+ acc.total_tokens += message.usage_metadata?.total_tokens || 0;
167
+ acc.input_tokens += message.usage_metadata?.input_tokens || 0;
168
+ acc.output_tokens += message.usage_metadata?.output_tokens || 0;
162
169
  }
163
- else if ((_d = message.response_metadata) === null || _d === void 0 ? void 0 : _d.usage) {
164
- const usage = (_e = message.response_metadata) === null || _e === void 0 ? void 0 : _e.usage;
170
+ else if (message.response_metadata?.usage) {
171
+ const usage = message.response_metadata?.usage;
165
172
  acc.total_tokens += usage.total_tokens || 0;
166
173
  acc.input_tokens += usage.prompt_tokens || 0;
167
174
  acc.output_tokens += usage.completion_tokens || 0;
@@ -175,21 +182,19 @@ export class LangGraphClient extends EventEmitter {
175
182
  }
176
183
  /** 前端工具人机交互时,锁住面板 */
177
184
  isFELocking(messages) {
178
- var _a;
179
185
  const lastMessage = messages[messages.length - 1];
180
186
  if (!lastMessage) {
181
187
  return false;
182
188
  }
183
- const tool = this.tools.getTool(lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.name);
184
- return tool && tool.render && (lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.type) === "tool" && !((_a = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.additional_kwargs) === null || _a === void 0 ? void 0 : _a.done);
189
+ const tool = this.tools.getTool(lastMessage?.name);
190
+ return tool && tool.render && lastMessage?.type === "tool" && !lastMessage?.additional_kwargs?.done;
185
191
  }
186
192
  /**
187
193
  * @zh 取消当前的 Run。
188
194
  * @en Cancels the current Run.
189
195
  */
190
196
  cancelRun() {
191
- var _a, _b;
192
- if (((_a = this.currentThread) === null || _a === void 0 ? void 0 : _a.thread_id) && ((_b = this.currentRun) === null || _b === void 0 ? void 0 : _b.run_id)) {
197
+ if (this.currentThread?.thread_id && this.currentRun?.run_id) {
193
198
  this.runs.cancel(this.currentThread.thread_id, this.currentRun.run_id);
194
199
  }
195
200
  }
@@ -204,6 +209,8 @@ export class LangGraphClient extends EventEmitter {
204
209
  this.messageProcessor.setGraphMessages(state.messages);
205
210
  return state;
206
211
  }
212
+ messagesMetadata = {};
213
+ humanInTheLoop = null;
207
214
  /**
208
215
  * @zh 发送消息到 LangGraph 后端。
209
216
  * @en Sends a message to the LangGraph backend.
@@ -229,12 +236,26 @@ export class LangGraphClient extends EventEmitter {
229
236
  content: input,
230
237
  },
231
238
  ];
239
+ const streamRecord = [];
240
+ this._status = "busy";
241
+ this.emit("start", {
242
+ event: "start",
243
+ });
232
244
  const createStreamResponse = async () => {
233
- if (_debug === null || _debug === void 0 ? void 0 : _debug.streamResponse) {
245
+ if (_debug?.streamResponse) {
234
246
  return _debug.streamResponse;
235
247
  }
248
+ const onCallback = this.legacyMode
249
+ ? (chunk) => {
250
+ streamRecord.push(chunk);
251
+ this.processStreamChunk(chunk, command);
252
+ }
253
+ : undefined;
236
254
  if (joinRunId) {
237
- return this.runs.joinStream(this.currentThread.thread_id, joinRunId);
255
+ return this.runs.joinStream(this.currentThread.thread_id, joinRunId, {
256
+ /** @ts-ignore */
257
+ onCallback,
258
+ });
238
259
  }
239
260
  return this.runs.stream(this.currentThread.thread_id, this.currentAssistant.assistant_id, {
240
261
  input: {
@@ -247,63 +268,31 @@ export class LangGraphClient extends EventEmitter {
247
268
  streamMode: ["messages", "values"],
248
269
  streamSubgraphs: true,
249
270
  command,
271
+ /** @ts-ignore 为兼容不支持 AsyncIterableFunction 的环境*/
272
+ onCallback,
250
273
  });
251
274
  };
252
275
  const streamResponse = await createStreamResponse();
253
- const streamRecord = [];
254
- this.emit("start", {
255
- event: "start",
256
- });
257
- for await (const chunk of streamResponse) {
258
- streamRecord.push(chunk);
259
- if (chunk.event === "metadata") {
260
- this.currentRun = chunk.data;
261
- }
262
- else if (chunk.event === "error" || chunk.event === "Error" || chunk.event === "__stream_error__") {
263
- this.emit("error", chunk);
264
- }
265
- else if (chunk.event === "messages/metadata") {
266
- Object.assign(this.messagesMetadata, chunk.data);
267
- continue;
268
- }
269
- else if (chunk.event === "messages/partial" || chunk.event === "messages/complete") {
270
- for (const message of chunk.data) {
271
- this.messageProcessor.updateStreamingMessage(message);
272
- }
273
- this.emit("message", chunk);
274
- continue;
275
- }
276
- else if (chunk.event === "values") {
277
- const data = chunk.data;
278
- if (data.__interrupt__) {
279
- this.humanInTheLoop = data.__interrupt__;
280
- }
281
- else if (data.messages) {
282
- const isResume = !!(command === null || command === void 0 ? void 0 : command.resume);
283
- const isLongerThanLocal = data.messages.length >= this.messageProcessor.getGraphMessages().length;
284
- // resume 情况下,长度低于前端 message 的统统不接受
285
- if (!isResume || (isResume && isLongerThanLocal)) {
286
- this.messageProcessor.setGraphMessages(data.messages);
287
- this.emit("value", chunk);
288
- }
289
- this.graphState = chunk.data;
290
- }
291
- continue;
292
- }
293
- else if (chunk.event.startsWith("values|")) {
294
- this.graphPosition = chunk.event.split("|")[1];
276
+ if (!this.legacyMode) {
277
+ // 正常的 JS 环境都可以执行,但是部分环境不支持 AsyncGeneratorFunction(比如 sb 的微信小程序)
278
+ for await (const chunk of streamResponse) {
279
+ streamRecord.push(chunk);
280
+ this.processStreamChunk(chunk, command);
295
281
  }
296
282
  }
297
283
  const data = await this.runFETool();
298
284
  if (data)
299
285
  streamRecord.push(...data);
300
286
  this.humanInTheLoop = null;
287
+ this._status = "idle";
301
288
  this.emit("done", {
302
289
  event: "done",
303
290
  });
304
291
  this.messageProcessor.clearStreamingMessages();
305
292
  return streamRecord;
306
293
  }
294
+ /** 当前子图位置,但是依赖 stream,不太适合稳定使用*/
295
+ graphPosition = "";
307
296
  getGraphPosition() {
308
297
  return this.graphPosition.split("|").map((i) => {
309
298
  const [name, id] = i.split(":");
@@ -317,14 +306,62 @@ export class LangGraphClient extends EventEmitter {
317
306
  const position = this.getGraphPosition();
318
307
  return position[position.length - 1];
319
308
  }
309
+ /**
310
+ * @zh 处理流式响应的单个 chunk。
311
+ * @en Processes a single chunk from the stream response.
312
+ * @returns 是否需要跳过后续处理 (continue)
313
+ */
314
+ processStreamChunk(chunk, command) {
315
+ if (chunk.event === "metadata") {
316
+ this.currentRun = chunk.data;
317
+ }
318
+ else if (chunk.event === "error" || chunk.event === "Error" || chunk.event === "__stream_error__") {
319
+ this._status = "error";
320
+ this.emit("error", chunk);
321
+ }
322
+ else if (chunk.event === "messages/metadata") {
323
+ Object.assign(this.messagesMetadata, chunk.data);
324
+ return true;
325
+ }
326
+ else if (chunk.event === "messages/partial" || chunk.event === "messages/complete") {
327
+ for (const message of chunk.data) {
328
+ this.messageProcessor.updateStreamingMessage(message);
329
+ }
330
+ this.emit("message", chunk);
331
+ return true;
332
+ }
333
+ else if (chunk.event === "values") {
334
+ const data = chunk.data;
335
+ if (data.__interrupt__) {
336
+ this._status = "interrupted";
337
+ this.humanInTheLoop = camelcaseKeys(data.__interrupt__, {
338
+ deep: true,
339
+ });
340
+ }
341
+ else if (data.messages) {
342
+ const isResume = !!command?.resume;
343
+ const isLongerThanLocal = data.messages.length >= this.messageProcessor.getGraphMessages().length;
344
+ // resume 情况下,长度低于前端 message 的统统不接受
345
+ if (!isResume || (isResume && isLongerThanLocal)) {
346
+ this.messageProcessor.setGraphMessages(data.messages);
347
+ this.emit("value", chunk);
348
+ }
349
+ this.graphState = chunk.data;
350
+ }
351
+ return true;
352
+ }
353
+ else if (chunk.event.startsWith("values|")) {
354
+ this.graphPosition = chunk.event.split("|")[1];
355
+ }
356
+ return false;
357
+ }
320
358
  runFETool() {
321
- var _a;
322
359
  const data = this.messageProcessor.getStreamingMessages(); // 需要保证不被清理
323
360
  const lastMessage = data[data.length - 1];
324
361
  if (!lastMessage)
325
362
  return;
326
363
  // 如果最后一条消息是前端工具消息,则调用工具
327
- if (lastMessage.type === "ai" && ((_a = lastMessage.tool_calls) === null || _a === void 0 ? void 0 : _a.length)) {
364
+ if (lastMessage.type === "ai" && lastMessage.tool_calls?.length) {
328
365
  const result = lastMessage.tool_calls.map((tool) => {
329
366
  const toolMessage = {
330
367
  ...tool,
@@ -336,6 +373,7 @@ export class LangGraphClient extends EventEmitter {
336
373
  // json 校验
337
374
  return this.callFETool(toolMessage, tool.args);
338
375
  });
376
+ this._status = "interrupted";
339
377
  this.currentThread.status = "interrupted"; // 修复某些机制下,状态不为 interrupted 与后端有差异
340
378
  return Promise.all(result);
341
379
  }
@@ -348,6 +386,7 @@ export class LangGraphClient extends EventEmitter {
348
386
  }
349
387
  return this.resume(result);
350
388
  }
389
+ extraParams = {};
351
390
  /**
352
391
  * @zh 继续被前端工具中断的流程。
353
392
  * @en Resumes a process interrupted by a frontend tool.
@@ -364,9 +403,8 @@ export class LangGraphClient extends EventEmitter {
364
403
  * @en Marks the frontend tool waiting as completed.
365
404
  */
366
405
  doneFEToolWaiting(id, result) {
367
- var _a;
368
406
  const done = this.tools.doneWaiting(id, result);
369
- if (!done && ((_a = this.currentThread) === null || _a === void 0 ? void 0 : _a.status) === "interrupted") {
407
+ if (!done && this.currentThread?.status === "interrupted") {
370
408
  this.resume(result);
371
409
  }
372
410
  }
@@ -389,14 +427,14 @@ export class LangGraphClient extends EventEmitter {
389
427
  * @en Resets the client state.
390
428
  */
391
429
  async reset() {
392
- var _a;
393
- await this.initAssistant((_a = this.currentAssistant) === null || _a === void 0 ? void 0 : _a.graph_id);
430
+ await this.initAssistant(this.currentAssistant?.graph_id);
394
431
  this.currentThread = null;
395
432
  this.graphState = {};
396
433
  this.messageProcessor.setGraphMessages([]);
397
434
  this.messageProcessor.clearStreamingMessages();
398
435
  this.currentRun = undefined;
399
436
  this.tools.clearWaiting();
437
+ this._status = "idle";
400
438
  this.emit("value", {
401
439
  event: "messages/partial",
402
440
  data: {
@@ -7,9 +7,8 @@ export class StreamingMessageType {
7
7
  return m.type === "tool";
8
8
  }
9
9
  static isToolAssistant(m) {
10
- var _a, _b;
11
10
  /** @ts-ignore */
12
- return m.type === "ai" && (((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) || ((_b = m.tool_call_chunks) === null || _b === void 0 ? void 0 : _b.length));
11
+ return m.type === "ai" && (m.tool_calls?.length || m.tool_call_chunks?.length);
13
12
  }
14
13
  }
15
14
  /**
@@ -17,12 +16,11 @@ export class StreamingMessageType {
17
16
  * @en The MessageProcessor class is used to uniformly handle Message-related logic and avoid duplicate processing.
18
17
  */
19
18
  export class MessageProcessor {
20
- constructor() {
21
- /** 流式消息缓存 */
22
- this.streamingMessage = [];
23
- /** 图发过来的更新信息 */
24
- this.graphMessages = [];
25
- }
19
+ /** 流式消息缓存 */
20
+ streamingMessage = [];
21
+ /** 图发过来的更新信息 */
22
+ graphMessages = [];
23
+ constructor() { }
26
24
  /**
27
25
  * @zh 获取流式消息
28
26
  * @en Get streaming messages
@@ -64,7 +62,7 @@ export class MessageProcessor {
64
62
  */
65
63
  updateStreamingMessage(message) {
66
64
  const lastMessage = this.streamingMessage[this.streamingMessage.length - 1];
67
- if (!(lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.id) || message.id !== lastMessage.id) {
65
+ if (!lastMessage?.id || message.id !== lastMessage.id) {
68
66
  this.streamingMessage.push(message);
69
67
  return;
70
68
  }
@@ -100,15 +98,14 @@ export class MessageProcessor {
100
98
  * @en Attaches additional information to messages, such as spend time, unique ID, etc.
101
99
  */
102
100
  attachInfoForMessage(messages) {
103
- var _a, _b, _c;
104
101
  let lastMessage = null;
105
102
  const result = [...messages]; // 创建副本避免修改原数组
106
103
  for (const message of result) {
107
- const createTime = ((_a = message.response_metadata) === null || _a === void 0 ? void 0 : _a.create_time) || "";
104
+ const createTime = message.response_metadata?.create_time || "";
108
105
  // 工具必须要使用 tool_call_id 来保证一致性
109
106
  message.unique_id = message.tool_call_id || message.id;
110
- message.spend_time = new Date(createTime).getTime() - new Date(((_b = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.response_metadata) === null || _b === void 0 ? void 0 : _b.create_time) || createTime).getTime();
111
- if (!message.usage_metadata && ((_c = message.response_metadata) === null || _c === void 0 ? void 0 : _c.usage)) {
107
+ message.spend_time = new Date(createTime).getTime() - new Date(lastMessage?.response_metadata?.create_time || createTime).getTime();
108
+ if (!message.usage_metadata && message.response_metadata?.usage) {
112
109
  const usage = message.response_metadata.usage;
113
110
  message.usage_metadata = {
114
111
  ...usage,
@@ -126,14 +123,13 @@ export class MessageProcessor {
126
123
  * @en Composes tool messages, associating AI tool calls with tool execution results.
127
124
  */
128
125
  composeToolMessages(messages) {
129
- var _a, _b;
130
126
  const result = [];
131
127
  const assistantToolMessages = new Map();
132
128
  const toolParentMessage = new Map();
133
129
  for (const message of messages) {
134
130
  if (StreamingMessageType.isToolAssistant(message)) {
135
131
  /** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
136
- (_a = (message.tool_calls || message.tool_call_chunks)) === null || _a === void 0 ? void 0 : _a.forEach((element) => {
132
+ (message.tool_calls || message.tool_call_chunks)?.forEach((element) => {
137
133
  assistantToolMessages.set(element.id, element);
138
134
  toolParentMessage.set(element.id, message);
139
135
  });
@@ -148,7 +144,7 @@ export class MessageProcessor {
148
144
  const isDone = !!message.content;
149
145
  message.done = isDone;
150
146
  message.additional_kwargs = {
151
- ...((parentMessage === null || parentMessage === void 0 ? void 0 : parentMessage.additional_kwargs) || {}),
147
+ ...(parentMessage?.additional_kwargs || {}),
152
148
  ...(message.additional_kwargs || {}),
153
149
  done: isDone,
154
150
  };
@@ -158,7 +154,7 @@ export class MessageProcessor {
158
154
  message.node_name = parentMessage.name;
159
155
  // 修补特殊情况下,tool name 丢失的问题
160
156
  if (!message.name) {
161
- message.name = (_b = parentMessage.tool_calls.find((i) => i.id === message.tool_call_id)) === null || _b === void 0 ? void 0 : _b.name;
157
+ message.name = parentMessage.tool_calls.find((i) => i.id === message.tool_call_id)?.name;
162
158
  }
163
159
  }
164
160
  }
@@ -171,7 +167,6 @@ export class MessageProcessor {
171
167
  * @en Generate messages used for streaming rendering in the UI
172
168
  */
173
169
  renderMessages(graphState, getGraphNodeNow, messagesMetadata) {
174
- var _a;
175
170
  const previousMessage = new Map();
176
171
  const closedToolCallIds = new Set();
177
172
  const result = [];
@@ -193,18 +188,17 @@ export class MessageProcessor {
193
188
  // 记录这个 id 的消息,并添加到结果中
194
189
  previousMessage.set(message.id, m);
195
190
  /** @ts-ignore */
196
- const tool_calls = ((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) ? m.tool_calls : m.tool_call_chunks;
191
+ const tool_calls = m.tool_calls?.length ? m.tool_calls : m.tool_call_chunks;
197
192
  const new_tool_calls = tool_calls
198
193
  .filter((i) => {
199
194
  return !closedToolCallIds.has(i.id);
200
195
  })
201
196
  .map((tool, index) => {
202
- var _a, _b, _c, _d;
203
197
  return {
204
198
  type: "tool",
205
199
  additional_kwargs: {},
206
200
  /** @ts-ignore */
207
- tool_input: (_d = (_c = (_b = (_a = m.additional_kwargs) === null || _a === void 0 ? void 0 : _a.tool_calls) === null || _b === void 0 ? void 0 : _b[index]) === null || _c === void 0 ? void 0 : _c.function) === null || _d === void 0 ? void 0 : _d.arguments,
201
+ tool_input: m.additional_kwargs?.tool_calls?.[index]?.function?.arguments,
208
202
  id: tool.id,
209
203
  name: tool.name,
210
204
  response_metadata: {},
@@ -231,14 +225,14 @@ export class MessageProcessor {
231
225
  return this.processMessages(result, graphState, messagesMetadata);
232
226
  }
233
227
  foldTreeMessages(messages, graphState, messagesMetadata) {
234
- const state_sub_messages = Object.entries((graphState === null || graphState === void 0 ? void 0 : graphState.task_store) || {}).map(([key, value]) => [key, value.messages]);
228
+ const state_sub_messages = Object.entries(graphState?.task_store || {}).map(([key, value]) => [key, value.messages]);
235
229
  const state_sub_messages_map = new Map(state_sub_messages);
236
230
  const nonRootMessageId = new Set();
237
231
  const parentPointer = new Map(Object.entries(messagesMetadata || {})
238
232
  .map(([childId, metadata]) => {
239
- if (metadata === null || metadata === void 0 ? void 0 : metadata.parent_id) {
233
+ if (metadata?.parent_id) {
240
234
  nonRootMessageId.add(childId);
241
- return [childId, metadata === null || metadata === void 0 ? void 0 : metadata.parent_id];
235
+ return [childId, metadata?.parent_id];
242
236
  }
243
237
  return;
244
238
  })
package/dist/SpendTime.js CHANGED
@@ -3,9 +3,7 @@
3
3
  * @en The SpendTime class is used to calculate and record the time spent on operations.
4
4
  */
5
5
  export class SpendTime {
6
- constructor() {
7
- this.timeCounter = new Map();
8
- }
6
+ timeCounter = new Map();
9
7
  /**
10
8
  * @zh 开始计时。
11
9
  * @en Starts timing.
@@ -18,8 +16,7 @@ export class SpendTime {
18
16
  * @en Ends timing.
19
17
  */
20
18
  end(key) {
21
- var _a;
22
- this.timeCounter.set(key, [((_a = this.timeCounter.get(key)) === null || _a === void 0 ? void 0 : _a[0]) || new Date(), new Date()]);
19
+ this.timeCounter.set(key, [this.timeCounter.get(key)?.[0] || new Date(), new Date()]);
23
20
  }
24
21
  /**
25
22
  * @zh 设置或更新指定键的耗时记录。如果键已存在,则更新结束时间;否则,开始新的计时。
@@ -38,16 +35,14 @@ export class SpendTime {
38
35
  * @en Gets the start time for the specified key.
39
36
  */
40
37
  getStartTime(key) {
41
- var _a;
42
- return ((_a = this.timeCounter.get(key)) === null || _a === void 0 ? void 0 : _a[0]) || new Date();
38
+ return this.timeCounter.get(key)?.[0] || new Date();
43
39
  }
44
40
  /**
45
41
  * @zh 获取指定键的结束时间。
46
42
  * @en Gets the end time for the specified key.
47
43
  */
48
44
  getEndTime(key) {
49
- var _a;
50
- return ((_a = this.timeCounter.get(key)) === null || _a === void 0 ? void 0 : _a[1]) || new Date();
45
+ return this.timeCounter.get(key)?.[1] || new Date();
51
46
  }
52
47
  /**
53
48
  * @zh 获取指定键的耗时(毫秒)。
package/dist/TestKit.d.ts CHANGED
@@ -89,7 +89,7 @@ export declare class TestLangGraphChat {
89
89
  * @zh 准备测试环境,初始化客户端连接
90
90
  * @en Prepare test environment, initialize client connection
91
91
  */
92
- ready(): Promise<import("./LangGraphClient.js").LangGraphClient<unknown>> | undefined;
92
+ ready(): Promise<import("./History.js").History> | undefined;
93
93
  /**
94
94
  * @zh 模拟人类输入消息并等待测试任务完成,这是测试的核心方法
95
95
  * @en Simulate human input and wait for test tasks to complete, this is the core test method
package/dist/TestKit.js CHANGED
@@ -4,18 +4,17 @@ export class TestLogger {
4
4
  console.log(message);
5
5
  }
6
6
  logMessage(message) {
7
- var _a, _b, _c, _d, _e, _f, _g;
8
7
  const emoji = message.type === "ai" ? "🤖" : message.type === "human" ? "👤" : "🔧";
9
- const header = `${emoji} ${message.type} | ${(_a = message.name) !== null && _a !== void 0 ? _a : "null"} | ${message.id}`;
8
+ const header = `${emoji} ${message.type} | ${message.name ?? "null"} | ${message.id}`;
10
9
  if (message.type === "tool") {
11
10
  return console.log(`${header}
12
- 🔧 Input: ${(_c = (_b = message.tool_input) === null || _b === void 0 ? void 0 : _b.slice(0, 100)) !== null && _c !== void 0 ? _c : ""}
13
- 💬 Output: ${(_e = (_d = message.content) === null || _d === void 0 ? void 0 : _d.slice(0, 100)) !== null && _e !== void 0 ? _e : ""}
11
+ 🔧 Input: ${message.tool_input?.slice(0, 100) ?? ""}
12
+ 💬 Output: ${message.content?.slice(0, 100) ?? ""}
14
13
  `);
15
14
  }
16
15
  console.log(`---
17
16
  ${header}
18
- 💬 Output: ${(_g = (_f = message.content) === null || _f === void 0 ? void 0 : _f.slice(0, 100)) !== null && _g !== void 0 ? _g : ""}
17
+ 💬 Output: ${message.content?.slice(0, 100) ?? ""}
19
18
  `);
20
19
  }
21
20
  }
@@ -33,22 +32,23 @@ ${header}
33
32
  * ```
34
33
  */
35
34
  export class TestLangGraphChat {
35
+ store;
36
+ /** 是否开启调试模式 */
37
+ debug = false;
38
+ /** 上次消息数量,用于检测消息变化 */
39
+ lastLength = 0;
40
+ /** 待处理的测试任务列表 */
41
+ processFunc = [];
42
+ /** 自定义日志器 */
43
+ logger;
36
44
  /**
37
45
  * @zh 构造函数,初始化测试环境
38
46
  * @en Constructor, initialize test environment
39
47
  */
40
48
  constructor(store, options) {
41
- var _a, _b;
42
49
  this.store = store;
43
- /** 是否开启调试模式 */
44
- this.debug = false;
45
- /** 上次消息数量,用于检测消息变化 */
46
- this.lastLength = 0;
47
- /** 待处理的测试任务列表 */
48
- this.processFunc = [];
49
- this.readited = false;
50
- this.debug = (_a = options.debug) !== null && _a !== void 0 ? _a : false;
51
- this.logger = (_b = options.logger) !== null && _b !== void 0 ? _b : new TestLogger();
50
+ this.debug = options.debug ?? false;
51
+ this.logger = options.logger ?? new TestLogger();
52
52
  options.tools && this.addTools(options.tools);
53
53
  const renderMessages = this.store.data.renderMessages;
54
54
  // 订阅消息变化,自动检查任务完成状态
@@ -126,6 +126,7 @@ export class TestLangGraphChat {
126
126
  this.logger.logMessage(item);
127
127
  }
128
128
  }
129
+ readited = false;
129
130
  /**
130
131
  * @zh 准备测试环境,初始化客户端连接
131
132
  * @en Prepare test environment, initialize client connection
@@ -4,11 +4,9 @@ import { createJSONDefineTool } from "./tool/createTool.js";
4
4
  * @en The ToolManager class is used to manage and execute tools.
5
5
  */
6
6
  export class ToolManager {
7
- constructor() {
8
- this.tools = new Map();
9
- // === 专门为前端设计的异步触发结构
10
- this.waitingMap = new Map();
11
- }
7
+ tools = new Map();
8
+ // === 专门为前端设计的异步触发结构
9
+ waitingMap = new Map();
12
10
  /**
13
11
  * @zh 注册一个工具。
14
12
  * @en Registers a tool.
@@ -66,9 +64,8 @@ export class ToolManager {
66
64
  * @en Calls the tool with the specified name.
67
65
  */
68
66
  async callTool(name, args, context) {
69
- var _a;
70
67
  const tool = this.getTool(name) || this.getTool("__default__");
71
- return await ((_a = tool.execute) === null || _a === void 0 ? void 0 : _a.call(tool, args, context));
68
+ return await tool.execute?.(args, context);
72
69
  }
73
70
  /**
74
71
  * @zh 将所有工具转换为 JSON 定义格式。
@@ -78,7 +78,7 @@ export const useArtifacts = (renderMessages, client) => {
78
78
  });
79
79
  const debouncedSetCurrentArtifactById = (id, tool_id) => {
80
80
  const current = currentArtifactId.get();
81
- if ((current === null || current === void 0 ? void 0 : current[0]) === id && (current === null || current === void 0 ? void 0 : current[1]) === tool_id) {
81
+ if (current?.[0] === id && current?.[1] === tool_id) {
82
82
  return;
83
83
  }
84
84
  showArtifact.set(true);
@@ -1,4 +1,4 @@
1
+ import { Client } from "@langchain/langgraph-sdk";
1
2
  export const createLangGraphServerClient = async (config) => {
2
- const { Client } = await import("@langchain/langgraph-sdk");
3
3
  return new Client(config);
4
4
  };
@@ -0,0 +1,3 @@
1
+ import { LangGraphClientConfig } from "../LangGraphClient.js";
2
+ import { ILangGraphClient } from "@langgraph-js/pure-graph/dist/types.js";
3
+ export declare const createLowerJSClient: (config: Omit<LangGraphClientConfig, "client">) => ILangGraphClient;