@langgraph-js/sdk 1.5.3 → 1.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -75,7 +75,7 @@ export declare class StreamingMessageType {
75
75
  static isToolAssistant(m: Message): any;
76
76
  }
77
77
  type StreamingUpdateEvent = {
78
- type: "message" | "value" | "update" | "error" | "thread" | "done";
78
+ type: "message" | "value" | "update" | "error" | "thread" | "done" | "start";
79
79
  data: any;
80
80
  };
81
81
  type StreamingUpdateCallback = (event: StreamingUpdateEvent) => void;
@@ -279,6 +279,7 @@ export class LangGraphClient extends Client {
279
279
  message.usage_metadata = parentMessage.usage_metadata;
280
280
  message.node_name = parentMessage.name;
281
281
  }
282
+ message.id = message.tool_call_id;
282
283
  }
283
284
  result.push(message);
284
285
  }
@@ -368,6 +369,12 @@ export class LangGraphClient extends Client {
368
369
  command,
369
370
  });
370
371
  const streamRecord = [];
372
+ this.emitStreamingUpdate({
373
+ type: "start",
374
+ data: {
375
+ event: "start",
376
+ },
377
+ });
371
378
  for await (const chunk of streamResponse) {
372
379
  streamRecord.push(chunk);
373
380
  if (chunk.event === "metadata") {
@@ -403,7 +410,6 @@ export class LangGraphClient extends Client {
403
410
  });
404
411
  }
405
412
  this.graphState = chunk.data;
406
- this.streamingMessage = [];
407
413
  }
408
414
  continue;
409
415
  }
@@ -423,6 +429,7 @@ export class LangGraphClient extends Client {
423
429
  event: "done",
424
430
  },
425
431
  });
432
+ this.streamingMessage = [];
426
433
  return streamRecord;
427
434
  }
428
435
  /** 子图的数据需要通过 merge 的方式重新进行合并更新 */
@@ -444,8 +451,10 @@ export class LangGraphClient extends Client {
444
451
  }
445
452
  runFETool() {
446
453
  var _a;
447
- const data = this.graphMessages;
454
+ const data = this.streamingMessage; // 需要保证不被清理
448
455
  const lastMessage = data[data.length - 1];
456
+ if (!lastMessage)
457
+ return;
449
458
  // 如果最后一条消息是前端工具消息,则调用工具
450
459
  if (lastMessage.type === "ai" && ((_a = lastMessage.tool_calls) === null || _a === void 0 ? void 0 : _a.length)) {
451
460
  const result = lastMessage.tool_calls.map((tool) => {
@@ -102,6 +102,9 @@ export const createChatStore = (initClientName, config, context = {}) => {
102
102
  // await newClient.createThread();
103
103
  inChatError.set(null);
104
104
  newClient.onStreamingUpdate((event) => {
105
+ if (event.type === "start") {
106
+ loading.set(true);
107
+ }
105
108
  if (event.type === "thread" || event.type === "done") {
106
109
  // console.log(event.data);
107
110
  // 创建新流程时,默认为 __start__
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langgraph-js/sdk",
3
- "version": "1.5.3",
3
+ "version": "1.5.5",
4
4
  "description": "The UI SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",
@@ -84,7 +84,7 @@ export class StreamingMessageType {
84
84
  }
85
85
 
86
86
  type StreamingUpdateEvent = {
87
- type: "message" | "value" | "update" | "error" | "thread" | "done";
87
+ type: "message" | "value" | "update" | "error" | "thread" | "done" | "start";
88
88
  data: any;
89
89
  };
90
90
 
@@ -357,6 +357,7 @@ export class LangGraphClient extends Client {
357
357
  message.usage_metadata = parentMessage.usage_metadata;
358
358
  message.node_name = parentMessage.name;
359
359
  }
360
+ message.id = message.tool_call_id;
360
361
  }
361
362
  result.push(message);
362
363
  }
@@ -456,6 +457,12 @@ export class LangGraphClient extends Client {
456
457
  command,
457
458
  });
458
459
  const streamRecord: any[] = [];
460
+ this.emitStreamingUpdate({
461
+ type: "start",
462
+ data: {
463
+ event: "start",
464
+ },
465
+ });
459
466
  for await (const chunk of streamResponse) {
460
467
  streamRecord.push(chunk);
461
468
  if (chunk.event === "metadata") {
@@ -489,7 +496,6 @@ export class LangGraphClient extends Client {
489
496
  });
490
497
  }
491
498
  this.graphState = chunk.data;
492
- this.streamingMessage = [];
493
499
  }
494
500
  continue;
495
501
  } else if (chunk.event.startsWith("values|")) {
@@ -507,6 +513,7 @@ export class LangGraphClient extends Client {
507
513
  event: "done",
508
514
  },
509
515
  });
516
+ this.streamingMessage = [];
510
517
  return streamRecord;
511
518
  }
512
519
  /** 子图的数据需要通过 merge 的方式重新进行合并更新 */
@@ -528,8 +535,9 @@ export class LangGraphClient extends Client {
528
535
  }
529
536
 
530
537
  private runFETool() {
531
- const data = this.graphMessages;
538
+ const data = this.streamingMessage; // 需要保证不被清理
532
539
  const lastMessage = data[data.length - 1];
540
+ if (!lastMessage) return;
533
541
  // 如果最后一条消息是前端工具消息,则调用工具
534
542
  if (lastMessage.type === "ai" && lastMessage.tool_calls?.length) {
535
543
  const result = lastMessage.tool_calls.map((tool) => {
@@ -108,6 +108,9 @@ export const createChatStore = (
108
108
  // await newClient.createThread();
109
109
  inChatError.set(null);
110
110
  newClient.onStreamingUpdate((event) => {
111
+ if (event.type === "start") {
112
+ loading.set(true);
113
+ }
111
114
  if (event.type === "thread" || event.type === "done") {
112
115
  // console.log(event.data);
113
116
  // 创建新流程时,默认为 __start__