@langgraph-js/sdk 1.12.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,324 @@
1
+ /**
2
+ * @zh StreamingMessageType 类用于判断消息的类型。
3
+ * @en The StreamingMessageType class is used to determine the type of a message.
4
+ */
5
+ export class StreamingMessageType {
6
+ static isTool(m) {
7
+ return m.type === "tool";
8
+ }
9
+ static isToolAssistant(m) {
10
+ var _a, _b;
11
+ /** @ts-ignore */
12
+ return m.type === "ai" && (((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) || ((_b = m.tool_call_chunks) === null || _b === void 0 ? void 0 : _b.length));
13
+ }
14
+ }
15
+ /**
16
+ * @zh MessageProcessor 类用于统一处理 Message 相关的逻辑,避免重复处理。
17
+ * @en The MessageProcessor class is used to uniformly handle Message-related logic and avoid duplicate processing.
18
+ */
19
+ export class MessageProcessor {
20
+ constructor(subAgentsKey = "task_store") {
21
+ /** 流式消息缓存 */
22
+ this.streamingMessage = [];
23
+ /** 图发过来的更新信息 */
24
+ this.graphMessages = [];
25
+ this.subAgentsKey = subAgentsKey;
26
+ }
27
+ /**
28
+ * @zh 获取流式消息
29
+ * @en Get streaming messages
30
+ */
31
+ getStreamingMessages() {
32
+ return [...this.streamingMessage];
33
+ }
34
+ /**
35
+ * @zh 设置流式消息
36
+ * @en Set streaming messages
37
+ */
38
+ setStreamingMessages(messages) {
39
+ this.streamingMessage = messages;
40
+ }
41
+ /**
42
+ * @zh 清空流式消息
43
+ * @en Clear streaming messages
44
+ */
45
+ clearStreamingMessages() {
46
+ this.streamingMessage = [];
47
+ }
48
+ /**
49
+ * @zh 获取图消息
50
+ * @en Get graph messages
51
+ */
52
+ getGraphMessages() {
53
+ return [...this.graphMessages];
54
+ }
55
+ /**
56
+ * @zh 设置图消息
57
+ * @en Set graph messages
58
+ */
59
+ setGraphMessages(messages) {
60
+ this.graphMessages = messages;
61
+ }
62
+ /**
63
+ * @zh 更新流式消息
64
+ * @en Update streaming message
65
+ */
66
+ updateStreamingMessage(message) {
67
+ const lastMessage = this.streamingMessage[this.streamingMessage.length - 1];
68
+ if (!(lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.id) || message.id !== lastMessage.id) {
69
+ this.streamingMessage.push(message);
70
+ return;
71
+ }
72
+ this.streamingMessage[this.streamingMessage.length - 1] = message;
73
+ }
74
+ /**
75
+ * @zh 将 graphMessages 和 streamingMessage 合并,并返回新的消息数组
76
+ * @en Combine graphMessages and streamingMessage and return a new message array
77
+ */
78
+ combineGraphMessagesWithStreamingMessages() {
79
+ const idMap = new Map(this.streamingMessage.map((i) => [i.id, i]));
80
+ return [
81
+ ...this.graphMessages.map((i) => {
82
+ if (idMap.has(i.id)) {
83
+ const newValue = idMap.get(i.id);
84
+ idMap.delete(i.id);
85
+ return newValue;
86
+ }
87
+ return i;
88
+ }),
89
+ ...idMap.values(),
90
+ ];
91
+ }
92
+ /**
93
+ * @zh 子图的数据需要通过 merge 的方式重新进行合并更新
94
+ * @en Subgraph data needs to be merged and updated through merge method
95
+ */
96
+ mergeSubGraphMessagesToStreamingMessages(messages) {
97
+ const map = new Map(messages.filter((i) => i.id).map((i) => [i.id, i]));
98
+ this.streamingMessage.forEach((i) => {
99
+ if (map.has(i.id)) {
100
+ const newValue = map.get(i.id);
101
+ Object.assign(i, newValue);
102
+ map.delete(i.id);
103
+ }
104
+ });
105
+ // 剩余的 message 一定不在 streamMessage 中
106
+ map.forEach((i) => {
107
+ if (i.type === "tool" && i.tool_call_id) {
108
+ this.streamingMessage.push(i);
109
+ }
110
+ });
111
+ }
112
+ /**
113
+ * @zh 克隆消息对象
114
+ * @en Clone message object
115
+ */
116
+ cloneMessage(message) {
117
+ return JSON.parse(JSON.stringify(message));
118
+ }
119
+ /**
120
+ * @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
121
+ * @en Attaches additional information to messages, such as spend time, unique ID, etc.
122
+ */
123
+ attachInfoForMessage(messages) {
124
+ var _a, _b, _c;
125
+ let lastMessage = null;
126
+ const result = [...messages]; // 创建副本避免修改原数组
127
+ for (const message of result) {
128
+ const createTime = ((_a = message.response_metadata) === null || _a === void 0 ? void 0 : _a.create_time) || "";
129
+ // 工具必须要使用 tool_call_id 来保证一致性
130
+ message.unique_id = message.tool_call_id || message.id;
131
+ message.spend_time = new Date(createTime).getTime() - new Date(((_b = lastMessage === null || lastMessage === void 0 ? void 0 : lastMessage.response_metadata) === null || _b === void 0 ? void 0 : _b.create_time) || createTime).getTime();
132
+ if (!message.usage_metadata && ((_c = message.response_metadata) === null || _c === void 0 ? void 0 : _c.usage)) {
133
+ const usage = message.response_metadata.usage;
134
+ message.usage_metadata = {
135
+ ...usage,
136
+ input_tokens: usage.prompt_tokens,
137
+ output_tokens: usage.completion_tokens,
138
+ total_tokens: usage.total_tokens,
139
+ };
140
+ }
141
+ lastMessage = message;
142
+ }
143
+ return result;
144
+ }
145
+ /**
146
+ * @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
147
+ * @en Composes tool messages, associating AI tool calls with tool execution results.
148
+ */
149
+ composeToolMessages(messages) {
150
+ var _a, _b;
151
+ const result = [];
152
+ const assistantToolMessages = new Map();
153
+ const toolParentMessage = new Map();
154
+ for (const message of messages) {
155
+ if (StreamingMessageType.isToolAssistant(message)) {
156
+ /** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
157
+ (_a = (message.tool_calls || message.tool_call_chunks)) === null || _a === void 0 ? void 0 : _a.forEach((element) => {
158
+ assistantToolMessages.set(element.id, element);
159
+ toolParentMessage.set(element.id, message);
160
+ });
161
+ if (!message.content)
162
+ continue;
163
+ }
164
+ if (StreamingMessageType.isTool(message) && !message.tool_input) {
165
+ const assistantToolMessage = assistantToolMessages.get(message.tool_call_id);
166
+ const parentMessage = toolParentMessage.get(message.tool_call_id);
167
+ if (assistantToolMessage) {
168
+ message.tool_input = typeof assistantToolMessage.args !== "string" ? JSON.stringify(assistantToolMessage.args) : assistantToolMessage.args;
169
+ if (message.additional_kwargs) {
170
+ message.additional_kwargs.done = true;
171
+ message.done = true;
172
+ }
173
+ else {
174
+ message.done = true;
175
+ message.additional_kwargs = {
176
+ done: true,
177
+ };
178
+ }
179
+ }
180
+ if (parentMessage) {
181
+ message.usage_metadata = parentMessage.usage_metadata;
182
+ message.node_name = parentMessage.name;
183
+ // 修补特殊情况下,tool name 丢失的问题
184
+ if (!message.name) {
185
+ message.name = (_b = parentMessage.tool_calls.find((i) => i.id === message.tool_call_id)) === null || _b === void 0 ? void 0 : _b.name;
186
+ }
187
+ }
188
+ }
189
+ result.push(message);
190
+ }
191
+ return result;
192
+ }
193
+ /**
194
+ * @zh 转换 subAgent 消息为工具的子消息
195
+ * @en Convert subAgent messages to tool sub-messages
196
+ */
197
+ convertSubAgentMessages(messages, graphState) {
198
+ const origin_task_store = graphState[this.subAgentsKey];
199
+ if (!origin_task_store)
200
+ return messages;
201
+ const task_store = JSON.parse(JSON.stringify(origin_task_store));
202
+ /** 获取 subAgent 消息的 id,用于流式过程中对数据进行标记 */
203
+ messages
204
+ .filter((i) => {
205
+ var _a;
206
+ return (_a = i.node_name) === null || _a === void 0 ? void 0 : _a.startsWith("subagent_");
207
+ })
208
+ .forEach((i) => {
209
+ const tool_call_id = i.node_name.replace("subagent_", "");
210
+ const store = task_store[tool_call_id];
211
+ if (store) {
212
+ // 根据 id 进行去重
213
+ const exists = store.messages.some((msg) => msg.id === i.id);
214
+ if (!exists) {
215
+ store.messages.push(i);
216
+ }
217
+ }
218
+ else {
219
+ task_store[tool_call_id] = {
220
+ messages: [i],
221
+ };
222
+ }
223
+ });
224
+ const ignoreIds = new Set();
225
+ Object.values(task_store).forEach((task) => {
226
+ task.messages.forEach((message) => {
227
+ ignoreIds.add(message.id);
228
+ });
229
+ });
230
+ const result = [];
231
+ for (const message of messages) {
232
+ if (message.type === "tool" && message.tool_call_id) {
233
+ const task = task_store[message.tool_call_id];
234
+ if (task) {
235
+ // 递归处理子消息,但避免重复处理
236
+ message.sub_agent_messages = this.processMessages(task.messages);
237
+ }
238
+ }
239
+ if (message.id && ignoreIds.has(message.id))
240
+ continue;
241
+ result.push(message);
242
+ }
243
+ return result;
244
+ }
245
+ /**
246
+ * @zh 生成用于 UI 中的流式渲染的消息
247
+ * @en Generate messages used for streaming rendering in the UI
248
+ */
249
+ renderMessages(graphState, getGraphNodeNow) {
250
+ var _a;
251
+ const previousMessage = new Map();
252
+ const closedToolCallIds = new Set();
253
+ const result = [];
254
+ const inputMessages = this.combineGraphMessagesWithStreamingMessages();
255
+ // 从后往前遍历,这样可以保证最新的消息在前面
256
+ for (let i = inputMessages.length - 1; i >= 0; i--) {
257
+ const message = this.cloneMessage(inputMessages[i]);
258
+ if (!message.id) {
259
+ result.unshift(message);
260
+ continue;
261
+ }
262
+ if (message.type === "ai") {
263
+ /** @ts-ignore */
264
+ if (!message.name)
265
+ message.name = getGraphNodeNow().name;
266
+ }
267
+ if (StreamingMessageType.isToolAssistant(message)) {
268
+ const m = message;
269
+ // 记录这个 id 的消息,并添加到结果中
270
+ previousMessage.set(message.id, m);
271
+ /** @ts-ignore */
272
+ const tool_calls = ((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) ? m.tool_calls : m.tool_call_chunks;
273
+ const new_tool_calls = tool_calls
274
+ .filter((i) => {
275
+ return !closedToolCallIds.has(i.id);
276
+ })
277
+ .map((tool, index) => {
278
+ var _a, _b, _c, _d;
279
+ return {
280
+ type: "tool",
281
+ additional_kwargs: {},
282
+ /** @ts-ignore */
283
+ tool_input: (_d = (_c = (_b = (_a = m.additional_kwargs) === null || _a === void 0 ? void 0 : _a.tool_calls) === null || _b === void 0 ? void 0 : _b[index]) === null || _c === void 0 ? void 0 : _c.function) === null || _d === void 0 ? void 0 : _d.arguments,
284
+ id: tool.id,
285
+ name: tool.name,
286
+ response_metadata: {},
287
+ tool_call_id: tool.id,
288
+ content: "",
289
+ };
290
+ });
291
+ for (const tool of new_tool_calls) {
292
+ if (!previousMessage.has(tool.id)) {
293
+ result.unshift(tool);
294
+ previousMessage.set(tool.id, tool);
295
+ }
296
+ }
297
+ result.unshift(m);
298
+ }
299
+ else {
300
+ if (message.type === "tool" && message.tool_call_id) {
301
+ closedToolCallIds.add(message.tool_call_id);
302
+ }
303
+ previousMessage.set(message.id, message);
304
+ result.unshift(message);
305
+ }
306
+ }
307
+ return this.processMessages(result, graphState);
308
+ }
309
+ /**
310
+ * @zh 统一的消息处理入口,按顺序执行所有处理步骤
311
+ * @en Unified message processing entry point, executing all processing steps in order
312
+ */
313
+ processMessages(messages, graphState) {
314
+ // 1. 组合工具消息
315
+ const composedMessages = this.composeToolMessages(messages);
316
+ // 2. 附加信息
317
+ const messagesWithInfo = this.attachInfoForMessage(composedMessages);
318
+ // 3. 转换子代理消息(如果提供了 graphState)
319
+ if (graphState) {
320
+ return this.convertSubAgentMessages(messagesWithInfo, graphState);
321
+ }
322
+ return messagesWithInfo;
323
+ }
324
+ }
package/dist/TestKit.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { RenderMessage } from "./LangGraphClient.js";
2
- import { Message } from "@langchain/langgraph-sdk";
2
+ import type { Message } from "@langchain/langgraph-sdk";
3
3
  import { CallToolResult, UnionTool } from "./tool/createTool.js";
4
4
  import { createChatStore } from "./ui-store/createChatStore.js";
5
5
  /**
@@ -89,7 +89,7 @@ export declare class TestLangGraphChat {
89
89
  * @zh 准备测试环境,初始化客户端连接
90
90
  * @en Prepare test environment, initialize client connection
91
91
  */
92
- ready(): Promise<import("./LangGraphClient.js").LangGraphClient> | undefined;
92
+ ready(): Promise<import("./LangGraphClient.js").LangGraphClient<unknown, unknown>> | undefined;
93
93
  /**
94
94
  * @zh 模拟人类输入消息并等待测试任务完成,这是测试的核心方法
95
95
  * @en Simulate human input and wait for test tasks to complete, this is the core test method
@@ -0,0 +1,2 @@
1
+ import { ILangGraphClient } from "../types.js";
2
+ export declare const createLangGraphServerClient: () => Promise<ILangGraphClient>;
@@ -0,0 +1,4 @@
1
+ export const createLangGraphServerClient = async () => {
2
+ const { Client } = await import("@langchain/langgraph-sdk");
3
+ return new Client();
4
+ };
package/dist/index.d.ts CHANGED
@@ -1,4 +1,6 @@
1
1
  export * from "./LangGraphClient.js";
2
+ export * from "./MessageProcessor.js";
3
+ export * from "./types.js";
2
4
  export * from "./tool/index.js";
3
5
  export * from "@langchain/langgraph-sdk";
4
6
  export * from "./ui-store/index.js";
package/dist/index.js CHANGED
@@ -1,4 +1,6 @@
1
1
  export * from "./LangGraphClient.js";
2
+ export * from "./MessageProcessor.js";
3
+ export * from "./types.js";
2
4
  export * from "./tool/index.js";
3
5
  export * from "@langchain/langgraph-sdk";
4
6
  export * from "./ui-store/index.js";
@@ -0,0 +1,130 @@
1
+ import { Thread, Assistant, Run, StreamMode, Command, Metadata, AssistantGraph, OnConflictBehavior, ThreadStatus, ValuesStreamEvent, UpdatesStreamEvent, DebugStreamEvent, MessagesStreamEvent, MessagesTupleStreamEvent, CustomStreamEvent, EventsStreamEvent, ErrorStreamEvent, MetadataStreamEvent, FeedbackStreamEvent, Config, Checkpoint } from "@langchain/langgraph-sdk";
2
+ import { StreamEvent } from "@langchain/core/tracers/log_stream";
3
+ export type AssistantSortBy = "assistant_id" | "graph_id" | "name" | "created_at" | "updated_at";
4
+ export type ThreadSortBy = "thread_id" | "status" | "created_at" | "updated_at";
5
+ export type SortOrder = "asc" | "desc";
6
+ export type RunStatus = "pending" | "running" | "error" | "success" | "timeout" | "interrupted";
7
+ export type MultitaskStrategy = "reject" | "interrupt" | "rollback" | "enqueue";
8
+ export type DisconnectMode = "cancel" | "continue";
9
+ export type OnCompletionBehavior = "complete" | "continue";
10
+ export type CancelAction = "interrupt" | "rollback";
11
+ export type TypedAsyncGenerator<TStreamMode extends StreamMode | StreamMode[] = [], TSubgraphs extends boolean = false, TStateType = unknown, TUpdateType = TStateType, TCustomType = unknown> = AsyncGenerator<{
12
+ values: ValuesStreamEvent<TStateType>;
13
+ updates: UpdatesStreamEvent<TUpdateType>;
14
+ custom: CustomStreamEvent<TCustomType>;
15
+ debug: DebugStreamEvent;
16
+ messages: MessagesStreamEvent;
17
+ "messages-tuple": MessagesTupleStreamEvent;
18
+ events: EventsStreamEvent;
19
+ }[TStreamMode extends StreamMode[] ? TStreamMode[number] : TStreamMode] | ErrorStreamEvent | MetadataStreamEvent | FeedbackStreamEvent>;
20
+ /**
21
+ * 兼容 LangGraph SDK 的接口定义,方便进行无侵入式的扩展
22
+ */
23
+ export interface ILangGraphClient<TStateType = unknown, TUpdateType = TStateType> {
24
+ assistants: {
25
+ search(query?: {
26
+ graphId?: string;
27
+ metadata?: Metadata;
28
+ limit?: number;
29
+ offset?: number;
30
+ sortBy?: AssistantSortBy;
31
+ sortOrder?: SortOrder;
32
+ }): Promise<Assistant[]>;
33
+ getGraph(assistantId: string, options?: {
34
+ xray?: boolean | number;
35
+ }): Promise<AssistantGraph>;
36
+ };
37
+ threads: {
38
+ create<ValuesType = TStateType>(payload?: {
39
+ metadata?: Metadata;
40
+ threadId?: string;
41
+ ifExists?: OnConflictBehavior;
42
+ graphId?: string;
43
+ supersteps?: Array<{
44
+ updates: Array<{
45
+ values: unknown;
46
+ command?: Command;
47
+ asNode: string;
48
+ }>;
49
+ }>;
50
+ }): Promise<Thread<ValuesType>>;
51
+ search<ValuesType = TStateType>(query?: {
52
+ metadata?: Metadata;
53
+ limit?: number;
54
+ offset?: number;
55
+ status?: ThreadStatus;
56
+ sortBy?: ThreadSortBy;
57
+ sortOrder?: SortOrder;
58
+ }): Promise<Thread<ValuesType>[]>;
59
+ get<ValuesType = TStateType>(threadId: string): Promise<Thread<ValuesType>>;
60
+ delete(threadId: string): Promise<void>;
61
+ };
62
+ runs: {
63
+ list(threadId: string, options?: {
64
+ limit?: number;
65
+ offset?: number;
66
+ status?: RunStatus;
67
+ }): Promise<Run[]>;
68
+ stream<TStreamMode extends StreamMode | StreamMode[] = StreamMode, TSubgraphs extends boolean = false>(threadId: null, assistantId: string, payload?: {
69
+ input?: Record<string, unknown> | null;
70
+ metadata?: Metadata;
71
+ config?: Config;
72
+ checkpointId?: string;
73
+ checkpoint?: Omit<Checkpoint, "thread_id">;
74
+ checkpointDuring?: boolean;
75
+ interruptBefore?: "*" | string[];
76
+ interruptAfter?: "*" | string[];
77
+ signal?: AbortController["signal"];
78
+ webhook?: string;
79
+ onDisconnect?: DisconnectMode;
80
+ afterSeconds?: number;
81
+ ifNotExists?: "create" | "reject";
82
+ command?: Command;
83
+ onRunCreated?: (params: {
84
+ run_id: string;
85
+ thread_id?: string;
86
+ }) => void;
87
+ streamMode?: TStreamMode;
88
+ streamSubgraphs?: TSubgraphs;
89
+ streamResumable?: boolean;
90
+ feedbackKeys?: string[];
91
+ }): TypedAsyncGenerator<TStreamMode, TSubgraphs, TStateType, TUpdateType>;
92
+ stream<TStreamMode extends StreamMode | StreamMode[] = StreamMode, TSubgraphs extends boolean = false>(threadId: string, assistantId: string, payload?: {
93
+ input?: Record<string, unknown> | null;
94
+ metadata?: Metadata;
95
+ config?: Config;
96
+ checkpointId?: string;
97
+ checkpoint?: Omit<Checkpoint, "thread_id">;
98
+ checkpointDuring?: boolean;
99
+ interruptBefore?: "*" | string[];
100
+ interruptAfter?: "*" | string[];
101
+ multitaskStrategy?: MultitaskStrategy;
102
+ onCompletion?: OnCompletionBehavior;
103
+ signal?: AbortController["signal"];
104
+ webhook?: string;
105
+ onDisconnect?: DisconnectMode;
106
+ afterSeconds?: number;
107
+ ifNotExists?: "create" | "reject";
108
+ command?: Command;
109
+ onRunCreated?: (params: {
110
+ run_id: string;
111
+ thread_id?: string;
112
+ }) => void;
113
+ streamMode?: TStreamMode;
114
+ streamSubgraphs?: TSubgraphs;
115
+ streamResumable?: boolean;
116
+ feedbackKeys?: string[];
117
+ }): TypedAsyncGenerator<TStreamMode, TSubgraphs, TStateType, TUpdateType>;
118
+ joinStream(threadId: string | undefined | null, runId: string, options?: {
119
+ signal?: AbortSignal;
120
+ cancelOnDisconnect?: boolean;
121
+ lastEventId?: string;
122
+ streamMode?: StreamMode | StreamMode[];
123
+ } | AbortSignal): AsyncGenerator<{
124
+ id?: string;
125
+ event: StreamEvent;
126
+ data: any;
127
+ }>;
128
+ cancel(threadId: string, runId: string, wait?: boolean, action?: CancelAction): Promise<void>;
129
+ };
130
+ }
package/dist/types.js ADDED
@@ -0,0 +1 @@
1
+ export {};
@@ -31,7 +31,7 @@ export declare const createChatStore: (initClientName: string, config: LangGraph
31
31
  onInit?: (client: LangGraphClient) => void;
32
32
  }) => {
33
33
  data: {
34
- client: import("nanostores").PreinitializedWritableAtom<LangGraphClient | null> & object;
34
+ client: import("nanostores").PreinitializedWritableAtom<LangGraphClient<unknown, unknown> | null> & object;
35
35
  renderMessages: import("nanostores").PreinitializedWritableAtom<RenderMessage[]> & object;
36
36
  userInput: import("nanostores").PreinitializedWritableAtom<string> & object;
37
37
  loading: import("nanostores").PreinitializedWritableAtom<boolean> & object;
@@ -52,7 +52,7 @@ export declare const createChatStore: (initClientName: string, config: LangGraph
52
52
  refreshTools: () => Promise<void>;
53
53
  setTools(new_tools: UnionTool<any>[]): void;
54
54
  isFELocking(): boolean | undefined;
55
- initClient: () => Promise<LangGraphClient>;
55
+ initClient: () => Promise<LangGraphClient<unknown, unknown>>;
56
56
  sendMessage: (message?: Message[], extraData?: SendMessageOptions) => Promise<void>;
57
57
  stopGeneration: () => void;
58
58
  toggleToolCollapse: (toolId: string) => void;
@@ -84,7 +84,7 @@ export declare const createChatStore: (initClientName: string, config: LangGraph
84
84
  */
85
85
  toHistoryChat(thread: Thread<{
86
86
  messages: Message[];
87
- }>): Promise<Thread<import("@langchain/langgraph-sdk").DefaultValues> | undefined>;
87
+ }>): Promise<Thread<unknown> | undefined>;
88
88
  /**
89
89
  * @zh 删除指定的历史聊天会话。
90
90
  * @en Deletes the specified historical chat session.
@@ -2,6 +2,7 @@ import { atom } from "nanostores";
2
2
  import { LangGraphClient } from "../LangGraphClient.js";
3
3
  import { debounce } from "ts-debounce";
4
4
  import { ToolRenderData } from "../tool/ToolUI.js";
5
+ import { createLangGraphServerClient } from "../client/LanggraphServer.js";
5
6
  /**
6
7
  * @zh 格式化日期对象为时间字符串。
7
8
  * @en Formats a Date object into a time string.
@@ -102,35 +103,50 @@ export const createChatStore = (initClientName, config, context = {}) => {
102
103
  * @en Initializes the LangGraph client.
103
104
  */
104
105
  async function initClient() {
105
- var _a;
106
- const newClient = new LangGraphClient(config);
106
+ var _a, _b;
107
+ const newClient = new LangGraphClient({
108
+ ...config,
109
+ client: (_a = config.client) !== null && _a !== void 0 ? _a : (await createLangGraphServerClient()),
110
+ });
107
111
  await newClient.initAssistant(currentAgent.get());
108
112
  currentAgent.set(newClient.getCurrentAssistant().graph_id);
109
113
  // 不再需要创建,sendMessage 会自动创建
110
114
  // await newClient.createThread();
111
115
  inChatError.set(null);
112
- newClient.onStreamingUpdate((event) => {
116
+ // 监听流开始事件
117
+ newClient.on("start", () => {
118
+ loading.set(true);
119
+ });
120
+ // 监听 Thread 创建和流完成事件
121
+ newClient.on("thread", () => {
122
+ var _a;
123
+ currentChatId.set(((_a = newClient.getCurrentThread()) === null || _a === void 0 ? void 0 : _a.thread_id) || null);
124
+ // 创建新流程时,默认为 __start__
125
+ currentNodeName.set("__start__");
126
+ // 创建新会话时,需要自动刷新历史面板
127
+ refreshHistoryList();
128
+ });
129
+ newClient.on("done", () => {
130
+ loading.set(false);
131
+ updateUI(newClient);
132
+ });
133
+ // 监听错误事件
134
+ newClient.on("error", (event) => {
135
+ loading.set(false);
136
+ inChatError.set(event.data);
137
+ });
138
+ // 监听消息和值更新事件
139
+ newClient.on("message", () => {
140
+ var _a;
141
+ currentChatId.set(((_a = newClient.getCurrentThread()) === null || _a === void 0 ? void 0 : _a.thread_id) || null);
142
+ updateUI(newClient);
143
+ });
144
+ newClient.on("value", () => {
113
145
  var _a;
114
146
  currentChatId.set(((_a = newClient.getCurrentThread()) === null || _a === void 0 ? void 0 : _a.thread_id) || null);
115
- if (event.type === "start")
116
- loading.set(true);
117
- if (event.type === "thread" || event.type === "done") {
118
- // console.log(event.data);
119
- // 创建新流程时,默认为 __start__
120
- currentNodeName.set("__start__");
121
- if (event.type === "done")
122
- loading.set(false);
123
- // 创建新会话时,需要自动刷新历史面板
124
- return refreshHistoryList();
125
- }
126
- if (event.type === "error") {
127
- loading.set(false);
128
- inChatError.set(event.data);
129
- }
130
- // console.log(newClient.renderMessage);
131
147
  updateUI(newClient);
132
148
  });
133
- (_a = context.onInit) === null || _a === void 0 ? void 0 : _a.call(context, newClient);
149
+ (_b = context.onInit) === null || _b === void 0 ? void 0 : _b.call(context, newClient);
134
150
  newClient.graphState = {};
135
151
  client.set(newClient);
136
152
  if (showGraph.get())
@@ -299,7 +315,7 @@ export const createChatStore = (initClientName, config, context = {}) => {
299
315
  */
300
316
  async deleteHistoryChat(thread) {
301
317
  var _a;
302
- await ((_a = client.get()) === null || _a === void 0 ? void 0 : _a.threads.delete(thread.thread_id));
318
+ await ((_a = client.get()) === null || _a === void 0 ? void 0 : _a.deleteThread(thread.thread_id));
303
319
  await refreshHistoryList();
304
320
  },
305
321
  getToolUIRender,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langgraph-js/sdk",
3
- "version": "1.12.0",
3
+ "version": "2.0.0",
4
4
  "description": "The UI SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces",
5
5
  "main": "dist/index.js",
6
6
  "type": "module",
@@ -30,6 +30,7 @@
30
30
  },
31
31
  "dependencies": {
32
32
  "@langchain/langgraph-sdk": "^0.0.77",
33
+ "eventemitter3": "^5.0.1",
33
34
  "jsonrepair": "^3.12.0",
34
35
  "nanostores": "^1.0.1",
35
36
  "ts-debounce": "^4.0.0",