@langgraph-js/sdk 1.1.6 → 1.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,163 +1,163 @@
1
- # @langgraph-js/sdk
2
-
3
- ![npm version](https://img.shields.io/npm/v/@langgraph-js/sdk)
4
- ![license](https://img.shields.io/npm/l/@langgraph-js/sdk)
5
-
6
- > The missing UI SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces
7
-
8
- ## Why @langgraph-js/sdk?
9
-
10
- Building AI agent applications is complex, especially when you need to bridge the gap between LangGraph agents and interactive user interfaces. This SDK solves the critical challenges of frontend integration:
11
-
12
- - **Provides a complete UI integration layer** - no more complex custom code to handle tools, streaming, and state management
13
- - **Simplifies human-in-the-loop interactions** - easily incorporate user feedback within agent workflows
14
- - **Handles edge cases automatically** - interruptions, errors, token management and more
15
- - **Offers a rich set of UI components** - ready-to-use elements to display agent interactions
16
-
17
- [DOCS](https://langgraph-js.netlify.app)
18
-
19
- ## Installation
20
-
21
- ```bash
22
- # Using npm
23
- npm install @langgraph-js/sdk
24
-
25
- # Using yarn
26
- yarn add @langgraph-js/sdk
27
-
28
- # Using pnpm
29
- pnpm add @langgraph-js/sdk
30
- ```
31
-
32
- ## Key Features
33
-
34
- ### Generative UI
35
-
36
- - ✅ Custom Tool Messages
37
- - ✅ Token Counter
38
- - ✅ Stop Graph Progress
39
- - ✅ Interrupt Handling
40
- - ✅ Error Handling
41
- - ✅ Spend Time Tracking
42
- - ✅ Time Persistence
43
-
44
- ### Frontend Actions
45
-
46
- - ✅ Definition of Union Tools
47
- - ✅ Frontend Functions As Tools
48
- - ✅ Human-in-the-Loop Interaction
49
- - ✅ Interrupt Mode
50
-
51
- ### Authorization
52
-
53
- - ✅ Cookie-Based Authentication
54
- - ✅ Custom Token Authentication
55
-
56
- ### Persistence
57
-
58
- - ✅ Read History from LangGraph
59
-
60
- ## Advanced Usage
61
-
62
- ### Creating a Chat Store
63
-
64
- You can easily create a reactive store for your LangGraph client:
65
-
66
- ```typescript
67
- import { createChatStore } from "@langgraph-js/sdk";
68
-
69
- export const globalChatStore = createChatStore(
70
- "agent",
71
- {
72
- // Custom LangGraph backend interaction
73
- apiUrl: "http://localhost:8123",
74
- // Custom headers for authentication
75
- defaultHeaders: JSON.parse(localStorage.getItem("code") || "{}"),
76
- callerOptions: {
77
- // Example for including cookies
78
- // fetch(url: string, options: RequestInit) {
79
- // options.credentials = "include";
80
- // return fetch(url, options);
81
- // },
82
- },
83
- },
84
- {
85
- onInit(client) {
86
- client.tools.bindTools([]);
87
- },
88
- }
89
- );
90
- ```
91
-
92
- ### React Integration
93
-
94
- First, install the nanostores React integration:
95
-
96
- ```bash
97
- pnpm i @nanostores/react
98
- ```
99
-
100
- Then create a context provider for your chat:
101
-
102
- ```tsx
103
- import React, { createContext, useContext, useEffect } from "react";
104
- import { globalChatStore } from "../store"; // Import your store
105
- import { UnionStore, useUnionStore } from "@langgraph-js/sdk";
106
- import { useStore } from "@nanostores/react";
107
-
108
- type ChatContextType = UnionStore<typeof globalChatStore>;
109
-
110
- const ChatContext = createContext<ChatContextType | undefined>(undefined);
111
-
112
- export const useChat = () => {
113
- const context = useContext(ChatContext);
114
- if (!context) {
115
- throw new Error("useChat must be used within a ChatProvider");
116
- }
117
- return context;
118
- };
119
-
120
- export const ChatProvider = ({ children }) => {
121
- // Use store to ensure React gets reactive state updates
122
- const store = useUnionStore(globalChatStore, useStore);
123
-
124
- useEffect(() => {
125
- // Initialize client
126
- store.initClient().then(() => {
127
- // Initialize conversation history
128
- store.refreshHistoryList();
129
- });
130
- }, [store.currentAgent]);
131
-
132
- return <ChatContext.Provider value={store}>{children}</ChatContext.Provider>;
133
- };
134
- ```
135
-
136
- Use it in your components:
137
-
138
- ```tsx
139
- export const MyChat = () => {
140
- return (
141
- <ChatProvider>
142
- <ChatComp></ChatComp>
143
- </ChatProvider>
144
- );
145
- };
146
-
147
- function ChatComp() {
148
- const chat = useChat();
149
- // Use chat store methods and state here
150
- }
151
- ```
152
-
153
- ## Documentation
154
-
155
- For complete documentation, visit our [official docs](https://langgraph-js.netlify.app).
156
-
157
- ## Contributing
158
-
159
- Contributions are welcome! Please feel free to submit a Pull Request.
160
-
161
- ## License
162
-
163
- This project is licensed under the Apache-2.0 License.
1
+ # @langgraph-js/sdk
2
+
3
+ ![npm version](https://img.shields.io/npm/v/@langgraph-js/sdk)
4
+ ![license](https://img.shields.io/npm/l/@langgraph-js/sdk)
5
+
6
+ > The missing UI SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces
7
+
8
+ ## Why @langgraph-js/sdk?
9
+
10
+ Building AI agent applications is complex, especially when you need to bridge the gap between LangGraph agents and interactive user interfaces. This SDK solves the critical challenges of frontend integration:
11
+
12
+ - **Provides a complete UI integration layer** - no more complex custom code to handle tools, streaming, and state management
13
+ - **Simplifies human-in-the-loop interactions** - easily incorporate user feedback within agent workflows
14
+ - **Handles edge cases automatically** - interruptions, errors, token management and more
15
+ - **Offers a rich set of UI components** - ready-to-use elements to display agent interactions
16
+
17
+ [DOCS](https://langgraph-js.netlify.app)
18
+
19
+ ## Installation
20
+
21
+ ```bash
22
+ # Using npm
23
+ npm install @langgraph-js/sdk
24
+
25
+ # Using yarn
26
+ yarn add @langgraph-js/sdk
27
+
28
+ # Using pnpm
29
+ pnpm add @langgraph-js/sdk
30
+ ```
31
+
32
+ ## Key Features
33
+
34
+ ### Generative UI
35
+
36
+ - ✅ Custom Tool Messages
37
+ - ✅ Token Counter
38
+ - ✅ Stop Graph Progress
39
+ - ✅ Interrupt Handling
40
+ - ✅ Error Handling
41
+ - ✅ Spend Time Tracking
42
+ - ✅ Time Persistence
43
+
44
+ ### Frontend Actions
45
+
46
+ - ✅ Definition of Union Tools
47
+ - ✅ Frontend Functions As Tools
48
+ - ✅ Human-in-the-Loop Interaction
49
+ - ✅ Interrupt Mode
50
+
51
+ ### Authorization
52
+
53
+ - ✅ Cookie-Based Authentication
54
+ - ✅ Custom Token Authentication
55
+
56
+ ### Persistence
57
+
58
+ - ✅ Read History from LangGraph
59
+
60
+ ## Advanced Usage
61
+
62
+ ### Creating a Chat Store
63
+
64
+ You can easily create a reactive store for your LangGraph client:
65
+
66
+ ```typescript
67
+ import { createChatStore } from "@langgraph-js/sdk";
68
+
69
+ export const globalChatStore = createChatStore(
70
+ "agent",
71
+ {
72
+ // Custom LangGraph backend interaction
73
+ apiUrl: "http://localhost:8123",
74
+ // Custom headers for authentication
75
+ defaultHeaders: JSON.parse(localStorage.getItem("code") || "{}"),
76
+ callerOptions: {
77
+ // Example for including cookies
78
+ // fetch(url: string, options: RequestInit) {
79
+ // options.credentials = "include";
80
+ // return fetch(url, options);
81
+ // },
82
+ },
83
+ },
84
+ {
85
+ onInit(client) {
86
+ client.tools.bindTools([]);
87
+ },
88
+ }
89
+ );
90
+ ```
91
+
92
+ ### React Integration
93
+
94
+ First, install the nanostores React integration:
95
+
96
+ ```bash
97
+ pnpm i @nanostores/react
98
+ ```
99
+
100
+ Then create a context provider for your chat:
101
+
102
+ ```tsx
103
+ import React, { createContext, useContext, useEffect } from "react";
104
+ import { globalChatStore } from "../store"; // Import your store
105
+ import { UnionStore, useUnionStore } from "@langgraph-js/sdk";
106
+ import { useStore } from "@nanostores/react";
107
+
108
+ type ChatContextType = UnionStore<typeof globalChatStore>;
109
+
110
+ const ChatContext = createContext<ChatContextType | undefined>(undefined);
111
+
112
+ export const useChat = () => {
113
+ const context = useContext(ChatContext);
114
+ if (!context) {
115
+ throw new Error("useChat must be used within a ChatProvider");
116
+ }
117
+ return context;
118
+ };
119
+
120
+ export const ChatProvider = ({ children }) => {
121
+ // Use store to ensure React gets reactive state updates
122
+ const store = useUnionStore(globalChatStore, useStore);
123
+
124
+ useEffect(() => {
125
+ // Initialize client
126
+ store.initClient().then(() => {
127
+ // Initialize conversation history
128
+ store.refreshHistoryList();
129
+ });
130
+ }, [store.currentAgent]);
131
+
132
+ return <ChatContext.Provider value={store}>{children}</ChatContext.Provider>;
133
+ };
134
+ ```
135
+
136
+ Use it in your components:
137
+
138
+ ```tsx
139
+ export const MyChat = () => {
140
+ return (
141
+ <ChatProvider>
142
+ <ChatComp></ChatComp>
143
+ </ChatProvider>
144
+ );
145
+ };
146
+
147
+ function ChatComp() {
148
+ const chat = useChat();
149
+ // Use chat store methods and state here
150
+ }
151
+ ```
152
+
153
+ ## Documentation
154
+
155
+ For complete documentation, visit our [official docs](https://langgraph-js.netlify.app).
156
+
157
+ ## Contributing
158
+
159
+ Contributions are welcome! Please feel free to submit a Pull Request.
160
+
161
+ ## License
162
+
163
+ This project is licensed under the Apache-2.0 License.
@@ -1,7 +1,25 @@
1
1
  import { Client, Thread, Message, Assistant, HumanMessage, ToolMessage, Command } from "@langchain/langgraph-sdk";
2
- import { ToolManager } from "./ToolManager";
3
- import { CallToolResult } from "./tool";
4
- import { AsyncCallerParams } from "@langchain/langgraph-sdk/dist/utils/async_caller";
2
+ import { ToolManager } from "./ToolManager.js";
3
+ import { CallToolResult } from "./tool/createTool.js";
4
+ interface AsyncCallerParams {
5
+ /**
6
+ * The maximum number of concurrent calls that can be made.
7
+ * Defaults to `Infinity`, which means no limit.
8
+ */
9
+ maxConcurrency?: number;
10
+ /**
11
+ * The maximum number of retries that can be made for a single call,
12
+ * with an exponential backoff between each attempt. Defaults to 6.
13
+ */
14
+ maxRetries?: number;
15
+ onFailedResponseHook?: any;
16
+ /**
17
+ * Specify a custom fetch implementation.
18
+ *
19
+ * By default we expect the `fetch` is available in the global scope.
20
+ */
21
+ fetch?: typeof fetch | ((...args: any[]) => any);
22
+ }
5
23
  export type RenderMessage = Message & {
6
24
  /** 工具入参 ,聚合而来*/
7
25
  tool_input?: string;
@@ -40,6 +58,10 @@ export interface LangGraphClientConfig {
40
58
  timeoutMs?: number;
41
59
  defaultHeaders?: Record<string, string | null | undefined>;
42
60
  }
61
+ /**
62
+ * @zh StreamingMessageType 类用于判断消息的类型。
63
+ * @en The StreamingMessageType class is used to determine the type of a message.
64
+ */
43
65
  export declare class StreamingMessageType {
44
66
  static isUser(m: Message): m is HumanMessage;
45
67
  static isTool(m: Message): m is ToolMessage;
@@ -51,6 +73,10 @@ type StreamingUpdateEvent = {
51
73
  data: any;
52
74
  };
53
75
  type StreamingUpdateCallback = (event: StreamingUpdateEvent) => void;
76
+ /**
77
+ * @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
78
+ * @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
79
+ */
54
80
  export declare class LangGraphClient extends Client {
55
81
  private currentAssistant;
56
82
  private currentThread;
@@ -60,43 +86,103 @@ export declare class LangGraphClient extends Client {
60
86
  constructor(config: LangGraphClientConfig);
61
87
  availableAssistants: Assistant[];
62
88
  private listAssistants;
89
+ /**
90
+ * @zh 初始化 Assistant。
91
+ * @en Initializes the Assistant.
92
+ */
63
93
  initAssistant(agentName: string): Promise<void>;
94
+ /**
95
+ * @zh 创建一个新的 Thread。
96
+ * @en Creates a new Thread.
97
+ */
64
98
  createThread({ threadId, }?: {
65
99
  threadId?: string;
66
100
  }): Promise<Thread<import("@langchain/langgraph-sdk").DefaultValues>>;
101
+ /**
102
+ * @zh 列出所有的 Thread。
103
+ * @en Lists all Threads.
104
+ */
67
105
  listThreads<T>(): Promise<Thread<T>[]>;
68
- /** 从历史中恢复数据 */
106
+ /**
107
+ * @zh 从历史中恢复 Thread 数据。
108
+ * @en Resets the Thread data from history.
109
+ */
69
110
  resetThread(agent: string, threadId: string): Promise<void>;
70
111
  streamingMessage: RenderMessage[];
71
112
  /** 图发过来的更新信息 */
72
113
  graphMessages: RenderMessage[];
73
114
  cloneMessage(message: Message): Message;
74
115
  private replaceMessageWithValuesMessage;
75
- /** 用于 UI 中的流式渲染中的消息 */
116
+ /**
117
+ * @zh 用于 UI 中的流式渲染中的消息。
118
+ * @en Messages used for streaming rendering in the UI.
119
+ */
76
120
  get renderMessage(): RenderMessage[];
77
- attachInfoForMessage(result: RenderMessage[]): RenderMessage[];
78
- composeToolMessages(messages: RenderMessage[]): RenderMessage[];
121
+ /**
122
+ * @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
123
+ * @en Attaches additional information to messages, such as spend time, unique ID, etc.
124
+ */
125
+ private attachInfoForMessage;
126
+ /**
127
+ * @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
128
+ * @en Composes tool messages, associating AI tool calls with tool execution results.
129
+ */
130
+ private composeToolMessages;
131
+ /**
132
+ * @zh 获取 Token 计数器信息。
133
+ * @en Gets the Token counter information.
134
+ */
79
135
  get tokenCounter(): {
80
136
  total_tokens: number;
81
137
  input_tokens: number;
82
138
  output_tokens: number;
83
139
  };
140
+ /**
141
+ * @zh 注册流式更新的回调函数。
142
+ * @en Registers a callback function for streaming updates.
143
+ */
84
144
  onStreamingUpdate(callback: StreamingUpdateCallback): () => void;
85
145
  private emitStreamingUpdate;
86
146
  graphState: any;
87
147
  currentRun?: {
88
148
  run_id: string;
89
149
  };
150
+ /**
151
+ * @zh 取消当前的 Run。
152
+ * @en Cancels the current Run.
153
+ */
90
154
  cancelRun(): void;
155
+ /**
156
+ * @zh 发送消息到 LangGraph 后端。
157
+ * @en Sends a message to the LangGraph backend.
158
+ */
91
159
  sendMessage(input: string | Message[], { extraParams, _debug, command }?: SendMessageOptions): Promise<any[]>;
92
160
  private runFETool;
93
161
  private callFETool;
94
- /** 恢复消息,当中断流时使用 */
162
+ /**
163
+ * @zh 继续被前端工具中断的流程。
164
+ * @en Resumes a process interrupted by a frontend tool.
165
+ */
95
166
  resume(result: CallToolResult): Promise<any[]>;
96
- /** 完成工具等待 */
167
+ /**
168
+ * @zh 标记前端工具等待已完成。
169
+ * @en Marks the frontend tool waiting as completed.
170
+ */
97
171
  doneFEToolWaiting(id: string, result: CallToolResult): void;
172
+ /**
173
+ * @zh 获取当前的 Thread。
174
+ * @en Gets the current Thread.
175
+ */
98
176
  getCurrentThread(): Thread<import("@langchain/langgraph-sdk").DefaultValues> | null;
177
+ /**
178
+ * @zh 获取当前的 Assistant。
179
+ * @en Gets the current Assistant.
180
+ */
99
181
  getCurrentAssistant(): Assistant | null;
182
+ /**
183
+ * @zh 重置客户端状态。
184
+ * @en Resets the client state.
185
+ */
100
186
  reset(): Promise<void>;
101
187
  }
102
188
  export {};
@@ -1,5 +1,9 @@
1
1
  import { Client } from "@langchain/langgraph-sdk";
2
- import { ToolManager } from "./ToolManager";
2
+ import { ToolManager } from "./ToolManager.js";
3
+ /**
4
+ * @zh StreamingMessageType 类用于判断消息的类型。
5
+ * @en The StreamingMessageType class is used to determine the type of a message.
6
+ */
3
7
  export class StreamingMessageType {
4
8
  static isUser(m) {
5
9
  return m.type === "human";
@@ -16,6 +20,10 @@ export class StreamingMessageType {
16
20
  return m.type === "ai" && (((_a = m.tool_calls) === null || _a === void 0 ? void 0 : _a.length) || ((_b = m.tool_call_chunks) === null || _b === void 0 ? void 0 : _b.length));
17
21
  }
18
22
  }
23
+ /**
24
+ * @zh LangGraphClient 类是与 LangGraph 后端交互的主要客户端。
25
+ * @en The LangGraphClient class is the main client for interacting with the LangGraph backend.
26
+ */
19
27
  export class LangGraphClient extends Client {
20
28
  constructor(config) {
21
29
  super(config);
@@ -37,6 +45,10 @@ export class LangGraphClient extends Client {
37
45
  limit: 100,
38
46
  });
39
47
  }
48
+ /**
49
+ * @zh 初始化 Assistant。
50
+ * @en Initializes the Assistant.
51
+ */
40
52
  async initAssistant(agentName) {
41
53
  try {
42
54
  const assistants = await this.listAssistants();
@@ -56,6 +68,10 @@ export class LangGraphClient extends Client {
56
68
  throw error;
57
69
  }
58
70
  }
71
+ /**
72
+ * @zh 创建一个新的 Thread。
73
+ * @en Creates a new Thread.
74
+ */
59
75
  async createThread({ threadId, } = {}) {
60
76
  try {
61
77
  this.currentThread = await this.threads.create({
@@ -68,12 +84,19 @@ export class LangGraphClient extends Client {
68
84
  throw error;
69
85
  }
70
86
  }
87
+ /**
88
+ * @zh 列出所有的 Thread。
89
+ * @en Lists all Threads.
90
+ */
71
91
  async listThreads() {
72
92
  return this.threads.search({
73
93
  sortOrder: "desc",
74
94
  });
75
95
  }
76
- /** 从历史中恢复数据 */
96
+ /**
97
+ * @zh 从历史中恢复 Thread 数据。
98
+ * @en Resets the Thread data from history.
99
+ */
77
100
  async resetThread(agent, threadId) {
78
101
  await this.initAssistant(agent);
79
102
  this.currentThread = await this.threads.get(threadId);
@@ -104,7 +127,10 @@ export class LangGraphClient extends Client {
104
127
  }
105
128
  return message;
106
129
  }
107
- /** 用于 UI 中的流式渲染中的消息 */
130
+ /**
131
+ * @zh 用于 UI 中的流式渲染中的消息。
132
+ * @en Messages used for streaming rendering in the UI.
133
+ */
108
134
  get renderMessage() {
109
135
  var _a;
110
136
  const previousMessage = new Map();
@@ -157,6 +183,10 @@ export class LangGraphClient extends Client {
157
183
  }
158
184
  return this.attachInfoForMessage(this.composeToolMessages(result));
159
185
  }
186
+ /**
187
+ * @zh 为消息附加额外的信息,如耗时、唯一 ID 等。
188
+ * @en Attaches additional information to messages, such as spend time, unique ID, etc.
189
+ */
160
190
  attachInfoForMessage(result) {
161
191
  var _a, _b, _c;
162
192
  let lastMessage = null;
@@ -182,6 +212,10 @@ export class LangGraphClient extends Client {
182
212
  }
183
213
  return result;
184
214
  }
215
+ /**
216
+ * @zh 组合工具消息,将 AI 的工具调用和工具的执行结果关联起来。
217
+ * @en Composes tool messages, associating AI tool calls with tool execution results.
218
+ */
185
219
  composeToolMessages(messages) {
186
220
  var _a;
187
221
  const result = [];
@@ -219,6 +253,10 @@ export class LangGraphClient extends Client {
219
253
  }
220
254
  return result;
221
255
  }
256
+ /**
257
+ * @zh 获取 Token 计数器信息。
258
+ * @en Gets the Token counter information.
259
+ */
222
260
  get tokenCounter() {
223
261
  return this.graphMessages.reduce((acc, message) => {
224
262
  var _a, _b, _c, _d, _e;
@@ -240,6 +278,10 @@ export class LangGraphClient extends Client {
240
278
  output_tokens: 0,
241
279
  });
242
280
  }
281
+ /**
282
+ * @zh 注册流式更新的回调函数。
283
+ * @en Registers a callback function for streaming updates.
284
+ */
243
285
  onStreamingUpdate(callback) {
244
286
  this.streamingCallbacks.add(callback);
245
287
  return () => {
@@ -249,12 +291,20 @@ export class LangGraphClient extends Client {
249
291
  emitStreamingUpdate(event) {
250
292
  this.streamingCallbacks.forEach((callback) => callback(event));
251
293
  }
294
+ /**
295
+ * @zh 取消当前的 Run。
296
+ * @en Cancels the current Run.
297
+ */
252
298
  cancelRun() {
253
299
  var _a, _b;
254
300
  if (((_a = this.currentThread) === null || _a === void 0 ? void 0 : _a.thread_id) && ((_b = this.currentRun) === null || _b === void 0 ? void 0 : _b.run_id)) {
255
301
  this.runs.cancel(this.currentThread.thread_id, this.currentRun.run_id);
256
302
  }
257
303
  }
304
+ /**
305
+ * @zh 发送消息到 LangGraph 后端。
306
+ * @en Sends a message to the LangGraph backend.
307
+ */
258
308
  async sendMessage(input, { extraParams, _debug, command } = {}) {
259
309
  if (!this.currentAssistant) {
260
310
  throw new Error("Thread or Assistant not initialized");
@@ -366,7 +416,10 @@ export class LangGraphClient extends Client {
366
416
  const result = await this.tools.callTool(message.name, args, { client: that, message });
367
417
  return this.resume(result);
368
418
  }
369
- /** 恢复消息,当中断流时使用 */
419
+ /**
420
+ * @zh 继续被前端工具中断的流程。
421
+ * @en Resumes a process interrupted by a frontend tool.
422
+ */
370
423
  resume(result) {
371
424
  return this.sendMessage([], {
372
425
  command: {
@@ -374,7 +427,10 @@ export class LangGraphClient extends Client {
374
427
  },
375
428
  });
376
429
  }
377
- /** 完成工具等待 */
430
+ /**
431
+ * @zh 标记前端工具等待已完成。
432
+ * @en Marks the frontend tool waiting as completed.
433
+ */
378
434
  doneFEToolWaiting(id, result) {
379
435
  var _a;
380
436
  const done = this.tools.doneWaiting(id, result);
@@ -382,12 +438,24 @@ export class LangGraphClient extends Client {
382
438
  this.resume(result);
383
439
  }
384
440
  }
441
+ /**
442
+ * @zh 获取当前的 Thread。
443
+ * @en Gets the current Thread.
444
+ */
385
445
  getCurrentThread() {
386
446
  return this.currentThread;
387
447
  }
448
+ /**
449
+ * @zh 获取当前的 Assistant。
450
+ * @en Gets the current Assistant.
451
+ */
388
452
  getCurrentAssistant() {
389
453
  return this.currentAssistant;
390
454
  }
455
+ /**
456
+ * @zh 重置客户端状态。
457
+ * @en Resets the client state.
458
+ */
391
459
  async reset() {
392
460
  var _a;
393
461
  await this.initAssistant((_a = this.currentAssistant) === null || _a === void 0 ? void 0 : _a.name);