@langgraph-js/sdk 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.env ADDED
File without changes
package/README.md ADDED
@@ -0,0 +1,163 @@
1
+ # @langgraph-js/sdk
2
+
3
+ ![npm version](https://img.shields.io/npm/v/@langgraph-js/sdk)
4
+ ![license](https://img.shields.io/npm/l/@langgraph-js/sdk)
5
+
6
+ > The missing UI SDK for LangGraph - seamlessly integrate your AI agents with frontend interfaces
7
+
8
+ ## Why @langgraph-js/sdk?
9
+
10
+ Building AI agent applications is complex, especially when you need to bridge the gap between LangGraph agents and interactive user interfaces. This SDK solves the critical challenges of frontend integration:
11
+
12
+ - **Provides a complete UI integration layer** - no more complex custom code to handle tools, streaming, and state management
13
+ - **Simplifies human-in-the-loop interactions** - easily incorporate user feedback within agent workflows
14
+ - **Handles edge cases automatically** - interruptions, errors, token management and more
15
+ - **Offers a rich set of UI components** - ready-to-use elements to display agent interactions
16
+
17
+ [DOCS](https://langgraph-js.netlify.app)
18
+
19
+ ## Installation
20
+
21
+ ```bash
22
+ # Using npm
23
+ npm install @langgraph-js/sdk
24
+
25
+ # Using yarn
26
+ yarn add @langgraph-js/sdk
27
+
28
+ # Using pnpm
29
+ pnpm add @langgraph-js/sdk
30
+ ```
31
+
32
+ ## Key Features
33
+
34
+ ### Generative UI
35
+
36
+ - ✅ Custom Tool Messages
37
+ - ✅ Token Counter
38
+ - ✅ Stop Graph Progress
39
+ - ✅ Interrupt Handling
40
+ - ✅ Error Handling
41
+ - ✅ Spend Time Tracking
42
+ - ✅ Time Persistence
43
+
44
+ ### Frontend Actions
45
+
46
+ - ✅ Definition of Union Tools
47
+ - ✅ Frontend Functions As Tools
48
+ - ✅ Human-in-the-Loop Interaction
49
+ - ✅ Interrupt Mode
50
+
51
+ ### Authorization
52
+
53
+ - ✅ Cookie-Based Authentication
54
+ - ✅ Custom Token Authentication
55
+
56
+ ### Persistence
57
+
58
+ - ✅ Read History from LangGraph
59
+
60
+ ## Advanced Usage
61
+
62
+ ### Creating a Chat Store
63
+
64
+ You can easily create a reactive store for your LangGraph client:
65
+
66
+ ```typescript
67
+ import { createChatStore } from "@langgraph-js/sdk";
68
+
69
+ export const globalChatStore = createChatStore(
70
+ "agent",
71
+ {
72
+ // Custom LangGraph backend interaction
73
+ apiUrl: "http://localhost:8123",
74
+ // Custom headers for authentication
75
+ defaultHeaders: JSON.parse(localStorage.getItem("code") || "{}"),
76
+ callerOptions: {
77
+ // Example for including cookies
78
+ // fetch(url: string, options: RequestInit) {
79
+ // options.credentials = "include";
80
+ // return fetch(url, options);
81
+ // },
82
+ },
83
+ },
84
+ {
85
+ onInit(client) {
86
+ client.tools.bindTools([]);
87
+ },
88
+ }
89
+ );
90
+ ```
91
+
92
+ ### React Integration
93
+
94
+ First, install the nanostores React integration:
95
+
96
+ ```bash
97
+ pnpm i @nanostores/react
98
+ ```
99
+
100
+ Then create a context provider for your chat:
101
+
102
+ ```tsx
103
+ import React, { createContext, useContext, useEffect } from "react";
104
+ import { globalChatStore } from "../store"; // Import your store
105
+ import { UnionStore, useUnionStore } from "@langgraph-js/sdk";
106
+ import { useStore } from "@nanostores/react";
107
+
108
+ type ChatContextType = UnionStore<typeof globalChatStore>;
109
+
110
+ const ChatContext = createContext<ChatContextType | undefined>(undefined);
111
+
112
+ export const useChat = () => {
113
+ const context = useContext(ChatContext);
114
+ if (!context) {
115
+ throw new Error("useChat must be used within a ChatProvider");
116
+ }
117
+ return context;
118
+ };
119
+
120
+ export const ChatProvider = ({ children }) => {
121
+ // Use store to ensure React gets reactive state updates
122
+ const store = useUnionStore(globalChatStore, useStore);
123
+
124
+ useEffect(() => {
125
+ // Initialize client
126
+ store.initClient().then(() => {
127
+ // Initialize conversation history
128
+ store.refreshHistoryList();
129
+ });
130
+ }, [store.currentAgent]);
131
+
132
+ return <ChatContext.Provider value={store}>{children}</ChatContext.Provider>;
133
+ };
134
+ ```
135
+
136
+ Use it in your components:
137
+
138
+ ```tsx
139
+ export const MyChat = () => {
140
+ return (
141
+ <ChatProvider>
142
+ <ChatComp></ChatComp>
143
+ </ChatProvider>
144
+ );
145
+ };
146
+
147
+ function ChatComp() {
148
+ const chat = useChat();
149
+ // Use chat store methods and state here
150
+ }
151
+ ```
152
+
153
+ ## Documentation
154
+
155
+ For complete documentation, visit our [official docs](https://langgraph-js.netlify.app).
156
+
157
+ ## Contributing
158
+
159
+ Contributions are welcome! Please feel free to submit a Pull Request.
160
+
161
+ ## License
162
+
163
+ This project is licensed under the Apache-2.0 License.
@@ -0,0 +1,392 @@
1
+ import { Client } from "@langchain/langgraph-sdk";
2
+ import { ToolManager } from "./ToolManager";
3
+ export class StreamingMessageType {
4
+ static isUser(m) {
5
+ return m.type === "human";
6
+ }
7
+ static isTool(m) {
8
+ return m.type === "tool";
9
+ }
10
+ static isAssistant(m) {
11
+ return m.type === "ai" && !this.isToolAssistant(m);
12
+ }
13
+ static isToolAssistant(m) {
14
+ /** @ts-ignore */
15
+ return m.type === "ai" && (m.tool_calls?.length || m.tool_call_chunks?.length);
16
+ }
17
+ }
18
+ export class LangGraphClient extends Client {
19
+ currentAssistant = null;
20
+ currentThread = null;
21
+ streamingCallbacks = new Set();
22
+ tools = new ToolManager();
23
+ stopController = null;
24
+ constructor(config) {
25
+ super(config);
26
+ }
27
+ availableAssistants = [];
28
+ listAssistants() {
29
+ return this.assistants.search({
30
+ metadata: null,
31
+ offset: 0,
32
+ limit: 100,
33
+ });
34
+ }
35
+ async initAssistant(agentName) {
36
+ try {
37
+ const assistants = await this.listAssistants();
38
+ this.availableAssistants = assistants;
39
+ if (assistants.length > 0) {
40
+ this.currentAssistant = assistants.find((assistant) => assistant.name === agentName) || null;
41
+ if (!this.currentAssistant) {
42
+ throw new Error("Agent not found");
43
+ }
44
+ }
45
+ else {
46
+ throw new Error("No assistants found");
47
+ }
48
+ }
49
+ catch (error) {
50
+ console.error("Failed to initialize LangGraphClient:", error);
51
+ throw error;
52
+ }
53
+ }
54
+ async createThread({ threadId, } = {}) {
55
+ try {
56
+ this.currentThread = await this.threads.create({
57
+ threadId,
58
+ });
59
+ return this.currentThread;
60
+ }
61
+ catch (error) {
62
+ console.error("Failed to create new thread:", error);
63
+ throw error;
64
+ }
65
+ }
66
+ async listThreads() {
67
+ return this.threads.search({
68
+ sortOrder: "desc",
69
+ });
70
+ }
71
+ /** 从历史中恢复数据 */
72
+ async resetThread(agent, threadId) {
73
+ await this.initAssistant(agent);
74
+ this.currentThread = await this.threads.get(threadId);
75
+ this.graphState = this.currentThread.values;
76
+ this.graphMessages = this.graphState.messages;
77
+ this.emitStreamingUpdate({
78
+ type: "value",
79
+ data: {
80
+ event: "messages/partial",
81
+ data: {
82
+ messages: this.graphMessages,
83
+ },
84
+ },
85
+ });
86
+ }
87
+ streamingMessage = [];
88
+ /** 图发过来的更新信息 */
89
+ graphMessages = [];
90
+ cloneMessage(message) {
91
+ return JSON.parse(JSON.stringify(message));
92
+ }
93
+ replaceMessageWithValuesMessage(message, isTool = false) {
94
+ const key = (isTool ? "tool_call_id" : "id");
95
+ const valuesMessage = this.graphMessages.find((i) => i[key] === message[key]);
96
+ if (valuesMessage) {
97
+ return {
98
+ ...valuesMessage,
99
+ /** @ts-ignore */
100
+ tool_input: message.tool_input,
101
+ };
102
+ }
103
+ return message;
104
+ }
105
+ /** 用于 UI 中的流式渲染中的消息 */
106
+ get renderMessage() {
107
+ const previousMessage = new Map();
108
+ const result = [];
109
+ const inputMessages = [...this.graphMessages, ...this.streamingMessage];
110
+ // 从后往前遍历,这样可以保证最新的消息在前面
111
+ for (let i = inputMessages.length - 1; i >= 0; i--) {
112
+ const message = this.cloneMessage(inputMessages[i]);
113
+ if (!message.id) {
114
+ result.unshift(message);
115
+ continue;
116
+ }
117
+ // 如果已经处理过这个 id 的消息,跳过
118
+ if (previousMessage.has(message.id)) {
119
+ continue;
120
+ }
121
+ if (StreamingMessageType.isToolAssistant(message)) {
122
+ const m = this.replaceMessageWithValuesMessage(message);
123
+ // 记录这个 id 的消息,并添加到结果中
124
+ previousMessage.set(message.id, m);
125
+ /** @ts-ignore */
126
+ const tool_calls = m.tool_calls?.length ? m.tool_calls : m.tool_call_chunks;
127
+ const new_tool_calls = tool_calls.map((tool, index) => {
128
+ return this.replaceMessageWithValuesMessage({
129
+ type: "tool",
130
+ additional_kwargs: {},
131
+ /** @ts-ignore */
132
+ tool_input: m.additional_kwargs?.tool_calls[index].function.arguments,
133
+ id: tool.id,
134
+ name: tool.name,
135
+ response_metadata: {},
136
+ tool_call_id: tool.id,
137
+ }, true);
138
+ });
139
+ for (const tool of new_tool_calls) {
140
+ if (!previousMessage.has(tool.id)) {
141
+ result.unshift(tool);
142
+ previousMessage.set(tool.id, tool);
143
+ }
144
+ }
145
+ result.unshift(m);
146
+ }
147
+ else {
148
+ // 记录这个 id 的消息,并添加到结果中
149
+ const m = this.replaceMessageWithValuesMessage(message);
150
+ previousMessage.set(message.id, m);
151
+ result.unshift(m);
152
+ }
153
+ }
154
+ return this.attachInfoForMessage(this.composeToolMessages(result));
155
+ }
156
+ attachInfoForMessage(result) {
157
+ let lastMessage = null;
158
+ for (const message of result) {
159
+ const createTime = message.response_metadata?.create_time || "";
160
+ // 用长度作为渲染 id,长度变了就要重新渲染
161
+ message.unique_id = message.id + JSON.stringify(message.content).length;
162
+ message.spend_time = new Date(createTime).getTime() - new Date(lastMessage?.response_metadata?.create_time || createTime).getTime();
163
+ if (!message.usage_metadata && message.response_metadata?.usage) {
164
+ const usage = message.response_metadata.usage;
165
+ message.usage_metadata = {
166
+ input_tokens: usage.prompt_tokens,
167
+ output_tokens: usage.completion_tokens,
168
+ total_tokens: usage.total_tokens,
169
+ };
170
+ }
171
+ lastMessage = message;
172
+ }
173
+ return result;
174
+ }
175
+ composeToolMessages(messages) {
176
+ const result = [];
177
+ const assistantToolMessages = new Map();
178
+ const toolParentMessage = new Map();
179
+ for (const message of messages) {
180
+ if (StreamingMessageType.isToolAssistant(message)) {
181
+ /** @ts-ignore 只有 tool_call_chunks 的 args 才是文本 */
182
+ message.tool_call_chunks?.forEach((element) => {
183
+ assistantToolMessages.set(element.id, element);
184
+ toolParentMessage.set(element.id, message);
185
+ });
186
+ if (!message.content)
187
+ continue;
188
+ }
189
+ if (StreamingMessageType.isTool(message) && !message.tool_input) {
190
+ const assistantToolMessage = assistantToolMessages.get(message.tool_call_id);
191
+ const parentMessage = toolParentMessage.get(message.tool_call_id);
192
+ if (assistantToolMessage) {
193
+ message.tool_input = assistantToolMessage.args;
194
+ if (message.additional_kwargs) {
195
+ message.additional_kwargs.done = true;
196
+ }
197
+ else {
198
+ message.additional_kwargs = {
199
+ done: true,
200
+ };
201
+ }
202
+ }
203
+ if (parentMessage) {
204
+ message.usage_metadata = parentMessage.usage_metadata;
205
+ }
206
+ }
207
+ result.push(message);
208
+ }
209
+ return result;
210
+ }
211
+ get tokenCounter() {
212
+ return this.graphMessages.reduce((acc, message) => {
213
+ if (message.usage_metadata) {
214
+ acc.total_tokens += message.usage_metadata?.total_tokens || 0;
215
+ acc.input_tokens += message.usage_metadata?.input_tokens || 0;
216
+ acc.output_tokens += message.usage_metadata?.output_tokens || 0;
217
+ }
218
+ else if (message.response_metadata?.usage) {
219
+ const usage = message.response_metadata?.usage;
220
+ acc.total_tokens += usage.total_tokens || 0;
221
+ acc.input_tokens += usage.prompt_tokens || 0;
222
+ acc.output_tokens += usage.completion_tokens || 0;
223
+ }
224
+ return acc;
225
+ }, {
226
+ total_tokens: 0,
227
+ input_tokens: 0,
228
+ output_tokens: 0,
229
+ });
230
+ }
231
+ onStreamingUpdate(callback) {
232
+ this.streamingCallbacks.add(callback);
233
+ return () => {
234
+ this.streamingCallbacks.delete(callback);
235
+ };
236
+ }
237
+ emitStreamingUpdate(event) {
238
+ this.streamingCallbacks.forEach((callback) => callback(event));
239
+ }
240
+ graphState = {};
241
+ currentRun;
242
+ cancelRun() {
243
+ if (this.currentThread?.thread_id && this.currentRun?.run_id) {
244
+ this.runs.cancel(this.currentThread.thread_id, this.currentRun.run_id);
245
+ }
246
+ }
247
+ async sendMessage(input, { extraParams, _debug, command } = {}) {
248
+ if (!this.currentAssistant) {
249
+ throw new Error("Thread or Assistant not initialized");
250
+ }
251
+ if (!this.currentThread) {
252
+ await this.createThread();
253
+ this.emitStreamingUpdate({
254
+ type: "thread",
255
+ data: {
256
+ event: "thread/create",
257
+ data: {
258
+ thread: this.currentThread,
259
+ },
260
+ },
261
+ });
262
+ }
263
+ const messagesToSend = Array.isArray(input)
264
+ ? input
265
+ : [
266
+ {
267
+ type: "human",
268
+ content: input,
269
+ },
270
+ ];
271
+ const streamResponse = _debug?.streamResponse ||
272
+ this.runs.stream(this.currentThread.thread_id, this.currentAssistant.assistant_id, {
273
+ input: { ...this.graphState, ...(extraParams || {}), messages: messagesToSend, fe_tools: this.tools.toJSON() },
274
+ streamMode: ["messages", "values"],
275
+ streamSubgraphs: true,
276
+ command,
277
+ });
278
+ const streamRecord = [];
279
+ for await (const chunk of streamResponse) {
280
+ streamRecord.push(chunk);
281
+ if (chunk.event === "metadata") {
282
+ this.currentRun = chunk.data;
283
+ }
284
+ else if (chunk.event === "error") {
285
+ this.emitStreamingUpdate({
286
+ type: "error",
287
+ data: chunk,
288
+ });
289
+ }
290
+ else if (chunk.event === "messages/partial") {
291
+ for (const message of chunk.data) {
292
+ this.streamingMessage.push(message);
293
+ }
294
+ this.emitStreamingUpdate({
295
+ type: "message",
296
+ data: chunk,
297
+ });
298
+ continue;
299
+ }
300
+ else if (chunk.event.startsWith("values")) {
301
+ const data = chunk.data;
302
+ if (data.messages) {
303
+ const isResume = !!command?.resume;
304
+ const isLongerThanLocal = data.messages.length >= this.graphMessages.length;
305
+ // resume 情况下,长度低于前端 message 的统统不接受
306
+ if (!isResume || (isResume && isLongerThanLocal)) {
307
+ this.graphMessages = data.messages;
308
+ this.emitStreamingUpdate({
309
+ type: "value",
310
+ data: chunk,
311
+ });
312
+ }
313
+ }
314
+ this.graphState = chunk.data;
315
+ this.streamingMessage = [];
316
+ continue;
317
+ }
318
+ }
319
+ this.streamingMessage = [];
320
+ const data = await this.runFETool();
321
+ if (data)
322
+ streamRecord.push(...data);
323
+ this.emitStreamingUpdate({
324
+ type: "done",
325
+ data: {
326
+ event: "done",
327
+ },
328
+ });
329
+ return streamRecord;
330
+ }
331
+ runFETool() {
332
+ const data = this.graphMessages;
333
+ const lastMessage = data[data.length - 1];
334
+ // 如果最后一条消息是前端工具消息,则调用工具
335
+ if (lastMessage.type === "ai" && lastMessage.tool_calls?.length) {
336
+ const result = lastMessage.tool_calls.map((tool) => {
337
+ if (this.tools.getTool(tool.name)) {
338
+ const toolMessage = {
339
+ ...tool,
340
+ tool_call_id: tool.id,
341
+ /** @ts-ignore */
342
+ tool_input: JSON.stringify(tool.args),
343
+ additional_kwargs: {},
344
+ };
345
+ // json 校验
346
+ return this.callFETool(toolMessage, tool.args);
347
+ }
348
+ });
349
+ return Promise.all(result);
350
+ }
351
+ }
352
+ async callFETool(message, args) {
353
+ const that = this; // 防止 this 被错误解析
354
+ const result = await this.tools.callTool(message.name, args, { client: that, message });
355
+ return this.resume(result);
356
+ }
357
+ /** 恢复消息,当中断流时使用 */
358
+ resume(result) {
359
+ return this.sendMessage([], {
360
+ command: {
361
+ resume: result,
362
+ },
363
+ });
364
+ }
365
+ /** 完成工具等待 */
366
+ doneFEToolWaiting(id, result) {
367
+ const done = this.tools.doneWaiting(id, result);
368
+ if (!done && this.currentThread?.status === "interrupted") {
369
+ this.resume(result);
370
+ }
371
+ }
372
+ getCurrentThread() {
373
+ return this.currentThread;
374
+ }
375
+ getCurrentAssistant() {
376
+ return this.currentAssistant;
377
+ }
378
+ async reset() {
379
+ await this.initAssistant(this.currentAssistant?.name);
380
+ this.currentThread = null;
381
+ this.graphState = {};
382
+ this.graphMessages = [];
383
+ this.streamingMessage = [];
384
+ this.currentRun = undefined;
385
+ this.emitStreamingUpdate({
386
+ type: "value",
387
+ data: {
388
+ event: "messages/partial",
389
+ },
390
+ });
391
+ }
392
+ }
@@ -0,0 +1,27 @@
1
+ export class SpendTime {
2
+ timeCounter = new Map();
3
+ start(key) {
4
+ this.timeCounter.set(key, [new Date()]);
5
+ }
6
+ end(key) {
7
+ this.timeCounter.set(key, [this.timeCounter.get(key)?.[0] || new Date(), new Date()]);
8
+ }
9
+ setSpendTime(key) {
10
+ if (this.timeCounter.has(key)) {
11
+ this.end(key);
12
+ }
13
+ else {
14
+ this.start(key);
15
+ }
16
+ }
17
+ getStartTime(key) {
18
+ return this.timeCounter.get(key)?.[0] || new Date();
19
+ }
20
+ getEndTime(key) {
21
+ return this.timeCounter.get(key)?.[1] || new Date();
22
+ }
23
+ getSpendTime(key) {
24
+ const [start, end = new Date()] = this.timeCounter.get(key) || [new Date(), new Date()];
25
+ return end.getTime() - start.getTime();
26
+ }
27
+ }
@@ -0,0 +1,91 @@
1
+ import { createJSONDefineTool } from "./tool/createTool";
2
+ export class ToolManager {
3
+ tools = new Map();
4
+ /**
5
+ * 注册一个工具
6
+ * @param tool 要注册的工具
7
+ */
8
+ bindTool(tool) {
9
+ if (this.tools.has(tool.name)) {
10
+ throw new Error(`Tool with name ${tool.name} already exists`);
11
+ }
12
+ this.tools.set(tool.name, tool);
13
+ }
14
+ /**
15
+ * 注册多个工具
16
+ * @param tools 要注册的工具数组
17
+ */
18
+ bindTools(tools) {
19
+ tools.forEach((tool) => this.bindTool(tool));
20
+ }
21
+ /**
22
+ * 获取所有已注册的工具
23
+ * @returns 工具数组
24
+ */
25
+ getAllTools() {
26
+ return Array.from(this.tools.values());
27
+ }
28
+ /**
29
+ * 获取指定名称的工具
30
+ * @param name 工具名称
31
+ * @returns 工具实例或 undefined
32
+ */
33
+ getTool(name) {
34
+ return this.tools.get(name);
35
+ }
36
+ /**
37
+ * 移除指定名称的工具
38
+ * @param name 工具名称
39
+ * @returns 是否成功移除
40
+ */
41
+ removeTool(name) {
42
+ return this.tools.delete(name);
43
+ }
44
+ /**
45
+ * 清空所有工具
46
+ */
47
+ clearTools() {
48
+ this.tools.clear();
49
+ }
50
+ async callTool(name, args, context) {
51
+ const tool = this.getTool(name);
52
+ if (!tool) {
53
+ throw new Error(`Tool with name ${name} not found`);
54
+ }
55
+ return await tool.execute(args, context);
56
+ }
57
+ toJSON() {
58
+ return Array.from(this.tools.values()).map((i) => createJSONDefineTool(i));
59
+ }
60
+ // === 专门为前端设计的异步触发结构
61
+ waitingMap = new Map();
62
+ doneWaiting(id, value) {
63
+ if (this.waitingMap.has(id)) {
64
+ this.waitingMap.get(id)(value);
65
+ this.waitingMap.delete(id);
66
+ return true;
67
+ }
68
+ else {
69
+ console.warn(`Waiting for tool ${id} not found`);
70
+ return false;
71
+ }
72
+ }
73
+ waitForDone(id) {
74
+ if (this.waitingMap.has(id)) {
75
+ return this.waitingMap.get(id);
76
+ }
77
+ const promise = new Promise((resolve, reject) => {
78
+ this.waitingMap.set(id, resolve);
79
+ });
80
+ return promise;
81
+ }
82
+ /** 等待用户输入
83
+ * @example
84
+ * // 继续 chat 流
85
+ * client.tools.doneWaiting(message.id!, (e.target as any).value);
86
+ */
87
+ static waitForUIDone(_, context) {
88
+ // console.log(context.message);
89
+ return context.client.tools.waitForDone(context.message.id);
90
+ }
91
+ }
package/dist/index.js ADDED
@@ -0,0 +1,5 @@
1
+ export * from "./LangGraphClient";
2
+ export * from "./tool";
3
+ export * from "@langchain/langgraph-sdk";
4
+ export * from "./ui-store";
5
+ export * from "./ToolManager";
@@ -0,0 +1 @@
1
+ export {};