@ai-sdk/langchain 0.0.0-0219f568-20260113124214

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright 2023 Vercel, Inc.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
package/README.md ADDED
@@ -0,0 +1,241 @@
1
+ # AI SDK - LangChain Adapter
2
+
3
+ The **[AI SDK](https://ai-sdk.dev)** LangChain adapter provides seamless integration between [LangChain](https://langchain.com/) and the AI SDK, enabling you to use LangChain agents and graphs with AI SDK UI components.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ npm install @ai-sdk/langchain @langchain/core
9
+ ```
10
+
11
+ > **Note:** `@langchain/core` is a required peer dependency.
12
+
13
+ ## Features
14
+
15
+ - Convert AI SDK `UIMessage` to LangChain `BaseMessage` format
16
+ - Transform LangChain/LangGraph streams to AI SDK `UIMessageStream`
17
+ - `ChatTransport` implementation for LangSmith deployments
18
+ - Full support for text, tool calls, and tool results
19
+ - Custom data streaming with typed events (`data-{type}`)
20
+
21
+ ## Usage
22
+
23
+ ### Converting Messages
24
+
25
+ Use `toBaseMessages` to convert AI SDK messages to LangChain format:
26
+
27
+ ```ts
28
+ import { toBaseMessages } from '@ai-sdk/langchain';
29
+
30
+ // Convert UI messages to LangChain format
31
+ const langchainMessages = await toBaseMessages(uiMessages);
32
+
33
+ // Use with any LangChain model
34
+ const response = await model.invoke(langchainMessages);
35
+ ```
36
+
37
+ ### Streaming from LangGraph
38
+
39
+ Use `toUIMessageStream` to convert LangGraph streams to AI SDK format:
40
+
41
+ ```ts
42
+ import { toBaseMessages, toUIMessageStream } from '@ai-sdk/langchain';
43
+ import { createUIMessageStreamResponse } from 'ai';
44
+
45
+ // Convert messages and stream from a LangGraph graph
46
+ const langchainMessages = await toBaseMessages(uiMessages);
47
+
48
+ const langchainStream = await graph.stream(
49
+ { messages: langchainMessages },
50
+ { streamMode: ['values', 'messages'] },
51
+ );
52
+
53
+ // Convert to UI message stream response
54
+ return createUIMessageStreamResponse({
55
+ stream: toUIMessageStream(langchainStream),
56
+ });
57
+ ```
58
+
59
+ ### Streaming with `streamEvents`
60
+
61
+ You can also use `toUIMessageStream` with `streamEvents()` for more granular event handling:
62
+
63
+ ```ts
64
+ import { toBaseMessages, toUIMessageStream } from '@ai-sdk/langchain';
65
+ import { createUIMessageStreamResponse } from 'ai';
66
+
67
+ // Using streamEvents with an agent
68
+ const langchainMessages = await toBaseMessages(uiMessages);
69
+ const streamEvents = agent.streamEvents(
70
+ { messages: langchainMessages },
71
+ { version: 'v2' },
72
+ );
73
+
74
+ // Convert to UI message stream response
75
+ return createUIMessageStreamResponse({
76
+ stream: toUIMessageStream(streamEvents),
77
+ });
78
+ ```
79
+
80
+ The adapter automatically detects the stream type and handles:
81
+
82
+ - `on_chat_model_stream` events for text streaming
83
+ - `on_tool_start` and `on_tool_end` events for tool calls
84
+ - Reasoning content from contentBlocks
85
+
86
+ ### Custom Data Streaming
87
+
88
+ LangChain tools can emit custom data events using `config.writer()`. The adapter converts these to typed `data-{type}` parts:
89
+
90
+ ```ts
91
+ import { tool, type ToolRuntime } from 'langchain';
92
+
93
+ const analyzeDataTool = tool(
94
+ async ({ query }, config: ToolRuntime) => {
95
+ // Emit progress updates - becomes 'data-progress' in the UI
96
+ config.writer?.({
97
+ type: 'progress',
98
+ id: 'analysis-1', // Include 'id' to persist in message.parts
99
+ step: 'fetching',
100
+ message: 'Fetching data...',
101
+ progress: 50,
102
+ });
103
+
104
+ // ... perform analysis ...
105
+
106
+ // Emit status update - becomes 'data-status' in the UI
107
+ config.writer?.({
108
+ type: 'status',
109
+ id: 'analysis-1-status',
110
+ status: 'complete',
111
+ message: 'Analysis finished',
112
+ });
113
+
114
+ return 'Analysis complete';
115
+ },
116
+ {
117
+ name: 'analyze_data',
118
+ description: 'Analyze data with progress updates',
119
+ schema: z.object({ query: z.string() }),
120
+ },
121
+ );
122
+ ```
123
+
124
+ Enable the `custom` stream mode to receive these events:
125
+
126
+ ```ts
127
+ const stream = await graph.stream(
128
+ { messages: langchainMessages },
129
+ { streamMode: ['values', 'messages', 'custom'] },
130
+ );
131
+ ```
132
+
133
+ **Custom data behavior:**
134
+
135
+ - Data with an `id` field is **persistent** (added to `message.parts` for rendering)
136
+ - Data without an `id` is **transient** (only delivered via the `onData` callback)
137
+ - The `type` field determines the event name: `{ type: 'progress' }` → `data-progress`
138
+
139
+ ### LangSmith Deployment Transport
140
+
141
+ Use `LangSmithDeploymentTransport` with the AI SDK `useChat` hook to connect directly to a LangGraph deployment from the browser:
142
+
143
+ ```tsx
144
+ import { useChat } from 'ai/react';
145
+ import { LangSmithDeploymentTransport } from '@ai-sdk/langchain';
146
+ import { useMemo } from 'react';
147
+
148
+ function Chat() {
149
+ const transport = useMemo(
150
+ () =>
151
+ new LangSmithDeploymentTransport({
152
+ url: 'https://your-deployment.us.langgraph.app',
153
+ apiKey: process.env.LANGSMITH_API_KEY,
154
+ }),
155
+ [],
156
+ );
157
+
158
+ const { messages, input, handleInputChange, handleSubmit } = useChat({
159
+ transport,
160
+ });
161
+
162
+ return (
163
+ <div>
164
+ {messages.map(m => (
165
+ <div key={m.id}>{m.parts.map(part => part.text).join('')}</div>
166
+ ))}
167
+ <form onSubmit={handleSubmit}>
168
+ <input value={input} onChange={handleInputChange} />
169
+ <button type="submit">Send</button>
170
+ </form>
171
+ </div>
172
+ );
173
+ }
174
+ ```
175
+
176
+ ## API Reference
177
+
178
+ ### `toBaseMessages(messages)`
179
+
180
+ Converts AI SDK `UIMessage` objects to LangChain `BaseMessage` objects.
181
+
182
+ **Parameters:**
183
+
184
+ - `messages`: `UIMessage[]` - Array of AI SDK UI messages
185
+
186
+ **Returns:** `Promise<BaseMessage[]>`
187
+
188
+ ### `convertModelMessages(modelMessages)`
189
+
190
+ Converts AI SDK `ModelMessage` objects to LangChain `BaseMessage` objects.
191
+
192
+ **Parameters:**
193
+
194
+ - `modelMessages`: `ModelMessage[]` - Array of model messages
195
+
196
+ **Returns:** `BaseMessage[]`
197
+
198
+ ### `toUIMessageStream(stream)`
199
+
200
+ Converts a LangChain/LangGraph stream to an AI SDK `UIMessageStream`.
201
+
202
+ **Parameters:**
203
+
204
+ - `stream`: `AsyncIterable | ReadableStream` - A stream from LangChain `model.stream()`, LangGraph `graph.stream()`, or `streamEvents()`
205
+
206
+ **Returns:** `ReadableStream<UIMessageChunk>`
207
+
208
+ **Supported stream types:**
209
+
210
+ - **Model streams** - Direct `AIMessageChunk` streams from `model.stream()`
211
+ - **LangGraph streams** - Streams with `streamMode: ['values', 'messages']`
212
+ - **streamEvents** - Event streams from `agent.streamEvents()` or `model.streamEvents()`
213
+
214
+ **Supported LangGraph stream events:**
215
+
216
+ - `messages` - Streaming message chunks (text, tool calls)
217
+ - `values` - State updates that finalize pending message chunks
218
+ - `custom` - Custom data events (emitted as `data-{type}` chunks)
219
+
220
+ **Supported streamEvents events:**
221
+
222
+ - `on_chat_model_stream` - Token streaming from chat models
223
+ - `on_tool_start` - Tool execution start
224
+ - `on_tool_end` - Tool execution end with output
225
+
226
+ ### `LangSmithDeploymentTransport`
227
+
228
+ A `ChatTransport` implementation for LangSmith/LangGraph deployments.
229
+
230
+ **Constructor Parameters:**
231
+
232
+ - `options`: `LangSmithDeploymentTransportOptions` - Configuration for the RemoteGraph connection
233
+ - `url`: `string` - LangSmith deployment URL or local server URL
234
+ - `apiKey?`: `string` - API key for authentication (optional for local development)
235
+ - `graphId?`: `string` - The ID of the graph to connect to (defaults to `'agent'`)
236
+
237
+ **Implements:** `ChatTransport`
238
+
239
+ ## Documentation
240
+
241
+ Please check out the [AI SDK documentation](https://ai-sdk.dev) for more information.
@@ -0,0 +1,133 @@
1
+ import { BaseMessage, AIMessageChunk } from '@langchain/core/messages';
2
+ import { UIMessage, UIMessageChunk, ModelMessage, ChatTransport, ChatRequestOptions } from 'ai';
3
+ import { RemoteGraph, RemoteGraphParams } from '@langchain/langgraph/remote';
4
+
5
+ /**
6
+ * Configuration options and helper callback methods for stream lifecycle events.
7
+ */
8
+ interface StreamCallbacks {
9
+ /** `onStart`: Called once when the stream is initialized. */
10
+ onStart?: () => Promise<void> | void;
11
+ /** `onFinal`: Called once when the stream is closed with the final completion message. */
12
+ onFinal?: (completion: string) => Promise<void> | void;
13
+ /** `onToken`: Called for each tokenized message. */
14
+ onToken?: (token: string) => Promise<void> | void;
15
+ /** `onText`: Called for each text chunk. */
16
+ onText?: (text: string) => Promise<void> | void;
17
+ }
18
+
19
+ /**
20
+ * Converts AI SDK UIMessages to LangChain BaseMessage objects.
21
+ *
22
+ * This function transforms the AI SDK's message format into LangChain's message
23
+ * format, enabling seamless integration between the two frameworks.
24
+ *
25
+ * @param messages - Array of AI SDK UIMessage objects to convert.
26
+ * @returns Promise resolving to an array of LangChain BaseMessage objects.
27
+ *
28
+ * @example
29
+ * ```ts
30
+ * import { toBaseMessages } from '@ai-sdk/langchain';
31
+ *
32
+ * const langchainMessages = await toBaseMessages(uiMessages);
33
+ *
34
+ * // Use with LangChain
35
+ * const response = await model.invoke(langchainMessages);
36
+ * ```
37
+ */
38
+ declare function toBaseMessages(messages: UIMessage[]): Promise<BaseMessage[]>;
39
+ /**
40
+ * Converts ModelMessages to LangChain BaseMessage objects.
41
+ *
42
+ * @param modelMessages - Array of ModelMessage objects from convertToModelMessages.
43
+ * @returns Array of LangChain BaseMessage objects.
44
+ */
45
+ declare function convertModelMessages(modelMessages: ModelMessage[]): BaseMessage[];
46
+ /**
47
+ * Converts a LangChain stream to an AI SDK UIMessageStream.
48
+ *
49
+ * This function automatically detects the stream type and handles:
50
+ * - Direct model streams (AsyncIterable from `model.stream()`)
51
+ * - LangGraph streams (ReadableStream with `streamMode: ['values', 'messages']`)
52
+ * - streamEvents streams (from `agent.streamEvents()` or `model.streamEvents()`)
53
+ *
54
+ * @param stream - A stream from LangChain model.stream(), graph.stream(), or streamEvents().
55
+ * @param callbacks - Optional callbacks for stream lifecycle events.
56
+ * @returns A ReadableStream of UIMessageChunk objects.
57
+ *
58
+ * @example
59
+ * ```ts
60
+ * // With a direct model stream
61
+ * const model = new ChatOpenAI({ model: 'gpt-4o-mini' });
62
+ * const stream = await model.stream(messages);
63
+ * return createUIMessageStreamResponse({
64
+ * stream: toUIMessageStream(stream),
65
+ * });
66
+ *
67
+ * // With a LangGraph stream
68
+ * const graphStream = await graph.stream(
69
+ * { messages },
70
+ * { streamMode: ['values', 'messages'] }
71
+ * );
72
+ * return createUIMessageStreamResponse({
73
+ * stream: toUIMessageStream(graphStream),
74
+ * });
75
+ *
76
+ * // With streamEvents
77
+ * const streamEvents = agent.streamEvents(
78
+ * { messages },
79
+ * { version: "v2" }
80
+ * );
81
+ * return createUIMessageStreamResponse({
82
+ * stream: toUIMessageStream(streamEvents),
83
+ * });
84
+ * ```
85
+ */
86
+ declare function toUIMessageStream(stream: AsyncIterable<AIMessageChunk> | ReadableStream, callbacks?: StreamCallbacks): ReadableStream<UIMessageChunk>;
87
+
88
+ /**
89
+ * Options for configuring a LangSmith deployment transport.
90
+ * Extends RemoteGraphParams but makes graphId optional (defaults to 'agent').
91
+ */
92
+ type LangSmithDeploymentTransportOptions = Omit<RemoteGraphParams, 'graphId'> & {
93
+ /**
94
+ * The ID of the graph to connect to.
95
+ * @default 'agent'
96
+ */
97
+ graphId?: string;
98
+ };
99
+ /**
100
+ * A ChatTransport implementation for LangSmith/LangGraph deployments.
101
+ *
102
+ * This transport enables seamless integration between the AI SDK's useChat hook
103
+ * and LangSmith deployed LangGraph agents.
104
+ *
105
+ * @example
106
+ * ```ts
107
+ * import { LangSmithDeploymentTransport } from '@ai-sdk/langchain';
108
+ *
109
+ * // Use with useChat
110
+ * const { messages, input, handleSubmit } = useChat({
111
+ * transport: new LangSmithDeploymentTransport({
112
+ * url: 'https://your-deployment.us.langgraph.app',
113
+ * apiKey: 'my-api-key',
114
+ * }),
115
+ * });
116
+ * ```
117
+ */
118
+ declare class LangSmithDeploymentTransport<UI_MESSAGE extends UIMessage> implements ChatTransport<UI_MESSAGE> {
119
+ protected graph: RemoteGraph;
120
+ constructor(options: LangSmithDeploymentTransportOptions);
121
+ sendMessages(options: {
122
+ trigger: 'submit-message' | 'regenerate-message';
123
+ chatId: string;
124
+ messageId: string | undefined;
125
+ messages: UI_MESSAGE[];
126
+ abortSignal: AbortSignal | undefined;
127
+ } & ChatRequestOptions): Promise<ReadableStream<UIMessageChunk>>;
128
+ reconnectToStream(_options: {
129
+ chatId: string;
130
+ } & ChatRequestOptions): Promise<ReadableStream<UIMessageChunk> | null>;
131
+ }
132
+
133
+ export { LangSmithDeploymentTransport, type LangSmithDeploymentTransportOptions, type StreamCallbacks, convertModelMessages, toBaseMessages, toUIMessageStream };
@@ -0,0 +1,133 @@
1
+ import { BaseMessage, AIMessageChunk } from '@langchain/core/messages';
2
+ import { UIMessage, UIMessageChunk, ModelMessage, ChatTransport, ChatRequestOptions } from 'ai';
3
+ import { RemoteGraph, RemoteGraphParams } from '@langchain/langgraph/remote';
4
+
5
+ /**
6
+ * Configuration options and helper callback methods for stream lifecycle events.
7
+ */
8
+ interface StreamCallbacks {
9
+ /** `onStart`: Called once when the stream is initialized. */
10
+ onStart?: () => Promise<void> | void;
11
+ /** `onFinal`: Called once when the stream is closed with the final completion message. */
12
+ onFinal?: (completion: string) => Promise<void> | void;
13
+ /** `onToken`: Called for each tokenized message. */
14
+ onToken?: (token: string) => Promise<void> | void;
15
+ /** `onText`: Called for each text chunk. */
16
+ onText?: (text: string) => Promise<void> | void;
17
+ }
18
+
19
+ /**
20
+ * Converts AI SDK UIMessages to LangChain BaseMessage objects.
21
+ *
22
+ * This function transforms the AI SDK's message format into LangChain's message
23
+ * format, enabling seamless integration between the two frameworks.
24
+ *
25
+ * @param messages - Array of AI SDK UIMessage objects to convert.
26
+ * @returns Promise resolving to an array of LangChain BaseMessage objects.
27
+ *
28
+ * @example
29
+ * ```ts
30
+ * import { toBaseMessages } from '@ai-sdk/langchain';
31
+ *
32
+ * const langchainMessages = await toBaseMessages(uiMessages);
33
+ *
34
+ * // Use with LangChain
35
+ * const response = await model.invoke(langchainMessages);
36
+ * ```
37
+ */
38
+ declare function toBaseMessages(messages: UIMessage[]): Promise<BaseMessage[]>;
39
+ /**
40
+ * Converts ModelMessages to LangChain BaseMessage objects.
41
+ *
42
+ * @param modelMessages - Array of ModelMessage objects from convertToModelMessages.
43
+ * @returns Array of LangChain BaseMessage objects.
44
+ */
45
+ declare function convertModelMessages(modelMessages: ModelMessage[]): BaseMessage[];
46
+ /**
47
+ * Converts a LangChain stream to an AI SDK UIMessageStream.
48
+ *
49
+ * This function automatically detects the stream type and handles:
50
+ * - Direct model streams (AsyncIterable from `model.stream()`)
51
+ * - LangGraph streams (ReadableStream with `streamMode: ['values', 'messages']`)
52
+ * - streamEvents streams (from `agent.streamEvents()` or `model.streamEvents()`)
53
+ *
54
+ * @param stream - A stream from LangChain model.stream(), graph.stream(), or streamEvents().
55
+ * @param callbacks - Optional callbacks for stream lifecycle events.
56
+ * @returns A ReadableStream of UIMessageChunk objects.
57
+ *
58
+ * @example
59
+ * ```ts
60
+ * // With a direct model stream
61
+ * const model = new ChatOpenAI({ model: 'gpt-4o-mini' });
62
+ * const stream = await model.stream(messages);
63
+ * return createUIMessageStreamResponse({
64
+ * stream: toUIMessageStream(stream),
65
+ * });
66
+ *
67
+ * // With a LangGraph stream
68
+ * const graphStream = await graph.stream(
69
+ * { messages },
70
+ * { streamMode: ['values', 'messages'] }
71
+ * );
72
+ * return createUIMessageStreamResponse({
73
+ * stream: toUIMessageStream(graphStream),
74
+ * });
75
+ *
76
+ * // With streamEvents
77
+ * const streamEvents = agent.streamEvents(
78
+ * { messages },
79
+ * { version: "v2" }
80
+ * );
81
+ * return createUIMessageStreamResponse({
82
+ * stream: toUIMessageStream(streamEvents),
83
+ * });
84
+ * ```
85
+ */
86
+ declare function toUIMessageStream(stream: AsyncIterable<AIMessageChunk> | ReadableStream, callbacks?: StreamCallbacks): ReadableStream<UIMessageChunk>;
87
+
88
+ /**
89
+ * Options for configuring a LangSmith deployment transport.
90
+ * Extends RemoteGraphParams but makes graphId optional (defaults to 'agent').
91
+ */
92
+ type LangSmithDeploymentTransportOptions = Omit<RemoteGraphParams, 'graphId'> & {
93
+ /**
94
+ * The ID of the graph to connect to.
95
+ * @default 'agent'
96
+ */
97
+ graphId?: string;
98
+ };
99
+ /**
100
+ * A ChatTransport implementation for LangSmith/LangGraph deployments.
101
+ *
102
+ * This transport enables seamless integration between the AI SDK's useChat hook
103
+ * and LangSmith deployed LangGraph agents.
104
+ *
105
+ * @example
106
+ * ```ts
107
+ * import { LangSmithDeploymentTransport } from '@ai-sdk/langchain';
108
+ *
109
+ * // Use with useChat
110
+ * const { messages, input, handleSubmit } = useChat({
111
+ * transport: new LangSmithDeploymentTransport({
112
+ * url: 'https://your-deployment.us.langgraph.app',
113
+ * apiKey: 'my-api-key',
114
+ * }),
115
+ * });
116
+ * ```
117
+ */
118
+ declare class LangSmithDeploymentTransport<UI_MESSAGE extends UIMessage> implements ChatTransport<UI_MESSAGE> {
119
+ protected graph: RemoteGraph;
120
+ constructor(options: LangSmithDeploymentTransportOptions);
121
+ sendMessages(options: {
122
+ trigger: 'submit-message' | 'regenerate-message';
123
+ chatId: string;
124
+ messageId: string | undefined;
125
+ messages: UI_MESSAGE[];
126
+ abortSignal: AbortSignal | undefined;
127
+ } & ChatRequestOptions): Promise<ReadableStream<UIMessageChunk>>;
128
+ reconnectToStream(_options: {
129
+ chatId: string;
130
+ } & ChatRequestOptions): Promise<ReadableStream<UIMessageChunk> | null>;
131
+ }
132
+
133
+ export { LangSmithDeploymentTransport, type LangSmithDeploymentTransportOptions, type StreamCallbacks, convertModelMessages, toBaseMessages, toUIMessageStream };