mcp-use 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/README.md +196 -0
  2. package/dist/examples/ai_sdk_example.d.ts +23 -0
  3. package/dist/examples/ai_sdk_example.d.ts.map +1 -0
  4. package/dist/examples/ai_sdk_example.js +213 -0
  5. package/dist/examples/stream_example.d.ts +12 -0
  6. package/dist/examples/stream_example.d.ts.map +1 -0
  7. package/dist/examples/stream_example.js +198 -0
  8. package/dist/index.d.ts +2 -0
  9. package/dist/index.d.ts.map +1 -1
  10. package/dist/index.js +2 -0
  11. package/dist/src/agents/mcp_agent.d.ts +9 -3
  12. package/dist/src/agents/mcp_agent.d.ts.map +1 -1
  13. package/dist/src/agents/mcp_agent.js +175 -42
  14. package/dist/src/agents/utils/ai_sdk.d.ts +22 -0
  15. package/dist/src/agents/utils/ai_sdk.d.ts.map +1 -0
  16. package/dist/src/agents/utils/ai_sdk.js +62 -0
  17. package/dist/src/agents/utils/index.d.ts +2 -0
  18. package/dist/src/agents/utils/index.d.ts.map +1 -0
  19. package/dist/src/agents/utils/index.js +1 -0
  20. package/dist/tests/ai_sdk_compatibility.test.d.ts +13 -0
  21. package/dist/tests/ai_sdk_compatibility.test.d.ts.map +1 -0
  22. package/dist/tests/ai_sdk_compatibility.test.js +214 -0
  23. package/dist/tests/stream_events.test.d.ts +2 -0
  24. package/dist/tests/stream_events.test.d.ts.map +1 -0
  25. package/dist/tests/stream_events.test.js +306 -0
  26. package/dist/tests/stream_events_simple.test.d.ts +7 -0
  27. package/dist/tests/stream_events_simple.test.d.ts.map +1 -0
  28. package/dist/tests/stream_events_simple.test.js +179 -0
  29. package/dist/vitest.config.d.ts +3 -0
  30. package/dist/vitest.config.d.ts.map +1 -0
  31. package/dist/vitest.config.js +21 -0
  32. package/package.json +14 -4
package/README.md CHANGED
@@ -101,6 +101,197 @@ main().catch(console.error)
101
101
 
102
102
  ---
103
103
 
104
+ ## šŸ”§ API Methods
105
+
106
+ ### MCPAgent Methods
107
+
108
+ The `MCPAgent` class provides several methods for executing queries with different output formats:
109
+
110
+ #### `run(query: string, maxSteps?: number): Promise<string>`
111
+
112
+ Executes a query and returns the final result as a string.
113
+
114
+ ```ts
115
+ const result = await agent.run('What tools are available?')
116
+ console.log(result)
117
+ ```
118
+
119
+ #### `stream(query: string, maxSteps?: number): AsyncGenerator<AgentStep, string, void>`
120
+
121
+ Yields intermediate steps during execution, providing visibility into the agent's reasoning process.
122
+
123
+ ```ts
124
+ const stream = agent.stream('Search for restaurants in Tokyo')
125
+ for await (const step of stream) {
126
+ console.log(`Tool: ${step.action.tool}, Input: ${step.action.toolInput}`)
127
+ console.log(`Result: ${step.observation}`)
128
+ }
129
+ ```
130
+
131
+ #### `streamEvents(query: string, maxSteps?: number): AsyncGenerator<StreamEvent, void, void>`
132
+
133
+ Yields fine-grained LangChain StreamEvent objects, enabling token-by-token streaming and detailed event tracking.
134
+
135
+ ```ts
136
+ const eventStream = agent.streamEvents('What is the weather today?')
137
+ for await (const event of eventStream) {
138
+ // Handle different event types
139
+ switch (event.event) {
140
+ case 'on_chat_model_stream':
141
+ // Token-by-token streaming from the LLM
142
+ if (event.data?.chunk?.content) {
143
+ process.stdout.write(event.data.chunk.content)
144
+ }
145
+ break
146
+ case 'on_tool_start':
147
+ console.log(`\nTool started: ${event.name}`)
148
+ break
149
+ case 'on_tool_end':
150
+ console.log(`Tool completed: ${event.name}`)
151
+ break
152
+ }
153
+ }
154
+ ```
155
+
156
+ ### Key Differences
157
+
158
+ - **`run()`**: Best for simple queries where you only need the final result
159
+ - **`stream()`**: Best for debugging and understanding the agent's tool usage
160
+ - **`streamEvents()`**: Best for real-time UI updates with token-level streaming
161
+
162
+ ## šŸ”„ AI SDK Integration
163
+
164
+ The library provides built-in utilities for integrating with [Vercel AI SDK](https://sdk.vercel.ai/), making it easy to build streaming UIs with React hooks like `useCompletion` and `useChat`.
165
+
166
+ ### Installation
167
+
168
+ ```bash
169
+ npm install ai @langchain/anthropic
170
+ ```
171
+
172
+ ### Basic Usage
173
+
174
+ ```ts
175
+ import { ChatAnthropic } from '@langchain/anthropic'
176
+ import { LangChainAdapter } from 'ai'
177
+ import { createReadableStreamFromGenerator, MCPAgent, MCPClient, streamEventsToAISDK } from 'mcp-use'
178
+
179
+ async function createApiHandler() {
180
+ const config = {
181
+ mcpServers: {
182
+ everything: { command: 'npx', args: ['-y', '@modelcontextprotocol/server-everything'] }
183
+ }
184
+ }
185
+
186
+ const client = new MCPClient(config)
187
+ const llm = new ChatAnthropic({ model: 'claude-sonnet-4-20250514' })
188
+ const agent = new MCPAgent({ llm, client, maxSteps: 5 })
189
+
190
+ return async (request: { prompt: string }) => {
191
+ const streamEvents = agent.streamEvents(request.prompt)
192
+ const aiSDKStream = streamEventsToAISDK(streamEvents)
193
+ const readableStream = createReadableStreamFromGenerator(aiSDKStream)
194
+
195
+ return LangChainAdapter.toDataStreamResponse(readableStream)
196
+ }
197
+ }
198
+ ```
199
+
200
+ ### Enhanced Usage with Tool Visibility
201
+
202
+ ```ts
203
+ import { streamEventsToAISDKWithTools } from 'mcp-use'
204
+
205
+ async function createEnhancedApiHandler() {
206
+ const config = {
207
+ mcpServers: {
208
+ everything: { command: 'npx', args: ['-y', '@modelcontextprotocol/server-everything'] }
209
+ }
210
+ }
211
+
212
+ const client = new MCPClient(config)
213
+ const llm = new ChatAnthropic({ model: 'claude-sonnet-4-20250514' })
214
+ const agent = new MCPAgent({ llm, client, maxSteps: 8 })
215
+
216
+ return async (request: { prompt: string }) => {
217
+ const streamEvents = agent.streamEvents(request.prompt)
218
+ // Enhanced stream includes tool usage notifications
219
+ const enhancedStream = streamEventsToAISDKWithTools(streamEvents)
220
+ const readableStream = createReadableStreamFromGenerator(enhancedStream)
221
+
222
+ return LangChainAdapter.toDataStreamResponse(readableStream)
223
+ }
224
+ }
225
+ ```
226
+
227
+ ### Next.js API Route Example
228
+
229
+ ```ts
230
+ // pages/api/chat.ts or app/api/chat/route.ts
231
+ import { ChatAnthropic } from '@langchain/anthropic'
232
+ import { LangChainAdapter } from 'ai'
233
+ import { createReadableStreamFromGenerator, MCPAgent, MCPClient, streamEventsToAISDK } from 'mcp-use'
234
+
235
+ export async function POST(req: Request) {
236
+ const { prompt } = await req.json()
237
+
238
+ const config = {
239
+ mcpServers: {
240
+ everything: { command: 'npx', args: ['-y', '@modelcontextprotocol/server-everything'] }
241
+ }
242
+ }
243
+
244
+ const client = new MCPClient(config)
245
+ const llm = new ChatAnthropic({ model: 'claude-sonnet-4-20250514' })
246
+ const agent = new MCPAgent({ llm, client, maxSteps: 10 })
247
+
248
+ try {
249
+ const streamEvents = agent.streamEvents(prompt)
250
+ const aiSDKStream = streamEventsToAISDK(streamEvents)
251
+ const readableStream = createReadableStreamFromGenerator(aiSDKStream)
252
+
253
+ return LangChainAdapter.toDataStreamResponse(readableStream)
254
+ }
255
+ finally {
256
+ await client.closeAllSessions()
257
+ }
258
+ }
259
+ ```
260
+
261
+ ### Frontend Integration
262
+
263
+ ```tsx
264
+ // components/Chat.tsx
265
+ import { useCompletion } from 'ai/react'
266
+
267
+ export function Chat() {
268
+ const { completion, input, handleInputChange, handleSubmit } = useCompletion({
269
+ api: '/api/chat',
270
+ })
271
+
272
+ return (
273
+ <div>
274
+ <div>{completion}</div>
275
+ <form onSubmit={handleSubmit}>
276
+ <input
277
+ value={input}
278
+ onChange={handleInputChange}
279
+ placeholder="Ask me anything..."
280
+ />
281
+ </form>
282
+ </div>
283
+ )
284
+ }
285
+ ```
286
+
287
+ ### Available AI SDK Utilities
288
+
289
+ - **`streamEventsToAISDK()`**: Converts streamEvents to basic text stream
290
+ - **`streamEventsToAISDKWithTools()`**: Enhanced stream with tool usage notifications
291
+ - **`createReadableStreamFromGenerator()`**: Converts async generator to ReadableStream
292
+
293
+ ---
294
+
104
295
  ## šŸ“‚ Configuration File
105
296
 
106
297
  You can store servers in a JSON file:
@@ -140,6 +331,9 @@ npm install
140
331
  npm run example:airbnb # Search accommodations with Airbnb
141
332
  npm run example:browser # Browser automation with Playwright
142
333
  npm run example:chat # Interactive chat with memory
334
+ npm run example:stream # Demonstrate streaming methods (stream & streamEvents)
335
+ npm run example:stream_events # Comprehensive streamEvents() examples
336
+ npm run example:ai_sdk # AI SDK integration with streaming
143
337
  npm run example:filesystem # File system operations
144
338
  npm run example:http # HTTP server connection
145
339
  npm run example:everything # Test MCP functionalities
@@ -153,6 +347,8 @@ npm run example:multi # Multiple servers in one session
153
347
  - **Multi-Server**: Combine multiple MCP servers (Airbnb + Browser) in a single task
154
348
  - **Sandboxed Execution**: Run MCP servers in isolated E2B containers
155
349
  - **OAuth Flows**: Authenticate with services like Linear using OAuth2
350
+ - **Streaming Methods**: Demonstrate both step-by-step and token-level streaming
351
+ - **AI SDK Integration**: Build streaming UIs with Vercel AI SDK and React hooks
156
352
 
157
353
  See the [examples README](./examples/README.md) for detailed documentation and prerequisites.
158
354
 
@@ -0,0 +1,23 @@
1
+ /**
2
+ * AI SDK Integration Example
3
+ *
4
+ * This example demonstrates how to use MCPAgent's streamEvents() method
5
+ * with Vercel AI SDK's LangChainAdapter for building streaming UIs.
6
+ *
7
+ * This pattern is useful for:
8
+ * - Next.js API routes with useCompletion/useChat hooks
9
+ * - Real-time streaming applications
10
+ * - Building chat interfaces with token-by-token updates
11
+ */
12
+ import type { StreamEvent } from '../index.js';
13
+ declare function streamEventsToAISDK(streamEvents: AsyncGenerator<StreamEvent, void, void>): AsyncGenerator<string, void, void>;
14
+ declare function createReadableStreamFromGenerator(generator: AsyncGenerator<string, void, void>): ReadableStream<string>;
15
+ declare function streamEventsToAISDKWithTools(streamEvents: AsyncGenerator<StreamEvent, void, void>): AsyncGenerator<string, void, void>;
16
+ declare function createApiHandler(): Promise<(request: {
17
+ prompt: string;
18
+ }) => Promise<Response>>;
19
+ declare function createEnhancedApiHandler(): Promise<(request: {
20
+ prompt: string;
21
+ }) => Promise<Response>>;
22
+ export { createApiHandler, createEnhancedApiHandler, createReadableStreamFromGenerator, streamEventsToAISDK, streamEventsToAISDKWithTools, };
23
+ //# sourceMappingURL=ai_sdk_example.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ai_sdk_example.d.ts","sourceRoot":"","sources":["../../examples/ai_sdk_example.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAEH,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AAU9C,iBAAgB,mBAAmB,CACjC,YAAY,EAAE,cAAc,CAAC,WAAW,EAAE,IAAI,EAAE,IAAI,CAAC,GACpD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAUpC;AAGD,iBAAS,iCAAiC,CACxC,SAAS,EAAE,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,GAC5C,cAAc,CAAC,MAAM,CAAC,CAcxB;AAGD,iBAAgB,4BAA4B,CAC1C,YAAY,EAAE,cAAc,CAAC,WAAW,EAAE,IAAI,EAAE,IAAI,CAAC,GACpD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAqBpC;AAGD,iBAAe,gBAAgB,sBAwBM;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,wBAsBtD;AAGD,iBAAe,wBAAwB,sBAuBM;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,wBAkB9D;AAoFD,OAAO,EACL,gBAAgB,EAChB,wBAAwB,EACxB,iCAAiC,EACjC,mBAAmB,EACnB,4BAA4B,GAC7B,CAAA"}
@@ -0,0 +1,213 @@
1
+ /**
2
+ * AI SDK Integration Example
3
+ *
4
+ * This example demonstrates how to use MCPAgent's streamEvents() method
5
+ * with Vercel AI SDK's LangChainAdapter for building streaming UIs.
6
+ *
7
+ * This pattern is useful for:
8
+ * - Next.js API routes with useCompletion/useChat hooks
9
+ * - Real-time streaming applications
10
+ * - Building chat interfaces with token-by-token updates
11
+ */
12
+ import { ChatAnthropic } from '@langchain/anthropic';
13
+ import { LangChainAdapter } from 'ai';
14
+ import { config } from 'dotenv';
15
+ import { MCPAgent, MCPClient } from '../index.js';
16
+ // Load environment variables
17
+ config();
18
+ // Utility function to convert streamEvents to AI SDK compatible stream
19
+ async function* streamEventsToAISDK(streamEvents) {
20
+ for await (const event of streamEvents) {
21
+ // Only yield the actual content tokens from chat model streams
22
+ if (event.event === 'on_chat_model_stream' && event.data?.chunk?.text) {
23
+ const textContent = event.data.chunk.text;
24
+ if (typeof textContent === 'string' && textContent.length > 0) {
25
+ yield textContent;
26
+ }
27
+ }
28
+ }
29
+ }
30
+ // Convert async generator to ReadableStream for AI SDK compatibility
31
+ function createReadableStreamFromGenerator(generator) {
32
+ return new ReadableStream({
33
+ async start(controller) {
34
+ try {
35
+ for await (const chunk of generator) {
36
+ controller.enqueue(chunk);
37
+ }
38
+ controller.close();
39
+ }
40
+ catch (error) {
41
+ controller.error(error);
42
+ }
43
+ },
44
+ });
45
+ }
46
+ // Enhanced adapter that includes tool information
47
+ async function* streamEventsToAISDKWithTools(streamEvents) {
48
+ for await (const event of streamEvents) {
49
+ switch (event.event) {
50
+ case 'on_chat_model_stream':
51
+ if (event.data?.chunk?.text) {
52
+ const textContent = event.data.chunk.text;
53
+ if (typeof textContent === 'string' && textContent.length > 0) {
54
+ yield textContent;
55
+ }
56
+ }
57
+ break;
58
+ case 'on_tool_start':
59
+ yield `\nšŸ”§ Using tool: ${event.name}\n`;
60
+ break;
61
+ case 'on_tool_end':
62
+ yield `\nāœ… Tool completed: ${event.name}\n`;
63
+ break;
64
+ }
65
+ }
66
+ }
67
+ // Example: Basic AI SDK API route handler
68
+ async function createApiHandler() {
69
+ const everythingServer = {
70
+ mcpServers: {
71
+ everything: {
72
+ command: 'npx',
73
+ args: ['-y', '@modelcontextprotocol/server-everything'],
74
+ },
75
+ },
76
+ };
77
+ const client = new MCPClient(everythingServer);
78
+ const llm = new ChatAnthropic({
79
+ model: 'claude-sonnet-4-20250514',
80
+ temperature: 0.1,
81
+ });
82
+ const agent = new MCPAgent({
83
+ llm,
84
+ client,
85
+ maxSteps: 5,
86
+ verbose: false,
87
+ });
88
+ // Simulate an API route handler
89
+ const apiHandler = async (request) => {
90
+ try {
91
+ // Get streamEvents from MCPAgent
92
+ const streamEvents = agent.streamEvents(request.prompt);
93
+ // Convert to AI SDK compatible format
94
+ const aiSDKStream = streamEventsToAISDK(streamEvents);
95
+ const readableStream = createReadableStreamFromGenerator(aiSDKStream);
96
+ // Use LangChainAdapter to create a Response compatible with AI SDK
97
+ return LangChainAdapter.toDataStreamResponse(readableStream);
98
+ }
99
+ catch (error) {
100
+ console.error('Error in API handler:', error);
101
+ throw error;
102
+ }
103
+ finally {
104
+ await client.closeAllSessions();
105
+ }
106
+ };
107
+ return apiHandler;
108
+ }
109
+ // Example: Enhanced API handler with tool visibility
110
+ async function createEnhancedApiHandler() {
111
+ const everythingServer = {
112
+ mcpServers: {
113
+ everything: {
114
+ command: 'npx',
115
+ args: ['-y', '@modelcontextprotocol/server-everything'],
116
+ },
117
+ },
118
+ };
119
+ const client = new MCPClient(everythingServer);
120
+ const llm = new ChatAnthropic({
121
+ model: 'claude-sonnet-4-20250514',
122
+ temperature: 0.1,
123
+ });
124
+ const agent = new MCPAgent({
125
+ llm,
126
+ client,
127
+ maxSteps: 8,
128
+ verbose: false,
129
+ });
130
+ const enhancedApiHandler = async (request) => {
131
+ try {
132
+ const streamEvents = agent.streamEvents(request.prompt);
133
+ const enhancedStream = streamEventsToAISDKWithTools(streamEvents);
134
+ const readableStream = createReadableStreamFromGenerator(enhancedStream);
135
+ return LangChainAdapter.toDataStreamResponse(readableStream);
136
+ }
137
+ catch (error) {
138
+ console.error('Error in enhanced API handler:', error);
139
+ throw error;
140
+ }
141
+ finally {
142
+ await client.closeAllSessions();
143
+ }
144
+ };
145
+ return enhancedApiHandler;
146
+ }
147
+ // Example: Simulated Next.js API route
148
+ async function simulateNextJSApiRoute() {
149
+ console.log('šŸš€ Simulating Next.js API Route with AI SDK Integration\n');
150
+ const apiHandler = await createApiHandler();
151
+ // Simulate a request
152
+ const request = {
153
+ prompt: 'What\'s the current time? Also, list the files in the current directory.',
154
+ };
155
+ console.log(`šŸ“ Request: ${request.prompt}\n`);
156
+ console.log('šŸ“” Streaming response:\n');
157
+ try {
158
+ const response = await apiHandler(request);
159
+ if (response.body) {
160
+ const reader = response.body.getReader();
161
+ const decoder = new TextDecoder();
162
+ while (true) {
163
+ const { done, value } = await reader.read();
164
+ if (done)
165
+ break;
166
+ const chunk = decoder.decode(value);
167
+ process.stdout.write(chunk);
168
+ }
169
+ }
170
+ }
171
+ catch (error) {
172
+ console.error('āŒ Error:', error);
173
+ }
174
+ console.log('\n\nāœ… API Route simulation complete');
175
+ }
176
+ // Example: Enhanced streaming with tool visibility
177
+ async function simulateEnhancedStreaming() {
178
+ console.log('\n\nšŸš€ Enhanced Streaming with Tool Visibility\n');
179
+ const enhancedHandler = await createEnhancedApiHandler();
180
+ const request = {
181
+ prompt: 'Check the current time and create a file with a timestamp. Then tell me what tools you used.',
182
+ };
183
+ console.log(`šŸ“ Request: ${request.prompt}\n`);
184
+ console.log('šŸ“” Enhanced streaming response:\n');
185
+ try {
186
+ const response = await enhancedHandler(request);
187
+ if (response.body) {
188
+ const reader = response.body.getReader();
189
+ const decoder = new TextDecoder();
190
+ while (true) {
191
+ const { done, value } = await reader.read();
192
+ if (done)
193
+ break;
194
+ const chunk = decoder.decode(value);
195
+ process.stdout.write(chunk);
196
+ }
197
+ }
198
+ }
199
+ catch (error) {
200
+ console.error('āŒ Error:', error);
201
+ }
202
+ console.log('\n\nāœ… Enhanced streaming complete');
203
+ }
204
+ // Run all examples
205
+ async function runAllExamples() {
206
+ await simulateNextJSApiRoute();
207
+ await simulateEnhancedStreaming();
208
+ }
209
+ // Export utilities for reuse
210
+ export { createApiHandler, createEnhancedApiHandler, createReadableStreamFromGenerator, streamEventsToAISDK, streamEventsToAISDKWithTools, };
211
+ if (import.meta.url === `file://${process.argv[1]}`) {
212
+ runAllExamples().catch(console.error);
213
+ }
@@ -0,0 +1,12 @@
1
+ /**
2
+ * This example demonstrates how to use the stream method of MCPAgent to get
3
+ * intermediate steps and observe the agent's reasoning process in real-time.
4
+ *
5
+ * The stream method returns an AsyncGenerator that yields AgentStep objects
6
+ * for each intermediate step, and finally returns the complete result.
7
+ *
8
+ * This example also demonstrates the streamEvents method which yields
9
+ * LangChain StreamEvent objects for more granular, token-level streaming.
10
+ */
11
+ export {};
12
+ //# sourceMappingURL=stream_example.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"stream_example.d.ts","sourceRoot":"","sources":["../../examples/stream_example.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG"}
@@ -0,0 +1,198 @@
1
+ /**
2
+ * This example demonstrates how to use the stream method of MCPAgent to get
3
+ * intermediate steps and observe the agent's reasoning process in real-time.
4
+ *
5
+ * The stream method returns an AsyncGenerator that yields AgentStep objects
6
+ * for each intermediate step, and finally returns the complete result.
7
+ *
8
+ * This example also demonstrates the streamEvents method which yields
9
+ * LangChain StreamEvent objects for more granular, token-level streaming.
10
+ */
11
+ import { ChatAnthropic } from '@langchain/anthropic';
12
+ import { config } from 'dotenv';
13
+ import { MCPAgent, MCPClient } from '../index.js';
14
+ // Load environment variables from .env file
15
+ config();
16
+ const everythingServer = {
17
+ mcpServers: { everything: { command: 'npx', args: ['-y', '@modelcontextprotocol/server-everything'] } },
18
+ };
19
+ async function streamingExample() {
20
+ console.log('šŸš€ Starting streaming example...\n');
21
+ // Initialize MCP client and agent
22
+ const client = new MCPClient(everythingServer);
23
+ const llm = new ChatAnthropic({ model: 'claude-sonnet-4-20250514', temperature: 0 });
24
+ const agent = new MCPAgent({
25
+ llm,
26
+ client,
27
+ maxSteps: 10,
28
+ verbose: true,
29
+ });
30
+ const query = `Please help me understand what capabilities you have:
31
+ 1. List all available tools
32
+ 2. Try using a few different tools to demonstrate their functionality
33
+ 3. Show me what resources and prompts are available
34
+ 4. Create a simple example to showcase your abilities`;
35
+ console.log(`šŸ“ Query: ${query}\n`);
36
+ console.log('šŸ”„ Starting to stream agent steps...\n');
37
+ try {
38
+ // Use the stream method to get intermediate steps
39
+ const stream = agent.stream(query);
40
+ let stepNumber = 1;
41
+ // Iterate through the async generator to get intermediate steps
42
+ for await (const step of stream) {
43
+ console.log(`\n--- Step ${stepNumber} ---`);
44
+ console.log(`šŸ”§ Tool: ${step.action.tool}`);
45
+ console.log(`šŸ“„ Input: ${JSON.stringify(step.action.toolInput, null, 2)}`);
46
+ console.log(`šŸ“¤ Output: ${step.observation}`);
47
+ console.log('---\n');
48
+ stepNumber++;
49
+ }
50
+ // The final result is the return value when the generator is done
51
+ // Note: In the loop above, we don't get the final result directly
52
+ // To get it, we need to manually handle the generator
53
+ }
54
+ catch (error) {
55
+ console.error('āŒ Error during streaming:', error);
56
+ }
57
+ console.log('\nšŸŽ‰ Streaming complete!');
58
+ }
59
+ async function streamingExampleWithFinalResult() {
60
+ console.log('\n\nšŸš€ Starting streaming example with final result capture...\n');
61
+ // Initialize MCP client and agent
62
+ const client = new MCPClient(everythingServer);
63
+ const llm = new ChatAnthropic({ model: 'claude-sonnet-4-20250514', temperature: 0 });
64
+ const agent = new MCPAgent({
65
+ llm,
66
+ client,
67
+ maxSteps: 8,
68
+ verbose: false, // Less verbose for cleaner output
69
+ });
70
+ const query = `What tools do you have access to? Please test 2-3 of them to show me what they can do.`;
71
+ console.log(`šŸ“ Query: ${query}\n`);
72
+ console.log('šŸ”„ Processing with intermediate steps...\n');
73
+ try {
74
+ // Create the stream generator
75
+ const stream = agent.stream(query);
76
+ let stepNumber = 1;
77
+ let result = '';
78
+ // Manually iterate through the generator to capture both steps and final result
79
+ while (true) {
80
+ const { done, value } = await stream.next();
81
+ if (done) {
82
+ // Generator is complete, value contains the final result
83
+ result = value;
84
+ break;
85
+ }
86
+ else {
87
+ // value is an AgentStep
88
+ console.log(`\nšŸ”§ Step ${stepNumber}: ${value.action.tool}`);
89
+ console.log(` Input: ${JSON.stringify(value.action.toolInput)}`);
90
+ console.log(` Result: ${value.observation.slice(0, 100)}${value.observation.length > 100 ? '...' : ''}`);
91
+ stepNumber++;
92
+ }
93
+ }
94
+ console.log(`\n${'='.repeat(50)}`);
95
+ console.log('šŸŽÆ FINAL RESULT:');
96
+ console.log('='.repeat(50));
97
+ console.log(result);
98
+ }
99
+ catch (error) {
100
+ console.error('āŒ Error during streaming:', error);
101
+ }
102
+ finally {
103
+ // Clean up
104
+ await client.closeAllSessions();
105
+ }
106
+ console.log('\nāœ… Example complete!');
107
+ }
108
+ async function streamEventsExample() {
109
+ console.log('\n\nšŸš€ Starting streamEvents example (token-level streaming)...\n');
110
+ // Initialize MCP client and agent
111
+ const client = new MCPClient(everythingServer);
112
+ const llm = new ChatAnthropic({ model: 'claude-sonnet-4-20250514', temperature: 0 });
113
+ const agent = new MCPAgent({
114
+ llm,
115
+ client,
116
+ maxSteps: 5,
117
+ verbose: false,
118
+ });
119
+ const query = `What's the current time and date? Also create a simple text file with today's date.`;
120
+ console.log(`šŸ“ Query: ${query}\n`);
121
+ console.log('šŸ”„ Streaming fine-grained events...\n');
122
+ try {
123
+ // Use streamEvents for token-level streaming
124
+ const eventStream = agent.streamEvents(query);
125
+ let eventCount = 0;
126
+ let currentToolCall = null;
127
+ for await (const event of eventStream) {
128
+ eventCount++;
129
+ // Log different types of events
130
+ switch (event.event) {
131
+ case 'on_chain_start':
132
+ if (event.name === 'AgentExecutor') {
133
+ console.log('šŸ Agent execution started');
134
+ }
135
+ break;
136
+ case 'on_tool_start':
137
+ currentToolCall = event.name;
138
+ console.log(`\nšŸ”§ Tool started: ${event.name}`);
139
+ if (event.data?.input) {
140
+ console.log(` Input: ${JSON.stringify(event.data.input)}`);
141
+ }
142
+ break;
143
+ case 'on_tool_end':
144
+ if (event.name === currentToolCall) {
145
+ console.log(`āœ… Tool completed: ${event.name}`);
146
+ if (event.data?.output) {
147
+ const output = typeof event.data.output === 'string'
148
+ ? event.data.output
149
+ : JSON.stringify(event.data.output);
150
+ console.log(` Output: ${output.slice(0, 100)}${output.length > 100 ? '...' : ''}`);
151
+ }
152
+ currentToolCall = null;
153
+ }
154
+ break;
155
+ case 'on_chat_model_stream':
156
+ // This shows token-by-token streaming from the LLM
157
+ if (event.data?.chunk?.text) {
158
+ const textContent = event.data.chunk.text;
159
+ if (typeof textContent === 'string' && textContent.length > 0) {
160
+ process.stdout.write(textContent);
161
+ }
162
+ }
163
+ break;
164
+ case 'on_chain_end':
165
+ if (event.name === 'AgentExecutor') {
166
+ console.log('\n\nšŸ Agent execution completed');
167
+ }
168
+ break;
169
+ // You can handle many more event types:
170
+ // - on_llm_start, on_llm_end
171
+ // - on_parser_start, on_parser_end
172
+ // - on_retriever_start, on_retriever_end
173
+ // - etc.
174
+ }
175
+ // Limit output for demo purposes
176
+ if (eventCount > 200) {
177
+ console.log('\n... (truncated for demo)');
178
+ break;
179
+ }
180
+ }
181
+ console.log(`\n\nšŸ“Š Total events emitted: ${eventCount}`);
182
+ }
183
+ catch (error) {
184
+ console.error('āŒ Error during event streaming:', error);
185
+ }
186
+ finally {
187
+ await client.closeAllSessions();
188
+ }
189
+ console.log('\nāœ… StreamEvents example complete!');
190
+ }
191
+ // Run all examples
192
+ async function runAllExamples() {
193
+ await streamingExample();
194
+ await streamingExampleWithFinalResult();
195
+ await streamEventsExample();
196
+ }
197
+ // Run the examples
198
+ runAllExamples().catch(console.error);
package/dist/index.d.ts CHANGED
@@ -8,8 +8,10 @@ import { WebSocketConnector } from './src/connectors/websocket.js';
8
8
  import { Logger, logger } from './src/logging.js';
9
9
  import { MCPSession } from './src/session.js';
10
10
  export { BaseAdapter, LangChainAdapter } from './src/adapters/index.js';
11
+ export * from './src/agents/utils/index.js';
11
12
  export { ServerManager } from './src/managers/server_manager.js';
12
13
  export * from './src/managers/tools/index.js';
13
14
  export { setTelemetrySource, Telemetry } from './src/telemetry/index.js';
15
+ export type { StreamEvent } from '@langchain/core/tracers/log_stream';
14
16
  export { BaseConnector, HttpConnector, loadConfigFile, Logger, logger, MCPAgent, MCPClient, MCPSession, StdioConnector, WebSocketConnector };
15
17
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAA;AACpD,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAA;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAA;AACxD,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAA;AAC1D,OAAO,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAA;AAElE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAA;AACjD,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAA;AAE7C,OAAO,EAAE,WAAW,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAA;AACvE,OAAO,EAAE,aAAa,EAAE,MAAM,kCAAkC,CAAA;AAChE,cAAc,+BAA+B,CAAA;AAG7C,OAAO,EAAE,kBAAkB,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAA;AAExE,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,UAAU,EAAE,cAAc,EAAE,kBAAkB,EAAE,CAAA"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAA;AACpD,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAA;AAC3C,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAA;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAA;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAA;AACxD,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAA;AAC1D,OAAO,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAA;AAElE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAA;AACjD,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAA;AAE7C,OAAO,EAAE,WAAW,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAA;AAEvE,cAAc,6BAA6B,CAAA;AAC3C,OAAO,EAAE,aAAa,EAAE,MAAM,kCAAkC,CAAA;AAEhE,cAAc,+BAA+B,CAAA;AAG7C,OAAO,EAAE,kBAAkB,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAA;AAGxE,YAAY,EAAE,WAAW,EAAE,MAAM,oCAAoC,CAAA;AAErE,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,UAAU,EAAE,cAAc,EAAE,kBAAkB,EAAE,CAAA"}