mcp-use 0.1.20 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/browser.d.ts +49 -0
- package/dist/src/browser.d.ts.map +1 -0
- package/dist/src/browser.js +75 -0
- package/dist/src/client/base.d.ts +32 -0
- package/dist/src/client/base.d.ts.map +1 -0
- package/dist/src/client/base.js +119 -0
- package/dist/src/client.d.ts +19 -16
- package/dist/src/client.d.ts.map +1 -1
- package/dist/src/client.js +24 -107
- package/dist/src/logging.d.ts +1 -1
- package/dist/src/logging.d.ts.map +1 -1
- package/dist/src/logging.js +31 -16
- package/dist/src/managers/server_manager.js +1 -1
- package/dist/src/oauth-helper.d.ts +135 -0
- package/dist/src/oauth-helper.d.ts.map +1 -0
- package/dist/src/oauth-helper.js +427 -0
- package/package.json +6 -1
- package/dist/examples/add_server_tool.d.ts +0 -8
- package/dist/examples/add_server_tool.d.ts.map +0 -1
- package/dist/examples/add_server_tool.js +0 -79
- package/dist/examples/ai_sdk_example.d.ts +0 -23
- package/dist/examples/ai_sdk_example.d.ts.map +0 -1
- package/dist/examples/ai_sdk_example.js +0 -213
- package/dist/examples/airbnb_use.d.ts +0 -10
- package/dist/examples/airbnb_use.d.ts.map +0 -1
- package/dist/examples/airbnb_use.js +0 -43
- package/dist/examples/blender_use.d.ts +0 -15
- package/dist/examples/blender_use.d.ts.map +0 -1
- package/dist/examples/blender_use.js +0 -39
- package/dist/examples/browser_use.d.ts +0 -10
- package/dist/examples/browser_use.d.ts.map +0 -1
- package/dist/examples/browser_use.js +0 -46
- package/dist/examples/chat_example.d.ts +0 -10
- package/dist/examples/chat_example.d.ts.map +0 -1
- package/dist/examples/chat_example.js +0 -86
- package/dist/examples/filesystem_use.d.ts +0 -11
- package/dist/examples/filesystem_use.d.ts.map +0 -1
- package/dist/examples/filesystem_use.js +0 -43
- package/dist/examples/http_example.d.ts +0 -18
- package/dist/examples/http_example.d.ts.map +0 -1
- package/dist/examples/http_example.js +0 -37
- package/dist/examples/mcp_everything.d.ts +0 -6
- package/dist/examples/mcp_everything.d.ts.map +0 -1
- package/dist/examples/mcp_everything.js +0 -25
- package/dist/examples/multi_server_example.d.ts +0 -10
- package/dist/examples/multi_server_example.d.ts.map +0 -1
- package/dist/examples/multi_server_example.js +0 -51
- package/dist/examples/observability.d.ts +0 -6
- package/dist/examples/observability.d.ts.map +0 -1
- package/dist/examples/observability.js +0 -50
- package/dist/examples/stream_example.d.ts +0 -12
- package/dist/examples/stream_example.d.ts.map +0 -1
- package/dist/examples/stream_example.js +0 -198
- package/dist/examples/structured_output.d.ts +0 -9
- package/dist/examples/structured_output.d.ts.map +0 -1
- package/dist/examples/structured_output.js +0 -95
@@ -1,23 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* AI SDK Integration Example
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use MCPAgent's streamEvents() method
|
5
|
-
* with Vercel AI SDK's LangChainAdapter for building streaming UIs.
|
6
|
-
*
|
7
|
-
* This pattern is useful for:
|
8
|
-
* - Next.js API routes with useCompletion/useChat hooks
|
9
|
-
* - Real-time streaming applications
|
10
|
-
* - Building chat interfaces with token-by-token updates
|
11
|
-
*/
|
12
|
-
import type { StreamEvent } from '../index.js';
|
13
|
-
declare function streamEventsToAISDK(streamEvents: AsyncGenerator<StreamEvent, void, void>): AsyncGenerator<string, void, void>;
|
14
|
-
declare function createReadableStreamFromGenerator(generator: AsyncGenerator<string, void, void>): ReadableStream<string>;
|
15
|
-
declare function streamEventsToAISDKWithTools(streamEvents: AsyncGenerator<StreamEvent, void, void>): AsyncGenerator<string, void, void>;
|
16
|
-
declare function createApiHandler(): Promise<(request: {
|
17
|
-
prompt: string;
|
18
|
-
}) => Promise<Response>>;
|
19
|
-
declare function createEnhancedApiHandler(): Promise<(request: {
|
20
|
-
prompt: string;
|
21
|
-
}) => Promise<Response>>;
|
22
|
-
export { createApiHandler, createEnhancedApiHandler, createReadableStreamFromGenerator, streamEventsToAISDK, streamEventsToAISDKWithTools, };
|
23
|
-
//# sourceMappingURL=ai_sdk_example.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"ai_sdk_example.d.ts","sourceRoot":"","sources":["../../examples/ai_sdk_example.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;AAEH,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AAU9C,iBAAgB,mBAAmB,CACjC,YAAY,EAAE,cAAc,CAAC,WAAW,EAAE,IAAI,EAAE,IAAI,CAAC,GACpD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAUpC;AAGD,iBAAS,iCAAiC,CACxC,SAAS,EAAE,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,GAC5C,cAAc,CAAC,MAAM,CAAC,CAcxB;AAGD,iBAAgB,4BAA4B,CAC1C,YAAY,EAAE,cAAc,CAAC,WAAW,EAAE,IAAI,EAAE,IAAI,CAAC,GACpD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAqBpC;AAGD,iBAAe,gBAAgB,sBAwBM;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,wBAsBtD;AAGD,iBAAe,wBAAwB,sBAuBM;IAAE,MAAM,EAAE,MAAM,CAAA;CAAE,wBAkB9D;AAoFD,OAAO,EACL,gBAAgB,EAChB,wBAAwB,EACxB,iCAAiC,EACjC,mBAAmB,EACnB,4BAA4B,GAC7B,CAAA"}
|
@@ -1,213 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* AI SDK Integration Example
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use MCPAgent's streamEvents() method
|
5
|
-
* with Vercel AI SDK's LangChainAdapter for building streaming UIs.
|
6
|
-
*
|
7
|
-
* This pattern is useful for:
|
8
|
-
* - Next.js API routes with useCompletion/useChat hooks
|
9
|
-
* - Real-time streaming applications
|
10
|
-
* - Building chat interfaces with token-by-token updates
|
11
|
-
*/
|
12
|
-
import { ChatAnthropic } from '@langchain/anthropic';
|
13
|
-
import { LangChainAdapter } from 'ai';
|
14
|
-
import { config } from 'dotenv';
|
15
|
-
import { MCPAgent, MCPClient } from '../index.js';
|
16
|
-
// Load environment variables
|
17
|
-
config();
|
18
|
-
// Utility function to convert streamEvents to AI SDK compatible stream
|
19
|
-
async function* streamEventsToAISDK(streamEvents) {
|
20
|
-
for await (const event of streamEvents) {
|
21
|
-
// Only yield the actual content tokens from chat model streams
|
22
|
-
if (event.event === 'on_chat_model_stream' && event.data?.chunk?.text) {
|
23
|
-
const textContent = event.data.chunk.text;
|
24
|
-
if (typeof textContent === 'string' && textContent.length > 0) {
|
25
|
-
yield textContent;
|
26
|
-
}
|
27
|
-
}
|
28
|
-
}
|
29
|
-
}
|
30
|
-
// Convert async generator to ReadableStream for AI SDK compatibility
|
31
|
-
function createReadableStreamFromGenerator(generator) {
|
32
|
-
return new ReadableStream({
|
33
|
-
async start(controller) {
|
34
|
-
try {
|
35
|
-
for await (const chunk of generator) {
|
36
|
-
controller.enqueue(chunk);
|
37
|
-
}
|
38
|
-
controller.close();
|
39
|
-
}
|
40
|
-
catch (error) {
|
41
|
-
controller.error(error);
|
42
|
-
}
|
43
|
-
},
|
44
|
-
});
|
45
|
-
}
|
46
|
-
// Enhanced adapter that includes tool information
|
47
|
-
async function* streamEventsToAISDKWithTools(streamEvents) {
|
48
|
-
for await (const event of streamEvents) {
|
49
|
-
switch (event.event) {
|
50
|
-
case 'on_chat_model_stream':
|
51
|
-
if (event.data?.chunk?.text) {
|
52
|
-
const textContent = event.data.chunk.text;
|
53
|
-
if (typeof textContent === 'string' && textContent.length > 0) {
|
54
|
-
yield textContent;
|
55
|
-
}
|
56
|
-
}
|
57
|
-
break;
|
58
|
-
case 'on_tool_start':
|
59
|
-
yield `\n🔧 Using tool: ${event.name}\n`;
|
60
|
-
break;
|
61
|
-
case 'on_tool_end':
|
62
|
-
yield `\n✅ Tool completed: ${event.name}\n`;
|
63
|
-
break;
|
64
|
-
}
|
65
|
-
}
|
66
|
-
}
|
67
|
-
// Example: Basic AI SDK API route handler
|
68
|
-
async function createApiHandler() {
|
69
|
-
const everythingServer = {
|
70
|
-
mcpServers: {
|
71
|
-
everything: {
|
72
|
-
command: 'npx',
|
73
|
-
args: ['-y', '@modelcontextprotocol/server-everything'],
|
74
|
-
},
|
75
|
-
},
|
76
|
-
};
|
77
|
-
const client = new MCPClient(everythingServer);
|
78
|
-
const llm = new ChatAnthropic({
|
79
|
-
model: 'claude-sonnet-4-20250514',
|
80
|
-
temperature: 0.1,
|
81
|
-
});
|
82
|
-
const agent = new MCPAgent({
|
83
|
-
llm,
|
84
|
-
client,
|
85
|
-
maxSteps: 5,
|
86
|
-
verbose: false,
|
87
|
-
});
|
88
|
-
// Simulate an API route handler
|
89
|
-
const apiHandler = async (request) => {
|
90
|
-
try {
|
91
|
-
// Get streamEvents from MCPAgent
|
92
|
-
const streamEvents = agent.streamEvents(request.prompt);
|
93
|
-
// Convert to AI SDK compatible format
|
94
|
-
const aiSDKStream = streamEventsToAISDK(streamEvents);
|
95
|
-
const readableStream = createReadableStreamFromGenerator(aiSDKStream);
|
96
|
-
// Use LangChainAdapter to create a Response compatible with AI SDK
|
97
|
-
return LangChainAdapter.toDataStreamResponse(readableStream);
|
98
|
-
}
|
99
|
-
catch (error) {
|
100
|
-
console.error('Error in API handler:', error);
|
101
|
-
throw error;
|
102
|
-
}
|
103
|
-
finally {
|
104
|
-
await client.closeAllSessions();
|
105
|
-
}
|
106
|
-
};
|
107
|
-
return apiHandler;
|
108
|
-
}
|
109
|
-
// Example: Enhanced API handler with tool visibility
|
110
|
-
async function createEnhancedApiHandler() {
|
111
|
-
const everythingServer = {
|
112
|
-
mcpServers: {
|
113
|
-
everything: {
|
114
|
-
command: 'npx',
|
115
|
-
args: ['-y', '@modelcontextprotocol/server-everything'],
|
116
|
-
},
|
117
|
-
},
|
118
|
-
};
|
119
|
-
const client = new MCPClient(everythingServer);
|
120
|
-
const llm = new ChatAnthropic({
|
121
|
-
model: 'claude-sonnet-4-20250514',
|
122
|
-
temperature: 0.1,
|
123
|
-
});
|
124
|
-
const agent = new MCPAgent({
|
125
|
-
llm,
|
126
|
-
client,
|
127
|
-
maxSteps: 8,
|
128
|
-
verbose: false,
|
129
|
-
});
|
130
|
-
const enhancedApiHandler = async (request) => {
|
131
|
-
try {
|
132
|
-
const streamEvents = agent.streamEvents(request.prompt);
|
133
|
-
const enhancedStream = streamEventsToAISDKWithTools(streamEvents);
|
134
|
-
const readableStream = createReadableStreamFromGenerator(enhancedStream);
|
135
|
-
return LangChainAdapter.toDataStreamResponse(readableStream);
|
136
|
-
}
|
137
|
-
catch (error) {
|
138
|
-
console.error('Error in enhanced API handler:', error);
|
139
|
-
throw error;
|
140
|
-
}
|
141
|
-
finally {
|
142
|
-
await client.closeAllSessions();
|
143
|
-
}
|
144
|
-
};
|
145
|
-
return enhancedApiHandler;
|
146
|
-
}
|
147
|
-
// Example: Simulated Next.js API route
|
148
|
-
async function simulateNextJSApiRoute() {
|
149
|
-
console.log('🚀 Simulating Next.js API Route with AI SDK Integration\n');
|
150
|
-
const apiHandler = await createApiHandler();
|
151
|
-
// Simulate a request
|
152
|
-
const request = {
|
153
|
-
prompt: 'What\'s the current time? Also, list the files in the current directory.',
|
154
|
-
};
|
155
|
-
console.log(`📝 Request: ${request.prompt}\n`);
|
156
|
-
console.log('📡 Streaming response:\n');
|
157
|
-
try {
|
158
|
-
const response = await apiHandler(request);
|
159
|
-
if (response.body) {
|
160
|
-
const reader = response.body.getReader();
|
161
|
-
const decoder = new TextDecoder();
|
162
|
-
while (true) {
|
163
|
-
const { done, value } = await reader.read();
|
164
|
-
if (done)
|
165
|
-
break;
|
166
|
-
const chunk = decoder.decode(value);
|
167
|
-
process.stdout.write(chunk);
|
168
|
-
}
|
169
|
-
}
|
170
|
-
}
|
171
|
-
catch (error) {
|
172
|
-
console.error('❌ Error:', error);
|
173
|
-
}
|
174
|
-
console.log('\n\n✅ API Route simulation complete');
|
175
|
-
}
|
176
|
-
// Example: Enhanced streaming with tool visibility
|
177
|
-
async function simulateEnhancedStreaming() {
|
178
|
-
console.log('\n\n🚀 Enhanced Streaming with Tool Visibility\n');
|
179
|
-
const enhancedHandler = await createEnhancedApiHandler();
|
180
|
-
const request = {
|
181
|
-
prompt: 'Check the current time and create a file with a timestamp. Then tell me what tools you used.',
|
182
|
-
};
|
183
|
-
console.log(`📝 Request: ${request.prompt}\n`);
|
184
|
-
console.log('📡 Enhanced streaming response:\n');
|
185
|
-
try {
|
186
|
-
const response = await enhancedHandler(request);
|
187
|
-
if (response.body) {
|
188
|
-
const reader = response.body.getReader();
|
189
|
-
const decoder = new TextDecoder();
|
190
|
-
while (true) {
|
191
|
-
const { done, value } = await reader.read();
|
192
|
-
if (done)
|
193
|
-
break;
|
194
|
-
const chunk = decoder.decode(value);
|
195
|
-
process.stdout.write(chunk);
|
196
|
-
}
|
197
|
-
}
|
198
|
-
}
|
199
|
-
catch (error) {
|
200
|
-
console.error('❌ Error:', error);
|
201
|
-
}
|
202
|
-
console.log('\n\n✅ Enhanced streaming complete');
|
203
|
-
}
|
204
|
-
// Run all examples
|
205
|
-
async function runAllExamples() {
|
206
|
-
await simulateNextJSApiRoute();
|
207
|
-
await simulateEnhancedStreaming();
|
208
|
-
}
|
209
|
-
// Export utilities for reuse
|
210
|
-
export { createApiHandler, createEnhancedApiHandler, createReadableStreamFromGenerator, streamEventsToAISDK, streamEventsToAISDKWithTools, };
|
211
|
-
if (import.meta.url === `file://${process.argv[1]}`) {
|
212
|
-
runAllExamples().catch(console.error);
|
213
|
-
}
|
@@ -1,10 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Example demonstrating how to use mcp-use with Airbnb.
|
3
|
-
*
|
4
|
-
* This example shows how to connect an LLM to Airbnb through MCP tools
|
5
|
-
* to perform tasks like searching for accommodations.
|
6
|
-
*
|
7
|
-
* Special Thanks to https://github.com/openbnb-org/mcp-server-airbnb for the server.
|
8
|
-
*/
|
9
|
-
export {};
|
10
|
-
//# sourceMappingURL=airbnb_use.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"airbnb_use.d.ts","sourceRoot":"","sources":["../../examples/airbnb_use.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG"}
|
@@ -1,43 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Example demonstrating how to use mcp-use with Airbnb.
|
3
|
-
*
|
4
|
-
* This example shows how to connect an LLM to Airbnb through MCP tools
|
5
|
-
* to perform tasks like searching for accommodations.
|
6
|
-
*
|
7
|
-
* Special Thanks to https://github.com/openbnb-org/mcp-server-airbnb for the server.
|
8
|
-
*/
|
9
|
-
import { ChatOpenAI } from '@langchain/openai';
|
10
|
-
import { config } from 'dotenv';
|
11
|
-
import { MCPAgent, MCPClient } from '../index.js';
|
12
|
-
// Load environment variables from .env file
|
13
|
-
config();
|
14
|
-
async function runAirbnbExample() {
|
15
|
-
// Create MCPClient with Airbnb configuration
|
16
|
-
const config = {
|
17
|
-
mcpServers: {
|
18
|
-
airbnb: {
|
19
|
-
command: 'npx',
|
20
|
-
args: ['-y', '@openbnb/mcp-server-airbnb', '--ignore-robots-txt'],
|
21
|
-
},
|
22
|
-
},
|
23
|
-
};
|
24
|
-
const client = new MCPClient(config);
|
25
|
-
// Create LLM - you can choose between different models
|
26
|
-
const llm = new ChatOpenAI({ model: 'gpt-4o' });
|
27
|
-
// Create agent with the client
|
28
|
-
const agent = new MCPAgent({ llm, client, maxSteps: 30 });
|
29
|
-
try {
|
30
|
-
// Run a query to search for accommodations
|
31
|
-
const result = await agent.run('Find me a nice place to stay in Barcelona for 2 adults '
|
32
|
-
+ 'for a week in August. I prefer places with a pool and '
|
33
|
-
+ 'good reviews. Show me the top 3 options.', 30);
|
34
|
-
console.error(`\nResult: ${result}`);
|
35
|
-
}
|
36
|
-
finally {
|
37
|
-
// Ensure we clean up resources properly
|
38
|
-
await client.closeAllSessions();
|
39
|
-
}
|
40
|
-
}
|
41
|
-
if (import.meta.url === `file://${process.argv[1]}`) {
|
42
|
-
runAirbnbExample().catch(console.error);
|
43
|
-
}
|
@@ -1,15 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Blender MCP example for mcp-use.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the mcp-use library with MCPClient
|
5
|
-
* to connect an LLM to Blender through MCP tools via WebSocket.
|
6
|
-
* The example assumes you have installed the Blender MCP addon from:
|
7
|
-
* https://github.com/ahujasid/blender-mcp
|
8
|
-
*
|
9
|
-
* Make sure the addon is enabled in Blender preferences and the WebSocket
|
10
|
-
* server is running before executing this script.
|
11
|
-
*
|
12
|
-
* Special thanks to https://github.com/ahujasid/blender-mcp for the server.
|
13
|
-
*/
|
14
|
-
export {};
|
15
|
-
//# sourceMappingURL=blender_use.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"blender_use.d.ts","sourceRoot":"","sources":["../../examples/blender_use.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG"}
|
@@ -1,39 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Blender MCP example for mcp-use.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the mcp-use library with MCPClient
|
5
|
-
* to connect an LLM to Blender through MCP tools via WebSocket.
|
6
|
-
* The example assumes you have installed the Blender MCP addon from:
|
7
|
-
* https://github.com/ahujasid/blender-mcp
|
8
|
-
*
|
9
|
-
* Make sure the addon is enabled in Blender preferences and the WebSocket
|
10
|
-
* server is running before executing this script.
|
11
|
-
*
|
12
|
-
* Special thanks to https://github.com/ahujasid/blender-mcp for the server.
|
13
|
-
*/
|
14
|
-
import { ChatAnthropic } from '@langchain/anthropic';
|
15
|
-
import { config } from 'dotenv';
|
16
|
-
import { MCPAgent, MCPClient } from '../index.js';
|
17
|
-
// Load environment variables from .env file
|
18
|
-
config();
|
19
|
-
async function runBlenderExample() {
|
20
|
-
// Create MCPClient with Blender MCP configuration
|
21
|
-
const config = { mcpServers: { blender: { command: 'uvx', args: ['blender-mcp'] } } };
|
22
|
-
const client = MCPClient.fromDict(config);
|
23
|
-
// Create LLM
|
24
|
-
const llm = new ChatAnthropic({ model: 'claude-3-5-sonnet-20240620' });
|
25
|
-
// Create agent with the client
|
26
|
-
const agent = new MCPAgent({ llm, client, maxSteps: 30 });
|
27
|
-
try {
|
28
|
-
// Run the query
|
29
|
-
const result = await agent.run('Create an inflatable cube with soft material and a plane as ground.', 30);
|
30
|
-
console.error(`\nResult: ${result}`);
|
31
|
-
}
|
32
|
-
finally {
|
33
|
-
// Ensure we clean up resources properly
|
34
|
-
await client.closeAllSessions();
|
35
|
-
}
|
36
|
-
}
|
37
|
-
if (import.meta.url === `file://${process.argv[1]}`) {
|
38
|
-
runBlenderExample().catch(console.error);
|
39
|
-
}
|
@@ -1,10 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Basic usage example for mcp-use.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the mcp-use library with MCPClient
|
5
|
-
* to connect any LLM to MCP tools through a unified interface.
|
6
|
-
*
|
7
|
-
* Special thanks to https://github.com/microsoft/playwright-mcp for the server.
|
8
|
-
*/
|
9
|
-
export {};
|
10
|
-
//# sourceMappingURL=browser_use.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"browser_use.d.ts","sourceRoot":"","sources":["../../examples/browser_use.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG"}
|
@@ -1,46 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Basic usage example for mcp-use.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the mcp-use library with MCPClient
|
5
|
-
* to connect any LLM to MCP tools through a unified interface.
|
6
|
-
*
|
7
|
-
* Special thanks to https://github.com/microsoft/playwright-mcp for the server.
|
8
|
-
*/
|
9
|
-
import path from 'node:path';
|
10
|
-
import { fileURLToPath } from 'node:url';
|
11
|
-
import { ChatOpenAI } from '@langchain/openai';
|
12
|
-
import { config } from 'dotenv';
|
13
|
-
import { MCPAgent, MCPClient } from '../index.js';
|
14
|
-
// Load environment variables from .env file
|
15
|
-
config();
|
16
|
-
const __filename = fileURLToPath(import.meta.url);
|
17
|
-
const __dirname = path.dirname(__filename);
|
18
|
-
async function main() {
|
19
|
-
const config = {
|
20
|
-
mcpServers: {
|
21
|
-
playwright: {
|
22
|
-
command: 'npx',
|
23
|
-
args: ['@playwright/mcp@latest'],
|
24
|
-
env: {
|
25
|
-
DISPLAY: ':1',
|
26
|
-
},
|
27
|
-
},
|
28
|
-
},
|
29
|
-
};
|
30
|
-
// Create MCPClient from config file
|
31
|
-
const client = new MCPClient(config);
|
32
|
-
// Create LLM
|
33
|
-
const llm = new ChatOpenAI({ model: 'gpt-4o' });
|
34
|
-
// const llm = init_chat_model({ model: "llama-3.1-8b-instant", model_provider: "groq" })
|
35
|
-
// const llm = new ChatAnthropic({ model: "claude-3-" })
|
36
|
-
// const llm = new ChatGroq({ model: "llama3-8b-8192" })
|
37
|
-
// Create agent with the client
|
38
|
-
const agent = new MCPAgent({ llm, client, maxSteps: 30 });
|
39
|
-
// Run the query
|
40
|
-
const result = await agent.run(`Navigate to https://github.com/mcp-use/mcp-use, give a star to the project and write
|
41
|
-
a summary of the project.`, 30);
|
42
|
-
console.error(`\nResult: ${result}`);
|
43
|
-
}
|
44
|
-
if (import.meta.url === `file://${process.argv[1]}`) {
|
45
|
-
main().catch(console.error);
|
46
|
-
}
|
@@ -1,10 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Simple chat example using MCPAgent with built-in conversation memory.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the MCPAgent with its built-in
|
5
|
-
* conversation history capabilities for better contextual interactions.
|
6
|
-
*
|
7
|
-
* Special thanks to https://github.com/microsoft/playwright-mcp for the server.
|
8
|
-
*/
|
9
|
-
export {};
|
10
|
-
//# sourceMappingURL=chat_example.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"chat_example.d.ts","sourceRoot":"","sources":["../../examples/chat_example.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG"}
|
@@ -1,86 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Simple chat example using MCPAgent with built-in conversation memory.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the MCPAgent with its built-in
|
5
|
-
* conversation history capabilities for better contextual interactions.
|
6
|
-
*
|
7
|
-
* Special thanks to https://github.com/microsoft/playwright-mcp for the server.
|
8
|
-
*/
|
9
|
-
import readline from 'node:readline';
|
10
|
-
import { ChatOpenAI } from '@langchain/openai';
|
11
|
-
import { config } from 'dotenv';
|
12
|
-
import { MCPAgent, MCPClient } from '../index.js';
|
13
|
-
// Load environment variables from .env file
|
14
|
-
config();
|
15
|
-
async function runMemoryChat() {
|
16
|
-
// Config file path - change this to your config file
|
17
|
-
const config = {
|
18
|
-
mcpServers: {
|
19
|
-
airbnb: {
|
20
|
-
command: 'npx',
|
21
|
-
args: ['-y', '@openbnb/mcp-server-airbnb', '--ignore-robots-txt'],
|
22
|
-
},
|
23
|
-
},
|
24
|
-
};
|
25
|
-
console.error('Initializing chat...');
|
26
|
-
// Create MCP client and agent with memory enabled
|
27
|
-
const client = new MCPClient(config);
|
28
|
-
const llm = new ChatOpenAI({ model: 'gpt-4o-mini' });
|
29
|
-
// Create agent with memory_enabled=true
|
30
|
-
const agent = new MCPAgent({
|
31
|
-
llm,
|
32
|
-
client,
|
33
|
-
maxSteps: 15,
|
34
|
-
memoryEnabled: true, // Enable built-in conversation memory
|
35
|
-
});
|
36
|
-
console.error('\n===== Interactive MCP Chat =====');
|
37
|
-
console.error('Type \'exit\' or \'quit\' to end the conversation');
|
38
|
-
console.error('Type \'clear\' to clear conversation history');
|
39
|
-
console.error('==================================\n');
|
40
|
-
// Create readline interface for user input
|
41
|
-
const rl = readline.createInterface({
|
42
|
-
input: process.stdin,
|
43
|
-
output: process.stdout,
|
44
|
-
});
|
45
|
-
const question = (prompt) => {
|
46
|
-
return new Promise((resolve) => {
|
47
|
-
rl.question(prompt, resolve);
|
48
|
-
});
|
49
|
-
};
|
50
|
-
try {
|
51
|
-
// Main chat loop
|
52
|
-
while (true) {
|
53
|
-
// Get user input
|
54
|
-
const userInput = await question('\nYou: ');
|
55
|
-
// Check for exit command
|
56
|
-
if (userInput.toLowerCase() === 'exit' || userInput.toLowerCase() === 'quit') {
|
57
|
-
console.error('Ending conversation...');
|
58
|
-
break;
|
59
|
-
}
|
60
|
-
// Check for clear history command
|
61
|
-
if (userInput.toLowerCase() === 'clear') {
|
62
|
-
agent.clearConversationHistory();
|
63
|
-
console.error('Conversation history cleared.');
|
64
|
-
continue;
|
65
|
-
}
|
66
|
-
// Get response from agent
|
67
|
-
process.stdout.write('\nAssistant: ');
|
68
|
-
try {
|
69
|
-
// Run the agent with the user input (memory handling is automatic)
|
70
|
-
const response = await agent.run(userInput);
|
71
|
-
console.error(response);
|
72
|
-
}
|
73
|
-
catch (error) {
|
74
|
-
console.error(`\nError: ${error}`);
|
75
|
-
}
|
76
|
-
}
|
77
|
-
}
|
78
|
-
finally {
|
79
|
-
// Clean up
|
80
|
-
rl.close();
|
81
|
-
await client.closeAllSessions();
|
82
|
-
}
|
83
|
-
}
|
84
|
-
if (import.meta.url === `file://${process.argv[1]}`) {
|
85
|
-
runMemoryChat().catch(console.error);
|
86
|
-
}
|
@@ -1,11 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Basic usage example for mcp-use.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the mcp-use library with MCPClient
|
5
|
-
* to connect any LLM to MCP tools through a unified interface.
|
6
|
-
*
|
7
|
-
* Special Thanks to https://github.com/modelcontextprotocol/servers/tree/main/src/filesystem
|
8
|
-
* for the server.
|
9
|
-
*/
|
10
|
-
export {};
|
11
|
-
//# sourceMappingURL=filesystem_use.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"filesystem_use.d.ts","sourceRoot":"","sources":["../../examples/filesystem_use.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG"}
|
@@ -1,43 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* Basic usage example for mcp-use.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the mcp-use library with MCPClient
|
5
|
-
* to connect any LLM to MCP tools through a unified interface.
|
6
|
-
*
|
7
|
-
* Special Thanks to https://github.com/modelcontextprotocol/servers/tree/main/src/filesystem
|
8
|
-
* for the server.
|
9
|
-
*/
|
10
|
-
import { ChatOpenAI } from '@langchain/openai';
|
11
|
-
import { config } from 'dotenv';
|
12
|
-
import { MCPAgent, MCPClient } from '../index.js';
|
13
|
-
// Load environment variables from .env file
|
14
|
-
config();
|
15
|
-
const serverConfig = {
|
16
|
-
mcpServers: {
|
17
|
-
filesystem: {
|
18
|
-
command: 'npx',
|
19
|
-
args: [
|
20
|
-
'-y',
|
21
|
-
'@modelcontextprotocol/server-filesystem',
|
22
|
-
'THE_PATH_TO_YOUR_DIRECTORY',
|
23
|
-
],
|
24
|
-
},
|
25
|
-
},
|
26
|
-
};
|
27
|
-
async function main() {
|
28
|
-
// Create MCPClient from config
|
29
|
-
const client = MCPClient.fromDict(serverConfig);
|
30
|
-
// Create LLM
|
31
|
-
const llm = new ChatOpenAI({ model: 'gpt-4o' });
|
32
|
-
// const llm = init_chat_model({ model: "llama-3.1-8b-instant", model_provider: "groq" })
|
33
|
-
// const llm = new ChatAnthropic({ model: "claude-3-" })
|
34
|
-
// const llm = new ChatGroq({ model: "llama3-8b-8192" })
|
35
|
-
// Create agent with the client
|
36
|
-
const agent = new MCPAgent({ llm, client, maxSteps: 30 });
|
37
|
-
// Run the query
|
38
|
-
const result = await agent.run('Hello can you give me a list of files and directories in the current directory', 30);
|
39
|
-
console.log(`\nResult: ${result}`);
|
40
|
-
}
|
41
|
-
if (import.meta.url === `file://${process.argv[1]}`) {
|
42
|
-
main().catch(console.error);
|
43
|
-
}
|
@@ -1,18 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* HTTP Example for mcp-use.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the mcp-use library with MCPClient
|
5
|
-
* to connect to an MCP server running on a specific HTTP port.
|
6
|
-
*
|
7
|
-
* Before running this example, you need to start the Playwright MCP server
|
8
|
-
* in another terminal with:
|
9
|
-
*
|
10
|
-
* npx @playwright/mcp@latest --port 8931
|
11
|
-
*
|
12
|
-
* This will start the server on port 8931. Resulting in the config you find below.
|
13
|
-
* Of course you can run this with any server you want at any URL.
|
14
|
-
*
|
15
|
-
* Special thanks to https://github.com/microsoft/playwright-mcp for the server.
|
16
|
-
*/
|
17
|
-
export {};
|
18
|
-
//# sourceMappingURL=http_example.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"http_example.d.ts","sourceRoot":"","sources":["../../examples/http_example.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG"}
|
@@ -1,37 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* HTTP Example for mcp-use.
|
3
|
-
*
|
4
|
-
* This example demonstrates how to use the mcp-use library with MCPClient
|
5
|
-
* to connect to an MCP server running on a specific HTTP port.
|
6
|
-
*
|
7
|
-
* Before running this example, you need to start the Playwright MCP server
|
8
|
-
* in another terminal with:
|
9
|
-
*
|
10
|
-
* npx @playwright/mcp@latest --port 8931
|
11
|
-
*
|
12
|
-
* This will start the server on port 8931. Resulting in the config you find below.
|
13
|
-
* Of course you can run this with any server you want at any URL.
|
14
|
-
*
|
15
|
-
* Special thanks to https://github.com/microsoft/playwright-mcp for the server.
|
16
|
-
*/
|
17
|
-
import { ChatOpenAI } from '@langchain/openai';
|
18
|
-
import { config } from 'dotenv';
|
19
|
-
import { MCPAgent, MCPClient } from '../index.js';
|
20
|
-
// Load environment variables from .env file
|
21
|
-
config();
|
22
|
-
async function main() {
|
23
|
-
const config = { mcpServers: { http: { url: 'https://gitmcp.io/docs' } } };
|
24
|
-
// Create MCPClient from config
|
25
|
-
const client = MCPClient.fromDict(config);
|
26
|
-
// Create LLM
|
27
|
-
const llm = new ChatOpenAI({ model: 'gpt-4o' });
|
28
|
-
// Create agent with the client
|
29
|
-
const agent = new MCPAgent({ llm, client, maxSteps: 30 });
|
30
|
-
// Run the query
|
31
|
-
const result = await agent.run('Which tools are available and what can they do?', 30);
|
32
|
-
console.log(`\nResult: ${result}`);
|
33
|
-
await agent.close();
|
34
|
-
}
|
35
|
-
if (import.meta.url === `file://${process.argv[1]}`) {
|
36
|
-
main().catch(console.error);
|
37
|
-
}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"mcp_everything.d.ts","sourceRoot":"","sources":["../../examples/mcp_everything.ts"],"names":[],"mappings":"AAAA;;;GAGG"}
|