@aigne/example-afs-mcp-server 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.local.example +44 -0
- package/LICENSE.md +93 -0
- package/README.md +269 -0
- package/index.test.ts +11 -0
- package/index.ts +37 -0
- package/package.json +35 -0
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# Change the name of this file to .env.local and fill in the following values
|
|
2
|
+
|
|
3
|
+
# Uncomment the lines below to enable debug logging
|
|
4
|
+
# DEBUG="aigne:*"
|
|
5
|
+
|
|
6
|
+
# Use different Models
|
|
7
|
+
|
|
8
|
+
# OpenAI
|
|
9
|
+
# MODEL="openai/gpt-4.1"
|
|
10
|
+
# OPENAI_API_KEY="YOUR_OPENAI_API_KEY"
|
|
11
|
+
|
|
12
|
+
# Anthropic claude
|
|
13
|
+
# MODEL="anthropic/claude-3-7-sonnet-latest"
|
|
14
|
+
# ANTHROPIC_API_KEY=""
|
|
15
|
+
|
|
16
|
+
# Gemini
|
|
17
|
+
MODEL="google/gemini-2.5-pro"
|
|
18
|
+
# GEMINI_API_KEY=""
|
|
19
|
+
|
|
20
|
+
# Bedrock nova
|
|
21
|
+
# MODEL=bedrock:us.amazon.nova-premier-v1:0
|
|
22
|
+
# AWS_ACCESS_KEY_ID=""
|
|
23
|
+
# AWS_SECRET_ACCESS_KEY=""
|
|
24
|
+
# AWS_REGION=us-west-2
|
|
25
|
+
|
|
26
|
+
# DeepSeek
|
|
27
|
+
# MODEL="deepseek/deepseek-chat"
|
|
28
|
+
# DEEPSEEK_API_KEY=""
|
|
29
|
+
|
|
30
|
+
# OpenRouter
|
|
31
|
+
# MODEL="openrouter/openai/gpt-4o"
|
|
32
|
+
# OPEN_ROUTER_API_KEY=""
|
|
33
|
+
|
|
34
|
+
# xAI
|
|
35
|
+
# MODEL="xai/grok-2-latest"
|
|
36
|
+
# XAI_API_KEY=""
|
|
37
|
+
|
|
38
|
+
# Ollama
|
|
39
|
+
# MODEL="ollama/llama3.2"
|
|
40
|
+
# OLLAMA_DEFAULT_BASE_URL="http://localhost:11434/v1";
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# Setup proxy if needed
|
|
44
|
+
# HTTPS_PROXY=http://localhost:7890
|
package/LICENSE.md
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
Elastic License 2.0
|
|
2
|
+
|
|
3
|
+
URL: https://www.elastic.co/licensing/elastic-license
|
|
4
|
+
|
|
5
|
+
## Acceptance
|
|
6
|
+
|
|
7
|
+
By using the software, you agree to all of the terms and conditions below.
|
|
8
|
+
|
|
9
|
+
## Copyright License
|
|
10
|
+
|
|
11
|
+
The licensor grants you a non-exclusive, royalty-free, worldwide,
|
|
12
|
+
non-sublicensable, non-transferable license to use, copy, distribute, make
|
|
13
|
+
available, and prepare derivative works of the software, in each case subject to
|
|
14
|
+
the limitations and conditions below.
|
|
15
|
+
|
|
16
|
+
## Limitations
|
|
17
|
+
|
|
18
|
+
You may not provide the software to third parties as a hosted or managed
|
|
19
|
+
service, where the service provides users with access to any substantial set of
|
|
20
|
+
the features or functionality of the software.
|
|
21
|
+
|
|
22
|
+
You may not move, change, disable, or circumvent the license key functionality
|
|
23
|
+
in the software, and you may not remove or obscure any functionality in the
|
|
24
|
+
software that is protected by the license key.
|
|
25
|
+
|
|
26
|
+
You may not alter, remove, or obscure any licensing, copyright, or other notices
|
|
27
|
+
of the licensor in the software. Any use of the licensor’s trademarks is subject
|
|
28
|
+
to applicable law.
|
|
29
|
+
|
|
30
|
+
## Patents
|
|
31
|
+
|
|
32
|
+
The licensor grants you a license, under any patent claims the licensor can
|
|
33
|
+
license, or becomes able to license, to make, have made, use, sell, offer for
|
|
34
|
+
sale, import and have imported the software, in each case subject to the
|
|
35
|
+
limitations and conditions in this license. This license does not cover any
|
|
36
|
+
patent claims that you cause to be infringed by modifications or additions to
|
|
37
|
+
the software. If you or your company make any written claim that the software
|
|
38
|
+
infringes or contributes to infringement of any patent, your patent license for
|
|
39
|
+
the software granted under these terms ends immediately. If your company makes
|
|
40
|
+
such a claim, your patent license ends immediately for work on behalf of your
|
|
41
|
+
company.
|
|
42
|
+
|
|
43
|
+
## Notices
|
|
44
|
+
|
|
45
|
+
You must ensure that anyone who gets a copy of any part of the software from you
|
|
46
|
+
also gets a copy of these terms.
|
|
47
|
+
|
|
48
|
+
If you modify the software, you must include in any modified copies of the
|
|
49
|
+
software prominent notices stating that you have modified the software.
|
|
50
|
+
|
|
51
|
+
## No Other Rights
|
|
52
|
+
|
|
53
|
+
These terms do not imply any licenses other than those expressly granted in
|
|
54
|
+
these terms.
|
|
55
|
+
|
|
56
|
+
## Termination
|
|
57
|
+
|
|
58
|
+
If you use the software in violation of these terms, such use is not licensed,
|
|
59
|
+
and your licenses will automatically terminate. If the licensor provides you
|
|
60
|
+
with a notice of your violation, and you cease all violation of this license no
|
|
61
|
+
later than 30 days after you receive that notice, your licenses will be
|
|
62
|
+
reinstated retroactively. However, if you violate these terms after such
|
|
63
|
+
reinstatement, any additional violation of these terms will cause your licenses
|
|
64
|
+
to terminate automatically and permanently.
|
|
65
|
+
|
|
66
|
+
## No Liability
|
|
67
|
+
|
|
68
|
+
*As far as the law allows, the software comes as is, without any warranty or
|
|
69
|
+
condition, and the licensor will not be liable to you for any damages arising
|
|
70
|
+
out of these terms or the use or nature of the software, under any kind of
|
|
71
|
+
legal claim.*
|
|
72
|
+
|
|
73
|
+
## Definitions
|
|
74
|
+
|
|
75
|
+
The **licensor** is the entity offering these terms, and the **software** is the
|
|
76
|
+
software the licensor makes available under these terms, including any portion
|
|
77
|
+
of it.
|
|
78
|
+
|
|
79
|
+
**you** refers to the individual or entity agreeing to these terms.
|
|
80
|
+
|
|
81
|
+
**your company** is any legal entity, sole proprietorship, or other kind of
|
|
82
|
+
organization that you work for, plus all organizations that have control over,
|
|
83
|
+
are under the control of, or are under common control with that
|
|
84
|
+
organization. **control** means ownership of substantially all the assets of an
|
|
85
|
+
entity, or the power to direct its management and policies by vote, contract, or
|
|
86
|
+
otherwise. Control can be direct or indirect.
|
|
87
|
+
|
|
88
|
+
**your licenses** are all the licenses granted to you for the software under
|
|
89
|
+
these terms.
|
|
90
|
+
|
|
91
|
+
**use** means anything you do with the software requiring one of your licenses.
|
|
92
|
+
|
|
93
|
+
**trademark** means trademarks, service marks, and similar rights.
|
package/README.md
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
# AFS MCP Server Example
|
|
2
|
+
|
|
3
|
+
<p align="center">
|
|
4
|
+
<picture>
|
|
5
|
+
<source srcset="https://raw.githubusercontent.com/AIGNE-io/aigne-framework/main/logo-dark.svg" media="(prefers-color-scheme: dark)">
|
|
6
|
+
<source srcset="https://raw.githubusercontent.com/AIGNE-io/aigne-framework/main/logo.svg" media="(prefers-color-scheme: light)">
|
|
7
|
+
<img src="https://raw.githubusercontent.com/AIGNE-io/aigne-framework/main/logo.svg" alt="AIGNE Logo" width="400" />
|
|
8
|
+
</picture>
|
|
9
|
+
</p>
|
|
10
|
+
|
|
11
|
+
This example shows how to mount any [MCP (Model Context Protocol)](https://www.anthropic.com/news/model-context-protocol) server as an AFS module, making it accessible to AI agents through a unified file system interface. We use the GitHub MCP Server as a real-world demonstration.
|
|
12
|
+
|
|
13
|
+
## What You'll See
|
|
14
|
+
|
|
15
|
+
**User asks:** "Search for a repo named aigne"
|
|
16
|
+
|
|
17
|
+
**Behind the scenes:**
|
|
18
|
+
1. LLM calls `afs_exec` → `/modules/github-mcp-server/search_repositories`
|
|
19
|
+
2. MCP server searches GitHub and returns JSON results
|
|
20
|
+
3. LLM presents results naturally: "Found 89 repositories. Notable matches: aigne-framework..."
|
|
21
|
+
|
|
22
|
+
**The power:** AI agents can access GitHub (or any MCP server) through a simple, unified AFS interface - just like accessing files!
|
|
23
|
+
|
|
24
|
+
## Prerequisites
|
|
25
|
+
|
|
26
|
+
* [Node.js](https://nodejs.org) (>=20.0) and npm installed on your machine
|
|
27
|
+
* [Docker](https://www.docker.com/) installed and running
|
|
28
|
+
* A [GitHub Personal Access Token](https://github.com/settings/tokens) for GitHub API access
|
|
29
|
+
* An [OpenAI API key](https://platform.openai.com/api-keys) for interacting with OpenAI's services
|
|
30
|
+
* Optional dependencies (if running the example from source code):
|
|
31
|
+
* [Pnpm](https://pnpm.io) for package management
|
|
32
|
+
* [Bun](https://bun.sh) for running unit tests & examples
|
|
33
|
+
|
|
34
|
+
## Quick Start (No Installation Required)
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
# Set your GitHub Personal Access Token
|
|
38
|
+
export GITHUB_PERSONAL_ACCESS_TOKEN=your_github_token_here
|
|
39
|
+
|
|
40
|
+
# Set your OpenAI API key
|
|
41
|
+
export OPENAI_API_KEY=your_openai_api_key_here
|
|
42
|
+
|
|
43
|
+
# Run in interactive chat mode
|
|
44
|
+
npx -y @aigne/example-afs-mcp-server --chat
|
|
45
|
+
|
|
46
|
+
# Ask a specific question
|
|
47
|
+
npx -y @aigne/example-afs-mcp-server --input "Search for a repo named aigne"
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## See It In Action
|
|
51
|
+
|
|
52
|
+
Here's what happens when you ask to search for a repository:
|
|
53
|
+
|
|
54
|
+
```
|
|
55
|
+
👤 You: "Search for a repo named aigne"
|
|
56
|
+
|
|
57
|
+
🤖 Agent thinks: I need to search GitHub repositories...
|
|
58
|
+
→ Calls: afs_exec("/modules/github-mcp-server/search_repositories")
|
|
59
|
+
|
|
60
|
+
📡 GitHub MCP Server:
|
|
61
|
+
✓ Found 89 repositories matching "aigne"
|
|
62
|
+
|
|
63
|
+
🤖 AI: "I searched GitHub for 'aigne'. Results: 89 repositories found.
|
|
64
|
+
|
|
65
|
+
Notable matches:
|
|
66
|
+
• aigne-framework (AIGNE-io/aigne-framework) - ⭐ 150 stars
|
|
67
|
+
• aigne-examples (user/aigne-examples) - ⭐ 12 stars
|
|
68
|
+
...
|
|
69
|
+
|
|
70
|
+
Would you like me to open any of these repos or see more details?"
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
**Key insight:** The agent treats the GitHub MCP Server like any other AFS module - no special integration code needed!
|
|
74
|
+
|
|
75
|
+
## Installation
|
|
76
|
+
|
|
77
|
+
### Clone the Repository
|
|
78
|
+
|
|
79
|
+
```bash
|
|
80
|
+
git clone https://github.com/AIGNE-io/aigne-framework
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### Install Dependencies
|
|
84
|
+
|
|
85
|
+
```bash
|
|
86
|
+
cd aigne-framework/examples/afs-mcp-server
|
|
87
|
+
|
|
88
|
+
pnpm install
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### Setup Environment Variables
|
|
92
|
+
|
|
93
|
+
Setup your API keys in the `.env.local` file:
|
|
94
|
+
|
|
95
|
+
```bash
|
|
96
|
+
GITHUB_PERSONAL_ACCESS_TOKEN="" # Set your GitHub Personal Access Token here
|
|
97
|
+
OPENAI_API_KEY="" # Set your OpenAI API key here
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
#### Using Different Models
|
|
101
|
+
|
|
102
|
+
You can use different AI models by setting the `MODEL` environment variable along with the corresponding API key. The framework supports multiple providers:
|
|
103
|
+
|
|
104
|
+
* **OpenAI**: `MODEL="openai:gpt-4.1"` with `OPENAI_API_KEY`
|
|
105
|
+
* **Anthropic**: `MODEL="anthropic:claude-3-7-sonnet-latest"` with `ANTHROPIC_API_KEY`
|
|
106
|
+
* **Google Gemini**: `MODEL="gemini:gemini-2.0-flash"` with `GEMINI_API_KEY`
|
|
107
|
+
* **AWS Bedrock**: `MODEL="bedrock:us.amazon.nova-premier-v1:0"` with AWS credentials
|
|
108
|
+
* **DeepSeek**: `MODEL="deepseek:deepseek-chat"` with `DEEPSEEK_API_KEY`
|
|
109
|
+
* **OpenRouter**: `MODEL="openrouter:openai/gpt-4o"` with `OPEN_ROUTER_API_KEY`
|
|
110
|
+
* **xAI**: `MODEL="xai:grok-2-latest"` with `XAI_API_KEY`
|
|
111
|
+
* **Ollama**: `MODEL="ollama:llama3.2"` with `OLLAMA_DEFAULT_BASE_URL`
|
|
112
|
+
|
|
113
|
+
For detailed configuration examples, please refer to the `.env.local.example` file in this directory.
|
|
114
|
+
|
|
115
|
+
### Run the Example
|
|
116
|
+
|
|
117
|
+
```bash
|
|
118
|
+
# Run in interactive chat mode
|
|
119
|
+
pnpm start --chat
|
|
120
|
+
|
|
121
|
+
# Run with a single message
|
|
122
|
+
pnpm start --input "What are the recent issues in the AIGNE repository?"
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
## How It Works: 3 Simple Steps
|
|
126
|
+
|
|
127
|
+
### 1. Launch the MCP Server
|
|
128
|
+
|
|
129
|
+
```typescript
|
|
130
|
+
import { MCPAgent } from "@aigne/core";
|
|
131
|
+
|
|
132
|
+
const mcpAgent = await MCPAgent.from({
|
|
133
|
+
command: "docker",
|
|
134
|
+
args: [
|
|
135
|
+
"run", "-i", "--rm",
|
|
136
|
+
"-e", `GITHUB_PERSONAL_ACCESS_TOKEN=${process.env.GITHUB_PERSONAL_ACCESS_TOKEN}`,
|
|
137
|
+
"ghcr.io/github/github-mcp-server",
|
|
138
|
+
],
|
|
139
|
+
});
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
### 2. Mount It as an AFS Module
|
|
143
|
+
|
|
144
|
+
```typescript
|
|
145
|
+
import { AFS } from "@aigne/afs";
|
|
146
|
+
import { AFSHistory } from "@aigne/afs-history";
|
|
147
|
+
|
|
148
|
+
const afs = new AFS()
|
|
149
|
+
.mount(new AFSHistory({ storage: { url: ":memory:" } }))
|
|
150
|
+
.mount(mcpAgent); // Mounted at /modules/github-mcp-server
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
### 3. Create an AI Agent
|
|
154
|
+
|
|
155
|
+
```typescript
|
|
156
|
+
import { AIAgent } from "@aigne/core";
|
|
157
|
+
|
|
158
|
+
const agent = AIAgent.from({
|
|
159
|
+
instructions: "Help users interact with GitHub via the github-mcp-server module.",
|
|
160
|
+
inputKey: "message",
|
|
161
|
+
afs, // Agent automatically gets access to all mounted modules
|
|
162
|
+
});
|
|
163
|
+
```
|
|
164
|
+
|
|
165
|
+
**That's it!** The agent can now call `/modules/github-mcp-server/search_repositories`, `/modules/github-mcp-server/list_issues`, and all other GitHub MCP tools through the AFS interface.
|
|
166
|
+
|
|
167
|
+
## Try These Examples
|
|
168
|
+
|
|
169
|
+
```bash
|
|
170
|
+
# Search for repositories
|
|
171
|
+
npx -y @aigne/example-afs-mcp-server --input "Search for a repo named aigne"
|
|
172
|
+
|
|
173
|
+
# Get repository information
|
|
174
|
+
npx -y @aigne/example-afs-mcp-server --input "Tell me about the AIGNE-io/aigne-framework repository"
|
|
175
|
+
|
|
176
|
+
# Check recent issues
|
|
177
|
+
npx -y @aigne/example-afs-mcp-server --input "What are the recent open issues in AIGNE-io/aigne-framework?"
|
|
178
|
+
|
|
179
|
+
# Interactive mode - ask follow-up questions naturally
|
|
180
|
+
npx -y @aigne/example-afs-mcp-server --chat
|
|
181
|
+
```
|
|
182
|
+
|
|
183
|
+
**In chat mode, try:**
|
|
184
|
+
- "Show me the most popular AIGNE repositories"
|
|
185
|
+
- "Search for repos about AI agents"
|
|
186
|
+
- "What pull requests are open in aigne-framework?"
|
|
187
|
+
- "Find code examples of MCPAgent usage"
|
|
188
|
+
|
|
189
|
+
## Why Mount MCP as AFS?
|
|
190
|
+
|
|
191
|
+
**The Problem:** Each MCP server has its own protocol and tools. AI agents need custom code to work with each one.
|
|
192
|
+
|
|
193
|
+
**The Solution:** Mount all MCP servers as AFS modules:
|
|
194
|
+
|
|
195
|
+
```typescript
|
|
196
|
+
const afs = new AFS()
|
|
197
|
+
.mount("/github", await MCPAgent.from({ /* GitHub MCP */ }))
|
|
198
|
+
.mount("/slack", await MCPAgent.from({ /* Slack MCP */ }))
|
|
199
|
+
.mount("/notion", await MCPAgent.from({ /* Notion MCP */ }));
|
|
200
|
+
|
|
201
|
+
// Now the agent uses ONE interface (afs_exec) to access ALL services!
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
**Benefits:**
|
|
205
|
+
- **Unified Interface**: All MCP servers accessible through `afs_list`, `afs_read`, `afs_exec`
|
|
206
|
+
- **Composability**: Mix MCP servers with file systems, databases, custom modules
|
|
207
|
+
- **Path-Based**: Multiple MCP servers coexist at different paths
|
|
208
|
+
- **No Rewiring**: AI agents work with any mounted MCP server automatically
|
|
209
|
+
|
|
210
|
+
## Use Any MCP Server
|
|
211
|
+
|
|
212
|
+
Replace GitHub with **any** MCP server:
|
|
213
|
+
|
|
214
|
+
```typescript
|
|
215
|
+
// Slack MCP Server
|
|
216
|
+
.mount(await MCPAgent.from({
|
|
217
|
+
command: "npx",
|
|
218
|
+
args: ["-y", "@modelcontextprotocol/server-slack"],
|
|
219
|
+
env: { SLACK_BOT_TOKEN: process.env.SLACK_BOT_TOKEN },
|
|
220
|
+
}))
|
|
221
|
+
|
|
222
|
+
// File System MCP Server
|
|
223
|
+
.mount(await MCPAgent.from({
|
|
224
|
+
command: "npx",
|
|
225
|
+
args: ["-y", "@modelcontextprotocol/server-filesystem", "/path/to/files"],
|
|
226
|
+
}))
|
|
227
|
+
|
|
228
|
+
// Postgres MCP Server
|
|
229
|
+
.mount(await MCPAgent.from({
|
|
230
|
+
command: "npx",
|
|
231
|
+
args: ["-y", "@modelcontextprotocol/server-postgres"],
|
|
232
|
+
env: { POSTGRES_CONNECTION_STRING: process.env.DATABASE_URL },
|
|
233
|
+
}))
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
## Mix MCP with Other AFS Modules
|
|
237
|
+
|
|
238
|
+
```typescript
|
|
239
|
+
import { LocalFS } from "@aigne/afs-local-fs";
|
|
240
|
+
import { UserProfileMemory } from "@aigne/afs-user-profile-memory";
|
|
241
|
+
|
|
242
|
+
const afs = new AFS()
|
|
243
|
+
.mount(new AFSHistory({ storage: { url: ":memory:" } }))
|
|
244
|
+
.mount(new LocalFS({ localPath: "./docs" }))
|
|
245
|
+
.mount(new UserProfileMemory({ context }))
|
|
246
|
+
.mount(await MCPAgent.from({ /* GitHub MCP */ }))
|
|
247
|
+
.mount(await MCPAgent.from({ /* Slack MCP */ }));
|
|
248
|
+
|
|
249
|
+
// Agent now has: history, local files, user profiles, GitHub, Slack!
|
|
250
|
+
```
|
|
251
|
+
|
|
252
|
+
## Related Examples
|
|
253
|
+
|
|
254
|
+
- [AFS Memory Example](../afs-memory/README.md) - Conversational memory with user profiles
|
|
255
|
+
- [AFS LocalFS Example](../afs-local-fs/README.md) - File system access with AI agents
|
|
256
|
+
|
|
257
|
+
## MCP Resources
|
|
258
|
+
|
|
259
|
+
- [Model Context Protocol Official Site](https://www.anthropic.com/news/model-context-protocol)
|
|
260
|
+
- [GitHub MCP Server](https://github.com/github/mcp-server)
|
|
261
|
+
- [MCP Servers List](https://github.com/modelcontextprotocol/servers)
|
|
262
|
+
|
|
263
|
+
## TypeScript Support
|
|
264
|
+
|
|
265
|
+
This package includes full TypeScript type definitions.
|
|
266
|
+
|
|
267
|
+
## License
|
|
268
|
+
|
|
269
|
+
[MIT](../../LICENSE.md)
|
package/index.test.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { expect, test } from "bun:test";
|
|
2
|
+
import { runExampleTest } from "@aigne/test-utils/run-example-test.js";
|
|
3
|
+
|
|
4
|
+
test(
|
|
5
|
+
"should successfully run the chatbot",
|
|
6
|
+
async () => {
|
|
7
|
+
const { status } = await runExampleTest();
|
|
8
|
+
expect(status).toBe(0);
|
|
9
|
+
},
|
|
10
|
+
{ timeout: 600000 },
|
|
11
|
+
);
|
package/index.ts
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
#!/usr/bin/env npx -y bun
|
|
2
|
+
|
|
3
|
+
import { AFS } from "@aigne/afs";
|
|
4
|
+
import { AFSHistory } from "@aigne/afs-history";
|
|
5
|
+
import { runWithAIGNE } from "@aigne/cli/utils/run-with-aigne.js";
|
|
6
|
+
import { AIAgent, MCPAgent } from "@aigne/core";
|
|
7
|
+
|
|
8
|
+
const afs = new AFS()
|
|
9
|
+
.mount(new AFSHistory({ storage: { url: ":memory:" } })) // In-memory history for this example
|
|
10
|
+
.mount(
|
|
11
|
+
// Integrate github-mcp-server MCP server as an AFS module, so that AI agents can access github repo via AFS API
|
|
12
|
+
await MCPAgent.from({
|
|
13
|
+
command: "docker",
|
|
14
|
+
args: [
|
|
15
|
+
"run",
|
|
16
|
+
"-i",
|
|
17
|
+
"--rm",
|
|
18
|
+
"-e",
|
|
19
|
+
`GITHUB_PERSONAL_ACCESS_TOKEN=${process.env.GITHUB_PERSONAL_ACCESS_TOKEN}`,
|
|
20
|
+
"ghcr.io/github/github-mcp-server",
|
|
21
|
+
],
|
|
22
|
+
}),
|
|
23
|
+
);
|
|
24
|
+
|
|
25
|
+
const agent = AIAgent.from({
|
|
26
|
+
instructions:
|
|
27
|
+
"You are a friendly chatbot that can help users interact with a github repository via github-mcp-server mounted on AFS. Use the provided 'github-mcp-server' module to answer user questions about the repository. If you don't know the answer, just say you don't know. Do not try to make up an answer.",
|
|
28
|
+
inputKey: "message",
|
|
29
|
+
afs,
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
await runWithAIGNE(agent, {
|
|
33
|
+
chatLoopOptions: {
|
|
34
|
+
welcome:
|
|
35
|
+
"Hello! I'm a chatbot that can help you interact with github by github-mcp-server. Ask me anything about the github repository!",
|
|
36
|
+
},
|
|
37
|
+
});
|
package/package.json
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@aigne/example-afs-mcp-server",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "A demonstration of using AIGNE Framework with AFS mount a MCP server",
|
|
5
|
+
"author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
|
|
6
|
+
"homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/afs-mcp-server",
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "git+https://github.com/AIGNE-io/aigne-framework"
|
|
11
|
+
},
|
|
12
|
+
"bin": "index.ts",
|
|
13
|
+
"files": [
|
|
14
|
+
".env.local.example",
|
|
15
|
+
"*.ts",
|
|
16
|
+
"README.md"
|
|
17
|
+
],
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"yargs": "^18.0.0",
|
|
20
|
+
"@aigne/afs": "^1.2.0-beta",
|
|
21
|
+
"@aigne/afs-history": "^1.0.0",
|
|
22
|
+
"@aigne/afs-local-fs": "^1.1.0-beta",
|
|
23
|
+
"@aigne/cli": "^1.55.0-beta",
|
|
24
|
+
"@aigne/core": "^1.68.0-beta"
|
|
25
|
+
},
|
|
26
|
+
"devDependencies": {
|
|
27
|
+
"@types/bun": "^1.2.22",
|
|
28
|
+
"@aigne/test-utils": "^0.5.60-beta"
|
|
29
|
+
},
|
|
30
|
+
"scripts": {
|
|
31
|
+
"start": "bun run index.ts",
|
|
32
|
+
"lint": "tsc --noEmit",
|
|
33
|
+
"test:llm": "bun test index.test.ts"
|
|
34
|
+
}
|
|
35
|
+
}
|