@aigne/example-mcp-sqlite 1.14.6 → 1.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.local.example +41 -2
- package/README.md +32 -11
- package/package.json +6 -6
package/.env.local.example
CHANGED
|
@@ -1,5 +1,44 @@
|
|
|
1
1
|
# Change the name of this file to .env.local and fill in the following values
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
# Uncomment the lines below to enable debug logging
|
|
4
|
+
# DEBUG="aigne:*"
|
|
4
5
|
|
|
5
|
-
|
|
6
|
+
# Use different Models
|
|
7
|
+
|
|
8
|
+
# OpenAI
|
|
9
|
+
MODEL="openai:gpt-4.1"
|
|
10
|
+
OPENAI_API_KEY="YOUR_OPENAI_API_KEY"
|
|
11
|
+
|
|
12
|
+
# Anthropic claude
|
|
13
|
+
# MODEL="anthropic:claude-3-7-sonnet-latest"
|
|
14
|
+
# ANTHROPIC_API_KEY=""
|
|
15
|
+
|
|
16
|
+
# Gemini
|
|
17
|
+
# MODEL="gemini:gemini-2.0-flash"
|
|
18
|
+
# GEMINI_API_KEY=""
|
|
19
|
+
|
|
20
|
+
# Bedrock nova
|
|
21
|
+
# MODEL=bedrock:us.amazon.nova-premier-v1:0
|
|
22
|
+
# AWS_ACCESS_KEY_ID=""
|
|
23
|
+
# AWS_SECRET_ACCESS_KEY=""
|
|
24
|
+
# AWS_REGION=us-west-2
|
|
25
|
+
|
|
26
|
+
# DeepSeek
|
|
27
|
+
# MODEL="deepseek:deepseek-chat"
|
|
28
|
+
# DEEPSEEK_API_KEY=""
|
|
29
|
+
|
|
30
|
+
# OpenRouter
|
|
31
|
+
# MODEL="openrouter:openai/gpt-4o"
|
|
32
|
+
# OPEN_ROUTER_API_KEY=""
|
|
33
|
+
|
|
34
|
+
# xAI
|
|
35
|
+
# MODEL="xai:grok-2-latest"
|
|
36
|
+
# XAI_API_KEY=""
|
|
37
|
+
|
|
38
|
+
# Ollama
|
|
39
|
+
# MODEL="ollama:llama3.2"
|
|
40
|
+
# OLLAMA_DEFAULT_BASE_URL="http://localhost:11434/v1";
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# Setup proxy if needed
|
|
44
|
+
# HTTPS_PROXY=http://localhost:7890
|
package/README.md
CHANGED
|
@@ -59,12 +59,12 @@ AI ->> User: There are 10 products in the database.
|
|
|
59
59
|
|
|
60
60
|
## Prerequisites
|
|
61
61
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
62
|
+
* [Node.js](https://nodejs.org) and npm installed on your machine
|
|
63
|
+
* An [OpenAI API key](https://platform.openai.com/api-keys) for interacting with OpenAI's services
|
|
64
|
+
* [uv](https://github.com/astral-sh/uv) python environment for running [MCP Server SQlite](https://github.com/modelcontextprotocol/servers/tree/main/src/sqlite)
|
|
65
|
+
* Optional dependencies (if running the example from source code):
|
|
66
|
+
* [Bun](https://bun.sh) for running unit tests & examples
|
|
67
|
+
* [Pnpm](https://pnpm.io) for package management
|
|
68
68
|
|
|
69
69
|
## Quick Start (No Installation Required)
|
|
70
70
|
|
|
@@ -105,6 +105,21 @@ Setup your OpenAI API key in the `.env.local` file:
|
|
|
105
105
|
OPENAI_API_KEY="" # Set your OpenAI API key here
|
|
106
106
|
```
|
|
107
107
|
|
|
108
|
+
#### Using Different Models
|
|
109
|
+
|
|
110
|
+
You can use different AI models by setting the `MODEL` environment variable along with the corresponding API key. The framework supports multiple providers:
|
|
111
|
+
|
|
112
|
+
* **OpenAI**: `MODEL="openai:gpt-4.1"` with `OPENAI_API_KEY`
|
|
113
|
+
* **Anthropic**: `MODEL="anthropic:claude-3-7-sonnet-latest"` with `ANTHROPIC_API_KEY`
|
|
114
|
+
* **Google Gemini**: `MODEL="gemini:gemini-2.0-flash"` with `GEMINI_API_KEY`
|
|
115
|
+
* **AWS Bedrock**: `MODEL="bedrock:us.amazon.nova-premier-v1:0"` with AWS credentials
|
|
116
|
+
* **DeepSeek**: `MODEL="deepseek:deepseek-chat"` with `DEEPSEEK_API_KEY`
|
|
117
|
+
* **OpenRouter**: `MODEL="openrouter:openai/gpt-4o"` with `OPEN_ROUTER_API_KEY`
|
|
118
|
+
* **xAI**: `MODEL="xai:grok-2-latest"` with `XAI_API_KEY`
|
|
119
|
+
* **Ollama**: `MODEL="ollama:llama3.2"` with `OLLAMA_DEFAULT_BASE_URL`
|
|
120
|
+
|
|
121
|
+
For detailed configuration examples, please refer to the `.env.local.example` file in this directory.
|
|
122
|
+
|
|
108
123
|
### Run the Example
|
|
109
124
|
|
|
110
125
|
```bash
|
|
@@ -124,7 +139,7 @@ The example supports the following command-line parameters:
|
|
|
124
139
|
| Parameter | Description | Default |
|
|
125
140
|
|-----------|-------------|---------|
|
|
126
141
|
| `--chat` | Run in interactive chat mode | Disabled (one-shot mode) |
|
|
127
|
-
| `--model <provider[:model]>` | AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini' | openai |
|
|
142
|
+
| `--model <provider[:model]>` | AI model to use in format 'provider\[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini' | openai |
|
|
128
143
|
| `--temperature <value>` | Temperature for model generation | Provider default |
|
|
129
144
|
| `--top-p <value>` | Top-p sampling value | Provider default |
|
|
130
145
|
| `--presence-penalty <value>` | Presence penalty value | Provider default |
|
|
@@ -150,13 +165,11 @@ echo "how many products?" | pnpm start
|
|
|
150
165
|
The following example demonstrates how to interact with an SQLite database:
|
|
151
166
|
|
|
152
167
|
```typescript
|
|
153
|
-
import assert from "node:assert";
|
|
154
168
|
import { join } from "node:path";
|
|
155
169
|
import { AIAgent, AIGNE, MCPAgent } from "@aigne/core";
|
|
156
170
|
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
157
171
|
|
|
158
172
|
const { OPENAI_API_KEY } = process.env;
|
|
159
|
-
assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
|
|
160
173
|
|
|
161
174
|
const model = new OpenAIChatModel({
|
|
162
175
|
apiKey: OPENAI_API_KEY,
|
|
@@ -164,7 +177,12 @@ const model = new OpenAIChatModel({
|
|
|
164
177
|
|
|
165
178
|
const sqlite = await MCPAgent.from({
|
|
166
179
|
command: "uvx",
|
|
167
|
-
args: [
|
|
180
|
+
args: [
|
|
181
|
+
"-q",
|
|
182
|
+
"mcp-server-sqlite",
|
|
183
|
+
"--db-path",
|
|
184
|
+
join(process.cwd(), "usages.db"),
|
|
185
|
+
],
|
|
168
186
|
});
|
|
169
187
|
|
|
170
188
|
const aigne = new AIGNE({
|
|
@@ -177,7 +195,10 @@ const agent = AIAgent.from({
|
|
|
177
195
|
});
|
|
178
196
|
|
|
179
197
|
console.log(
|
|
180
|
-
await aigne.invoke(
|
|
198
|
+
await aigne.invoke(
|
|
199
|
+
agent,
|
|
200
|
+
"create a product table with columns name description and createdAt",
|
|
201
|
+
),
|
|
181
202
|
);
|
|
182
203
|
// output:
|
|
183
204
|
// {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/example-mcp-sqlite",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.15.0",
|
|
4
4
|
"description": "A demonstration of using AIGNE Framework and Sqlite MCP Server to interact with a SQLite database",
|
|
5
5
|
"author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
|
|
6
6
|
"homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/mcp-sqlite",
|
|
@@ -16,14 +16,14 @@
|
|
|
16
16
|
"README.md"
|
|
17
17
|
],
|
|
18
18
|
"dependencies": {
|
|
19
|
-
"@aigne/agent-library": "^1.17.
|
|
20
|
-
"@aigne/
|
|
21
|
-
"@aigne/openai": "^0.
|
|
22
|
-
"@aigne/
|
|
19
|
+
"@aigne/agent-library": "^1.17.5",
|
|
20
|
+
"@aigne/cli": "^1.18.0",
|
|
21
|
+
"@aigne/openai": "^0.6.0",
|
|
22
|
+
"@aigne/core": "^1.28.0"
|
|
23
23
|
},
|
|
24
24
|
"devDependencies": {
|
|
25
25
|
"@types/bun": "^1.2.9",
|
|
26
|
-
"@aigne/test-utils": "^0.4.
|
|
26
|
+
"@aigne/test-utils": "^0.4.12"
|
|
27
27
|
},
|
|
28
28
|
"scripts": {
|
|
29
29
|
"start": "bun run index.ts",
|