@marcelo-ochoa/server-postgres 0.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +164 -0
- package/dist/db.js +43 -0
- package/dist/handlers.js +55 -0
- package/dist/index.js +3 -0
- package/dist/server.js +36 -0
- package/dist/tools/awr.js +233 -0
- package/dist/tools/connect.js +24 -0
- package/dist/tools/explain.js +13 -0
- package/dist/tools/query.js +25 -0
- package/dist/tools/stats.js +54 -0
- package/dist/tools.js +117 -0
- package/package.json +38 -0
package/README.md
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
# PostgreSQL
|
|
2
|
+
|
|
3
|
+
A Model Context Protocol server that provides read-only access to PostgreSQL databases. This server enables LLMs to inspect database schemas and execute read-only queries.
|
|
4
|
+
|
|
5
|
+
## Components
|
|
6
|
+
|
|
7
|
+
### Tools
|
|
8
|
+
|
|
9
|
+
- **pg-query**
|
|
10
|
+
- Execute read-only SQL queries against the connected database
|
|
11
|
+
- Input: `sql` (string): The SQL query to execute
|
|
12
|
+
- All queries are executed within a READ ONLY transaction
|
|
13
|
+
|
|
14
|
+
- **pg-stats**
|
|
15
|
+
- Get statistics for a specific table
|
|
16
|
+
- Input: `name` (string): The name of the table to get statistics for
|
|
17
|
+
|
|
18
|
+
- **pg-explain**
|
|
19
|
+
- Explain Plan for a given SQL query
|
|
20
|
+
- Input: `sql` (string): The SQL query to explain
|
|
21
|
+
|
|
22
|
+
- **pg-connect**
|
|
23
|
+
- Connect to a PostgreSQL database
|
|
24
|
+
- Input: `connectionString` (string): The PostgreSQL connection string (e.g. postgresql://user:password@host:port/dbname)
|
|
25
|
+
|
|
26
|
+
- **pg-awr**
|
|
27
|
+
- Generate a PostgreSQL performance report similar to Oracle AWR. Includes database statistics, top queries (requires pg_stat_statements extension), table/index statistics, connection info, and optimization recommendations.
|
|
28
|
+
|
|
29
|
+
### Resources
|
|
30
|
+
|
|
31
|
+
The server provides schema information for each table in the database:
|
|
32
|
+
|
|
33
|
+
- **Table Schemas** (`postgres://<host>/<table>/schema`)
|
|
34
|
+
- JSON schema information for each table
|
|
35
|
+
- Includes column names and data types
|
|
36
|
+
- Automatically discovered from database metadata
|
|
37
|
+
|
|
38
|
+
## Configuration
|
|
39
|
+
|
|
40
|
+
### Usage with Claude Desktop
|
|
41
|
+
|
|
42
|
+
To use this server with the Claude Desktop app, add the following configuration to the "mcpServers" section of your `claude_desktop_config.json`:
|
|
43
|
+
|
|
44
|
+
### Docker
|
|
45
|
+
|
|
46
|
+
* when running docker on macos, use host.docker.internal if the server is running on the host network (eg localhost)
|
|
47
|
+
* username/password can be added to the postgresql url with `postgresql://user:password@host:port/db-name`
|
|
48
|
+
|
|
49
|
+
```json
|
|
50
|
+
{
|
|
51
|
+
"mcpServers": {
|
|
52
|
+
"postgres": {
|
|
53
|
+
"command": "docker",
|
|
54
|
+
"args": [
|
|
55
|
+
"run",
|
|
56
|
+
"-i",
|
|
57
|
+
"--rm",
|
|
58
|
+
"mochoa/mcp-postgres",
|
|
59
|
+
"postgresql://host.docker.internal:5432/mydb"]
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
### NPX
|
|
66
|
+
|
|
67
|
+
```json
|
|
68
|
+
{
|
|
69
|
+
"mcpServers": {
|
|
70
|
+
"postgres": {
|
|
71
|
+
"command": "npx",
|
|
72
|
+
"args": [
|
|
73
|
+
"-y",
|
|
74
|
+
"@marcelo-ochoa/server-postgres",
|
|
75
|
+
"postgresql://localhost/mydb"
|
|
76
|
+
]
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
Replace `/mydb` with your database name.
|
|
83
|
+
|
|
84
|
+
### Usage with VS Code
|
|
85
|
+
|
|
86
|
+
For quick installation, use one of the one-click install buttons below...
|
|
87
|
+
|
|
88
|
+
[](https://insiders.vscode.dev/redirect/mcp/install?name=postgres&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22pg_url%22%2C%22description%22%3A%22PostgreSQL%20URL%20(e.g.%20postgresql%3A%2F%2Fuser%3Apass%40localhost%3A5432%2Fmydb)%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-postgres%22%2C%22%24%7Binput%3Apg_url%7D%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=postgres&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22pg_url%22%2C%22description%22%3A%22PostgreSQL%20URL%20(e.g.%20postgresql%3A%2F%2Fuser%3Apass%40localhost%3A5432%2Fmydb)%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-postgres%22%2C%22%24%7Binput%3Apg_url%7D%22%5D%7D&quality=insiders)
|
|
89
|
+
|
|
90
|
+
[](https://insiders.vscode.dev/redirect/mcp/install?name=postgres&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22pg_url%22%2C%22description%22%3A%22PostgreSQL%20URL%20(e.g.%20postgresql%3A%2F%2Fuser%3Apass%40host.docker.internal%3A5432%2Fmydb)%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fpostgres%22%2C%22%24%7Binput%3Apg_url%7D%22%5D%7D) [](https://insiders.vscode.dev/redirect/mcp/install?name=postgres&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22pg_url%22%2C%22description%22%3A%22PostgreSQL%20URL%20(e.g.%20postgresql%3A%2F%2Fuser%3Apass%40host.docker.internal%3A5432%2Fmydb)%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fpostgres%22%2C%22%24%7Binput%3Apg_url%7D%22%5D%7D&quality=insiders)
|
|
91
|
+
|
|
92
|
+
For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`.
|
|
93
|
+
|
|
94
|
+
Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
|
|
95
|
+
|
|
96
|
+
> Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
|
|
97
|
+
|
|
98
|
+
### Docker
|
|
99
|
+
|
|
100
|
+
**Note**: When using Docker and connecting to a PostgreSQL server on your host machine, use `host.docker.internal` instead of `localhost` in the connection URL.
|
|
101
|
+
|
|
102
|
+
```json
|
|
103
|
+
{
|
|
104
|
+
"mcp": {
|
|
105
|
+
"inputs": [
|
|
106
|
+
{
|
|
107
|
+
"type": "promptString",
|
|
108
|
+
"id": "pg_url",
|
|
109
|
+
"description": "PostgreSQL URL (e.g. postgresql://user:pass@host.docker.internal:5432/mydb)"
|
|
110
|
+
}
|
|
111
|
+
],
|
|
112
|
+
"servers": {
|
|
113
|
+
"postgres": {
|
|
114
|
+
"command": "docker",
|
|
115
|
+
"args": [
|
|
116
|
+
"run",
|
|
117
|
+
"-i",
|
|
118
|
+
"--rm",
|
|
119
|
+
"mochoa/mcp-postgres",
|
|
120
|
+
"${input:pg_url}"
|
|
121
|
+
]
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
### NPX
|
|
129
|
+
|
|
130
|
+
```json
|
|
131
|
+
{
|
|
132
|
+
"mcp": {
|
|
133
|
+
"inputs": [
|
|
134
|
+
{
|
|
135
|
+
"type": "promptString",
|
|
136
|
+
"id": "pg_url",
|
|
137
|
+
"description": "PostgreSQL URL (e.g. postgresql://user:pass@localhost:5432/mydb)"
|
|
138
|
+
}
|
|
139
|
+
],
|
|
140
|
+
"servers": {
|
|
141
|
+
"postgres": {
|
|
142
|
+
"command": "npx",
|
|
143
|
+
"args": [
|
|
144
|
+
"-y",
|
|
145
|
+
"@marcelo-ochoa/server-postgres",
|
|
146
|
+
"${input:pg_url}"
|
|
147
|
+
]
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
## Building
|
|
155
|
+
|
|
156
|
+
Docker:
|
|
157
|
+
|
|
158
|
+
```sh
|
|
159
|
+
docker build -t mochoa/mcp-postgres -f src/postgres/Dockerfile .
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
## License
|
|
163
|
+
|
|
164
|
+
This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
|
package/dist/db.js
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import pg from "pg";
|
|
2
|
+
let pool = undefined;
|
|
3
|
+
let resourceBaseUrl = undefined;
|
|
4
|
+
export async function initializePool(connectionString) {
|
|
5
|
+
pool = new pg.Pool({
|
|
6
|
+
connectionString,
|
|
7
|
+
});
|
|
8
|
+
// Test connection
|
|
9
|
+
const client = await pool.connect();
|
|
10
|
+
client.release();
|
|
11
|
+
const url = new URL(connectionString);
|
|
12
|
+
url.protocol = "postgres:";
|
|
13
|
+
url.password = "";
|
|
14
|
+
resourceBaseUrl = url;
|
|
15
|
+
}
|
|
16
|
+
export function getPool() {
|
|
17
|
+
if (!pool) {
|
|
18
|
+
throw new Error("Postgres connection pool not initialized.");
|
|
19
|
+
}
|
|
20
|
+
return pool;
|
|
21
|
+
}
|
|
22
|
+
export function getResourceBaseUrl() {
|
|
23
|
+
if (!resourceBaseUrl) {
|
|
24
|
+
throw new Error("Resource Base URL not initialized.");
|
|
25
|
+
}
|
|
26
|
+
return resourceBaseUrl;
|
|
27
|
+
}
|
|
28
|
+
export async function withConnection(callback) {
|
|
29
|
+
const pool = getPool();
|
|
30
|
+
const client = await pool.connect();
|
|
31
|
+
try {
|
|
32
|
+
return await callback(client);
|
|
33
|
+
}
|
|
34
|
+
finally {
|
|
35
|
+
client.release();
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
export async function closePool() {
|
|
39
|
+
if (pool) {
|
|
40
|
+
await pool.end();
|
|
41
|
+
pool = undefined;
|
|
42
|
+
}
|
|
43
|
+
}
|
package/dist/handlers.js
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { withConnection, getResourceBaseUrl } from "./db.js";
|
|
2
|
+
import { queryHandler } from "./tools/query.js";
|
|
3
|
+
import { statsHandler } from "./tools/stats.js";
|
|
4
|
+
import { connectHandler } from "./tools/connect.js";
|
|
5
|
+
import { explainHandler } from "./tools/explain.js";
|
|
6
|
+
import { awrHandler } from "./tools/awr.js";
|
|
7
|
+
const toolHandlers = {
|
|
8
|
+
"pg-query": queryHandler,
|
|
9
|
+
"pg-stats": statsHandler,
|
|
10
|
+
"pg-explain": explainHandler,
|
|
11
|
+
"pg-connect": connectHandler,
|
|
12
|
+
"pg-awr": awrHandler,
|
|
13
|
+
};
|
|
14
|
+
export const callToolHandler = async (request) => {
|
|
15
|
+
const handler = toolHandlers[request.params.name];
|
|
16
|
+
if (handler) {
|
|
17
|
+
return handler(request);
|
|
18
|
+
}
|
|
19
|
+
throw new Error(`Unknown tool: ${request.params.name}`);
|
|
20
|
+
};
|
|
21
|
+
const SCHEMA_PATH = "schema";
|
|
22
|
+
export const listResourcesHandler = async (request) => {
|
|
23
|
+
const resourceBaseUrl = getResourceBaseUrl();
|
|
24
|
+
return await withConnection(async (client) => {
|
|
25
|
+
const result = await client.query("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'");
|
|
26
|
+
return {
|
|
27
|
+
resources: result.rows.map((row) => ({
|
|
28
|
+
uri: new URL(`${row.table_name}/${SCHEMA_PATH}`, resourceBaseUrl).href,
|
|
29
|
+
mimeType: "application/json",
|
|
30
|
+
name: `"${row.table_name}" database schema`,
|
|
31
|
+
})),
|
|
32
|
+
};
|
|
33
|
+
});
|
|
34
|
+
};
|
|
35
|
+
export const readResourceHandler = async (request) => {
|
|
36
|
+
const resourceUrl = new URL(request.params.uri);
|
|
37
|
+
const pathComponents = resourceUrl.pathname.split("/");
|
|
38
|
+
const schema = pathComponents.pop();
|
|
39
|
+
const tableName = pathComponents.pop();
|
|
40
|
+
if (schema !== SCHEMA_PATH) {
|
|
41
|
+
throw new Error("Invalid resource URI");
|
|
42
|
+
}
|
|
43
|
+
return await withConnection(async (client) => {
|
|
44
|
+
const result = await client.query("SELECT column_name, data_type FROM information_schema.columns WHERE table_name = $1", [tableName]);
|
|
45
|
+
return {
|
|
46
|
+
contents: [
|
|
47
|
+
{
|
|
48
|
+
uri: request.params.uri,
|
|
49
|
+
mimeType: "application/json",
|
|
50
|
+
text: JSON.stringify(result.rows, null, 2),
|
|
51
|
+
},
|
|
52
|
+
],
|
|
53
|
+
};
|
|
54
|
+
});
|
|
55
|
+
};
|
package/dist/index.js
ADDED
package/dist/server.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
2
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
3
|
+
import { CallToolRequestSchema, ListResourcesRequestSchema, ListToolsRequestSchema, ReadResourceRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
|
|
4
|
+
import { initializePool } from "./db.js";
|
|
5
|
+
import { listResourcesHandler, readResourceHandler, callToolHandler } from "./handlers.js";
|
|
6
|
+
import { tools } from "./tools.js";
|
|
7
|
+
const server = new Server({
|
|
8
|
+
name: "postgres-server",
|
|
9
|
+
version: "0.6.2",
|
|
10
|
+
}, {
|
|
11
|
+
capabilities: {
|
|
12
|
+
resources: {},
|
|
13
|
+
tools: {},
|
|
14
|
+
},
|
|
15
|
+
});
|
|
16
|
+
server.setRequestHandler(ListResourcesRequestSchema, listResourcesHandler);
|
|
17
|
+
server.setRequestHandler(ReadResourceRequestSchema, readResourceHandler);
|
|
18
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools }));
|
|
19
|
+
server.setRequestHandler(CallToolRequestSchema, callToolHandler);
|
|
20
|
+
export async function runServer() {
|
|
21
|
+
const args = process.argv.slice(2);
|
|
22
|
+
if (args.length === 0) {
|
|
23
|
+
console.error("Please provide a database URL as a command-line argument");
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
const databaseUrl = args[0];
|
|
27
|
+
try {
|
|
28
|
+
await initializePool(databaseUrl);
|
|
29
|
+
}
|
|
30
|
+
catch (error) {
|
|
31
|
+
console.error("Failed to initialize database pool:", error);
|
|
32
|
+
process.exit(1);
|
|
33
|
+
}
|
|
34
|
+
const transport = new StdioServerTransport();
|
|
35
|
+
await server.connect(transport);
|
|
36
|
+
}
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
import { withConnection } from "../db.js";
|
|
2
|
+
export const awrHandler = async (request) => {
|
|
3
|
+
try {
|
|
4
|
+
return await withConnection(async (client) => {
|
|
5
|
+
const report = {
|
|
6
|
+
timestamp: new Date().toISOString(),
|
|
7
|
+
database_statistics: {},
|
|
8
|
+
top_queries: [],
|
|
9
|
+
table_statistics: [],
|
|
10
|
+
index_statistics: [],
|
|
11
|
+
connection_info: {},
|
|
12
|
+
};
|
|
13
|
+
// Check if pg_stat_statements extension is available
|
|
14
|
+
const extCheck = await client.query(`
|
|
15
|
+
SELECT EXISTS (
|
|
16
|
+
SELECT 1 FROM pg_extension WHERE extname = 'pg_stat_statements'
|
|
17
|
+
) as has_extension
|
|
18
|
+
`);
|
|
19
|
+
const hasPgStatStatements = extCheck.rows[0].has_extension;
|
|
20
|
+
// 1. Database-wide statistics
|
|
21
|
+
const dbStats = await client.query(`
|
|
22
|
+
SELECT
|
|
23
|
+
datname,
|
|
24
|
+
numbackends as active_connections,
|
|
25
|
+
xact_commit as transactions_committed,
|
|
26
|
+
xact_rollback as transactions_rolled_back,
|
|
27
|
+
blks_read as blocks_read,
|
|
28
|
+
blks_hit as blocks_hit,
|
|
29
|
+
CASE
|
|
30
|
+
WHEN (blks_read + blks_hit) > 0
|
|
31
|
+
THEN ROUND(100.0 * blks_hit / (blks_read + blks_hit), 2)
|
|
32
|
+
ELSE 0
|
|
33
|
+
END as cache_hit_ratio,
|
|
34
|
+
tup_returned as tuples_returned,
|
|
35
|
+
tup_fetched as tuples_fetched,
|
|
36
|
+
tup_inserted as tuples_inserted,
|
|
37
|
+
tup_updated as tuples_updated,
|
|
38
|
+
tup_deleted as tuples_deleted,
|
|
39
|
+
conflicts,
|
|
40
|
+
temp_files,
|
|
41
|
+
temp_bytes,
|
|
42
|
+
deadlocks,
|
|
43
|
+
blk_read_time,
|
|
44
|
+
blk_write_time
|
|
45
|
+
FROM pg_stat_database
|
|
46
|
+
WHERE datname = current_database()
|
|
47
|
+
`);
|
|
48
|
+
report.database_statistics = dbStats.rows[0];
|
|
49
|
+
// 2. Top queries by total time (if pg_stat_statements is available)
|
|
50
|
+
if (hasPgStatStatements) {
|
|
51
|
+
try {
|
|
52
|
+
const topQueries = await client.query(`
|
|
53
|
+
SELECT
|
|
54
|
+
queryid,
|
|
55
|
+
LEFT(query, 100) as query_text,
|
|
56
|
+
calls,
|
|
57
|
+
ROUND(total_exec_time::numeric, 2) as total_time_ms,
|
|
58
|
+
ROUND(mean_exec_time::numeric, 2) as mean_time_ms,
|
|
59
|
+
ROUND(min_exec_time::numeric, 2) as min_time_ms,
|
|
60
|
+
ROUND(max_exec_time::numeric, 2) as max_time_ms,
|
|
61
|
+
ROUND(stddev_exec_time::numeric, 2) as stddev_time_ms,
|
|
62
|
+
rows as total_rows,
|
|
63
|
+
ROUND((100.0 * shared_blks_hit / NULLIF(shared_blks_hit + shared_blks_read, 0))::numeric, 2) as buffer_hit_ratio,
|
|
64
|
+
shared_blks_read,
|
|
65
|
+
shared_blks_hit,
|
|
66
|
+
shared_blks_dirtied,
|
|
67
|
+
shared_blks_written,
|
|
68
|
+
temp_blks_read,
|
|
69
|
+
temp_blks_written
|
|
70
|
+
FROM pg_stat_statements
|
|
71
|
+
WHERE dbid = (SELECT oid FROM pg_database WHERE datname = current_database())
|
|
72
|
+
ORDER BY total_exec_time DESC
|
|
73
|
+
LIMIT 20
|
|
74
|
+
`);
|
|
75
|
+
report.top_queries = topQueries.rows;
|
|
76
|
+
}
|
|
77
|
+
catch (error) {
|
|
78
|
+
report.top_queries_note = `pg_stat_statements extension exists but is not properly loaded. Error: ${error.message}. Add 'shared_preload_libraries = pg_stat_statements' to postgresql.conf and restart PostgreSQL.`;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
report.top_queries_note = "pg_stat_statements extension not available. Install with: CREATE EXTENSION pg_stat_statements;";
|
|
83
|
+
}
|
|
84
|
+
// 3. Table statistics
|
|
85
|
+
const tableStats = await client.query(`
|
|
86
|
+
SELECT
|
|
87
|
+
schemaname,
|
|
88
|
+
relname as table_name,
|
|
89
|
+
seq_scan,
|
|
90
|
+
seq_tup_read,
|
|
91
|
+
idx_scan,
|
|
92
|
+
idx_tup_fetch,
|
|
93
|
+
n_tup_ins as inserts,
|
|
94
|
+
n_tup_upd as updates,
|
|
95
|
+
n_tup_del as deletes,
|
|
96
|
+
n_tup_hot_upd as hot_updates,
|
|
97
|
+
n_live_tup as live_tuples,
|
|
98
|
+
n_dead_tup as dead_tuples,
|
|
99
|
+
ROUND(100.0 * n_dead_tup / NULLIF(n_live_tup + n_dead_tup, 0), 2) as dead_tuple_ratio,
|
|
100
|
+
last_vacuum,
|
|
101
|
+
last_autovacuum,
|
|
102
|
+
last_analyze,
|
|
103
|
+
last_autoanalyze,
|
|
104
|
+
vacuum_count,
|
|
105
|
+
autovacuum_count,
|
|
106
|
+
analyze_count,
|
|
107
|
+
autoanalyze_count
|
|
108
|
+
FROM pg_stat_user_tables
|
|
109
|
+
ORDER BY seq_scan + COALESCE(idx_scan, 0) DESC
|
|
110
|
+
LIMIT 20
|
|
111
|
+
`);
|
|
112
|
+
report.table_statistics = tableStats.rows;
|
|
113
|
+
// 4. Index statistics
|
|
114
|
+
const indexStats = await client.query(`
|
|
115
|
+
SELECT
|
|
116
|
+
schemaname,
|
|
117
|
+
relname as table_name,
|
|
118
|
+
indexrelname as index_name,
|
|
119
|
+
idx_scan as index_scans,
|
|
120
|
+
idx_tup_read as tuples_read,
|
|
121
|
+
idx_tup_fetch as tuples_fetched,
|
|
122
|
+
pg_size_pretty(pg_relation_size(indexrelid)) as index_size
|
|
123
|
+
FROM pg_stat_user_indexes
|
|
124
|
+
ORDER BY idx_scan DESC
|
|
125
|
+
LIMIT 20
|
|
126
|
+
`);
|
|
127
|
+
report.index_statistics = indexStats.rows;
|
|
128
|
+
// 5. Connection and activity info
|
|
129
|
+
const connInfo = await client.query(`
|
|
130
|
+
SELECT
|
|
131
|
+
COUNT(*) as total_connections,
|
|
132
|
+
COUNT(*) FILTER (WHERE state = 'active') as active,
|
|
133
|
+
COUNT(*) FILTER (WHERE state = 'idle') as idle,
|
|
134
|
+
COUNT(*) FILTER (WHERE state = 'idle in transaction') as idle_in_transaction,
|
|
135
|
+
COUNT(*) FILTER (WHERE wait_event_type IS NOT NULL) as waiting,
|
|
136
|
+
MAX(EXTRACT(EPOCH FROM (now() - query_start))) as longest_query_seconds,
|
|
137
|
+
MAX(EXTRACT(EPOCH FROM (now() - xact_start))) as longest_transaction_seconds
|
|
138
|
+
FROM pg_stat_activity
|
|
139
|
+
WHERE datname = current_database()
|
|
140
|
+
`);
|
|
141
|
+
report.connection_info = connInfo.rows[0];
|
|
142
|
+
// 6. Background writer and checkpoint statistics
|
|
143
|
+
// In PostgreSQL 17+, checkpoint stats moved to pg_stat_checkpointer
|
|
144
|
+
// and buffer backend stats moved to pg_stat_io
|
|
145
|
+
const versionResult = await client.query('SHOW server_version_num');
|
|
146
|
+
const versionNum = parseInt(versionResult.rows[0].server_version_num);
|
|
147
|
+
if (versionNum >= 170000) {
|
|
148
|
+
// PostgreSQL 17+: Query multiple views
|
|
149
|
+
const checkpointerStats = await client.query(`
|
|
150
|
+
SELECT
|
|
151
|
+
num_timed as checkpoints_timed,
|
|
152
|
+
num_requested as checkpoints_requested,
|
|
153
|
+
write_time as checkpoint_write_time,
|
|
154
|
+
sync_time as checkpoint_sync_time,
|
|
155
|
+
buffers_written as buffers_checkpoint,
|
|
156
|
+
stats_reset
|
|
157
|
+
FROM pg_stat_checkpointer
|
|
158
|
+
`);
|
|
159
|
+
const bgWriterStats = await client.query(`
|
|
160
|
+
SELECT
|
|
161
|
+
buffers_clean,
|
|
162
|
+
maxwritten_clean,
|
|
163
|
+
buffers_alloc
|
|
164
|
+
FROM pg_stat_bgwriter
|
|
165
|
+
`);
|
|
166
|
+
// Get backend buffer stats from pg_stat_io
|
|
167
|
+
const ioStats = await client.query(`
|
|
168
|
+
SELECT
|
|
169
|
+
SUM(reads) FILTER (WHERE backend_type = 'client backend') as buffers_backend_read,
|
|
170
|
+
SUM(writes) FILTER (WHERE backend_type = 'client backend') as buffers_backend_write,
|
|
171
|
+
SUM(fsyncs) FILTER (WHERE backend_type = 'client backend') as buffers_backend_fsync
|
|
172
|
+
FROM pg_stat_io
|
|
173
|
+
`);
|
|
174
|
+
report.bgwriter_statistics = {
|
|
175
|
+
...checkpointerStats.rows[0],
|
|
176
|
+
...bgWriterStats.rows[0],
|
|
177
|
+
...ioStats.rows[0]
|
|
178
|
+
};
|
|
179
|
+
}
|
|
180
|
+
else {
|
|
181
|
+
// PostgreSQL < 17: Use pg_stat_bgwriter for everything
|
|
182
|
+
const bgWriterStats = await client.query(`
|
|
183
|
+
SELECT
|
|
184
|
+
checkpoints_timed,
|
|
185
|
+
checkpoints_req as checkpoints_requested,
|
|
186
|
+
checkpoint_write_time,
|
|
187
|
+
checkpoint_sync_time,
|
|
188
|
+
buffers_checkpoint,
|
|
189
|
+
buffers_clean,
|
|
190
|
+
maxwritten_clean,
|
|
191
|
+
buffers_backend,
|
|
192
|
+
buffers_backend_fsync,
|
|
193
|
+
buffers_alloc,
|
|
194
|
+
stats_reset
|
|
195
|
+
FROM pg_stat_bgwriter
|
|
196
|
+
`);
|
|
197
|
+
report.bgwriter_statistics = bgWriterStats.rows[0];
|
|
198
|
+
}
|
|
199
|
+
// 7. Unused indexes (potential optimization candidates)
|
|
200
|
+
const unusedIndexes = await client.query(`
|
|
201
|
+
SELECT
|
|
202
|
+
schemaname,
|
|
203
|
+
relname as table_name,
|
|
204
|
+
indexrelname as index_name,
|
|
205
|
+
idx_scan as scans,
|
|
206
|
+
pg_size_pretty(pg_relation_size(indexrelid)) as index_size
|
|
207
|
+
FROM pg_stat_user_indexes
|
|
208
|
+
WHERE idx_scan = 0
|
|
209
|
+
AND indexrelname NOT LIKE '%_pkey'
|
|
210
|
+
ORDER BY pg_relation_size(indexrelid) DESC
|
|
211
|
+
LIMIT 10
|
|
212
|
+
`);
|
|
213
|
+
report.unused_indexes = unusedIndexes.rows;
|
|
214
|
+
return {
|
|
215
|
+
content: [{
|
|
216
|
+
type: "text",
|
|
217
|
+
text: JSON.stringify(report, null, 2),
|
|
218
|
+
mimeType: "application/json"
|
|
219
|
+
}],
|
|
220
|
+
isError: false,
|
|
221
|
+
};
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
catch (error) {
|
|
225
|
+
return {
|
|
226
|
+
content: [{
|
|
227
|
+
type: "text",
|
|
228
|
+
text: `Error generating PostgreSQL performance report: ${error?.message ?? error}`
|
|
229
|
+
}],
|
|
230
|
+
isError: true,
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
};
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { initializePool, closePool } from "../db.js";
|
|
2
|
+
export const connectHandler = async (request) => {
|
|
3
|
+
const connectionString = request.params.arguments?.connectionString;
|
|
4
|
+
if (typeof connectionString !== "string" || !connectionString) {
|
|
5
|
+
return {
|
|
6
|
+
content: [{ type: "text", text: "Missing or invalid connectionString argument." }],
|
|
7
|
+
isError: true,
|
|
8
|
+
};
|
|
9
|
+
}
|
|
10
|
+
try {
|
|
11
|
+
await closePool();
|
|
12
|
+
await initializePool(connectionString);
|
|
13
|
+
return {
|
|
14
|
+
content: [{ type: "text", text: `Successfully connected to Postgres DB: ${connectionString}` }],
|
|
15
|
+
isError: false,
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
catch (err) {
|
|
19
|
+
return {
|
|
20
|
+
content: [{ type: "text", text: `Failed to connect: ${err}` }],
|
|
21
|
+
isError: true,
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
};
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { withConnection } from "../db.js";
|
|
2
|
+
export const explainHandler = async (request) => {
|
|
3
|
+
const sql = typeof request.params.arguments?.sql === "string"
|
|
4
|
+
? request.params.arguments.sql.replace(/;\s*$/, "")
|
|
5
|
+
: "";
|
|
6
|
+
return await withConnection(async (client) => {
|
|
7
|
+
const result = await client.query(`EXPLAIN (ANALYZE, VERBOSE, BUFFERS, FORMAT JSON) ${sql}`);
|
|
8
|
+
return {
|
|
9
|
+
content: [{ type: "text", text: JSON.stringify(result.rows[0], null, 2), mimeType: "application/json" }],
|
|
10
|
+
isError: false,
|
|
11
|
+
};
|
|
12
|
+
});
|
|
13
|
+
};
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { encode } from "@toon-format/toon";
|
|
2
|
+
import { withConnection } from "../db.js";
|
|
3
|
+
export const queryHandler = async (request) => {
|
|
4
|
+
const sql = typeof request.params.arguments?.sql === "string"
|
|
5
|
+
? request.params.arguments.sql.replace(/;\s*$/, "")
|
|
6
|
+
: "";
|
|
7
|
+
return await withConnection(async (client) => {
|
|
8
|
+
try {
|
|
9
|
+
await client.query("BEGIN TRANSACTION READ ONLY");
|
|
10
|
+
const result = await client.query(sql);
|
|
11
|
+
return {
|
|
12
|
+
content: [{ type: "text", text: encode(result.rows) }],
|
|
13
|
+
isError: false,
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
catch (error) {
|
|
17
|
+
throw error;
|
|
18
|
+
}
|
|
19
|
+
finally {
|
|
20
|
+
client
|
|
21
|
+
.query("ROLLBACK")
|
|
22
|
+
.catch((error) => console.warn("Could not roll back transaction:", error));
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
};
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { withConnection } from "../db.js";
|
|
2
|
+
export const statsHandler = async (request) => {
|
|
3
|
+
const tableName = request.params.arguments?.name;
|
|
4
|
+
return await withConnection(async (client) => {
|
|
5
|
+
const result = await client.query(`
|
|
6
|
+
SELECT json_build_object(
|
|
7
|
+
'table_stats', (
|
|
8
|
+
SELECT json_build_object(
|
|
9
|
+
'schema_name', n.nspname,
|
|
10
|
+
'table_name', c.relname,
|
|
11
|
+
'num_rows', c.reltuples,
|
|
12
|
+
'blocks', c.relpages,
|
|
13
|
+
'last_analyzed', s.last_analyze
|
|
14
|
+
)
|
|
15
|
+
FROM pg_class c
|
|
16
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
17
|
+
LEFT JOIN pg_stat_user_tables s ON s.relid = c.oid
|
|
18
|
+
WHERE c.relname = $1 AND n.nspname = 'public'
|
|
19
|
+
),
|
|
20
|
+
'index_stats', (
|
|
21
|
+
SELECT json_agg(
|
|
22
|
+
json_build_object(
|
|
23
|
+
'index_name', c2.relname,
|
|
24
|
+
'num_rows', c2.reltuples,
|
|
25
|
+
'blocks', c2.relpages,
|
|
26
|
+
'index_size', pg_size_pretty(pg_relation_size(c2.oid))
|
|
27
|
+
)
|
|
28
|
+
)
|
|
29
|
+
FROM pg_index i
|
|
30
|
+
JOIN pg_class c ON c.oid = i.indrelid
|
|
31
|
+
JOIN pg_class c2 ON c2.oid = i.indexrelid
|
|
32
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
33
|
+
WHERE c.relname = $1 AND n.nspname = 'public'
|
|
34
|
+
),
|
|
35
|
+
'column_stats', (
|
|
36
|
+
SELECT json_agg(
|
|
37
|
+
json_build_object(
|
|
38
|
+
'column_name', attname,
|
|
39
|
+
'null_frac', null_frac,
|
|
40
|
+
'avg_width', avg_width,
|
|
41
|
+
'n_distinct', n_distinct
|
|
42
|
+
)
|
|
43
|
+
)
|
|
44
|
+
FROM pg_stats
|
|
45
|
+
WHERE tablename = $1 AND schemaname = 'public'
|
|
46
|
+
)
|
|
47
|
+
) as stats_json
|
|
48
|
+
`, [tableName]);
|
|
49
|
+
return {
|
|
50
|
+
content: [{ type: "text", text: JSON.stringify(result.rows[0].stats_json, null, 2), mimeType: "application/json" }],
|
|
51
|
+
isError: false,
|
|
52
|
+
};
|
|
53
|
+
});
|
|
54
|
+
};
|
package/dist/tools.js
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
export const tools = [
|
|
2
|
+
{
|
|
3
|
+
name: "pg-query",
|
|
4
|
+
description: "Run a read-only SQL query",
|
|
5
|
+
inputSchema: {
|
|
6
|
+
type: "object",
|
|
7
|
+
properties: {
|
|
8
|
+
sql: {
|
|
9
|
+
type: "string",
|
|
10
|
+
description: "The SQL query to execute"
|
|
11
|
+
},
|
|
12
|
+
mcp_client: {
|
|
13
|
+
"type": "string",
|
|
14
|
+
"description": "Specify the name and version of the MCP client implementation being used (e.g. Copilot, Claude, Cline...)",
|
|
15
|
+
"default": "UNKNOWN-MCP-CLIENT"
|
|
16
|
+
},
|
|
17
|
+
model: {
|
|
18
|
+
"type": "string",
|
|
19
|
+
"description": "The name (and version) of the language model being used by the MCP client to process requests (e.g. gpt-4.1, claude-sonnet-4, llama4...)",
|
|
20
|
+
"default": "UNKNOWN-LLM"
|
|
21
|
+
}
|
|
22
|
+
},
|
|
23
|
+
required: ["sql", "mcp_client", "model"]
|
|
24
|
+
},
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
name: "pg-stats",
|
|
28
|
+
description: "Get statistics for a specific table",
|
|
29
|
+
inputSchema: {
|
|
30
|
+
type: "object",
|
|
31
|
+
properties: {
|
|
32
|
+
name: {
|
|
33
|
+
type: "string",
|
|
34
|
+
description: "The name of the table to get statistics for"
|
|
35
|
+
},
|
|
36
|
+
mcp_client: {
|
|
37
|
+
"type": "string",
|
|
38
|
+
"description": "Specify the name and version of the MCP client implementation being used (e.g. Copilot, Claude, Cline...)",
|
|
39
|
+
"default": "UNKNOWN-MCP-CLIENT"
|
|
40
|
+
},
|
|
41
|
+
model: {
|
|
42
|
+
"type": "string",
|
|
43
|
+
"description": "The name (and version) of the language model being used by the MCP client to process requests (e.g. gpt-4.1, claude-sonnet-4, llama4...)",
|
|
44
|
+
"default": "UNKNOWN-LLM"
|
|
45
|
+
}
|
|
46
|
+
},
|
|
47
|
+
required: ["name"]
|
|
48
|
+
},
|
|
49
|
+
},
|
|
50
|
+
{
|
|
51
|
+
name: "pg-explain",
|
|
52
|
+
description: "Explain Plan for a given SQL query",
|
|
53
|
+
inputSchema: {
|
|
54
|
+
type: "object",
|
|
55
|
+
properties: {
|
|
56
|
+
sql: {
|
|
57
|
+
type: "string",
|
|
58
|
+
description: "The SQL query to explain"
|
|
59
|
+
},
|
|
60
|
+
mcp_client: {
|
|
61
|
+
"type": "string",
|
|
62
|
+
"description": "Specify the name and version of the MCP client implementation being used (e.g. Copilot, Claude, Cline...)",
|
|
63
|
+
"default": "UNKNOWN-MCP-CLIENT"
|
|
64
|
+
},
|
|
65
|
+
model: {
|
|
66
|
+
"type": "string",
|
|
67
|
+
"description": "The name (and version) of the language model being used by the MCP client to process requests (e.g. gpt-4.1, claude-sonnet-4, llama4...)",
|
|
68
|
+
"default": "UNKNOWN-LLM"
|
|
69
|
+
}
|
|
70
|
+
},
|
|
71
|
+
required: ["sql", "mcp_client", "model"]
|
|
72
|
+
},
|
|
73
|
+
},
|
|
74
|
+
{
|
|
75
|
+
name: "pg-connect",
|
|
76
|
+
description: "Connect to a PostgreSQL database",
|
|
77
|
+
inputSchema: {
|
|
78
|
+
type: "object",
|
|
79
|
+
properties: {
|
|
80
|
+
connectionString: {
|
|
81
|
+
type: "string",
|
|
82
|
+
description: "The PostgreSQL connection string (e.g. postgresql://user:password@host:port/dbname)"
|
|
83
|
+
},
|
|
84
|
+
mcp_client: {
|
|
85
|
+
"type": "string",
|
|
86
|
+
"description": "Specify the name and version of the MCP client implementation being used (e.g. Copilot, Claude, Cline...)",
|
|
87
|
+
"default": "UNKNOWN-MCP-CLIENT"
|
|
88
|
+
},
|
|
89
|
+
model: {
|
|
90
|
+
"type": "string",
|
|
91
|
+
"description": "The name (and version) of the language model being used by the MCP client to process requests (e.g. gpt-4.1, claude-sonnet-4, llama4...)",
|
|
92
|
+
"default": "UNKNOWN-LLM"
|
|
93
|
+
}
|
|
94
|
+
},
|
|
95
|
+
required: ["connectionString"]
|
|
96
|
+
},
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
name: "pg-awr",
|
|
100
|
+
description: "Generate a PostgreSQL performance report similar to Oracle AWR. Includes database statistics, top queries (requires pg_stat_statements extension), table/index statistics, connection info, and optimization recommendations.",
|
|
101
|
+
inputSchema: {
|
|
102
|
+
type: "object",
|
|
103
|
+
properties: {
|
|
104
|
+
mcp_client: {
|
|
105
|
+
"type": "string",
|
|
106
|
+
"description": "Specify the name and version of the MCP client implementation being used (e.g. Copilot, Claude, Cline...)",
|
|
107
|
+
"default": "UNKNOWN-MCP-CLIENT"
|
|
108
|
+
},
|
|
109
|
+
model: {
|
|
110
|
+
"type": "string",
|
|
111
|
+
"description": "The name (and version) of the language model being used by the MCP client to process requests (e.g. gpt-4.1, claude-sonnet-4, llama4...)",
|
|
112
|
+
"default": "UNKNOWN-LLM"
|
|
113
|
+
}
|
|
114
|
+
},
|
|
115
|
+
},
|
|
116
|
+
},
|
|
117
|
+
];
|
package/package.json
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@marcelo-ochoa/server-postgres",
|
|
3
|
+
"version": "0.6.2",
|
|
4
|
+
"description": "MCP server for interacting with PostgreSQL databases",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"read-only-mcp",
|
|
7
|
+
"postgres-database",
|
|
8
|
+
"ai-agent",
|
|
9
|
+
"llm-tool",
|
|
10
|
+
"rag"
|
|
11
|
+
],
|
|
12
|
+
"license": "MIT",
|
|
13
|
+
"author": "Marcelo Fabian Ochoa",
|
|
14
|
+
"homepage": "https://modelcontextprotocol.io",
|
|
15
|
+
"bugs": "https://github.com/marcelo-ochoa/servers/issues",
|
|
16
|
+
"type": "module",
|
|
17
|
+
"bin": {
|
|
18
|
+
"mcp-server-postgres": "dist/index.js"
|
|
19
|
+
},
|
|
20
|
+
"files": [
|
|
21
|
+
"dist"
|
|
22
|
+
],
|
|
23
|
+
"scripts": {
|
|
24
|
+
"build": "tsc && shx chmod +x dist/*.js",
|
|
25
|
+
"prepare": "npm run build",
|
|
26
|
+
"watch": "tsc --watch"
|
|
27
|
+
},
|
|
28
|
+
"dependencies": {
|
|
29
|
+
"@modelcontextprotocol/sdk": "1.0.1",
|
|
30
|
+
"@toon-format/toon": "^1.0.0",
|
|
31
|
+
"pg": "^8.13.0"
|
|
32
|
+
},
|
|
33
|
+
"devDependencies": {
|
|
34
|
+
"@types/pg": "^8.11.10",
|
|
35
|
+
"shx": "^0.3.4",
|
|
36
|
+
"typescript": "^5.6.2"
|
|
37
|
+
}
|
|
38
|
+
}
|