database-universal-mcp 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- database_universal_mcp-1.0.0.dist-info/METADATA +34 -0
- database_universal_mcp-1.0.0.dist-info/RECORD +6 -0
- database_universal_mcp-1.0.0.dist-info/WHEEL +4 -0
- database_universal_mcp-1.0.0.dist-info/entry_points.txt +2 -0
- database_universal_mcp-1.0.0.dist-info/licenses/LICENSE +17 -0
- server.py +502 -0
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: database-universal-mcp
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: MCP server for database universal. Features query sql, list tables, describe table. From MEOK AI Labs.
|
|
5
|
+
Project-URL: Homepage, https://meok.ai
|
|
6
|
+
Project-URL: Repository, https://github.com/CSOAI-ORG/database-universal-mcp
|
|
7
|
+
Author-email: MEOK AI Labs <nicholas@meok.ai>
|
|
8
|
+
License: MIT License
|
|
9
|
+
|
|
10
|
+
Copyright (c) 2026 MEOK AI Labs (meok.ai)
|
|
11
|
+
|
|
12
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
13
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
14
|
+
in the Software without restriction, including without limitation the rights
|
|
15
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
16
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
17
|
+
furnished to do so, subject to the following conditions:
|
|
18
|
+
|
|
19
|
+
The above copyright notice and this permission notice shall be included in all
|
|
20
|
+
copies or substantial portions of the Software.
|
|
21
|
+
|
|
22
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
23
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
24
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
|
25
|
+
License-File: LICENSE
|
|
26
|
+
Keywords: ai,database,mcp,mcp/,meok,universal
|
|
27
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
28
|
+
Classifier: Operating System :: OS Independent
|
|
29
|
+
Classifier: Programming Language :: Python :: 3
|
|
30
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
31
|
+
Requires-Python: >=3.10
|
|
32
|
+
Requires-Dist: mcp>=1.0.0
|
|
33
|
+
Requires-Dist: psycopg2-binary>=2.9.0
|
|
34
|
+
Requires-Dist: pymysql>=1.0.0
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
server.py,sha256=kPo0XrAaCJY9ol8EOEm6l5rBD22y95j52mVmZHYrbg8,17001
|
|
2
|
+
database_universal_mcp-1.0.0.dist-info/METADATA,sha256=_5F_okBXOgUtJb8rgUWIbhIAr3Pj9y_MH1VJ8OlWDPQ,1684
|
|
3
|
+
database_universal_mcp-1.0.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
4
|
+
database_universal_mcp-1.0.0.dist-info/entry_points.txt,sha256=mkAfiChNUAczJMAXplhcdAwuBALMKD32DG139_zYBMQ,55
|
|
5
|
+
database_universal_mcp-1.0.0.dist-info/licenses/LICENSE,sha256=j3ubn5qaWJ2R1iHLwwnUIwaFCGnaPWGUP4rLLcmYL9k,820
|
|
6
|
+
database_universal_mcp-1.0.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 MEOK AI Labs (meok.ai)
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
server.py
ADDED
|
@@ -0,0 +1,502 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Universal Database MCP Server
|
|
4
|
+
===============================
|
|
5
|
+
Connect to SQLite, PostgreSQL, or MySQL databases from AI agents.
|
|
6
|
+
Query, explore schema, insert data, and export results to CSV.
|
|
7
|
+
|
|
8
|
+
By MEOK AI Labs | https://meok.ai
|
|
9
|
+
|
|
10
|
+
Install: pip install mcp
|
|
11
|
+
Optional: pip install psycopg2-binary mysql-connector-python
|
|
12
|
+
Run: python server.py
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
import sys, os
|
|
17
|
+
sys.path.insert(0, os.path.expanduser('~/clawd/meok-labs-engine/shared'))
|
|
18
|
+
from auth_middleware import check_access
|
|
19
|
+
|
|
20
|
+
import csv
|
|
21
|
+
import io
|
|
22
|
+
import json
|
|
23
|
+
import os
|
|
24
|
+
import re
|
|
25
|
+
import sqlite3
|
|
26
|
+
import tempfile
|
|
27
|
+
from datetime import datetime, timedelta
|
|
28
|
+
from typing import Any, Optional
|
|
29
|
+
from collections import defaultdict
|
|
30
|
+
from urllib.parse import urlparse
|
|
31
|
+
from mcp.server.fastmcp import FastMCP
|
|
32
|
+
|
|
33
|
+
# ---------------------------------------------------------------------------
|
|
34
|
+
# Rate limiting
|
|
35
|
+
# ---------------------------------------------------------------------------
|
|
36
|
+
FREE_DAILY_LIMIT = 30
|
|
37
|
+
_usage: dict[str, list[datetime]] = defaultdict(list)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _check_rate_limit(caller: str = "anonymous") -> Optional[str]:
|
|
41
|
+
now = datetime.now()
|
|
42
|
+
cutoff = now - timedelta(days=1)
|
|
43
|
+
_usage[caller] = [t for t in _usage[caller] if t > cutoff]
|
|
44
|
+
if len(_usage[caller]) >= FREE_DAILY_LIMIT:
|
|
45
|
+
return f"Free tier limit reached ({FREE_DAILY_LIMIT}/day). Upgrade to Pro: https://mcpize.com/database-universal-mcp/pro"
|
|
46
|
+
_usage[caller].append(now)
|
|
47
|
+
return None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# ---------------------------------------------------------------------------
|
|
51
|
+
# Safety: SQL query validation
|
|
52
|
+
# ---------------------------------------------------------------------------
|
|
53
|
+
_DANGEROUS_PATTERNS = [
|
|
54
|
+
r"\bDROP\s+(TABLE|DATABASE|INDEX|SCHEMA)\b",
|
|
55
|
+
r"\bTRUNCATE\b",
|
|
56
|
+
r"\bALTER\s+TABLE\b.*\bDROP\b",
|
|
57
|
+
r"\bDELETE\s+FROM\b(?!.*\bWHERE\b)", # DELETE without WHERE
|
|
58
|
+
r"\bGRANT\b",
|
|
59
|
+
r"\bREVOKE\b",
|
|
60
|
+
]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _validate_query(sql: str, allow_write: bool = False) -> Optional[str]:
|
|
64
|
+
"""Validate SQL query for safety. Returns error message if unsafe."""
|
|
65
|
+
sql_upper = sql.strip().upper()
|
|
66
|
+
|
|
67
|
+
for pattern in _DANGEROUS_PATTERNS:
|
|
68
|
+
if re.search(pattern, sql_upper, re.IGNORECASE):
|
|
69
|
+
return f"Blocked: dangerous SQL pattern detected. Use with caution or upgrade to Pro for unrestricted access."
|
|
70
|
+
|
|
71
|
+
if not allow_write:
|
|
72
|
+
write_keywords = ["INSERT", "UPDATE", "DELETE", "CREATE", "ALTER", "DROP"]
|
|
73
|
+
first_word = sql_upper.split()[0] if sql_upper.split() else ""
|
|
74
|
+
if first_word in write_keywords:
|
|
75
|
+
return f"Write operations require explicit allow_write=True for safety."
|
|
76
|
+
|
|
77
|
+
return None
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
# ---------------------------------------------------------------------------
|
|
81
|
+
# Database connection helpers
|
|
82
|
+
# ---------------------------------------------------------------------------
|
|
83
|
+
|
|
84
|
+
def _get_connection(connection_string: str):
|
|
85
|
+
"""Create a database connection from a connection string.
|
|
86
|
+
|
|
87
|
+
Supported formats:
|
|
88
|
+
- sqlite:///path/to/db.sqlite
|
|
89
|
+
- sqlite:path/to/db.sqlite
|
|
90
|
+
- postgresql://user:pass@host:port/dbname
|
|
91
|
+
- mysql://user:pass@host:port/dbname
|
|
92
|
+
- /path/to/file.db (treated as SQLite)
|
|
93
|
+
"""
|
|
94
|
+
cs = connection_string.strip()
|
|
95
|
+
|
|
96
|
+
# Plain file path -> SQLite
|
|
97
|
+
if cs.startswith("/") or cs.startswith("./") or cs.endswith(".db") or cs.endswith(".sqlite"):
|
|
98
|
+
return sqlite3.connect(cs), "sqlite"
|
|
99
|
+
|
|
100
|
+
parsed = urlparse(cs)
|
|
101
|
+
scheme = parsed.scheme.lower()
|
|
102
|
+
|
|
103
|
+
if scheme in ("sqlite", "sqlite3"):
|
|
104
|
+
path = parsed.path
|
|
105
|
+
if path.startswith("///"):
|
|
106
|
+
path = path[2:] # sqlite:///absolute/path
|
|
107
|
+
elif path.startswith("/"):
|
|
108
|
+
path = path # sqlite:/absolute/path
|
|
109
|
+
return sqlite3.connect(path), "sqlite"
|
|
110
|
+
|
|
111
|
+
elif scheme in ("postgresql", "postgres", "psycopg2"):
|
|
112
|
+
try:
|
|
113
|
+
import psycopg2
|
|
114
|
+
except ImportError:
|
|
115
|
+
raise ImportError("Install psycopg2-binary: pip install psycopg2-binary")
|
|
116
|
+
conn = psycopg2.connect(
|
|
117
|
+
host=parsed.hostname or "localhost",
|
|
118
|
+
port=parsed.port or 5432,
|
|
119
|
+
user=parsed.username or "postgres",
|
|
120
|
+
password=parsed.password or "",
|
|
121
|
+
dbname=parsed.path.lstrip("/") or "postgres")
|
|
122
|
+
conn.autocommit = True
|
|
123
|
+
return conn, "postgresql"
|
|
124
|
+
|
|
125
|
+
elif scheme in ("mysql", "mysql+pymysql"):
|
|
126
|
+
try:
|
|
127
|
+
import mysql.connector
|
|
128
|
+
except ImportError:
|
|
129
|
+
raise ImportError("Install mysql-connector-python: pip install mysql-connector-python")
|
|
130
|
+
conn = mysql.connector.connect(
|
|
131
|
+
host=parsed.hostname or "localhost",
|
|
132
|
+
port=parsed.port or 3306,
|
|
133
|
+
user=parsed.username or "root",
|
|
134
|
+
password=parsed.password or "",
|
|
135
|
+
database=parsed.path.lstrip("/") or "")
|
|
136
|
+
conn.autocommit = True
|
|
137
|
+
return conn, "mysql"
|
|
138
|
+
|
|
139
|
+
else:
|
|
140
|
+
raise ValueError(f"Unsupported database scheme: {scheme}. Use sqlite, postgresql, or mysql.")
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def _execute_query(connection_string: str, sql: str, params: Optional[list] = None) -> dict:
|
|
144
|
+
"""Execute a SQL query and return results."""
|
|
145
|
+
conn, db_type = _get_connection(connection_string)
|
|
146
|
+
try:
|
|
147
|
+
cursor = conn.cursor()
|
|
148
|
+
if params:
|
|
149
|
+
cursor.execute(sql, params)
|
|
150
|
+
else:
|
|
151
|
+
cursor.execute(sql)
|
|
152
|
+
|
|
153
|
+
# Check if query returns rows
|
|
154
|
+
if cursor.description:
|
|
155
|
+
columns = [desc[0] for desc in cursor.description]
|
|
156
|
+
rows = cursor.fetchmany(1000) # Limit to 1000 rows in free tier
|
|
157
|
+
total_available = len(rows)
|
|
158
|
+
if len(rows) == 1000:
|
|
159
|
+
# Try to get count
|
|
160
|
+
try:
|
|
161
|
+
remaining = cursor.fetchall()
|
|
162
|
+
total_available += len(remaining)
|
|
163
|
+
except Exception:
|
|
164
|
+
pass
|
|
165
|
+
|
|
166
|
+
# Convert to list of dicts
|
|
167
|
+
data = []
|
|
168
|
+
for row in rows[:1000]:
|
|
169
|
+
record = {}
|
|
170
|
+
for i, col in enumerate(columns):
|
|
171
|
+
val = row[i]
|
|
172
|
+
# Make JSON-serializable
|
|
173
|
+
if isinstance(val, (datetime)):
|
|
174
|
+
val = val.isoformat()
|
|
175
|
+
elif isinstance(val, bytes):
|
|
176
|
+
val = val.hex()[:100] + "..." if len(val) > 50 else val.hex()
|
|
177
|
+
elif isinstance(val, memoryview):
|
|
178
|
+
val = bytes(val).hex()[:100]
|
|
179
|
+
record[col] = val
|
|
180
|
+
data.append(record)
|
|
181
|
+
|
|
182
|
+
return {
|
|
183
|
+
"status": "ok",
|
|
184
|
+
"columns": columns,
|
|
185
|
+
"rows": data,
|
|
186
|
+
"row_count": len(data),
|
|
187
|
+
"total_available": total_available,
|
|
188
|
+
"db_type": db_type,
|
|
189
|
+
}
|
|
190
|
+
else:
|
|
191
|
+
affected = cursor.rowcount if cursor.rowcount >= 0 else 0
|
|
192
|
+
conn.commit()
|
|
193
|
+
return {
|
|
194
|
+
"status": "ok",
|
|
195
|
+
"message": f"Query executed successfully. {affected} row(s) affected.",
|
|
196
|
+
"rows_affected": affected,
|
|
197
|
+
"db_type": db_type,
|
|
198
|
+
}
|
|
199
|
+
finally:
|
|
200
|
+
conn.close()
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def _list_tables(connection_string: str) -> dict:
|
|
204
|
+
"""List all tables in the database."""
|
|
205
|
+
conn, db_type = _get_connection(connection_string)
|
|
206
|
+
try:
|
|
207
|
+
cursor = conn.cursor()
|
|
208
|
+
if db_type == "sqlite":
|
|
209
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
|
|
210
|
+
elif db_type == "postgresql":
|
|
211
|
+
cursor.execute("""
|
|
212
|
+
SELECT table_name FROM information_schema.tables
|
|
213
|
+
WHERE table_schema = 'public' ORDER BY table_name
|
|
214
|
+
""")
|
|
215
|
+
elif db_type == "mysql":
|
|
216
|
+
cursor.execute("SHOW TABLES")
|
|
217
|
+
|
|
218
|
+
tables = [row[0] for row in cursor.fetchall()]
|
|
219
|
+
return {"status": "ok", "tables": tables, "count": len(tables), "db_type": db_type}
|
|
220
|
+
finally:
|
|
221
|
+
conn.close()
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def _describe_table(connection_string: str, table_name: str) -> dict:
|
|
225
|
+
"""Describe a table's schema."""
|
|
226
|
+
# Validate table name (prevent injection)
|
|
227
|
+
if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', table_name):
|
|
228
|
+
return {"error": "Invalid table name"}
|
|
229
|
+
|
|
230
|
+
conn, db_type = _get_connection(connection_string)
|
|
231
|
+
try:
|
|
232
|
+
cursor = conn.cursor()
|
|
233
|
+
columns = []
|
|
234
|
+
|
|
235
|
+
if db_type == "sqlite":
|
|
236
|
+
cursor.execute(f"PRAGMA table_info({table_name})")
|
|
237
|
+
for row in cursor.fetchall():
|
|
238
|
+
columns.append({
|
|
239
|
+
"name": row[1],
|
|
240
|
+
"type": row[2],
|
|
241
|
+
"nullable": not row[3],
|
|
242
|
+
"default": row[4],
|
|
243
|
+
"primary_key": bool(row[5]),
|
|
244
|
+
})
|
|
245
|
+
# Row count
|
|
246
|
+
cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
|
|
247
|
+
row_count = cursor.fetchone()[0]
|
|
248
|
+
|
|
249
|
+
elif db_type == "postgresql":
|
|
250
|
+
cursor.execute("""
|
|
251
|
+
SELECT column_name, data_type, is_nullable, column_default
|
|
252
|
+
FROM information_schema.columns
|
|
253
|
+
WHERE table_name = %s AND table_schema = 'public'
|
|
254
|
+
ORDER BY ordinal_position
|
|
255
|
+
""", (table_name))
|
|
256
|
+
for row in cursor.fetchall():
|
|
257
|
+
columns.append({
|
|
258
|
+
"name": row[0],
|
|
259
|
+
"type": row[1],
|
|
260
|
+
"nullable": row[2] == "YES",
|
|
261
|
+
"default": row[3],
|
|
262
|
+
})
|
|
263
|
+
cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
|
|
264
|
+
row_count = cursor.fetchone()[0]
|
|
265
|
+
|
|
266
|
+
elif db_type == "mysql":
|
|
267
|
+
cursor.execute(f"DESCRIBE {table_name}")
|
|
268
|
+
for row in cursor.fetchall():
|
|
269
|
+
columns.append({
|
|
270
|
+
"name": row[0],
|
|
271
|
+
"type": row[1],
|
|
272
|
+
"nullable": row[2] == "YES",
|
|
273
|
+
"default": row[4],
|
|
274
|
+
"primary_key": row[3] == "PRI",
|
|
275
|
+
})
|
|
276
|
+
cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
|
|
277
|
+
row_count = cursor.fetchone()[0]
|
|
278
|
+
|
|
279
|
+
return {
|
|
280
|
+
"status": "ok",
|
|
281
|
+
"table": table_name,
|
|
282
|
+
"columns": columns,
|
|
283
|
+
"column_count": len(columns),
|
|
284
|
+
"row_count": row_count,
|
|
285
|
+
"db_type": db_type,
|
|
286
|
+
}
|
|
287
|
+
finally:
|
|
288
|
+
conn.close()
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def _insert_row(connection_string: str, table_name: str, data: dict) -> dict:
|
|
292
|
+
"""Insert a row into a table."""
|
|
293
|
+
if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', table_name):
|
|
294
|
+
return {"error": "Invalid table name"}
|
|
295
|
+
|
|
296
|
+
conn, db_type = _get_connection(connection_string)
|
|
297
|
+
try:
|
|
298
|
+
cursor = conn.cursor()
|
|
299
|
+
columns = list(data.keys())
|
|
300
|
+
values = list(data.values())
|
|
301
|
+
|
|
302
|
+
# Validate column names
|
|
303
|
+
for col in columns:
|
|
304
|
+
if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', col):
|
|
305
|
+
return {"error": f"Invalid column name: {col}"}
|
|
306
|
+
|
|
307
|
+
col_str = ", ".join(columns)
|
|
308
|
+
|
|
309
|
+
if db_type == "sqlite":
|
|
310
|
+
placeholders = ", ".join(["?"] * len(values))
|
|
311
|
+
else:
|
|
312
|
+
placeholders = ", ".join(["%s"] * len(values))
|
|
313
|
+
|
|
314
|
+
sql = f"INSERT INTO {table_name} ({col_str}) VALUES ({placeholders})"
|
|
315
|
+
cursor.execute(sql, values)
|
|
316
|
+
conn.commit()
|
|
317
|
+
|
|
318
|
+
return {
|
|
319
|
+
"status": "ok",
|
|
320
|
+
"message": f"Inserted 1 row into {table_name}",
|
|
321
|
+
"table": table_name,
|
|
322
|
+
"columns": columns,
|
|
323
|
+
}
|
|
324
|
+
finally:
|
|
325
|
+
conn.close()
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
def _validate_output_path(output_path: str) -> Optional[str]:
|
|
329
|
+
"""Validate output file path against traversal attacks."""
|
|
330
|
+
blocked = ["/etc/", "/var/", "/proc/", "/sys/", "/dev/", ".."]
|
|
331
|
+
for pattern in blocked:
|
|
332
|
+
if pattern in output_path:
|
|
333
|
+
return f"Access denied: path contains blocked pattern '{pattern}'"
|
|
334
|
+
real = os.path.realpath(output_path)
|
|
335
|
+
parent = os.path.dirname(real)
|
|
336
|
+
if not os.path.isdir(parent):
|
|
337
|
+
return f"Directory does not exist: {parent}"
|
|
338
|
+
return None
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
def _export_to_csv(connection_string: str, sql: str, output_path: str) -> dict:
|
|
342
|
+
"""Execute a query and export results to CSV."""
|
|
343
|
+
path_err = _validate_output_path(output_path)
|
|
344
|
+
if path_err:
|
|
345
|
+
return {"error": path_err}
|
|
346
|
+
|
|
347
|
+
result = _execute_query(connection_string, sql)
|
|
348
|
+
if result.get("status") != "ok":
|
|
349
|
+
return result
|
|
350
|
+
|
|
351
|
+
rows = result.get("rows", [])
|
|
352
|
+
columns = result.get("columns", [])
|
|
353
|
+
|
|
354
|
+
if not rows:
|
|
355
|
+
return {"error": "No data to export"}
|
|
356
|
+
|
|
357
|
+
with open(output_path, "w", newline="") as f:
|
|
358
|
+
writer = csv.DictWriter(f, fieldnames=columns)
|
|
359
|
+
writer.writeheader()
|
|
360
|
+
writer.writerows(rows)
|
|
361
|
+
|
|
362
|
+
return {
|
|
363
|
+
"status": "ok",
|
|
364
|
+
"output": output_path,
|
|
365
|
+
"rows_exported": len(rows),
|
|
366
|
+
"columns": columns,
|
|
367
|
+
"file_size_bytes": os.path.getsize(output_path),
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
# ---------------------------------------------------------------------------
|
|
372
|
+
# MCP Server
|
|
373
|
+
# ---------------------------------------------------------------------------
|
|
374
|
+
mcp = FastMCP(
|
|
375
|
+
"Universal Database MCP",
|
|
376
|
+
instructions="Database connector for SQLite, PostgreSQL, and MySQL. Query data, explore schema, insert rows, and export to CSV. By MEOK AI Labs.")
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
@mcp.tool()
|
|
380
|
+
def query_sql(connection_string: str, sql: str, allow_write: bool = False, api_key: str = "") -> dict:
|
|
381
|
+
"""Execute a SQL query against a database. Supports SQLite, PostgreSQL, and MySQL.
|
|
382
|
+
|
|
383
|
+
Connection string examples:
|
|
384
|
+
- SQLite: 'sqlite:///path/to/db.sqlite' or just '/path/to/file.db'
|
|
385
|
+
- PostgreSQL: 'postgresql://user:pass@localhost:5432/mydb'
|
|
386
|
+
- MySQL: 'mysql://user:pass@localhost:3306/mydb'
|
|
387
|
+
|
|
388
|
+
Args:
|
|
389
|
+
connection_string: Database connection URI
|
|
390
|
+
sql: SQL query to execute
|
|
391
|
+
allow_write: Set True for INSERT/UPDATE/DELETE (safety guard)
|
|
392
|
+
"""
|
|
393
|
+
allowed, msg, tier = check_access(api_key)
|
|
394
|
+
if not allowed:
|
|
395
|
+
return {"error": msg, "upgrade_url": "https://meok.ai/pricing"}
|
|
396
|
+
|
|
397
|
+
err = _check_rate_limit()
|
|
398
|
+
if err:
|
|
399
|
+
return {"error": err}
|
|
400
|
+
safety = _validate_query(sql, allow_write)
|
|
401
|
+
if safety:
|
|
402
|
+
return {"error": safety}
|
|
403
|
+
try:
|
|
404
|
+
return _execute_query(connection_string, sql)
|
|
405
|
+
except Exception as e:
|
|
406
|
+
return {"error": str(e)}
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
@mcp.tool()
|
|
410
|
+
def list_tables(connection_string: str, api_key: str = "") -> dict:
|
|
411
|
+
"""List all tables in a database.
|
|
412
|
+
|
|
413
|
+
Args:
|
|
414
|
+
connection_string: Database connection URI
|
|
415
|
+
"""
|
|
416
|
+
allowed, msg, tier = check_access(api_key)
|
|
417
|
+
if not allowed:
|
|
418
|
+
return {"error": msg, "upgrade_url": "https://meok.ai/pricing"}
|
|
419
|
+
|
|
420
|
+
err = _check_rate_limit()
|
|
421
|
+
if err:
|
|
422
|
+
return {"error": err}
|
|
423
|
+
try:
|
|
424
|
+
return _list_tables(connection_string)
|
|
425
|
+
except Exception as e:
|
|
426
|
+
return {"error": str(e)}
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
@mcp.tool()
|
|
430
|
+
def describe_table(connection_string: str, table_name: str, api_key: str = "") -> dict:
|
|
431
|
+
"""Describe a table's schema: column names, types, nullability, defaults,
|
|
432
|
+
primary keys, and row count.
|
|
433
|
+
|
|
434
|
+
Args:
|
|
435
|
+
connection_string: Database connection URI
|
|
436
|
+
table_name: Name of the table to describe
|
|
437
|
+
"""
|
|
438
|
+
allowed, msg, tier = check_access(api_key)
|
|
439
|
+
if not allowed:
|
|
440
|
+
return {"error": msg, "upgrade_url": "https://meok.ai/pricing"}
|
|
441
|
+
|
|
442
|
+
err = _check_rate_limit()
|
|
443
|
+
if err:
|
|
444
|
+
return {"error": err}
|
|
445
|
+
try:
|
|
446
|
+
return _describe_table(connection_string, table_name)
|
|
447
|
+
except Exception as e:
|
|
448
|
+
return {"error": str(e)}
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
@mcp.tool()
|
|
452
|
+
def insert_row(connection_string: str, table_name: str, data: dict, api_key: str = "") -> dict:
|
|
453
|
+
"""Insert a single row into a table. Column names and values are passed
|
|
454
|
+
as a dictionary.
|
|
455
|
+
|
|
456
|
+
Args:
|
|
457
|
+
connection_string: Database connection URI
|
|
458
|
+
table_name: Target table name
|
|
459
|
+
data: Dictionary of column_name -> value pairs
|
|
460
|
+
"""
|
|
461
|
+
allowed, msg, tier = check_access(api_key)
|
|
462
|
+
if not allowed:
|
|
463
|
+
return {"error": msg, "upgrade_url": "https://meok.ai/pricing"}
|
|
464
|
+
|
|
465
|
+
err = _check_rate_limit()
|
|
466
|
+
if err:
|
|
467
|
+
return {"error": err}
|
|
468
|
+
try:
|
|
469
|
+
return _insert_row(connection_string, table_name, data)
|
|
470
|
+
except Exception as e:
|
|
471
|
+
return {"error": str(e)}
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
@mcp.tool()
|
|
475
|
+
def export_to_csv(connection_string: str, sql: str, output_path: str = "", api_key: str = "") -> dict:
|
|
476
|
+
"""Execute a SELECT query and export results to a CSV file.
|
|
477
|
+
|
|
478
|
+
Args:
|
|
479
|
+
connection_string: Database connection URI
|
|
480
|
+
sql: SELECT query to execute
|
|
481
|
+
output_path: Path for the output CSV (default: temp file)
|
|
482
|
+
"""
|
|
483
|
+
allowed, msg, tier = check_access(api_key)
|
|
484
|
+
if not allowed:
|
|
485
|
+
return {"error": msg, "upgrade_url": "https://meok.ai/pricing"}
|
|
486
|
+
|
|
487
|
+
err = _check_rate_limit()
|
|
488
|
+
if err:
|
|
489
|
+
return {"error": err}
|
|
490
|
+
safety = _validate_query(sql, allow_write=False)
|
|
491
|
+
if safety:
|
|
492
|
+
return {"error": safety}
|
|
493
|
+
if not output_path:
|
|
494
|
+
output_path = os.path.join(tempfile.gettempdir(), f"export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv")
|
|
495
|
+
try:
|
|
496
|
+
return _export_to_csv(connection_string, sql, output_path)
|
|
497
|
+
except Exception as e:
|
|
498
|
+
return {"error": str(e)}
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
if __name__ == "__main__":
|
|
502
|
+
mcp.run()
|