@gluip/chart-canvas-mcp 0.1.5 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +78 -1
- package/dist/database.js +224 -0
- package/dist/index.js +286 -0
- package/dist/state.js +11 -0
- package/dist/transformers.js +138 -0
- package/package.json +8 -3
package/README.md
CHANGED
|
@@ -16,9 +16,24 @@ Watch the [full demo on YouTube](https://www.youtube.com/watch?v=XVucQstPisc) to
|
|
|
16
16
|
🎨 **Interactive Dashboard**: Drag-and-drop grid layout with real-time updates
|
|
17
17
|
🔄 **Live Synchronization**: Changes appear instantly in your browser
|
|
18
18
|
📊 **Rich Visualizations**: Powered by ECharts and Mermaid
|
|
19
|
+
💾 **Universal Data Sources**: Query SQLite, CSV, Parquet, JSON, and NDJSON files directly
|
|
20
|
+
⚡ **Smart Data Flow**: Execute queries server-side - data stays local, never sent to LLM
|
|
21
|
+
🔒 **Privacy First**: Your data never leaves your machine
|
|
19
22
|
🚀 **Easy Setup**: One command to get started
|
|
20
23
|
🌐 **Production Ready**: Built-in production mode with optimized builds
|
|
21
24
|
|
|
25
|
+
## Supported Data Sources
|
|
26
|
+
|
|
27
|
+
Chart Canvas can query and visualize data from multiple file formats:
|
|
28
|
+
|
|
29
|
+
- **SQLite** (`.db`, `.sqlite`, `.sqlite3`) - Relational databases
|
|
30
|
+
- **CSV** (`.csv`) - Comma-separated values
|
|
31
|
+
- **Parquet** (`.parquet`) - Columnar storage format
|
|
32
|
+
- **JSON** (`.json`) - JSON arrays of objects
|
|
33
|
+
- **NDJSON** (`.jsonl`, `.ndjson`) - Newline-delimited JSON
|
|
34
|
+
|
|
35
|
+
**Privacy & Performance**: All queries execute locally on your machine using DuckDB. Query results are transformed into visualizations server-side - only metadata (chart configuration) is sent to the LLM, never your actual data. This makes it fast, scalable, and private.
|
|
36
|
+
|
|
22
37
|
## Quick Start
|
|
23
38
|
|
|
24
39
|
### Installation
|
|
@@ -68,6 +83,12 @@ Add to your MCP client configuration (e.g., Claude Desktop):
|
|
|
68
83
|
"Draw a flowchart for the user authentication process"
|
|
69
84
|
|
|
70
85
|
"Make a table with team member information"
|
|
86
|
+
|
|
87
|
+
"Show me the database schema for my SQLite database"
|
|
88
|
+
|
|
89
|
+
"Query the athletes table and show the top 10 with most personal records"
|
|
90
|
+
|
|
91
|
+
"Create a chart showing sales trends from the database grouped by region"
|
|
71
92
|
```
|
|
72
93
|
|
|
73
94
|
## MCP Tools
|
|
@@ -77,6 +98,7 @@ Add to your MCP client configuration (e.g., Claude Desktop):
|
|
|
77
98
|
Create charts, diagrams, and tables on the canvas.
|
|
78
99
|
|
|
79
100
|
**Supported Types**:
|
|
101
|
+
|
|
80
102
|
- `line` - Line charts with multiple series
|
|
81
103
|
- `bar` - Bar charts for comparisons
|
|
82
104
|
- `scatter` - Scatter plots for data distribution
|
|
@@ -85,6 +107,7 @@ Create charts, diagrams, and tables on the canvas.
|
|
|
85
107
|
- `flowchart` - Mermaid diagrams (flowcharts, sequence diagrams, Gantt charts, etc.)
|
|
86
108
|
|
|
87
109
|
**Example**:
|
|
110
|
+
|
|
88
111
|
```typescript
|
|
89
112
|
{
|
|
90
113
|
type: "line",
|
|
@@ -109,6 +132,60 @@ Remove all visualizations from the canvas.
|
|
|
109
132
|
|
|
110
133
|
Open the dashboard in your default browser.
|
|
111
134
|
|
|
135
|
+
### getDatabaseSchema
|
|
136
|
+
|
|
137
|
+
Inspect the structure of a SQLite database to understand available tables and columns before writing queries.
|
|
138
|
+
|
|
139
|
+
**Parameters**:
|
|
140
|
+
|
|
141
|
+
- `databasePath` - Path to SQLite database file (e.g., `./data/mydb.sqlite` or absolute path)
|
|
142
|
+
|
|
143
|
+
**Example**:
|
|
144
|
+
|
|
145
|
+
```typescript
|
|
146
|
+
{
|
|
147
|
+
databasePath: "/path/to/database.db";
|
|
148
|
+
}
|
|
149
|
+
```
|
|
150
|
+
|
|
151
|
+
**Returns**: Formatted schema showing all tables, columns, data types, and constraints.
|
|
152
|
+
|
|
153
|
+
### queryAndVisualize
|
|
154
|
+
|
|
155
|
+
Execute a SQL query on a SQLite database and create a visualization from the results. Queries are executed server-side and must be read-only (SELECT only). Maximum 10,000 rows.
|
|
156
|
+
|
|
157
|
+
**Parameters**:
|
|
158
|
+
|
|
159
|
+
- `databasePath` - Path to SQLite database file
|
|
160
|
+
- `query` - SQL SELECT query (read-only)
|
|
161
|
+
- `visualizationType` - Type of chart: `line`, `bar`, `scatter`, `pie`, or `table`
|
|
162
|
+
- `columnMapping` (optional for table) - Mapping of columns to chart axes:
|
|
163
|
+
- `xColumn` - Column for X-axis (required for charts)
|
|
164
|
+
- `yColumns` - Array of columns for Y-axis (required for charts)
|
|
165
|
+
- `seriesColumn` - Column to group data into separate series (optional)
|
|
166
|
+
- `groupByColumn` - Alternative grouping column (optional)
|
|
167
|
+
- `title` - Optional title for visualization
|
|
168
|
+
- `description` - Optional description
|
|
169
|
+
- `useColumnAsXLabel` - If true, use X column values as labels instead of numbers
|
|
170
|
+
|
|
171
|
+
**Example**:
|
|
172
|
+
|
|
173
|
+
```typescript
|
|
174
|
+
{
|
|
175
|
+
databasePath: "./data/sales.db",
|
|
176
|
+
query: "SELECT region, SUM(revenue) as total FROM sales GROUP BY region",
|
|
177
|
+
visualizationType: "bar",
|
|
178
|
+
columnMapping: {
|
|
179
|
+
xColumn: "region",
|
|
180
|
+
yColumns: ["total"]
|
|
181
|
+
},
|
|
182
|
+
title: "Revenue by Region",
|
|
183
|
+
useColumnAsXLabel: true
|
|
184
|
+
}
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
**Security**: Only SELECT and WITH (CTE) queries are allowed. INSERT, UPDATE, DELETE, DROP, and other modifying operations are blocked.
|
|
188
|
+
|
|
112
189
|
## Architecture
|
|
113
190
|
|
|
114
191
|
- **Backend**: Node.js + TypeScript + Express + MCP SDK
|
|
@@ -128,7 +205,7 @@ cd chart-canvas
|
|
|
128
205
|
cd backend
|
|
129
206
|
npm install
|
|
130
207
|
|
|
131
|
-
# Install frontend dependencies
|
|
208
|
+
# Install frontend dependencies
|
|
132
209
|
cd ../frontend
|
|
133
210
|
npm install
|
|
134
211
|
|
package/dist/database.js
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
import { DuckDBInstance } from "@duckdb/node-api";
|
|
2
|
+
import { existsSync } from "fs";
|
|
3
|
+
import { resolve, extname } from "path";
|
|
4
|
+
const FORBIDDEN_KEYWORDS = [
|
|
5
|
+
"INSERT",
|
|
6
|
+
"UPDATE",
|
|
7
|
+
"DELETE",
|
|
8
|
+
"DROP",
|
|
9
|
+
"CREATE",
|
|
10
|
+
"ALTER",
|
|
11
|
+
"TRUNCATE",
|
|
12
|
+
"REPLACE",
|
|
13
|
+
"ATTACH",
|
|
14
|
+
"DETACH",
|
|
15
|
+
];
|
|
16
|
+
const MAX_ROWS = 10000;
|
|
17
|
+
const QUERY_TIMEOUT_MS = 5000;
|
|
18
|
+
/**
|
|
19
|
+
* Validate database/file path - must exist and be a file
|
|
20
|
+
*/
|
|
21
|
+
export function validateDatabasePath(dbPath) {
|
|
22
|
+
const resolvedPath = resolve(dbPath);
|
|
23
|
+
if (!existsSync(resolvedPath)) {
|
|
24
|
+
throw new Error(`File does not exist: ${dbPath}`);
|
|
25
|
+
}
|
|
26
|
+
return resolvedPath;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Detect file type based on extension
|
|
30
|
+
*/
|
|
31
|
+
export function getFileType(filePath) {
|
|
32
|
+
const ext = extname(filePath).toLowerCase();
|
|
33
|
+
if (ext === ".db" || ext === ".sqlite" || ext === ".sqlite3") {
|
|
34
|
+
return "sqlite";
|
|
35
|
+
}
|
|
36
|
+
if (ext === ".csv") {
|
|
37
|
+
return "csv";
|
|
38
|
+
}
|
|
39
|
+
if (ext === ".parquet") {
|
|
40
|
+
return "parquet";
|
|
41
|
+
}
|
|
42
|
+
if (ext === ".json") {
|
|
43
|
+
return "json";
|
|
44
|
+
}
|
|
45
|
+
if (ext === ".jsonl" || ext === ".ndjson") {
|
|
46
|
+
return "ndjson";
|
|
47
|
+
}
|
|
48
|
+
return "unknown";
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Validate SQL query - must be read-only (SELECT only)
|
|
52
|
+
*/
|
|
53
|
+
export function validateReadOnlyQuery(sql) {
|
|
54
|
+
const upperSQL = sql.trim().toUpperCase();
|
|
55
|
+
// Must start with SELECT or WITH (for CTEs)
|
|
56
|
+
if (!upperSQL.startsWith("SELECT") && !upperSQL.startsWith("WITH")) {
|
|
57
|
+
throw new Error("Only SELECT queries are allowed");
|
|
58
|
+
}
|
|
59
|
+
// Check for forbidden keywords
|
|
60
|
+
for (const keyword of FORBIDDEN_KEYWORDS) {
|
|
61
|
+
const regex = new RegExp(`\\b${keyword}\\b`, "i");
|
|
62
|
+
if (regex.test(sql)) {
|
|
63
|
+
throw new Error(`Query contains forbidden keyword: ${keyword}`);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Get the DuckDB read function for a file type
|
|
69
|
+
*/
|
|
70
|
+
function getReadFunction(filePath, fileType) {
|
|
71
|
+
const escapedPath = filePath.replace(/'/g, "''");
|
|
72
|
+
switch (fileType) {
|
|
73
|
+
case "sqlite":
|
|
74
|
+
// For SQLite, we'll attach the database
|
|
75
|
+
return `sqlite_scan('${escapedPath}', '')`;
|
|
76
|
+
case "csv":
|
|
77
|
+
return `read_csv_auto('${escapedPath}')`;
|
|
78
|
+
case "parquet":
|
|
79
|
+
return `read_parquet('${escapedPath}')`;
|
|
80
|
+
case "json":
|
|
81
|
+
return `read_json_auto('${escapedPath}')`;
|
|
82
|
+
case "ndjson":
|
|
83
|
+
return `read_json_auto('${escapedPath}', format='newline_delimited')`;
|
|
84
|
+
default:
|
|
85
|
+
throw new Error(`Unsupported file type. Supported: .db/.sqlite/.sqlite3 (SQLite), .csv, .parquet, .json, .jsonl/.ndjson`);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Get table name from file path
|
|
90
|
+
*/
|
|
91
|
+
function getTableName(filePath) {
|
|
92
|
+
return (filePath
|
|
93
|
+
.split("/")
|
|
94
|
+
.pop()
|
|
95
|
+
?.replace(/\.(db|sqlite|sqlite3|csv|parquet|json|jsonl|ndjson)$/i, "") ||
|
|
96
|
+
"data");
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Get database/file schema using DuckDB
|
|
100
|
+
*/
|
|
101
|
+
export async function getDatabaseSchema(dbPath) {
|
|
102
|
+
const resolvedPath = validateDatabasePath(dbPath);
|
|
103
|
+
const fileType = getFileType(resolvedPath);
|
|
104
|
+
if (fileType === "unknown") {
|
|
105
|
+
throw new Error(`Unsupported file type. Supported: .db/.sqlite/.sqlite3 (SQLite), .csv, .parquet, .json, .jsonl/.ndjson`);
|
|
106
|
+
}
|
|
107
|
+
const instance = await DuckDBInstance.create(":memory:");
|
|
108
|
+
const conn = await instance.connect();
|
|
109
|
+
try {
|
|
110
|
+
// For SQLite, we need to install and load the sqlite extension
|
|
111
|
+
if (fileType === "sqlite") {
|
|
112
|
+
await conn.run("INSTALL sqlite");
|
|
113
|
+
await conn.run("LOAD sqlite");
|
|
114
|
+
}
|
|
115
|
+
const readFunc = getReadFunction(resolvedPath, fileType);
|
|
116
|
+
const tableName = getTableName(resolvedPath);
|
|
117
|
+
// For SQLite, we need to query the sqlite_master table
|
|
118
|
+
if (fileType === "sqlite") {
|
|
119
|
+
const escapedPath = resolvedPath.replace(/'/g, "''");
|
|
120
|
+
// Get table names
|
|
121
|
+
const tablesReader = await conn.runAndReadAll(`SELECT name FROM sqlite_scan('${escapedPath}', 'sqlite_master') WHERE type='table' AND name NOT LIKE 'sqlite_%'`);
|
|
122
|
+
const tables = tablesReader.getRowObjects();
|
|
123
|
+
if (!tables || tables.length === 0) {
|
|
124
|
+
return { tables: [] };
|
|
125
|
+
}
|
|
126
|
+
// Get columns for each table
|
|
127
|
+
const tableSchemas = [];
|
|
128
|
+
for (const table of tables) {
|
|
129
|
+
const columnsReader = await conn.runAndReadAll(`DESCRIBE SELECT * FROM sqlite_scan('${escapedPath}', '${table.name}') LIMIT 0`);
|
|
130
|
+
const columns = columnsReader.getRowObjects();
|
|
131
|
+
tableSchemas.push({
|
|
132
|
+
name: table.name,
|
|
133
|
+
columns: columns.map((col) => ({
|
|
134
|
+
name: col.column_name,
|
|
135
|
+
type: col.column_type,
|
|
136
|
+
notNull: false,
|
|
137
|
+
defaultValue: null,
|
|
138
|
+
primaryKey: false,
|
|
139
|
+
})),
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
return { tables: tableSchemas };
|
|
143
|
+
}
|
|
144
|
+
else {
|
|
145
|
+
// For file-based formats (CSV, Parquet, JSON)
|
|
146
|
+
const reader = await conn.runAndReadAll(`DESCRIBE SELECT * FROM ${readFunc} LIMIT 0`);
|
|
147
|
+
const columns = reader.getRowObjects();
|
|
148
|
+
const columnInfos = columns.map((col) => ({
|
|
149
|
+
name: col.column_name,
|
|
150
|
+
type: col.column_type,
|
|
151
|
+
notNull: false,
|
|
152
|
+
defaultValue: null,
|
|
153
|
+
primaryKey: false,
|
|
154
|
+
}));
|
|
155
|
+
return {
|
|
156
|
+
tables: [
|
|
157
|
+
{
|
|
158
|
+
name: tableName,
|
|
159
|
+
columns: columnInfos,
|
|
160
|
+
},
|
|
161
|
+
],
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
finally {
|
|
166
|
+
conn.closeSync();
|
|
167
|
+
instance.closeSync();
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
/**
|
|
171
|
+
* Execute a SQL query using DuckDB
|
|
172
|
+
*/
|
|
173
|
+
export async function executeQuery(dbPath, sql) {
|
|
174
|
+
const resolvedPath = validateDatabasePath(dbPath);
|
|
175
|
+
const fileType = getFileType(resolvedPath);
|
|
176
|
+
validateReadOnlyQuery(sql);
|
|
177
|
+
if (fileType === "unknown") {
|
|
178
|
+
throw new Error(`Unsupported file type. Supported: .db/.sqlite/.sqlite3 (SQLite), .csv, .parquet, .json, .jsonl/.ndjson`);
|
|
179
|
+
}
|
|
180
|
+
const instance = await DuckDBInstance.create(":memory:");
|
|
181
|
+
const conn = await instance.connect();
|
|
182
|
+
try {
|
|
183
|
+
// For SQLite, install and load the extension first
|
|
184
|
+
if (fileType === "sqlite") {
|
|
185
|
+
await conn.run("INSTALL sqlite");
|
|
186
|
+
await conn.run("LOAD sqlite");
|
|
187
|
+
}
|
|
188
|
+
const readFunc = getReadFunction(resolvedPath, fileType);
|
|
189
|
+
const tableName = getTableName(resolvedPath);
|
|
190
|
+
// Replace table references with read function calls
|
|
191
|
+
let modifiedSql = sql;
|
|
192
|
+
// For SQLite, don't modify - queries reference tables directly via sqlite_scan
|
|
193
|
+
if (fileType !== "sqlite") {
|
|
194
|
+
modifiedSql = sql.replace(new RegExp(`\\b${tableName}\\b`, "gi"), readFunc);
|
|
195
|
+
}
|
|
196
|
+
const reader = await conn.runAndReadAll(modifiedSql);
|
|
197
|
+
const rawRows = reader.getRowObjects();
|
|
198
|
+
if (rawRows.length > MAX_ROWS) {
|
|
199
|
+
throw new Error(`Query returned ${rawRows.length} rows, exceeding limit of ${MAX_ROWS}`);
|
|
200
|
+
}
|
|
201
|
+
// Convert BigInt values to numbers for JSON serialization
|
|
202
|
+
const rows = rawRows.map((row) => {
|
|
203
|
+
const converted = {};
|
|
204
|
+
for (const [key, value] of Object.entries(row)) {
|
|
205
|
+
if (typeof value === "bigint") {
|
|
206
|
+
converted[key] = Number(value);
|
|
207
|
+
}
|
|
208
|
+
else {
|
|
209
|
+
converted[key] = value;
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
return converted;
|
|
213
|
+
});
|
|
214
|
+
const columns = reader.columnNames();
|
|
215
|
+
return {
|
|
216
|
+
rows,
|
|
217
|
+
columns,
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
finally {
|
|
221
|
+
conn.closeSync();
|
|
222
|
+
instance.closeSync();
|
|
223
|
+
}
|
|
224
|
+
}
|
package/dist/index.js
CHANGED
|
@@ -4,6 +4,8 @@ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"
|
|
|
4
4
|
import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
|
|
5
5
|
import { stateManager } from "./state.js";
|
|
6
6
|
import { startApiServer, getServerPort } from "./api.js";
|
|
7
|
+
import { getDatabaseSchema, executeQuery } from "./database.js";
|
|
8
|
+
import { transformToTable, transformToSeries, extractXLabels, } from "./transformers.js";
|
|
7
9
|
import open from "open";
|
|
8
10
|
const server = new Server({
|
|
9
11
|
name: "chart-canvas-server",
|
|
@@ -107,6 +109,80 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
|
107
109
|
required: ["id"],
|
|
108
110
|
},
|
|
109
111
|
},
|
|
112
|
+
{
|
|
113
|
+
name: "updateVisualization",
|
|
114
|
+
description: "Update an existing visualization on the canvas. You can change its data, title, description, or any other property.",
|
|
115
|
+
inputSchema: {
|
|
116
|
+
type: "object",
|
|
117
|
+
properties: {
|
|
118
|
+
id: {
|
|
119
|
+
type: "string",
|
|
120
|
+
description: "ID of the visualization to update",
|
|
121
|
+
},
|
|
122
|
+
type: {
|
|
123
|
+
type: "string",
|
|
124
|
+
enum: ["line", "bar", "scatter", "table", "flowchart", "pie"],
|
|
125
|
+
description: "Type of visualization",
|
|
126
|
+
},
|
|
127
|
+
series: {
|
|
128
|
+
type: "array",
|
|
129
|
+
items: {
|
|
130
|
+
type: "object",
|
|
131
|
+
properties: {
|
|
132
|
+
name: { type: "string" },
|
|
133
|
+
data: {
|
|
134
|
+
type: "array",
|
|
135
|
+
items: {
|
|
136
|
+
type: "array",
|
|
137
|
+
items: { type: "number" },
|
|
138
|
+
minItems: 2,
|
|
139
|
+
maxItems: 2,
|
|
140
|
+
},
|
|
141
|
+
},
|
|
142
|
+
},
|
|
143
|
+
required: ["name", "data"],
|
|
144
|
+
},
|
|
145
|
+
description: "Updated data series for charts",
|
|
146
|
+
},
|
|
147
|
+
table: {
|
|
148
|
+
type: "object",
|
|
149
|
+
properties: {
|
|
150
|
+
headers: {
|
|
151
|
+
type: "array",
|
|
152
|
+
items: { type: "string" },
|
|
153
|
+
},
|
|
154
|
+
rows: {
|
|
155
|
+
type: "array",
|
|
156
|
+
items: {
|
|
157
|
+
type: "array",
|
|
158
|
+
items: { type: ["string", "number"] },
|
|
159
|
+
},
|
|
160
|
+
},
|
|
161
|
+
},
|
|
162
|
+
required: ["headers", "rows"],
|
|
163
|
+
description: "Updated table data",
|
|
164
|
+
},
|
|
165
|
+
mermaid: {
|
|
166
|
+
type: "string",
|
|
167
|
+
description: "Updated Mermaid diagram syntax",
|
|
168
|
+
},
|
|
169
|
+
title: {
|
|
170
|
+
type: "string",
|
|
171
|
+
description: "Updated title",
|
|
172
|
+
},
|
|
173
|
+
description: {
|
|
174
|
+
type: "string",
|
|
175
|
+
description: "Updated description",
|
|
176
|
+
},
|
|
177
|
+
xLabels: {
|
|
178
|
+
type: "array",
|
|
179
|
+
items: { type: "string" },
|
|
180
|
+
description: "Updated x-axis labels",
|
|
181
|
+
},
|
|
182
|
+
},
|
|
183
|
+
required: ["id"],
|
|
184
|
+
},
|
|
185
|
+
},
|
|
110
186
|
{
|
|
111
187
|
name: "clearCanvas",
|
|
112
188
|
description: "Remove all visualizations from the canvas",
|
|
@@ -123,6 +199,78 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
|
123
199
|
properties: {},
|
|
124
200
|
},
|
|
125
201
|
},
|
|
202
|
+
{
|
|
203
|
+
name: "getDatabaseSchema",
|
|
204
|
+
description: "Get the schema of a data source (SQLite database, CSV, Parquet, or JSON file) including all tables/columns. Use this to understand the data structure before writing queries.",
|
|
205
|
+
inputSchema: {
|
|
206
|
+
type: "object",
|
|
207
|
+
properties: {
|
|
208
|
+
databasePath: {
|
|
209
|
+
type: "string",
|
|
210
|
+
description: "Path to the data file. Supported formats: SQLite (.db, .sqlite, .sqlite3), CSV (.csv), Parquet (.parquet), JSON (.json, .jsonl, .ndjson)",
|
|
211
|
+
},
|
|
212
|
+
},
|
|
213
|
+
required: ["databasePath"],
|
|
214
|
+
},
|
|
215
|
+
},
|
|
216
|
+
{
|
|
217
|
+
name: "queryAndVisualize",
|
|
218
|
+
description: "Execute a SQL query on a data source (SQLite, CSV, Parquet, or JSON file) and create a visualization from the results. The query must be read-only (SELECT only). You must specify how to map columns to the visualization.",
|
|
219
|
+
inputSchema: {
|
|
220
|
+
type: "object",
|
|
221
|
+
properties: {
|
|
222
|
+
databasePath: {
|
|
223
|
+
type: "string",
|
|
224
|
+
description: "Path to the data file (SQLite, CSV, Parquet, JSON)",
|
|
225
|
+
},
|
|
226
|
+
query: {
|
|
227
|
+
type: "string",
|
|
228
|
+
description: "SQL SELECT query to execute (read-only, max 10000 rows)",
|
|
229
|
+
},
|
|
230
|
+
visualizationType: {
|
|
231
|
+
type: "string",
|
|
232
|
+
enum: ["line", "bar", "scatter", "table", "pie"],
|
|
233
|
+
description: "Type of visualization to create from the query results",
|
|
234
|
+
},
|
|
235
|
+
columnMapping: {
|
|
236
|
+
type: "object",
|
|
237
|
+
properties: {
|
|
238
|
+
xColumn: {
|
|
239
|
+
type: "string",
|
|
240
|
+
description: "Column to use for X-axis (required for charts, not needed for table)",
|
|
241
|
+
},
|
|
242
|
+
yColumns: {
|
|
243
|
+
type: "array",
|
|
244
|
+
items: { type: "string" },
|
|
245
|
+
description: "Column(s) to use for Y-axis values (required for charts, not needed for table)",
|
|
246
|
+
},
|
|
247
|
+
seriesColumn: {
|
|
248
|
+
type: "string",
|
|
249
|
+
description: "Optional: Column to group data into separate series (e.g., 'category', 'product_name')",
|
|
250
|
+
},
|
|
251
|
+
groupByColumn: {
|
|
252
|
+
type: "string",
|
|
253
|
+
description: "Optional: Column to group by (alternative to seriesColumn)",
|
|
254
|
+
},
|
|
255
|
+
},
|
|
256
|
+
description: "Mapping of query result columns to chart axes. Not required for table type.",
|
|
257
|
+
},
|
|
258
|
+
title: {
|
|
259
|
+
type: "string",
|
|
260
|
+
description: "Optional title for the visualization",
|
|
261
|
+
},
|
|
262
|
+
description: {
|
|
263
|
+
type: "string",
|
|
264
|
+
description: "Optional description for the visualization",
|
|
265
|
+
},
|
|
266
|
+
useColumnAsXLabel: {
|
|
267
|
+
type: "boolean",
|
|
268
|
+
description: "If true, use the xColumn values as labels instead of numeric values (useful for dates/categories)",
|
|
269
|
+
},
|
|
270
|
+
},
|
|
271
|
+
required: ["databasePath", "query", "visualizationType"],
|
|
272
|
+
},
|
|
273
|
+
},
|
|
126
274
|
],
|
|
127
275
|
};
|
|
128
276
|
});
|
|
@@ -164,6 +312,35 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
164
312
|
],
|
|
165
313
|
};
|
|
166
314
|
}
|
|
315
|
+
case "updateVisualization": {
|
|
316
|
+
const { id, type, series, table, mermaid, title, description, xLabels } = args;
|
|
317
|
+
const updates = {};
|
|
318
|
+
if (type !== undefined)
|
|
319
|
+
updates.type = type;
|
|
320
|
+
if (series !== undefined)
|
|
321
|
+
updates.series = series;
|
|
322
|
+
if (table !== undefined)
|
|
323
|
+
updates.table = table;
|
|
324
|
+
if (mermaid !== undefined)
|
|
325
|
+
updates.mermaid = mermaid;
|
|
326
|
+
if (title !== undefined)
|
|
327
|
+
updates.title = title;
|
|
328
|
+
if (description !== undefined)
|
|
329
|
+
updates.description = description;
|
|
330
|
+
if (xLabels !== undefined)
|
|
331
|
+
updates.xLabels = xLabels;
|
|
332
|
+
const updated = stateManager.updateVisualization(id, updates);
|
|
333
|
+
return {
|
|
334
|
+
content: [
|
|
335
|
+
{
|
|
336
|
+
type: "text",
|
|
337
|
+
text: updated
|
|
338
|
+
? `Updated visualization ${id}`
|
|
339
|
+
: `Visualization ${id} not found`,
|
|
340
|
+
},
|
|
341
|
+
],
|
|
342
|
+
};
|
|
343
|
+
}
|
|
167
344
|
case "clearCanvas": {
|
|
168
345
|
stateManager.clearCanvas();
|
|
169
346
|
return {
|
|
@@ -200,6 +377,115 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
200
377
|
};
|
|
201
378
|
}
|
|
202
379
|
}
|
|
380
|
+
case "getDatabaseSchema": {
|
|
381
|
+
try {
|
|
382
|
+
const { databasePath } = args;
|
|
383
|
+
const schema = await getDatabaseSchema(databasePath);
|
|
384
|
+
// Format schema as readable text
|
|
385
|
+
let schemaText = `Data source schema for: ${databasePath}\n\n`;
|
|
386
|
+
for (const table of schema.tables) {
|
|
387
|
+
schemaText += `Table: ${table.name}\n`;
|
|
388
|
+
schemaText += `Columns:\n`;
|
|
389
|
+
for (const col of table.columns) {
|
|
390
|
+
const constraints = [];
|
|
391
|
+
if (col.primaryKey)
|
|
392
|
+
constraints.push("PRIMARY KEY");
|
|
393
|
+
if (col.notNull)
|
|
394
|
+
constraints.push("NOT NULL");
|
|
395
|
+
if (col.defaultValue)
|
|
396
|
+
constraints.push(`DEFAULT ${col.defaultValue}`);
|
|
397
|
+
const constraintStr = constraints.length > 0 ? ` (${constraints.join(", ")})` : "";
|
|
398
|
+
schemaText += ` - ${col.name}: ${col.type}${constraintStr}\n`;
|
|
399
|
+
}
|
|
400
|
+
schemaText += `\n`;
|
|
401
|
+
}
|
|
402
|
+
return {
|
|
403
|
+
content: [
|
|
404
|
+
{
|
|
405
|
+
type: "text",
|
|
406
|
+
text: schemaText,
|
|
407
|
+
},
|
|
408
|
+
],
|
|
409
|
+
};
|
|
410
|
+
}
|
|
411
|
+
catch (error) {
|
|
412
|
+
return {
|
|
413
|
+
content: [
|
|
414
|
+
{
|
|
415
|
+
type: "text",
|
|
416
|
+
text: `Error getting data source schema: ${error instanceof Error ? error.message : String(error)}`,
|
|
417
|
+
},
|
|
418
|
+
],
|
|
419
|
+
};
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
case "queryAndVisualize": {
|
|
423
|
+
try {
|
|
424
|
+
const { databasePath, query, visualizationType, columnMapping, title, description, useColumnAsXLabel, } = args;
|
|
425
|
+
// Execute query
|
|
426
|
+
const result = await executeQuery(databasePath, query);
|
|
427
|
+
if (result.rows.length === 0) {
|
|
428
|
+
return {
|
|
429
|
+
content: [
|
|
430
|
+
{
|
|
431
|
+
type: "text",
|
|
432
|
+
text: "Query returned 0 rows. No visualization created.",
|
|
433
|
+
},
|
|
434
|
+
],
|
|
435
|
+
};
|
|
436
|
+
}
|
|
437
|
+
// Create visualization based on type
|
|
438
|
+
let viz;
|
|
439
|
+
if (visualizationType === "table") {
|
|
440
|
+
// For tables, just transform all columns
|
|
441
|
+
const tableData = transformToTable(result.rows, result.columns);
|
|
442
|
+
viz = stateManager.addVisualization({
|
|
443
|
+
type: "table",
|
|
444
|
+
table: tableData,
|
|
445
|
+
title,
|
|
446
|
+
description,
|
|
447
|
+
});
|
|
448
|
+
}
|
|
449
|
+
else {
|
|
450
|
+
// For charts, need column mapping
|
|
451
|
+
if (!columnMapping ||
|
|
452
|
+
!columnMapping.xColumn ||
|
|
453
|
+
!columnMapping.yColumns) {
|
|
454
|
+
throw new Error("columnMapping with xColumn and yColumns is required for chart visualizations");
|
|
455
|
+
}
|
|
456
|
+
const series = transformToSeries(result.rows, result.columns, columnMapping);
|
|
457
|
+
const xLabels = useColumnAsXLabel
|
|
458
|
+
? extractXLabels(result.rows, columnMapping.xColumn)
|
|
459
|
+
: undefined;
|
|
460
|
+
viz = stateManager.addVisualization({
|
|
461
|
+
type: visualizationType,
|
|
462
|
+
series,
|
|
463
|
+
title,
|
|
464
|
+
description,
|
|
465
|
+
xLabels,
|
|
466
|
+
});
|
|
467
|
+
}
|
|
468
|
+
const port = getServerPort() || 3000;
|
|
469
|
+
return {
|
|
470
|
+
content: [
|
|
471
|
+
{
|
|
472
|
+
type: "text",
|
|
473
|
+
text: `Created ${visualizationType} visualization with ${result.rows.length} rows. ID: ${viz.id}. View at http://localhost:${port}`,
|
|
474
|
+
},
|
|
475
|
+
],
|
|
476
|
+
};
|
|
477
|
+
}
|
|
478
|
+
catch (error) {
|
|
479
|
+
return {
|
|
480
|
+
content: [
|
|
481
|
+
{
|
|
482
|
+
type: "text",
|
|
483
|
+
text: `Error executing query and creating visualization: ${error instanceof Error ? error.message : String(error)}`,
|
|
484
|
+
},
|
|
485
|
+
],
|
|
486
|
+
};
|
|
487
|
+
}
|
|
488
|
+
}
|
|
203
489
|
default:
|
|
204
490
|
throw new Error(`Unknown tool: ${name}`);
|
|
205
491
|
}
|
package/dist/state.js
CHANGED
|
@@ -28,6 +28,17 @@ class StateManager {
|
|
|
28
28
|
this.state.visualizations = this.state.visualizations.filter((v) => v.id !== id);
|
|
29
29
|
return this.state.visualizations.length < initialLength;
|
|
30
30
|
}
|
|
31
|
+
updateVisualization(id, updates) {
|
|
32
|
+
const index = this.state.visualizations.findIndex((v) => v.id === id);
|
|
33
|
+
if (index === -1) {
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
this.state.visualizations[index] = {
|
|
37
|
+
...this.state.visualizations[index],
|
|
38
|
+
...updates,
|
|
39
|
+
};
|
|
40
|
+
return this.state.visualizations[index];
|
|
41
|
+
}
|
|
31
42
|
clearCanvas() {
|
|
32
43
|
this.state.visualizations = [];
|
|
33
44
|
}
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Transform SQL query results to TableData format
|
|
3
|
+
*/
|
|
4
|
+
export function transformToTable(rows, columns) {
|
|
5
|
+
if (rows.length === 0) {
|
|
6
|
+
return {
|
|
7
|
+
headers: columns,
|
|
8
|
+
rows: [],
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
return {
|
|
12
|
+
headers: columns,
|
|
13
|
+
rows: rows.map((row) => columns.map((col) => row[col])),
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Transform SQL query results to SeriesData format for charts
|
|
18
|
+
*/
|
|
19
|
+
export function transformToSeries(rows, columns, mapping) {
|
|
20
|
+
if (rows.length === 0) {
|
|
21
|
+
return [];
|
|
22
|
+
}
|
|
23
|
+
const { xColumn, yColumns, seriesColumn, groupByColumn } = mapping;
|
|
24
|
+
// Validate required columns
|
|
25
|
+
if (!xColumn) {
|
|
26
|
+
throw new Error("xColumn is required for chart transformation");
|
|
27
|
+
}
|
|
28
|
+
if (!yColumns || yColumns.length === 0) {
|
|
29
|
+
throw new Error("At least one yColumn is required for chart transformation");
|
|
30
|
+
}
|
|
31
|
+
// Validate columns exist
|
|
32
|
+
if (!columns.includes(xColumn)) {
|
|
33
|
+
throw new Error(`xColumn '${xColumn}' not found in query results`);
|
|
34
|
+
}
|
|
35
|
+
for (const yCol of yColumns) {
|
|
36
|
+
if (!columns.includes(yCol)) {
|
|
37
|
+
throw new Error(`yColumn '${yCol}' not found in query results`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
if (seriesColumn && !columns.includes(seriesColumn)) {
|
|
41
|
+
throw new Error(`seriesColumn '${seriesColumn}' not found in query results`);
|
|
42
|
+
}
|
|
43
|
+
if (groupByColumn && !columns.includes(groupByColumn)) {
|
|
44
|
+
throw new Error(`groupByColumn '${groupByColumn}' not found in query results`);
|
|
45
|
+
}
|
|
46
|
+
// If there's a seriesColumn, group data by series
|
|
47
|
+
if (seriesColumn) {
|
|
48
|
+
return transformWithSeriesColumn(rows, xColumn, yColumns[0], seriesColumn);
|
|
49
|
+
}
|
|
50
|
+
// If there's a groupByColumn, create separate series for each group
|
|
51
|
+
if (groupByColumn) {
|
|
52
|
+
return transformWithGroupBy(rows, xColumn, yColumns[0], groupByColumn);
|
|
53
|
+
}
|
|
54
|
+
// Otherwise, create one series per yColumn
|
|
55
|
+
return transformMultipleYColumns(rows, xColumn, yColumns);
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Transform data with a series column (e.g., different product lines)
|
|
59
|
+
*/
|
|
60
|
+
function transformWithSeriesColumn(rows, xColumn, yColumn, seriesColumn) {
|
|
61
|
+
const seriesMap = new Map();
|
|
62
|
+
for (const row of rows) {
|
|
63
|
+
const seriesName = String(row[seriesColumn]);
|
|
64
|
+
const xValue = parseNumericValue(row[xColumn]);
|
|
65
|
+
const yValue = parseNumericValue(row[yColumn]);
|
|
66
|
+
if (!seriesMap.has(seriesName)) {
|
|
67
|
+
seriesMap.set(seriesName, new Map());
|
|
68
|
+
}
|
|
69
|
+
seriesMap.get(seriesName).set(xValue, yValue);
|
|
70
|
+
}
|
|
71
|
+
const result = [];
|
|
72
|
+
for (const [seriesName, dataMap] of seriesMap.entries()) {
|
|
73
|
+
const data = Array.from(dataMap.entries()).sort((a, b) => a[0] - b[0]);
|
|
74
|
+
result.push({
|
|
75
|
+
name: seriesName,
|
|
76
|
+
data,
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
return result;
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Transform data with a group by column
|
|
83
|
+
*/
|
|
84
|
+
function transformWithGroupBy(rows, xColumn, yColumn, groupByColumn) {
|
|
85
|
+
return transformWithSeriesColumn(rows, xColumn, yColumn, groupByColumn);
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Transform data with multiple Y columns (each becomes a series)
|
|
89
|
+
*/
|
|
90
|
+
function transformMultipleYColumns(rows, xColumn, yColumns) {
|
|
91
|
+
const result = [];
|
|
92
|
+
for (const yColumn of yColumns) {
|
|
93
|
+
const data = rows
|
|
94
|
+
.map((row) => {
|
|
95
|
+
const xValue = parseNumericValue(row[xColumn]);
|
|
96
|
+
const yValue = parseNumericValue(row[yColumn]);
|
|
97
|
+
return [xValue, yValue];
|
|
98
|
+
})
|
|
99
|
+
.sort((a, b) => a[0] - b[0]);
|
|
100
|
+
result.push({
|
|
101
|
+
name: yColumn,
|
|
102
|
+
data,
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
return result;
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Parse a value to number, handling various formats
|
|
109
|
+
*/
|
|
110
|
+
function parseNumericValue(value) {
|
|
111
|
+
if (typeof value === "number") {
|
|
112
|
+
return value;
|
|
113
|
+
}
|
|
114
|
+
if (typeof value === "string") {
|
|
115
|
+
// Try to parse as number
|
|
116
|
+
const parsed = parseFloat(value);
|
|
117
|
+
if (!isNaN(parsed)) {
|
|
118
|
+
return parsed;
|
|
119
|
+
}
|
|
120
|
+
// Try to parse as date
|
|
121
|
+
const date = Date.parse(value);
|
|
122
|
+
if (!isNaN(date)) {
|
|
123
|
+
return date;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// For other types, try to convert to number
|
|
127
|
+
const num = Number(value);
|
|
128
|
+
if (!isNaN(num)) {
|
|
129
|
+
return num;
|
|
130
|
+
}
|
|
131
|
+
throw new Error(`Cannot convert value to number: ${value}`);
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* Extract X labels from query results
|
|
135
|
+
*/
|
|
136
|
+
export function extractXLabels(rows, xColumn) {
|
|
137
|
+
return rows.map((row) => String(row[xColumn]));
|
|
138
|
+
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@gluip/chart-canvas-mcp",
|
|
3
|
-
"version": "0.
|
|
4
|
-
"description": "MCP server for creating interactive visualizations (charts, diagrams, tables) through AI assistants",
|
|
3
|
+
"version": "0.3.0",
|
|
4
|
+
"description": "MCP server for creating interactive visualizations (charts, diagrams, tables) and querying data sources (SQLite, CSV, Parquet, JSON) through AI assistants",
|
|
5
5
|
"author": "Martijn",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"type": "module",
|
|
@@ -22,7 +22,11 @@
|
|
|
22
22
|
"echarts",
|
|
23
23
|
"mermaid",
|
|
24
24
|
"ai",
|
|
25
|
-
"llm"
|
|
25
|
+
"llm",
|
|
26
|
+
"sqlite",
|
|
27
|
+
"database",
|
|
28
|
+
"sql",
|
|
29
|
+
"query"
|
|
26
30
|
],
|
|
27
31
|
"repository": {
|
|
28
32
|
"type": "git",
|
|
@@ -39,6 +43,7 @@
|
|
|
39
43
|
},
|
|
40
44
|
"dependencies": {
|
|
41
45
|
"@modelcontextprotocol/sdk": "^1.0.4",
|
|
46
|
+
"@duckdb/node-api": "^1.4.3-r.3",
|
|
42
47
|
"express": "^4.21.2",
|
|
43
48
|
"cors": "^2.8.5",
|
|
44
49
|
"open": "^10.1.0"
|