@gluip/chart-canvas-mcp 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -16,11 +16,24 @@ Watch the [full demo on YouTube](https://www.youtube.com/watch?v=XVucQstPisc) to
16
16
  🎨 **Interactive Dashboard**: Drag-and-drop grid layout with real-time updates
17
17
  🔄 **Live Synchronization**: Changes appear instantly in your browser
18
18
  📊 **Rich Visualizations**: Powered by ECharts and Mermaid
19
- �️ **SQL Database Integration**: Query SQLite databases directly and visualize results
20
- ⚡ **Smart Data Flow**: Execute queries server-side without passing data through LLM
21
- �🚀 **Easy Setup**: One command to get started
19
+ 💾 **Universal Data Sources**: Query SQLite, CSV, Parquet, JSON, and NDJSON files directly
20
+ ⚡ **Smart Data Flow**: Execute queries server-side - data stays local, never sent to LLM
21
+ 🔒 **Privacy First**: Your data never leaves your machine
22
+ 🚀 **Easy Setup**: One command to get started
22
23
  🌐 **Production Ready**: Built-in production mode with optimized builds
23
24
 
25
+ ## Supported Data Sources
26
+
27
+ Chart Canvas can query and visualize data from multiple file formats:
28
+
29
+ - **SQLite** (`.db`, `.sqlite`, `.sqlite3`) - Relational databases
30
+ - **CSV** (`.csv`) - Comma-separated values
31
+ - **Parquet** (`.parquet`) - Columnar storage format
32
+ - **JSON** (`.json`) - JSON arrays of objects
33
+ - **NDJSON** (`.jsonl`, `.ndjson`) - Newline-delimited JSON
34
+
35
+ **Privacy & Performance**: All queries execute locally on your machine using DuckDB. Query results are transformed into visualizations server-side - only metadata (chart configuration) is sent to the LLM, never your actual data. This makes it fast, scalable, and private.
36
+
24
37
  ## Quick Start
25
38
 
26
39
  ### Installation
package/dist/database.js CHANGED
@@ -1,6 +1,6 @@
1
- import Database from "better-sqlite3";
1
+ import { DuckDBInstance } from "@duckdb/node-api";
2
2
  import { existsSync } from "fs";
3
- import { resolve } from "path";
3
+ import { resolve, extname } from "path";
4
4
  const FORBIDDEN_KEYWORDS = [
5
5
  "INSERT",
6
6
  "UPDATE",
@@ -12,20 +12,41 @@ const FORBIDDEN_KEYWORDS = [
12
12
  "REPLACE",
13
13
  "ATTACH",
14
14
  "DETACH",
15
- "PRAGMA",
16
15
  ];
17
16
  const MAX_ROWS = 10000;
18
17
  const QUERY_TIMEOUT_MS = 5000;
19
18
  /**
20
- * Validate database path - must exist and be a file
19
+ * Validate database/file path - must exist and be a file
21
20
  */
22
21
  export function validateDatabasePath(dbPath) {
23
22
  const resolvedPath = resolve(dbPath);
24
23
  if (!existsSync(resolvedPath)) {
25
- throw new Error(`Database file does not exist: ${dbPath}`);
24
+ throw new Error(`File does not exist: ${dbPath}`);
26
25
  }
27
26
  return resolvedPath;
28
27
  }
28
+ /**
29
+ * Detect file type based on extension
30
+ */
31
+ export function getFileType(filePath) {
32
+ const ext = extname(filePath).toLowerCase();
33
+ if (ext === ".db" || ext === ".sqlite" || ext === ".sqlite3") {
34
+ return "sqlite";
35
+ }
36
+ if (ext === ".csv") {
37
+ return "csv";
38
+ }
39
+ if (ext === ".parquet") {
40
+ return "parquet";
41
+ }
42
+ if (ext === ".json") {
43
+ return "json";
44
+ }
45
+ if (ext === ".jsonl" || ext === ".ndjson") {
46
+ return "ndjson";
47
+ }
48
+ return "unknown";
49
+ }
29
50
  /**
30
51
  * Validate SQL query - must be read-only (SELECT only)
31
52
  */
@@ -44,69 +65,160 @@ export function validateReadOnlyQuery(sql) {
44
65
  }
45
66
  }
46
67
  /**
47
- * Get database schema for all tables
68
+ * Get the DuckDB read function for a file type
69
+ */
70
+ function getReadFunction(filePath, fileType) {
71
+ const escapedPath = filePath.replace(/'/g, "''");
72
+ switch (fileType) {
73
+ case "sqlite":
74
+ // For SQLite, we'll attach the database
75
+ return `sqlite_scan('${escapedPath}', '')`;
76
+ case "csv":
77
+ return `read_csv_auto('${escapedPath}')`;
78
+ case "parquet":
79
+ return `read_parquet('${escapedPath}')`;
80
+ case "json":
81
+ return `read_json_auto('${escapedPath}')`;
82
+ case "ndjson":
83
+ return `read_json_auto('${escapedPath}', format='newline_delimited')`;
84
+ default:
85
+ throw new Error(`Unsupported file type. Supported: .db/.sqlite/.sqlite3 (SQLite), .csv, .parquet, .json, .jsonl/.ndjson`);
86
+ }
87
+ }
88
+ /**
89
+ * Get table name from file path
90
+ */
91
+ function getTableName(filePath) {
92
+ return (filePath
93
+ .split("/")
94
+ .pop()
95
+ ?.replace(/\.(db|sqlite|sqlite3|csv|parquet|json|jsonl|ndjson)$/i, "") ||
96
+ "data");
97
+ }
98
+ /**
99
+ * Get database/file schema using DuckDB
48
100
  */
49
- export function getDatabaseSchema(dbPath) {
101
+ export async function getDatabaseSchema(dbPath) {
50
102
  const resolvedPath = validateDatabasePath(dbPath);
51
- const db = new Database(resolvedPath, { readonly: true });
103
+ const fileType = getFileType(resolvedPath);
104
+ if (fileType === "unknown") {
105
+ throw new Error(`Unsupported file type. Supported: .db/.sqlite/.sqlite3 (SQLite), .csv, .parquet, .json, .jsonl/.ndjson`);
106
+ }
107
+ const instance = await DuckDBInstance.create(":memory:");
108
+ const conn = await instance.connect();
52
109
  try {
53
- // Get all table names (excluding sqlite internal tables)
54
- const tables = db
55
- .prepare(`SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name`)
56
- .all();
57
- const schema = {
58
- tables: [],
59
- };
60
- // Get columns for each table
61
- for (const table of tables) {
62
- const columns = db
63
- .prepare(`PRAGMA table_info(${table.name})`)
64
- .all();
65
- schema.tables.push({
66
- name: table.name,
67
- columns: columns.map((col) => ({
68
- name: col.name,
69
- type: col.type,
70
- notNull: col.notnull === 1,
71
- defaultValue: col.dflt_value,
72
- primaryKey: col.pk === 1,
73
- })),
74
- });
110
+ // For SQLite, we need to install and load the sqlite extension
111
+ if (fileType === "sqlite") {
112
+ await conn.run("INSTALL sqlite");
113
+ await conn.run("LOAD sqlite");
114
+ }
115
+ const readFunc = getReadFunction(resolvedPath, fileType);
116
+ const tableName = getTableName(resolvedPath);
117
+ // For SQLite, we need to query the sqlite_master table
118
+ if (fileType === "sqlite") {
119
+ const escapedPath = resolvedPath.replace(/'/g, "''");
120
+ // Get table names
121
+ const tablesReader = await conn.runAndReadAll(`SELECT name FROM sqlite_scan('${escapedPath}', 'sqlite_master') WHERE type='table' AND name NOT LIKE 'sqlite_%'`);
122
+ const tables = tablesReader.getRowObjects();
123
+ if (!tables || tables.length === 0) {
124
+ return { tables: [] };
125
+ }
126
+ // Get columns for each table
127
+ const tableSchemas = [];
128
+ for (const table of tables) {
129
+ const columnsReader = await conn.runAndReadAll(`DESCRIBE SELECT * FROM sqlite_scan('${escapedPath}', '${table.name}') LIMIT 0`);
130
+ const columns = columnsReader.getRowObjects();
131
+ tableSchemas.push({
132
+ name: table.name,
133
+ columns: columns.map((col) => ({
134
+ name: col.column_name,
135
+ type: col.column_type,
136
+ notNull: false,
137
+ defaultValue: null,
138
+ primaryKey: false,
139
+ })),
140
+ });
141
+ }
142
+ return { tables: tableSchemas };
143
+ }
144
+ else {
145
+ // For file-based formats (CSV, Parquet, JSON)
146
+ const reader = await conn.runAndReadAll(`DESCRIBE SELECT * FROM ${readFunc} LIMIT 0`);
147
+ const columns = reader.getRowObjects();
148
+ const columnInfos = columns.map((col) => ({
149
+ name: col.column_name,
150
+ type: col.column_type,
151
+ notNull: false,
152
+ defaultValue: null,
153
+ primaryKey: false,
154
+ }));
155
+ return {
156
+ tables: [
157
+ {
158
+ name: tableName,
159
+ columns: columnInfos,
160
+ },
161
+ ],
162
+ };
75
163
  }
76
- return schema;
77
164
  }
78
165
  finally {
79
- db.close();
166
+ conn.closeSync();
167
+ instance.closeSync();
80
168
  }
81
169
  }
82
170
  /**
83
- * Execute a SQL query with timeout and row limit
171
+ * Execute a SQL query using DuckDB
84
172
  */
85
- export function executeQuery(dbPath, sql) {
173
+ export async function executeQuery(dbPath, sql) {
86
174
  const resolvedPath = validateDatabasePath(dbPath);
175
+ const fileType = getFileType(resolvedPath);
87
176
  validateReadOnlyQuery(sql);
88
- const db = new Database(resolvedPath, {
89
- readonly: true,
90
- timeout: QUERY_TIMEOUT_MS,
91
- });
177
+ if (fileType === "unknown") {
178
+ throw new Error(`Unsupported file type. Supported: .db/.sqlite/.sqlite3 (SQLite), .csv, .parquet, .json, .jsonl/.ndjson`);
179
+ }
180
+ const instance = await DuckDBInstance.create(":memory:");
181
+ const conn = await instance.connect();
92
182
  try {
93
- // Prepare and execute query
94
- const stmt = db.prepare(sql);
95
- const rows = stmt.all();
96
- // Check row limit
97
- if (rows.length > MAX_ROWS) {
98
- throw new Error(`Query returned ${rows.length} rows, exceeding limit of ${MAX_ROWS}`);
183
+ // For SQLite, install and load the extension first
184
+ if (fileType === "sqlite") {
185
+ await conn.run("INSTALL sqlite");
186
+ await conn.run("LOAD sqlite");
187
+ }
188
+ const readFunc = getReadFunction(resolvedPath, fileType);
189
+ const tableName = getTableName(resolvedPath);
190
+ // Replace table references with read function calls
191
+ let modifiedSql = sql;
192
+ // For SQLite, don't modify - queries reference tables directly via sqlite_scan
193
+ if (fileType !== "sqlite") {
194
+ modifiedSql = sql.replace(new RegExp(`\\b${tableName}\\b`, "gi"), readFunc);
195
+ }
196
+ const reader = await conn.runAndReadAll(modifiedSql);
197
+ const rawRows = reader.getRowObjects();
198
+ if (rawRows.length > MAX_ROWS) {
199
+ throw new Error(`Query returned ${rawRows.length} rows, exceeding limit of ${MAX_ROWS}`);
99
200
  }
100
- // Get column names from first row or statement columns
101
- const columns = rows.length > 0
102
- ? Object.keys(rows[0])
103
- : stmt.columns().map((c) => c.name);
201
+ // Convert BigInt values to numbers for JSON serialization
202
+ const rows = rawRows.map((row) => {
203
+ const converted = {};
204
+ for (const [key, value] of Object.entries(row)) {
205
+ if (typeof value === "bigint") {
206
+ converted[key] = Number(value);
207
+ }
208
+ else {
209
+ converted[key] = value;
210
+ }
211
+ }
212
+ return converted;
213
+ });
214
+ const columns = reader.columnNames();
104
215
  return {
105
216
  rows,
106
217
  columns,
107
218
  };
108
219
  }
109
220
  finally {
110
- db.close();
221
+ conn.closeSync();
222
+ instance.closeSync();
111
223
  }
112
224
  }
package/dist/index.js CHANGED
@@ -109,6 +109,80 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
109
109
  required: ["id"],
110
110
  },
111
111
  },
112
+ {
113
+ name: "updateVisualization",
114
+ description: "Update an existing visualization on the canvas. You can change its data, title, description, or any other property.",
115
+ inputSchema: {
116
+ type: "object",
117
+ properties: {
118
+ id: {
119
+ type: "string",
120
+ description: "ID of the visualization to update",
121
+ },
122
+ type: {
123
+ type: "string",
124
+ enum: ["line", "bar", "scatter", "table", "flowchart", "pie"],
125
+ description: "Type of visualization",
126
+ },
127
+ series: {
128
+ type: "array",
129
+ items: {
130
+ type: "object",
131
+ properties: {
132
+ name: { type: "string" },
133
+ data: {
134
+ type: "array",
135
+ items: {
136
+ type: "array",
137
+ items: { type: "number" },
138
+ minItems: 2,
139
+ maxItems: 2,
140
+ },
141
+ },
142
+ },
143
+ required: ["name", "data"],
144
+ },
145
+ description: "Updated data series for charts",
146
+ },
147
+ table: {
148
+ type: "object",
149
+ properties: {
150
+ headers: {
151
+ type: "array",
152
+ items: { type: "string" },
153
+ },
154
+ rows: {
155
+ type: "array",
156
+ items: {
157
+ type: "array",
158
+ items: { type: ["string", "number"] },
159
+ },
160
+ },
161
+ },
162
+ required: ["headers", "rows"],
163
+ description: "Updated table data",
164
+ },
165
+ mermaid: {
166
+ type: "string",
167
+ description: "Updated Mermaid diagram syntax",
168
+ },
169
+ title: {
170
+ type: "string",
171
+ description: "Updated title",
172
+ },
173
+ description: {
174
+ type: "string",
175
+ description: "Updated description",
176
+ },
177
+ xLabels: {
178
+ type: "array",
179
+ items: { type: "string" },
180
+ description: "Updated x-axis labels",
181
+ },
182
+ },
183
+ required: ["id"],
184
+ },
185
+ },
112
186
  {
113
187
  name: "clearCanvas",
114
188
  description: "Remove all visualizations from the canvas",
@@ -127,13 +201,13 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
127
201
  },
128
202
  {
129
203
  name: "getDatabaseSchema",
130
- description: "Get the schema of a SQLite database including all tables and columns. Use this to understand the database structure before writing queries.",
204
+ description: "Get the schema of a data source (SQLite database, CSV, Parquet, or JSON file) including all tables/columns. Use this to understand the data structure before writing queries.",
131
205
  inputSchema: {
132
206
  type: "object",
133
207
  properties: {
134
208
  databasePath: {
135
209
  type: "string",
136
- description: "Path to the SQLite database file (e.g., './data/atletiek.db' or '/absolute/path/to/db.sqlite')",
210
+ description: "Path to the data file. Supported formats: SQLite (.db, .sqlite, .sqlite3), CSV (.csv), Parquet (.parquet), JSON (.json, .jsonl, .ndjson)",
137
211
  },
138
212
  },
139
213
  required: ["databasePath"],
@@ -141,13 +215,13 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
141
215
  },
142
216
  {
143
217
  name: "queryAndVisualize",
144
- description: "Execute a SQL query on a SQLite database and create a visualization from the results. The query must be read-only (SELECT only). You must specify how to map columns to the visualization.",
218
+ description: "Execute a SQL query on a data source (SQLite, CSV, Parquet, or JSON file) and create a visualization from the results. The query must be read-only (SELECT only). You must specify how to map columns to the visualization.",
145
219
  inputSchema: {
146
220
  type: "object",
147
221
  properties: {
148
222
  databasePath: {
149
223
  type: "string",
150
- description: "Path to the SQLite database file",
224
+ description: "Path to the data file (SQLite, CSV, Parquet, JSON)",
151
225
  },
152
226
  query: {
153
227
  type: "string",
@@ -238,6 +312,35 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
238
312
  ],
239
313
  };
240
314
  }
315
+ case "updateVisualization": {
316
+ const { id, type, series, table, mermaid, title, description, xLabels } = args;
317
+ const updates = {};
318
+ if (type !== undefined)
319
+ updates.type = type;
320
+ if (series !== undefined)
321
+ updates.series = series;
322
+ if (table !== undefined)
323
+ updates.table = table;
324
+ if (mermaid !== undefined)
325
+ updates.mermaid = mermaid;
326
+ if (title !== undefined)
327
+ updates.title = title;
328
+ if (description !== undefined)
329
+ updates.description = description;
330
+ if (xLabels !== undefined)
331
+ updates.xLabels = xLabels;
332
+ const updated = stateManager.updateVisualization(id, updates);
333
+ return {
334
+ content: [
335
+ {
336
+ type: "text",
337
+ text: updated
338
+ ? `Updated visualization ${id}`
339
+ : `Visualization ${id} not found`,
340
+ },
341
+ ],
342
+ };
343
+ }
241
344
  case "clearCanvas": {
242
345
  stateManager.clearCanvas();
243
346
  return {
@@ -277,9 +380,9 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
277
380
  case "getDatabaseSchema": {
278
381
  try {
279
382
  const { databasePath } = args;
280
- const schema = getDatabaseSchema(databasePath);
383
+ const schema = await getDatabaseSchema(databasePath);
281
384
  // Format schema as readable text
282
- let schemaText = `Database schema for: ${databasePath}\n\n`;
385
+ let schemaText = `Data source schema for: ${databasePath}\n\n`;
283
386
  for (const table of schema.tables) {
284
387
  schemaText += `Table: ${table.name}\n`;
285
388
  schemaText += `Columns:\n`;
@@ -310,7 +413,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
310
413
  content: [
311
414
  {
312
415
  type: "text",
313
- text: `Error getting database schema: ${error instanceof Error ? error.message : String(error)}`,
416
+ text: `Error getting data source schema: ${error instanceof Error ? error.message : String(error)}`,
314
417
  },
315
418
  ],
316
419
  };
@@ -320,7 +423,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
320
423
  try {
321
424
  const { databasePath, query, visualizationType, columnMapping, title, description, useColumnAsXLabel, } = args;
322
425
  // Execute query
323
- const result = executeQuery(databasePath, query);
426
+ const result = await executeQuery(databasePath, query);
324
427
  if (result.rows.length === 0) {
325
428
  return {
326
429
  content: [
package/dist/state.js CHANGED
@@ -28,6 +28,17 @@ class StateManager {
28
28
  this.state.visualizations = this.state.visualizations.filter((v) => v.id !== id);
29
29
  return this.state.visualizations.length < initialLength;
30
30
  }
31
+ updateVisualization(id, updates) {
32
+ const index = this.state.visualizations.findIndex((v) => v.id === id);
33
+ if (index === -1) {
34
+ return null;
35
+ }
36
+ this.state.visualizations[index] = {
37
+ ...this.state.visualizations[index],
38
+ ...updates,
39
+ };
40
+ return this.state.visualizations[index];
41
+ }
31
42
  clearCanvas() {
32
43
  this.state.visualizations = [];
33
44
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@gluip/chart-canvas-mcp",
3
- "version": "0.2.0",
4
- "description": "MCP server for creating interactive visualizations (charts, diagrams, tables) and querying SQLite databases through AI assistants",
3
+ "version": "0.3.0",
4
+ "description": "MCP server for creating interactive visualizations (charts, diagrams, tables) and querying data sources (SQLite, CSV, Parquet, JSON) through AI assistants",
5
5
  "author": "Martijn",
6
6
  "license": "MIT",
7
7
  "type": "module",
@@ -43,16 +43,15 @@
43
43
  },
44
44
  "dependencies": {
45
45
  "@modelcontextprotocol/sdk": "^1.0.4",
46
+ "@duckdb/node-api": "^1.4.3-r.3",
46
47
  "express": "^4.21.2",
47
48
  "cors": "^2.8.5",
48
- "open": "^10.1.0",
49
- "better-sqlite3": "^11.8.1"
49
+ "open": "^10.1.0"
50
50
  },
51
51
  "devDependencies": {
52
52
  "@types/express": "^5.0.0",
53
53
  "@types/cors": "^2.8.17",
54
54
  "@types/node": "^22.10.5",
55
- "@types/better-sqlite3": "^7.6.12",
56
55
  "typescript": "^5.7.3",
57
56
  "tsx": "^4.19.2"
58
57
  }