crushdataai 1.2.13 → 1.2.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/.agent/workflows/data-analyst.md +23 -0
- package/assets/.claude/skills/data-analyst/SKILL.md +48 -0
- package/assets/.cursor/commands/data-analyst.md +20 -0
- package/assets/.github/prompts/data-analyst.prompt.md +18 -0
- package/assets/.kiro/steering/data-analyst.md +18 -0
- package/assets/.windsurf/workflows/data-analyst.md +18 -0
- package/dist/connectors/cloud/index.d.ts +3 -1
- package/dist/connectors/cloud/index.js +34 -6
- package/dist/connectors/index.d.ts +1 -0
- package/dist/connectors/mysql/index.d.ts +1 -0
- package/dist/connectors/mysql/index.js +23 -0
- package/dist/connectors/postgresql/index.d.ts +1 -0
- package/dist/connectors/postgresql/index.js +16 -0
- package/dist/dashboard-server.d.ts +1 -0
- package/dist/dashboard-server.js +78 -0
- package/dist/index.js +17 -1
- package/dist/routes/dashboard.d.ts +2 -0
- package/dist/routes/dashboard.js +166 -0
- package/dist/services/query-executor.d.ts +5 -0
- package/dist/services/query-executor.js +80 -0
- package/dist/types/dashboard.d.ts +48 -0
- package/dist/types/dashboard.js +3 -0
- package/package.json +4 -3
- package/ui-dashboard-dist/assets/index-SkyAs8Zl.js +4185 -0
- package/ui-dashboard-dist/assets/index-uepFwkLY.css +1 -0
- package/ui-dashboard-dist/favicon.svg +13 -0
- package/ui-dashboard-dist/index.html +14 -0
|
@@ -114,6 +114,29 @@ print(f"Missing values:\n{df.isnull().sum()}")
|
|
|
114
114
|
|
|
115
115
|
---
|
|
116
116
|
|
|
117
|
+
## Step 5: Generate Dashboard Output
|
|
118
|
+
|
|
119
|
+
After analysis, save for visualization:
|
|
120
|
+
```python
|
|
121
|
+
from pathlib import Path
|
|
122
|
+
import json
|
|
123
|
+
from datetime import datetime
|
|
124
|
+
|
|
125
|
+
Path("reports/dashboards").mkdir(parents=True, exist_ok=True)
|
|
126
|
+
|
|
127
|
+
dashboard = {
|
|
128
|
+
"metadata": {"title": "Analysis Dashboard", "generatedAt": datetime.now().isoformat()},
|
|
129
|
+
"kpis": [{"id": "kpi-1", "label": "Total", "value": "$50K", "trend": "+12%", "trendDirection": "up"}],
|
|
130
|
+
"charts": [{"id": "chart-1", "type": "line", "title": "Trend", "data": {"labels": ["Jan","Feb"], "datasets": [{"label": "Revenue", "values": [10000,20000]}]}}]
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
with open("reports/dashboards/dashboard.json", "w") as f:
|
|
134
|
+
json.dump(dashboard, f, indent=2)
|
|
135
|
+
```
|
|
136
|
+
Tell user: "Run `npx crushdataai dashboard` to view."
|
|
137
|
+
|
|
138
|
+
---
|
|
139
|
+
|
|
117
140
|
## Pre-Delivery Checklist
|
|
118
141
|
|
|
119
142
|
- [ ] Business question answered
|
|
@@ -170,6 +170,54 @@ FROM table;
|
|
|
170
170
|
|
|
171
171
|
---
|
|
172
172
|
|
|
173
|
+
## Step 5: Generate Dashboard Output
|
|
174
|
+
|
|
175
|
+
**After analysis, save results for dashboard visualization:**
|
|
176
|
+
|
|
177
|
+
1. **Search for best chart type:**
|
|
178
|
+
```bash
|
|
179
|
+
python3 .claude/skills/data-analyst/scripts/search.py "<metric description>" --domain chart
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
2. **Create dashboard JSON:**
|
|
183
|
+
```python
|
|
184
|
+
from pathlib import Path
|
|
185
|
+
import json
|
|
186
|
+
from datetime import datetime
|
|
187
|
+
|
|
188
|
+
Path("reports/dashboards").mkdir(parents=True, exist_ok=True)
|
|
189
|
+
|
|
190
|
+
dashboard = {
|
|
191
|
+
"metadata": {
|
|
192
|
+
"title": "Analysis Dashboard",
|
|
193
|
+
"generatedAt": datetime.now().isoformat(),
|
|
194
|
+
"dataRange": f"{start_date} to {end_date}"
|
|
195
|
+
},
|
|
196
|
+
"kpis": [
|
|
197
|
+
{"id": "kpi-1", "label": "Total Revenue", "value": "$50,000", "trend": "+12%", "trendDirection": "up"}
|
|
198
|
+
],
|
|
199
|
+
"charts": [
|
|
200
|
+
{
|
|
201
|
+
"id": "chart-1",
|
|
202
|
+
"type": "line", # line, bar, pie, area, scatter, donut, table
|
|
203
|
+
"title": "Monthly Trend",
|
|
204
|
+
"data": {
|
|
205
|
+
"labels": ["Jan", "Feb", "Mar"],
|
|
206
|
+
"datasets": [{"label": "Revenue", "values": [10000, 15000, 25000]}]
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
]
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
with open("reports/dashboards/dashboard.json", "w") as f:
|
|
213
|
+
json.dump(dashboard, f, indent=2)
|
|
214
|
+
```
|
|
215
|
+
|
|
216
|
+
3. **Tell user:**
|
|
217
|
+
> "Dashboard ready! Run `npx crushdataai dashboard` to view."
|
|
218
|
+
|
|
219
|
+
---
|
|
220
|
+
|
|
173
221
|
## Pre-Delivery Checklist
|
|
174
222
|
|
|
175
223
|
Before presenting final results:
|
|
@@ -60,6 +60,26 @@ Report findings and ask user for confirmation.
|
|
|
60
60
|
- Compare to benchmarks
|
|
61
61
|
- Present for user validation
|
|
62
62
|
|
|
63
|
+
### 5. Generate Dashboard Output
|
|
64
|
+
After analysis, save for visualization:
|
|
65
|
+
```python
|
|
66
|
+
from pathlib import Path
|
|
67
|
+
import json
|
|
68
|
+
from datetime import datetime
|
|
69
|
+
|
|
70
|
+
Path("reports/dashboards").mkdir(parents=True, exist_ok=True)
|
|
71
|
+
|
|
72
|
+
dashboard = {
|
|
73
|
+
"metadata": {"title": "Analysis", "generatedAt": datetime.now().isoformat()},
|
|
74
|
+
"kpis": [{"id": "kpi-1", "label": "Total", "value": "$50K", "trend": "+12%"}],
|
|
75
|
+
"charts": [{"id": "chart-1", "type": "line", "title": "Trend", "data": {"labels": ["Jan","Feb"], "datasets": [{"label": "Revenue", "values": [10000,20000]}]}}]
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
with open("reports/dashboards/dashboard.json", "w") as f:
|
|
79
|
+
json.dump(dashboard, f, indent=2)
|
|
80
|
+
```
|
|
81
|
+
Tell user: "Run `npx crushdataai dashboard` to view."
|
|
82
|
+
|
|
63
83
|
---
|
|
64
84
|
|
|
65
85
|
## Quick Reference
|
|
@@ -58,3 +58,21 @@ Report and confirm before proceeding.
|
|
|
58
58
|
- Check totals
|
|
59
59
|
- Compare benchmarks
|
|
60
60
|
- User validation
|
|
61
|
+
|
|
62
|
+
### 5. Generate Dashboard Output
|
|
63
|
+
After analysis, save for visualization:
|
|
64
|
+
```python
|
|
65
|
+
from pathlib import Path
|
|
66
|
+
import json
|
|
67
|
+
from datetime import datetime
|
|
68
|
+
|
|
69
|
+
Path("reports/dashboards").mkdir(parents=True, exist_ok=True)
|
|
70
|
+
dashboard = {
|
|
71
|
+
"metadata": {"title": "Analysis", "generatedAt": datetime.now().isoformat()},
|
|
72
|
+
"kpis": [{"id": "kpi-1", "label": "Total", "value": "$50K", "trend": "+12%"}],
|
|
73
|
+
"charts": [{"id": "chart-1", "type": "line", "title": "Trend", "data": {"labels": ["Jan","Feb"], "datasets": [{"label": "Revenue", "values": [10000,20000]}]}}]
|
|
74
|
+
}
|
|
75
|
+
with open("reports/dashboards/dashboard.json", "w") as f:
|
|
76
|
+
json.dump(dashboard, f, indent=2)
|
|
77
|
+
```
|
|
78
|
+
Tell user: "Run `npx crushdataai dashboard` to view."
|
|
@@ -60,3 +60,21 @@ Ask: "Does this match your expectation?"
|
|
|
60
60
|
- Compare to benchmarks
|
|
61
61
|
- Document assumptions
|
|
62
62
|
- Present for user confirmation
|
|
63
|
+
|
|
64
|
+
### 5. Generate Dashboard Output
|
|
65
|
+
After analysis, save for visualization:
|
|
66
|
+
```python
|
|
67
|
+
from pathlib import Path
|
|
68
|
+
import json
|
|
69
|
+
from datetime import datetime
|
|
70
|
+
|
|
71
|
+
Path("reports/dashboards").mkdir(parents=True, exist_ok=True)
|
|
72
|
+
dashboard = {
|
|
73
|
+
"metadata": {"title": "Analysis", "generatedAt": datetime.now().isoformat()},
|
|
74
|
+
"kpis": [{"id": "kpi-1", "label": "Total", "value": "$50K", "trend": "+12%"}],
|
|
75
|
+
"charts": [{"id": "chart-1", "type": "line", "title": "Trend", "data": {"labels": ["Jan","Feb"], "datasets": [{"label": "Revenue", "values": [10000,20000]}]}}]
|
|
76
|
+
}
|
|
77
|
+
with open("reports/dashboards/dashboard.json", "w") as f:
|
|
78
|
+
json.dump(dashboard, f, indent=2)
|
|
79
|
+
```
|
|
80
|
+
Tell user: "Run `npx crushdataai dashboard` to view."
|
|
@@ -58,3 +58,21 @@ print(f"Shape: {df.shape}, Dates: {df['date'].min()} to {df['date'].max()}")
|
|
|
58
58
|
- Check totals are reasonable
|
|
59
59
|
- Compare to benchmarks
|
|
60
60
|
- Present for user validation
|
|
61
|
+
|
|
62
|
+
### 5. Generate Dashboard Output
|
|
63
|
+
After analysis, save for visualization:
|
|
64
|
+
```python
|
|
65
|
+
from pathlib import Path
|
|
66
|
+
import json
|
|
67
|
+
from datetime import datetime
|
|
68
|
+
|
|
69
|
+
Path("reports/dashboards").mkdir(parents=True, exist_ok=True)
|
|
70
|
+
dashboard = {
|
|
71
|
+
"metadata": {"title": "Analysis", "generatedAt": datetime.now().isoformat()},
|
|
72
|
+
"kpis": [{"id": "kpi-1", "label": "Total", "value": "$50K", "trend": "+12%"}],
|
|
73
|
+
"charts": [{"id": "chart-1", "type": "line", "title": "Trend", "data": {"labels": ["Jan","Feb"], "datasets": [{"label": "Revenue", "values": [10000,20000]}]}}]
|
|
74
|
+
}
|
|
75
|
+
with open("reports/dashboards/dashboard.json", "w") as f:
|
|
76
|
+
json.dump(dashboard, f, indent=2)
|
|
77
|
+
```
|
|
78
|
+
Tell user: "Run `npx crushdataai dashboard` to view."
|
|
@@ -8,11 +8,13 @@ export declare class BigQueryConnector implements Connector {
|
|
|
8
8
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
9
9
|
getSchema(connection: Connection, tableName: string): Promise<ColumnInfo[]>;
|
|
10
10
|
getSnippet(connection: Connection, lang: string): string;
|
|
11
|
+
executeQuery(connection: Connection, query: string): Promise<any[]>;
|
|
11
12
|
}
|
|
12
13
|
export declare class SnowflakeConnector implements Connector {
|
|
13
14
|
type: string;
|
|
14
15
|
private createConnection;
|
|
15
|
-
private
|
|
16
|
+
private executeInternal;
|
|
17
|
+
executeQuery(connection: Connection, query: string): Promise<any[]>;
|
|
16
18
|
test(connection: Connection): Promise<boolean>;
|
|
17
19
|
getTables(connection: Connection): Promise<Table[]>;
|
|
18
20
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
@@ -168,6 +168,18 @@ print(df.head())
|
|
|
168
168
|
}
|
|
169
169
|
return `# Language ${lang} not supported for BigQuery connector yet.`;
|
|
170
170
|
}
|
|
171
|
+
async executeQuery(connection, query) {
|
|
172
|
+
console.log(`[BigQuery] executeQuery called for ${connection.name}`);
|
|
173
|
+
const bigquery = this.createClient(connection);
|
|
174
|
+
try {
|
|
175
|
+
const [rows] = await bigquery.query(query);
|
|
176
|
+
return rows;
|
|
177
|
+
}
|
|
178
|
+
catch (error) {
|
|
179
|
+
console.error(`[BigQuery] executeQuery failed:`, error.message);
|
|
180
|
+
throw new Error(`Failed to execute query: ${error.message}`);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
171
183
|
}
|
|
172
184
|
exports.BigQueryConnector = BigQueryConnector;
|
|
173
185
|
class SnowflakeConnector {
|
|
@@ -193,7 +205,7 @@ class SnowflakeConnector {
|
|
|
193
205
|
});
|
|
194
206
|
});
|
|
195
207
|
}
|
|
196
|
-
|
|
208
|
+
executeInternal(conn, query) {
|
|
197
209
|
return new Promise((resolve, reject) => {
|
|
198
210
|
conn.execute({
|
|
199
211
|
sqlText: query,
|
|
@@ -208,6 +220,22 @@ class SnowflakeConnector {
|
|
|
208
220
|
});
|
|
209
221
|
});
|
|
210
222
|
}
|
|
223
|
+
async executeQuery(connection, query) {
|
|
224
|
+
console.log(`[Snowflake] executeQuery called for ${connection.name}`);
|
|
225
|
+
let conn = null;
|
|
226
|
+
try {
|
|
227
|
+
conn = await this.createConnection(connection);
|
|
228
|
+
return await this.executeInternal(conn, query);
|
|
229
|
+
}
|
|
230
|
+
catch (error) {
|
|
231
|
+
console.error(`[Snowflake] executeQuery failed:`, error.message);
|
|
232
|
+
throw new Error(`Failed to execute query: ${error.message}`);
|
|
233
|
+
}
|
|
234
|
+
finally {
|
|
235
|
+
if (conn)
|
|
236
|
+
conn.destroy(() => { });
|
|
237
|
+
}
|
|
238
|
+
}
|
|
211
239
|
async test(connection) {
|
|
212
240
|
console.log(`[Snowflake] Testing connection for ${connection.name} (Account: ${connection.account})`);
|
|
213
241
|
// Validate required fields
|
|
@@ -226,7 +254,7 @@ class SnowflakeConnector {
|
|
|
226
254
|
let conn = null;
|
|
227
255
|
try {
|
|
228
256
|
conn = await this.createConnection(connection);
|
|
229
|
-
await this.
|
|
257
|
+
await this.executeInternal(conn, 'SELECT CURRENT_VERSION()');
|
|
230
258
|
console.log(`[Snowflake] Connection test successful for ${connection.name}`);
|
|
231
259
|
return true;
|
|
232
260
|
}
|
|
@@ -244,7 +272,7 @@ class SnowflakeConnector {
|
|
|
244
272
|
let conn = null;
|
|
245
273
|
try {
|
|
246
274
|
conn = await this.createConnection(connection);
|
|
247
|
-
const rows = await this.
|
|
275
|
+
const rows = await this.executeInternal(conn, 'SHOW TABLES');
|
|
248
276
|
return rows.map((row) => ({
|
|
249
277
|
name: row.name || row.TABLE_NAME,
|
|
250
278
|
type: 'table',
|
|
@@ -266,9 +294,9 @@ class SnowflakeConnector {
|
|
|
266
294
|
try {
|
|
267
295
|
conn = await this.createConnection(connection);
|
|
268
296
|
const offset = (page - 1) * limit;
|
|
269
|
-
const countRows = await this.
|
|
297
|
+
const countRows = await this.executeInternal(conn, `SELECT COUNT(*) as TOTAL FROM "${tableName}"`);
|
|
270
298
|
const totalRows = countRows[0]?.TOTAL || 0;
|
|
271
|
-
const rows = await this.
|
|
299
|
+
const rows = await this.executeInternal(conn, `SELECT * FROM "${tableName}" LIMIT ${limit} OFFSET ${offset}`);
|
|
272
300
|
const columns = rows.length > 0 ? Object.keys(rows[0]) : [];
|
|
273
301
|
const totalPages = Math.ceil(totalRows / limit) || 1;
|
|
274
302
|
return {
|
|
@@ -298,7 +326,7 @@ class SnowflakeConnector {
|
|
|
298
326
|
let conn = null;
|
|
299
327
|
try {
|
|
300
328
|
conn = await this.createConnection(connection);
|
|
301
|
-
const rows = await this.
|
|
329
|
+
const rows = await this.executeInternal(conn, `DESCRIBE TABLE "${tableName}"`);
|
|
302
330
|
return rows.map((row) => ({
|
|
303
331
|
name: row.name || row.COLUMN_NAME,
|
|
304
332
|
type: row.type || row.DATA_TYPE,
|
|
@@ -28,6 +28,7 @@ export interface Connector {
|
|
|
28
28
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
29
29
|
getSchema(connection: Connection, tableName: string): Promise<ColumnInfo[]>;
|
|
30
30
|
getSnippet(connection: Connection, lang: string): string;
|
|
31
|
+
executeQuery?(connection: Connection, query: string): Promise<any[]>;
|
|
31
32
|
}
|
|
32
33
|
export declare class ConnectorRegistry {
|
|
33
34
|
private static connectors;
|
|
@@ -7,4 +7,5 @@ export declare class MySQLConnector implements Connector {
|
|
|
7
7
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
8
8
|
getSchema(connection: Connection, tableName: string): Promise<import('../index').ColumnInfo[]>;
|
|
9
9
|
getSnippet(connection: Connection, lang: string): string;
|
|
10
|
+
executeQuery(connection: Connection, query: string): Promise<any[]>;
|
|
10
11
|
}
|
|
@@ -178,5 +178,28 @@ finally:
|
|
|
178
178
|
}
|
|
179
179
|
return `# Language ${lang} not supported for MySQL connector yet.`;
|
|
180
180
|
}
|
|
181
|
+
async executeQuery(connection, query) {
|
|
182
|
+
console.log(`[MySQL] executeQuery called for ${connection.name}`);
|
|
183
|
+
let conn = null;
|
|
184
|
+
try {
|
|
185
|
+
conn = await promise_1.default.createConnection({
|
|
186
|
+
host: connection.host,
|
|
187
|
+
port: connection.port || 3306,
|
|
188
|
+
user: connection.user,
|
|
189
|
+
password: connection.password || '',
|
|
190
|
+
database: connection.database
|
|
191
|
+
});
|
|
192
|
+
const [rows] = await conn.execute(query);
|
|
193
|
+
return rows;
|
|
194
|
+
}
|
|
195
|
+
catch (error) {
|
|
196
|
+
console.error(`[MySQL] executeQuery failed:`, error.message);
|
|
197
|
+
throw new Error(`Failed to execute query: ${error.message}`);
|
|
198
|
+
}
|
|
199
|
+
finally {
|
|
200
|
+
if (conn)
|
|
201
|
+
await conn.end();
|
|
202
|
+
}
|
|
203
|
+
}
|
|
181
204
|
}
|
|
182
205
|
exports.MySQLConnector = MySQLConnector;
|
|
@@ -8,4 +8,5 @@ export declare class PostgreSQLConnector implements Connector {
|
|
|
8
8
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
9
9
|
getSchema(connection: Connection, tableName: string): Promise<import('../index').ColumnInfo[]>;
|
|
10
10
|
getSnippet(connection: Connection, lang: string): string;
|
|
11
|
+
executeQuery(connection: Connection, query: string): Promise<any[]>;
|
|
11
12
|
}
|
|
@@ -156,5 +156,21 @@ finally:
|
|
|
156
156
|
}
|
|
157
157
|
return `# Language ${lang} not supported for PostgreSQL connector yet.`;
|
|
158
158
|
}
|
|
159
|
+
async executeQuery(connection, query) {
|
|
160
|
+
console.log(`[PostgreSQL] executeQuery called for ${connection.name}`);
|
|
161
|
+
const client = this.createClient(connection);
|
|
162
|
+
try {
|
|
163
|
+
await client.connect();
|
|
164
|
+
const result = await client.query(query);
|
|
165
|
+
return result.rows;
|
|
166
|
+
}
|
|
167
|
+
catch (error) {
|
|
168
|
+
console.error(`[PostgreSQL] executeQuery failed:`, error.message);
|
|
169
|
+
throw new Error(`Failed to execute query: ${error.message}`);
|
|
170
|
+
}
|
|
171
|
+
finally {
|
|
172
|
+
await client.end();
|
|
173
|
+
}
|
|
174
|
+
}
|
|
159
175
|
}
|
|
160
176
|
exports.PostgreSQLConnector = PostgreSQLConnector;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function startDashboardServer(port: number): Promise<void>;
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.startDashboardServer = startDashboardServer;
|
|
40
|
+
const express_1 = __importDefault(require("express"));
|
|
41
|
+
const path = __importStar(require("path"));
|
|
42
|
+
const open_1 = __importDefault(require("open"));
|
|
43
|
+
const dashboard_1 = __importDefault(require("./routes/dashboard"));
|
|
44
|
+
async function startDashboardServer(port) {
|
|
45
|
+
const app = (0, express_1.default)();
|
|
46
|
+
// Serve static files from the built dashboard UI
|
|
47
|
+
const uiPath = path.join(__dirname, '..', 'ui-dashboard-dist');
|
|
48
|
+
app.use(express_1.default.static(uiPath));
|
|
49
|
+
// API routes
|
|
50
|
+
app.use('/api', dashboard_1.default);
|
|
51
|
+
// SPA fallback - serve index.html for client-side routing
|
|
52
|
+
app.get('*', (_req, res) => {
|
|
53
|
+
res.sendFile(path.join(uiPath, 'index.html'));
|
|
54
|
+
});
|
|
55
|
+
return new Promise((resolve, reject) => {
|
|
56
|
+
const server = app.listen(port, async () => {
|
|
57
|
+
console.log(`\n📊 Dashboard UI running at http://localhost:${port}\n`);
|
|
58
|
+
console.log(' Looking for dashboards in: reports/dashboards/');
|
|
59
|
+
console.log(' Press Ctrl+C to stop\n');
|
|
60
|
+
// Open browser
|
|
61
|
+
try {
|
|
62
|
+
await (0, open_1.default)(`http://localhost:${port}`);
|
|
63
|
+
}
|
|
64
|
+
catch (err) {
|
|
65
|
+
// Ignore open errors
|
|
66
|
+
}
|
|
67
|
+
resolve();
|
|
68
|
+
});
|
|
69
|
+
server.on('error', (err) => {
|
|
70
|
+
if (err.code === 'EADDRINUSE') {
|
|
71
|
+
reject(new Error(`Port ${port} is already in use. Try a different port with --port`));
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
reject(err);
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
});
|
|
78
|
+
}
|
package/dist/index.js
CHANGED
|
@@ -37,12 +37,13 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
37
37
|
const commander_1 = require("commander");
|
|
38
38
|
const commands_1 = require("./commands");
|
|
39
39
|
const server_1 = require("./server");
|
|
40
|
+
const dashboard_server_1 = require("./dashboard-server");
|
|
40
41
|
const connections_1 = require("./connections");
|
|
41
42
|
const program = new commander_1.Command();
|
|
42
43
|
program
|
|
43
44
|
.name('crushdataai')
|
|
44
45
|
.description('CLI to install CrushData AI data analyst skill for AI coding assistants')
|
|
45
|
-
.version('1.2.
|
|
46
|
+
.version('1.2.14');
|
|
46
47
|
program
|
|
47
48
|
.command('init')
|
|
48
49
|
.description('Initialize CrushData AI skill in current project')
|
|
@@ -125,4 +126,19 @@ program
|
|
|
125
126
|
const { schema } = await Promise.resolve().then(() => __importStar(require('./commands/schema')));
|
|
126
127
|
await schema(connection, table);
|
|
127
128
|
});
|
|
129
|
+
program
|
|
130
|
+
.command('dashboard')
|
|
131
|
+
.description('Open the dashboard visualization UI')
|
|
132
|
+
.option('-p, --port <port>', 'Server port', '3002')
|
|
133
|
+
.action(async (options) => {
|
|
134
|
+
const port = parseInt(options.port);
|
|
135
|
+
console.log('\n📊 Starting CrushData AI Dashboard...\n');
|
|
136
|
+
try {
|
|
137
|
+
await (0, dashboard_server_1.startDashboardServer)(port);
|
|
138
|
+
}
|
|
139
|
+
catch (error) {
|
|
140
|
+
console.error(`❌ Error: ${error.message}`);
|
|
141
|
+
process.exit(1);
|
|
142
|
+
}
|
|
143
|
+
});
|
|
128
144
|
program.parse();
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
const express_1 = require("express");
|
|
37
|
+
const fs = __importStar(require("fs"));
|
|
38
|
+
const path = __importStar(require("path"));
|
|
39
|
+
const query_executor_1 = require("../services/query-executor");
|
|
40
|
+
const router = (0, express_1.Router)();
|
|
41
|
+
// Get reports/dashboards directory path relative to current working directory
|
|
42
|
+
function getDashboardsDir() {
|
|
43
|
+
return path.join(process.cwd(), 'reports', 'dashboards');
|
|
44
|
+
}
|
|
45
|
+
// List all available dashboards
|
|
46
|
+
router.get('/dashboards', (_req, res) => {
|
|
47
|
+
try {
|
|
48
|
+
const dashboardsDir = getDashboardsDir();
|
|
49
|
+
// Check if directory exists
|
|
50
|
+
if (!fs.existsSync(dashboardsDir)) {
|
|
51
|
+
return res.json([]);
|
|
52
|
+
}
|
|
53
|
+
// Read all JSON files in the directory
|
|
54
|
+
const files = fs.readdirSync(dashboardsDir)
|
|
55
|
+
.filter(file => file.endsWith('.json'));
|
|
56
|
+
const dashboards = files.map(file => {
|
|
57
|
+
const filePath = path.join(dashboardsDir, file);
|
|
58
|
+
const content = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
59
|
+
// Use filename without extension as ID
|
|
60
|
+
const id = path.basename(file, '.json');
|
|
61
|
+
return {
|
|
62
|
+
id,
|
|
63
|
+
title: content.metadata?.title || id,
|
|
64
|
+
generatedAt: content.metadata?.generatedAt || '',
|
|
65
|
+
chartCount: content.charts?.length || 0,
|
|
66
|
+
kpiCount: content.kpis?.length || 0
|
|
67
|
+
};
|
|
68
|
+
});
|
|
69
|
+
res.json(dashboards);
|
|
70
|
+
}
|
|
71
|
+
catch (error) {
|
|
72
|
+
console.error('Error listing dashboards:', error);
|
|
73
|
+
res.status(500).json({ error: 'Failed to list dashboards' });
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
// Get a specific dashboard by ID
|
|
77
|
+
router.get('/dashboards/:id', (req, res) => {
|
|
78
|
+
try {
|
|
79
|
+
const { id } = req.params;
|
|
80
|
+
const dashboardsDir = getDashboardsDir();
|
|
81
|
+
const filePath = path.join(dashboardsDir, `${id}.json`);
|
|
82
|
+
if (!fs.existsSync(filePath)) {
|
|
83
|
+
return res.status(404).json({ error: 'Dashboard not found' });
|
|
84
|
+
}
|
|
85
|
+
const content = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
86
|
+
res.json({ id, ...content });
|
|
87
|
+
}
|
|
88
|
+
catch (error) {
|
|
89
|
+
console.error('Error reading dashboard:', error);
|
|
90
|
+
res.status(500).json({ error: 'Failed to read dashboard' });
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
// Refresh a chart's data (placeholder for now - would re-run query)
|
|
94
|
+
router.post('/charts/:id/refresh', async (req, res) => {
|
|
95
|
+
try {
|
|
96
|
+
const { id } = req.params;
|
|
97
|
+
const dashboardsDir = getDashboardsDir();
|
|
98
|
+
// 1. Find the chart in any dashboard
|
|
99
|
+
// We have to search all dashboards because we don't know which one calls it
|
|
100
|
+
// In a real DB we'd have a chart table, but here we scan JSONs
|
|
101
|
+
const files = fs.readdirSync(dashboardsDir).filter(file => file.endsWith('.json'));
|
|
102
|
+
let targetDashboard = null;
|
|
103
|
+
let targetDashboardFile = '';
|
|
104
|
+
let targetChart = null;
|
|
105
|
+
for (const file of files) {
|
|
106
|
+
const filePath = path.join(dashboardsDir, file);
|
|
107
|
+
const dashboard = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
108
|
+
const chart = dashboard.charts.find(c => c.id === id);
|
|
109
|
+
if (chart) {
|
|
110
|
+
targetDashboard = dashboard;
|
|
111
|
+
targetDashboardFile = filePath;
|
|
112
|
+
targetChart = chart;
|
|
113
|
+
break;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
if (!targetDashboard || !targetChart || !targetDashboardFile) {
|
|
117
|
+
return res.status(404).json({ error: 'Chart not found' });
|
|
118
|
+
}
|
|
119
|
+
// 2. Execute Query
|
|
120
|
+
if (!targetChart.query || !targetChart.query.connection) {
|
|
121
|
+
return res.status(400).json({ error: 'Chart has no query configuration' });
|
|
122
|
+
}
|
|
123
|
+
console.log(`Refreshing chart ${id} using connection ${targetChart.query.connection}...`);
|
|
124
|
+
const newData = await query_executor_1.QueryExecutor.execute(targetChart.query);
|
|
125
|
+
// 3. Update Dashboard
|
|
126
|
+
targetChart.data = newData;
|
|
127
|
+
targetChart.lastRefreshed = new Date().toISOString();
|
|
128
|
+
// Save back to disk
|
|
129
|
+
fs.writeFileSync(targetDashboardFile, JSON.stringify(targetDashboard, null, 2));
|
|
130
|
+
// 4. Return new data
|
|
131
|
+
res.json(targetChart);
|
|
132
|
+
}
|
|
133
|
+
catch (error) {
|
|
134
|
+
console.error('Error refreshing chart:', error);
|
|
135
|
+
res.status(500).json({ error: error instanceof Error ? error.message : 'Failed to refresh chart' });
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
// SSE Endpoint for file watching
|
|
139
|
+
router.get('/events', (req, res) => {
|
|
140
|
+
// Set headers for SSE
|
|
141
|
+
res.setHeader('Content-Type', 'text/event-stream');
|
|
142
|
+
res.setHeader('Cache-Control', 'no-cache');
|
|
143
|
+
res.setHeader('Connection', 'keep-alive');
|
|
144
|
+
res.flushHeaders();
|
|
145
|
+
const dashboardsDir = getDashboardsDir();
|
|
146
|
+
if (!fs.existsSync(dashboardsDir)) {
|
|
147
|
+
return res.end();
|
|
148
|
+
}
|
|
149
|
+
console.log('Client connected to SSE stream');
|
|
150
|
+
// Watch for file changes
|
|
151
|
+
const watcher = fs.watch(dashboardsDir, (eventType, filename) => {
|
|
152
|
+
if (filename && filename.endsWith('.json')) {
|
|
153
|
+
console.log(`File changed: ${filename} (${eventType})`);
|
|
154
|
+
const dashboardId = path.basename(filename, '.json');
|
|
155
|
+
// Send event
|
|
156
|
+
res.write(`data: ${JSON.stringify({ type: 'dashboard-update', id: dashboardId })}\n\n`);
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
// Cleanup on close
|
|
160
|
+
req.on('close', () => {
|
|
161
|
+
watcher.close();
|
|
162
|
+
console.log('Client disconnected from SSE stream');
|
|
163
|
+
res.end();
|
|
164
|
+
});
|
|
165
|
+
});
|
|
166
|
+
exports.default = router;
|