crushdataai 1.2.14 → 1.2.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +261 -0
- package/assets/images/crushdataai-dashboard-simple-charts.png +0 -0
- package/assets/images/crushdataai-dashboard.png +0 -0
- package/assets/images/crushdataai-data-connection-ui.png +0 -0
- package/assets/images/crushdataai-landing-page.png +0 -0
- package/dist/connectors/cloud/index.d.ts +3 -1
- package/dist/connectors/cloud/index.js +34 -6
- package/dist/connectors/index.d.ts +1 -0
- package/dist/connectors/mysql/index.d.ts +1 -0
- package/dist/connectors/mysql/index.js +23 -0
- package/dist/connectors/postgresql/index.d.ts +1 -0
- package/dist/connectors/postgresql/index.js +16 -0
- package/dist/routes/dashboard.js +67 -14
- package/dist/services/query-executor.d.ts +5 -0
- package/dist/services/query-executor.js +80 -0
- package/package.json +2 -1
- package/ui-dashboard-dist/assets/index-SkyAs8Zl.js +4185 -0
- package/ui-dashboard-dist/index.html +1 -1
- package/ui-dashboard-dist/assets/index-BhtUalwh.js +0 -112
package/README.md
ADDED
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
# CrushData AI
|
|
2
|
+
|
|
3
|
+
**Data Analyst Intelligence for AI IDEs**
|
|
4
|
+
|
|
5
|
+
An AI skill that provides structured, professional data analysis workflows with built-in validation - helping AI coding assistants perform data analysis like a careful human analyst.
|
|
6
|
+
|
|
7
|
+

|
|
8
|
+
|
|
9
|
+
## 🎯 What It Does
|
|
10
|
+
|
|
11
|
+
CrushData AI provides:
|
|
12
|
+
- **10 Analysis Workflows** - EDA, Dashboard, A/B Test, Cohort, Funnel, Time Series, Segmentation, Data Cleaning, Ad-hoc, KPI Reporting
|
|
13
|
+
- **400+ Searchable Patterns** - Metrics, SQL, Python, Charts, Database Tips, Common Mistakes
|
|
14
|
+
- **Context-Building Protocol** - Forces AI to ask questions and validate before delivering results
|
|
15
|
+
- **4 Industry Modules** - SaaS, E-commerce, Finance, Marketing specific metrics
|
|
16
|
+
|
|
17
|
+
## 🚀 Quick Start
|
|
18
|
+
|
|
19
|
+
### Install via CLI
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
npm install -g crushdataai
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
### What `npm install -g crushdataai` Does
|
|
26
|
+
|
|
27
|
+
The `-g` flag means **Global Install**:
|
|
28
|
+
|
|
29
|
+
| | Local Install (`npm install`) | Global Install (`npm install -g`) |
|
|
30
|
+
|--|-------------------------------|-----------------------------------|
|
|
31
|
+
| **Location** | `./node_modules/` in current folder | System-wide (e.g., `%APPDATA%\npm\`) |
|
|
32
|
+
| **Scope** | Only available in that project | Available everywhere on your computer |
|
|
33
|
+
| **Use Case** | Libraries for your project | CLI tools you want to run anywhere |
|
|
34
|
+
|
|
35
|
+
Then in any project:
|
|
36
|
+
```bash
|
|
37
|
+
cd your-project
|
|
38
|
+
crushdataai init --ai all # All AI IDEs
|
|
39
|
+
crushdataai init --ai claude # Claude Code only
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
### What `crushdataai init` Does
|
|
43
|
+
|
|
44
|
+
When you run `crushdataai init --ai all`, the CLI:
|
|
45
|
+
|
|
46
|
+
1. **Creates `.shared/data-analyst/`** - Contains the BM25 search engine and 13 CSV knowledge databases (~400 rows of data analyst patterns)
|
|
47
|
+
|
|
48
|
+
2. **Creates AI IDE config files** based on `--ai` flag:
|
|
49
|
+
| Flag | Creates |
|
|
50
|
+
|------|---------|
|
|
51
|
+
| `--ai claude` | `.claude/skills/data-analyst/SKILL.md` |
|
|
52
|
+
| `--ai cursor` | `.cursor/commands/data-analyst.md` |
|
|
53
|
+
| `--ai windsurf` | `.windsurf/workflows/data-analyst.md` |
|
|
54
|
+
| `--ai antigravity` | `.agent/workflows/data-analyst.md` |
|
|
55
|
+
| `--ai copilot` | `.github/prompts/data-analyst.prompt.md` |
|
|
56
|
+
| `--ai kiro` | `.kiro/steering/data-analyst.md` |
|
|
57
|
+
| `--ai all` | All of the above |
|
|
58
|
+
|
|
59
|
+
3. **Your AI IDE automatically detects** the config files and enables the `/data-analyst` command
|
|
60
|
+
|
|
61
|
+
### Updating
|
|
62
|
+
To update the CLI and refresh your project's AI skill files:
|
|
63
|
+
```bash
|
|
64
|
+
npm install -g crushdataai@latest
|
|
65
|
+
# Update specific IDE (recommended):
|
|
66
|
+
crushdataai init --ai cursor --force
|
|
67
|
+
|
|
68
|
+
# Or update everything:
|
|
69
|
+
crushdataai init --force
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
## 🔌 Data Connections (New in v1.2)
|
|
73
|
+
|
|
74
|
+
CrushData AI now features a **Connection Manager** to securely handle your data credentials.
|
|
75
|
+
|
|
76
|
+
### 1. Add Data Sources
|
|
77
|
+
Run the connect command to open the management UI:
|
|
78
|
+
|
|
79
|
+
```bash
|
|
80
|
+
crushdataai connect
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
- **Supported Types**: CSV, MySQL, PostgreSQL, Shopify, BigQuery, Snowflake
|
|
84
|
+
- **Private & Secure**: Credentials are stored **locally** on your machine (`~/.crushdataai/connections.json`). They are **never** uploaded to any server or included in the npm package.
|
|
85
|
+
|
|
86
|
+

|
|
87
|
+
|
|
88
|
+
> [!NOTE]
|
|
89
|
+
> **Persistence**: Once you add a connection, you can **close the UI** (Ctrl+C). The AI IDE reads the saved connection details directly from your local config file, so the server does NOT need to keep running.
|
|
90
|
+
|
|
91
|
+
### 2. View Saved Connections
|
|
92
|
+
```bash
|
|
93
|
+
crushdataai connections
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
## 📈 Data Visualization (New in v1.3)
|
|
97
|
+
|
|
98
|
+
CrushData AI generates interactive dashboards to visualize your analysis results.
|
|
99
|
+
|
|
100
|
+
### 1. View Dashboard
|
|
101
|
+
Run the dashboard command to open the local React-based viewer:
|
|
102
|
+
|
|
103
|
+
```bash
|
|
104
|
+
# Using installed package (generally faster)
|
|
105
|
+
crushdataai dashboard
|
|
106
|
+
|
|
107
|
+
# OR using npx (if not in PATH)
|
|
108
|
+
npx crushdataai dashboard
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+

|
|
112
|
+
*Advanced charts visualization (Funnel, Gauge, Radar, etc.)*
|
|
113
|
+
|
|
114
|
+

|
|
115
|
+
*Standard charts visualization (Line, Bar, Pie, etc.)*
|
|
116
|
+
|
|
117
|
+
### 2. Features
|
|
118
|
+
- **Tier 1 Charts**: Line, Bar, Pie, Area, Scatter, Radar (via Recharts)
|
|
119
|
+
- **Tier 2 Charts**: Funnel, Gauge, Heatmap, Sankey, Treemap, Waterfall (via Plotly)
|
|
120
|
+
- **Auto-Refresh**: The dashboard automatically updates when your AI agent writes new data to `reports/dashboards/`.
|
|
121
|
+
- **Data Refresh**: Use the "Refresh" button 🔄 on any chart to re-run the saved SQL/Python query against your data source.
|
|
122
|
+
|
|
123
|
+
### 3. AI Workflow Example
|
|
124
|
+
|
|
125
|
+
When you ask an AI agent (like Claude or Cursor) to "create a dashboard", it follows this process:
|
|
126
|
+
|
|
127
|
+
1. **Analyzes Data**: The AI runs SQL/Python to calculate metrics and aggregates.
|
|
128
|
+
2. **Generates JSON**: It creates a file at `reports/dashboards/your-topic.json` using the CrushData schema.
|
|
129
|
+
3. **Visualizes**: You run the dashboard command to see the rendered charts instantly.
|
|
130
|
+
|
|
131
|
+
*The AI automatically selects the best chart type (e.g., Line for trends, Bar for comparisons) based on your data.*
|
|
132
|
+
|
|
133
|
+
## 💻 Usage
|
|
134
|
+
|
|
135
|
+
### Step 1: Initialize
|
|
136
|
+
```bash
|
|
137
|
+
crushdataai init --ai all
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
### Step 2: Use in AI IDE
|
|
141
|
+
The skill activates automatically (Claude) or via slash command (others).
|
|
142
|
+
|
|
143
|
+
**Example Workflow:**
|
|
144
|
+
|
|
145
|
+
1. **User Request**: "Analyze the sales trends in `my-shop-data`"
|
|
146
|
+
2. **AI Action**: The AI checks your saved connections.
|
|
147
|
+
3. **AI Action**: The AI runs:
|
|
148
|
+
```bash
|
|
149
|
+
npx crushdataai snippet my-shop-data --lang python
|
|
150
|
+
```
|
|
151
|
+
4. **Result**: The AI receives the secure code to connect to your data (read-only) and proceeds with analysis.
|
|
152
|
+
|
|
153
|
+
### Claude Code
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
### Claude Code
|
|
157
|
+
|
|
158
|
+
The skill activates automatically when you request data analysis work. Just chat naturally:
|
|
159
|
+
|
|
160
|
+
```
|
|
161
|
+
Analyze customer churn for my SaaS product
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
### Cursor / Windsurf / Antigravity
|
|
165
|
+
|
|
166
|
+
Use the slash command to invoke the skill:
|
|
167
|
+
|
|
168
|
+
```
|
|
169
|
+
/data-analyst Analyze customer churn for my SaaS product
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
### Kiro
|
|
173
|
+
|
|
174
|
+
Type `/` in chat to see available commands, then select `data-analyst`:
|
|
175
|
+
|
|
176
|
+
```
|
|
177
|
+
/data-analyst Analyze customer churn for my SaaS product
|
|
178
|
+
```
|
|
179
|
+
|
|
180
|
+
### GitHub Copilot
|
|
181
|
+
|
|
182
|
+
In VS Code with Copilot, type `/` in chat to see available prompts, then select `data-analyst`:
|
|
183
|
+
|
|
184
|
+
```
|
|
185
|
+
/data-analyst Analyze customer churn for my SaaS product
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
### Example Prompts
|
|
189
|
+
|
|
190
|
+
```
|
|
191
|
+
Analyze customer churn for my SaaS product
|
|
192
|
+
Create a dashboard for e-commerce analytics
|
|
193
|
+
Calculate MRR and ARR from subscription data
|
|
194
|
+
Build a cohort retention analysis
|
|
195
|
+
Perform A/B test analysis on conversion rates
|
|
196
|
+
```
|
|
197
|
+
|
|
198
|
+
### Search Directly
|
|
199
|
+
```bash
|
|
200
|
+
# Search workflows
|
|
201
|
+
python3 .shared/data-analyst/scripts/search.py "EDA" --domain workflow
|
|
202
|
+
|
|
203
|
+
# Search metrics
|
|
204
|
+
python3 .shared/data-analyst/scripts/search.py "churn" --domain metric
|
|
205
|
+
|
|
206
|
+
# Search SQL patterns
|
|
207
|
+
python3 .shared/data-analyst/scripts/search.py "cohort" --domain sql
|
|
208
|
+
|
|
209
|
+
# Industry-specific
|
|
210
|
+
python3 .shared/data-analyst/scripts/search.py "MRR" --industry saas
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
## 📊 Search Domains
|
|
214
|
+
|
|
215
|
+
| Domain | Content |
|
|
216
|
+
|--------|---------|
|
|
217
|
+
| `workflow` | Step-by-step analysis processes |
|
|
218
|
+
| `metric` | Metric definitions with formulas |
|
|
219
|
+
| `chart` | Visualization recommendations |
|
|
220
|
+
| `cleaning` | Data quality patterns |
|
|
221
|
+
| `sql` | SQL patterns (window functions, cohorts) |
|
|
222
|
+
| `python` | pandas/polars code snippets |
|
|
223
|
+
| `database` | PostgreSQL, BigQuery, Snowflake tips |
|
|
224
|
+
| `report` | Dashboard UX guidelines |
|
|
225
|
+
| `validation` | Common mistakes to avoid |
|
|
226
|
+
|
|
227
|
+
## 🏭 Industry Modules
|
|
228
|
+
|
|
229
|
+
| Industry | Key Metrics |
|
|
230
|
+
|----------|-------------|
|
|
231
|
+
| `saas` | MRR, ARR, Churn, CAC, LTV, NRR |
|
|
232
|
+
| `ecommerce` | Conversion, AOV, Cart Abandonment |
|
|
233
|
+
| `finance` | Margins, ROI, Cash Flow, Ratios |
|
|
234
|
+
| `marketing` | CTR, CPA, ROAS, Lead Conversion |
|
|
235
|
+
|
|
236
|
+
## 🔒 How It Works
|
|
237
|
+
|
|
238
|
+
### Context-Building Protocol
|
|
239
|
+
|
|
240
|
+
1. **Discovery** - AI asks about business context before coding
|
|
241
|
+
2. **Data Profiling** - Mandatory checks before analysis
|
|
242
|
+
3. **Data Cleaning (ETL)** - Handle missing values/duplicates in `etl/` folder
|
|
243
|
+
4. **Validation** - Verify JOINs, aggregations, and totals
|
|
244
|
+
5. **Sanity Checks** - Compare to benchmarks before delivery
|
|
245
|
+
|
|
246
|
+
### Python Environment
|
|
247
|
+
To prevent global conflicts, the AI is instructed to:
|
|
248
|
+
1. **Check**: Look for existing `venv` or `.venv`.
|
|
249
|
+
2. **Create**: If missing, run `python3 -m venv venv`.
|
|
250
|
+
4. **Reports**: Save all validation/profiling outputs to `reports/` folder. Create if missing.
|
|
251
|
+
|
|
252
|
+
This prevents the common AI mistakes:
|
|
253
|
+
- ❌ Wrong metric definitions
|
|
254
|
+
- ❌ Duplicate row inflation
|
|
255
|
+
- ❌ Incorrect JOIN types
|
|
256
|
+
- ❌ Unreasonable totals
|
|
257
|
+
- ❌ Cluttered workspaces (scripts are organized in `analysis/` and `etl/`)
|
|
258
|
+
|
|
259
|
+
## 📝 License
|
|
260
|
+
|
|
261
|
+
Apache 2.0
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -8,11 +8,13 @@ export declare class BigQueryConnector implements Connector {
|
|
|
8
8
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
9
9
|
getSchema(connection: Connection, tableName: string): Promise<ColumnInfo[]>;
|
|
10
10
|
getSnippet(connection: Connection, lang: string): string;
|
|
11
|
+
executeQuery(connection: Connection, query: string): Promise<any[]>;
|
|
11
12
|
}
|
|
12
13
|
export declare class SnowflakeConnector implements Connector {
|
|
13
14
|
type: string;
|
|
14
15
|
private createConnection;
|
|
15
|
-
private
|
|
16
|
+
private executeInternal;
|
|
17
|
+
executeQuery(connection: Connection, query: string): Promise<any[]>;
|
|
16
18
|
test(connection: Connection): Promise<boolean>;
|
|
17
19
|
getTables(connection: Connection): Promise<Table[]>;
|
|
18
20
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
@@ -168,6 +168,18 @@ print(df.head())
|
|
|
168
168
|
}
|
|
169
169
|
return `# Language ${lang} not supported for BigQuery connector yet.`;
|
|
170
170
|
}
|
|
171
|
+
async executeQuery(connection, query) {
|
|
172
|
+
console.log(`[BigQuery] executeQuery called for ${connection.name}`);
|
|
173
|
+
const bigquery = this.createClient(connection);
|
|
174
|
+
try {
|
|
175
|
+
const [rows] = await bigquery.query(query);
|
|
176
|
+
return rows;
|
|
177
|
+
}
|
|
178
|
+
catch (error) {
|
|
179
|
+
console.error(`[BigQuery] executeQuery failed:`, error.message);
|
|
180
|
+
throw new Error(`Failed to execute query: ${error.message}`);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
171
183
|
}
|
|
172
184
|
exports.BigQueryConnector = BigQueryConnector;
|
|
173
185
|
class SnowflakeConnector {
|
|
@@ -193,7 +205,7 @@ class SnowflakeConnector {
|
|
|
193
205
|
});
|
|
194
206
|
});
|
|
195
207
|
}
|
|
196
|
-
|
|
208
|
+
executeInternal(conn, query) {
|
|
197
209
|
return new Promise((resolve, reject) => {
|
|
198
210
|
conn.execute({
|
|
199
211
|
sqlText: query,
|
|
@@ -208,6 +220,22 @@ class SnowflakeConnector {
|
|
|
208
220
|
});
|
|
209
221
|
});
|
|
210
222
|
}
|
|
223
|
+
async executeQuery(connection, query) {
|
|
224
|
+
console.log(`[Snowflake] executeQuery called for ${connection.name}`);
|
|
225
|
+
let conn = null;
|
|
226
|
+
try {
|
|
227
|
+
conn = await this.createConnection(connection);
|
|
228
|
+
return await this.executeInternal(conn, query);
|
|
229
|
+
}
|
|
230
|
+
catch (error) {
|
|
231
|
+
console.error(`[Snowflake] executeQuery failed:`, error.message);
|
|
232
|
+
throw new Error(`Failed to execute query: ${error.message}`);
|
|
233
|
+
}
|
|
234
|
+
finally {
|
|
235
|
+
if (conn)
|
|
236
|
+
conn.destroy(() => { });
|
|
237
|
+
}
|
|
238
|
+
}
|
|
211
239
|
async test(connection) {
|
|
212
240
|
console.log(`[Snowflake] Testing connection for ${connection.name} (Account: ${connection.account})`);
|
|
213
241
|
// Validate required fields
|
|
@@ -226,7 +254,7 @@ class SnowflakeConnector {
|
|
|
226
254
|
let conn = null;
|
|
227
255
|
try {
|
|
228
256
|
conn = await this.createConnection(connection);
|
|
229
|
-
await this.
|
|
257
|
+
await this.executeInternal(conn, 'SELECT CURRENT_VERSION()');
|
|
230
258
|
console.log(`[Snowflake] Connection test successful for ${connection.name}`);
|
|
231
259
|
return true;
|
|
232
260
|
}
|
|
@@ -244,7 +272,7 @@ class SnowflakeConnector {
|
|
|
244
272
|
let conn = null;
|
|
245
273
|
try {
|
|
246
274
|
conn = await this.createConnection(connection);
|
|
247
|
-
const rows = await this.
|
|
275
|
+
const rows = await this.executeInternal(conn, 'SHOW TABLES');
|
|
248
276
|
return rows.map((row) => ({
|
|
249
277
|
name: row.name || row.TABLE_NAME,
|
|
250
278
|
type: 'table',
|
|
@@ -266,9 +294,9 @@ class SnowflakeConnector {
|
|
|
266
294
|
try {
|
|
267
295
|
conn = await this.createConnection(connection);
|
|
268
296
|
const offset = (page - 1) * limit;
|
|
269
|
-
const countRows = await this.
|
|
297
|
+
const countRows = await this.executeInternal(conn, `SELECT COUNT(*) as TOTAL FROM "${tableName}"`);
|
|
270
298
|
const totalRows = countRows[0]?.TOTAL || 0;
|
|
271
|
-
const rows = await this.
|
|
299
|
+
const rows = await this.executeInternal(conn, `SELECT * FROM "${tableName}" LIMIT ${limit} OFFSET ${offset}`);
|
|
272
300
|
const columns = rows.length > 0 ? Object.keys(rows[0]) : [];
|
|
273
301
|
const totalPages = Math.ceil(totalRows / limit) || 1;
|
|
274
302
|
return {
|
|
@@ -298,7 +326,7 @@ class SnowflakeConnector {
|
|
|
298
326
|
let conn = null;
|
|
299
327
|
try {
|
|
300
328
|
conn = await this.createConnection(connection);
|
|
301
|
-
const rows = await this.
|
|
329
|
+
const rows = await this.executeInternal(conn, `DESCRIBE TABLE "${tableName}"`);
|
|
302
330
|
return rows.map((row) => ({
|
|
303
331
|
name: row.name || row.COLUMN_NAME,
|
|
304
332
|
type: row.type || row.DATA_TYPE,
|
|
@@ -28,6 +28,7 @@ export interface Connector {
|
|
|
28
28
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
29
29
|
getSchema(connection: Connection, tableName: string): Promise<ColumnInfo[]>;
|
|
30
30
|
getSnippet(connection: Connection, lang: string): string;
|
|
31
|
+
executeQuery?(connection: Connection, query: string): Promise<any[]>;
|
|
31
32
|
}
|
|
32
33
|
export declare class ConnectorRegistry {
|
|
33
34
|
private static connectors;
|
|
@@ -7,4 +7,5 @@ export declare class MySQLConnector implements Connector {
|
|
|
7
7
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
8
8
|
getSchema(connection: Connection, tableName: string): Promise<import('../index').ColumnInfo[]>;
|
|
9
9
|
getSnippet(connection: Connection, lang: string): string;
|
|
10
|
+
executeQuery(connection: Connection, query: string): Promise<any[]>;
|
|
10
11
|
}
|
|
@@ -178,5 +178,28 @@ finally:
|
|
|
178
178
|
}
|
|
179
179
|
return `# Language ${lang} not supported for MySQL connector yet.`;
|
|
180
180
|
}
|
|
181
|
+
async executeQuery(connection, query) {
|
|
182
|
+
console.log(`[MySQL] executeQuery called for ${connection.name}`);
|
|
183
|
+
let conn = null;
|
|
184
|
+
try {
|
|
185
|
+
conn = await promise_1.default.createConnection({
|
|
186
|
+
host: connection.host,
|
|
187
|
+
port: connection.port || 3306,
|
|
188
|
+
user: connection.user,
|
|
189
|
+
password: connection.password || '',
|
|
190
|
+
database: connection.database
|
|
191
|
+
});
|
|
192
|
+
const [rows] = await conn.execute(query);
|
|
193
|
+
return rows;
|
|
194
|
+
}
|
|
195
|
+
catch (error) {
|
|
196
|
+
console.error(`[MySQL] executeQuery failed:`, error.message);
|
|
197
|
+
throw new Error(`Failed to execute query: ${error.message}`);
|
|
198
|
+
}
|
|
199
|
+
finally {
|
|
200
|
+
if (conn)
|
|
201
|
+
await conn.end();
|
|
202
|
+
}
|
|
203
|
+
}
|
|
181
204
|
}
|
|
182
205
|
exports.MySQLConnector = MySQLConnector;
|
|
@@ -8,4 +8,5 @@ export declare class PostgreSQLConnector implements Connector {
|
|
|
8
8
|
getData(connection: Connection, tableName: string, page: number, limit: number): Promise<TableData>;
|
|
9
9
|
getSchema(connection: Connection, tableName: string): Promise<import('../index').ColumnInfo[]>;
|
|
10
10
|
getSnippet(connection: Connection, lang: string): string;
|
|
11
|
+
executeQuery(connection: Connection, query: string): Promise<any[]>;
|
|
11
12
|
}
|
|
@@ -156,5 +156,21 @@ finally:
|
|
|
156
156
|
}
|
|
157
157
|
return `# Language ${lang} not supported for PostgreSQL connector yet.`;
|
|
158
158
|
}
|
|
159
|
+
async executeQuery(connection, query) {
|
|
160
|
+
console.log(`[PostgreSQL] executeQuery called for ${connection.name}`);
|
|
161
|
+
const client = this.createClient(connection);
|
|
162
|
+
try {
|
|
163
|
+
await client.connect();
|
|
164
|
+
const result = await client.query(query);
|
|
165
|
+
return result.rows;
|
|
166
|
+
}
|
|
167
|
+
catch (error) {
|
|
168
|
+
console.error(`[PostgreSQL] executeQuery failed:`, error.message);
|
|
169
|
+
throw new Error(`Failed to execute query: ${error.message}`);
|
|
170
|
+
}
|
|
171
|
+
finally {
|
|
172
|
+
await client.end();
|
|
173
|
+
}
|
|
174
|
+
}
|
|
159
175
|
}
|
|
160
176
|
exports.PostgreSQLConnector = PostgreSQLConnector;
|
package/dist/routes/dashboard.js
CHANGED
|
@@ -36,6 +36,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
36
36
|
const express_1 = require("express");
|
|
37
37
|
const fs = __importStar(require("fs"));
|
|
38
38
|
const path = __importStar(require("path"));
|
|
39
|
+
const query_executor_1 = require("../services/query-executor");
|
|
39
40
|
const router = (0, express_1.Router)();
|
|
40
41
|
// Get reports/dashboards directory path relative to current working directory
|
|
41
42
|
function getDashboardsDir() {
|
|
@@ -82,7 +83,7 @@ router.get('/dashboards/:id', (req, res) => {
|
|
|
82
83
|
return res.status(404).json({ error: 'Dashboard not found' });
|
|
83
84
|
}
|
|
84
85
|
const content = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
85
|
-
res.json(content);
|
|
86
|
+
res.json({ id, ...content });
|
|
86
87
|
}
|
|
87
88
|
catch (error) {
|
|
88
89
|
console.error('Error reading dashboard:', error);
|
|
@@ -90,24 +91,76 @@ router.get('/dashboards/:id', (req, res) => {
|
|
|
90
91
|
}
|
|
91
92
|
});
|
|
92
93
|
// Refresh a chart's data (placeholder for now - would re-run query)
|
|
93
|
-
router.post('/charts/:id/refresh', (req, res) => {
|
|
94
|
+
router.post('/charts/:id/refresh', async (req, res) => {
|
|
94
95
|
try {
|
|
95
96
|
const { id } = req.params;
|
|
96
|
-
|
|
97
|
-
//
|
|
98
|
-
//
|
|
99
|
-
//
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
97
|
+
const dashboardsDir = getDashboardsDir();
|
|
98
|
+
// 1. Find the chart in any dashboard
|
|
99
|
+
// We have to search all dashboards because we don't know which one calls it
|
|
100
|
+
// In a real DB we'd have a chart table, but here we scan JSONs
|
|
101
|
+
const files = fs.readdirSync(dashboardsDir).filter(file => file.endsWith('.json'));
|
|
102
|
+
let targetDashboard = null;
|
|
103
|
+
let targetDashboardFile = '';
|
|
104
|
+
let targetChart = null;
|
|
105
|
+
for (const file of files) {
|
|
106
|
+
const filePath = path.join(dashboardsDir, file);
|
|
107
|
+
const dashboard = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
108
|
+
const chart = dashboard.charts.find(c => c.id === id);
|
|
109
|
+
if (chart) {
|
|
110
|
+
targetDashboard = dashboard;
|
|
111
|
+
targetDashboardFile = filePath;
|
|
112
|
+
targetChart = chart;
|
|
113
|
+
break;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
if (!targetDashboard || !targetChart || !targetDashboardFile) {
|
|
117
|
+
return res.status(404).json({ error: 'Chart not found' });
|
|
118
|
+
}
|
|
119
|
+
// 2. Execute Query
|
|
120
|
+
if (!targetChart.query || !targetChart.query.connection) {
|
|
121
|
+
return res.status(400).json({ error: 'Chart has no query configuration' });
|
|
122
|
+
}
|
|
123
|
+
console.log(`Refreshing chart ${id} using connection ${targetChart.query.connection}...`);
|
|
124
|
+
const newData = await query_executor_1.QueryExecutor.execute(targetChart.query);
|
|
125
|
+
// 3. Update Dashboard
|
|
126
|
+
targetChart.data = newData;
|
|
127
|
+
targetChart.lastRefreshed = new Date().toISOString();
|
|
128
|
+
// Save back to disk
|
|
129
|
+
fs.writeFileSync(targetDashboardFile, JSON.stringify(targetDashboard, null, 2));
|
|
130
|
+
// 4. Return new data
|
|
131
|
+
res.json(targetChart);
|
|
107
132
|
}
|
|
108
133
|
catch (error) {
|
|
109
134
|
console.error('Error refreshing chart:', error);
|
|
110
|
-
res.status(500).json({ error: 'Failed to refresh chart' });
|
|
135
|
+
res.status(500).json({ error: error instanceof Error ? error.message : 'Failed to refresh chart' });
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
// SSE Endpoint for file watching
|
|
139
|
+
router.get('/events', (req, res) => {
|
|
140
|
+
// Set headers for SSE
|
|
141
|
+
res.setHeader('Content-Type', 'text/event-stream');
|
|
142
|
+
res.setHeader('Cache-Control', 'no-cache');
|
|
143
|
+
res.setHeader('Connection', 'keep-alive');
|
|
144
|
+
res.flushHeaders();
|
|
145
|
+
const dashboardsDir = getDashboardsDir();
|
|
146
|
+
if (!fs.existsSync(dashboardsDir)) {
|
|
147
|
+
return res.end();
|
|
111
148
|
}
|
|
149
|
+
console.log('Client connected to SSE stream');
|
|
150
|
+
// Watch for file changes
|
|
151
|
+
const watcher = fs.watch(dashboardsDir, (eventType, filename) => {
|
|
152
|
+
if (filename && filename.endsWith('.json')) {
|
|
153
|
+
console.log(`File changed: ${filename} (${eventType})`);
|
|
154
|
+
const dashboardId = path.basename(filename, '.json');
|
|
155
|
+
// Send event
|
|
156
|
+
res.write(`data: ${JSON.stringify({ type: 'dashboard-update', id: dashboardId })}\n\n`);
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
// Cleanup on close
|
|
160
|
+
req.on('close', () => {
|
|
161
|
+
watcher.close();
|
|
162
|
+
console.log('Client disconnected from SSE stream');
|
|
163
|
+
res.end();
|
|
164
|
+
});
|
|
112
165
|
});
|
|
113
166
|
exports.default = router;
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.QueryExecutor = void 0;
|
|
4
|
+
const connections_1 = require("../connections");
|
|
5
|
+
const postgresql_1 = require("../connectors/postgresql");
|
|
6
|
+
const mysql_1 = require("../connectors/mysql");
|
|
7
|
+
// import { BigQueryConnector } from '../connectors/bigquery';
|
|
8
|
+
// import { SnowflakeConnector } from '../connectors/snowflake';
|
|
9
|
+
const shopify_1 = require("../connectors/shopify");
|
|
10
|
+
class QueryExecutor {
|
|
11
|
+
static async execute(query) {
|
|
12
|
+
if (!query.connection) {
|
|
13
|
+
throw new Error('No connection specified in query');
|
|
14
|
+
}
|
|
15
|
+
const connectionConfig = (0, connections_1.getConnection)(query.connection);
|
|
16
|
+
if (!connectionConfig) {
|
|
17
|
+
throw new Error(`Connection "${query.connection}" not found`);
|
|
18
|
+
}
|
|
19
|
+
let result;
|
|
20
|
+
// Execute query based on connection type
|
|
21
|
+
switch (connectionConfig.type) {
|
|
22
|
+
case 'postgresql': {
|
|
23
|
+
const connector = new postgresql_1.PostgreSQLConnector();
|
|
24
|
+
if (!query.sql)
|
|
25
|
+
throw new Error('SQL query required for Postgres');
|
|
26
|
+
result = await connector.executeQuery(connectionConfig, query.sql);
|
|
27
|
+
break;
|
|
28
|
+
}
|
|
29
|
+
case 'mysql': {
|
|
30
|
+
const connector = new mysql_1.MySQLConnector();
|
|
31
|
+
if (!query.sql)
|
|
32
|
+
throw new Error('SQL query required for MySQL');
|
|
33
|
+
result = await connector.executeQuery(connectionConfig, query.sql);
|
|
34
|
+
break;
|
|
35
|
+
}
|
|
36
|
+
// case 'bigquery': {
|
|
37
|
+
// const connector = new BigQueryConnector();
|
|
38
|
+
// if (!query.sql) throw new Error('SQL query required for BigQuery');
|
|
39
|
+
// result = await connector.executeQuery(connectionConfig, query.sql);
|
|
40
|
+
// break;
|
|
41
|
+
// }
|
|
42
|
+
// case 'snowflake': {
|
|
43
|
+
// const connector = new SnowflakeConnector();
|
|
44
|
+
// if (!query.sql) throw new Error('SQL query required for Snowflake');
|
|
45
|
+
// result = await connector.executeQuery(connectionConfig, query.sql);
|
|
46
|
+
// break;
|
|
47
|
+
// }
|
|
48
|
+
case 'shopify': {
|
|
49
|
+
const connector = new shopify_1.ShopifyConnector();
|
|
50
|
+
// For Shopify, query.sql is treated as the table name/endpoint
|
|
51
|
+
const tableName = query.sql?.trim() || 'orders';
|
|
52
|
+
// Fetch first page, limit 1000
|
|
53
|
+
const tableData = await connector.getData(connectionConfig, tableName, 1, 1000);
|
|
54
|
+
result = tableData.rows;
|
|
55
|
+
break;
|
|
56
|
+
}
|
|
57
|
+
default:
|
|
58
|
+
throw new Error(`Unsupported connection type: ${connectionConfig.type}`);
|
|
59
|
+
}
|
|
60
|
+
// Transform result to ChartData format
|
|
61
|
+
return this.transformToChartData(result, query);
|
|
62
|
+
}
|
|
63
|
+
static transformToChartData(data, query) {
|
|
64
|
+
if (!data || data.length === 0) {
|
|
65
|
+
return { labels: [], datasets: [] };
|
|
66
|
+
}
|
|
67
|
+
// Auto-detect labels (first string/date column)
|
|
68
|
+
const keys = Object.keys(data[0]);
|
|
69
|
+
const labelKey = keys.find(k => typeof data[0][k] === 'string' || data[0][k] instanceof Date) || keys[0];
|
|
70
|
+
const labels = data.map(row => String(row[labelKey]));
|
|
71
|
+
// Create datasets for all numeric columns
|
|
72
|
+
const valueKeys = keys.filter(k => k !== labelKey && typeof data[0][k] === 'number');
|
|
73
|
+
const datasets = valueKeys.map((key, i) => ({
|
|
74
|
+
label: key.charAt(0).toUpperCase() + key.slice(1).replace(/_/g, ' '),
|
|
75
|
+
values: data.map(row => Number(row[key]))
|
|
76
|
+
}));
|
|
77
|
+
return { labels, datasets };
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
exports.QueryExecutor = QueryExecutor;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "crushdataai",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.17",
|
|
4
4
|
"description": "CLI to install CrushData AI data analyst skill for AI coding assistants",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"bin": {
|
|
@@ -47,6 +47,7 @@
|
|
|
47
47
|
"files": [
|
|
48
48
|
"dist",
|
|
49
49
|
"assets",
|
|
50
|
+
"media",
|
|
50
51
|
"ui",
|
|
51
52
|
"ui-dashboard-dist"
|
|
52
53
|
],
|