@bonnard/cli 0.1.10 → 0.1.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/bon.mjs CHANGED
@@ -1488,10 +1488,45 @@ async function addManual(options) {
1488
1488
  console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1489
1489
  }
1490
1490
  /**
1491
+ * Add the Contoso demo datasource (read-only retail dataset)
1492
+ */
1493
+ async function addDemo(options) {
1494
+ const name = "contoso_demo";
1495
+ if (datasourceExists(name) && !options.force) {
1496
+ console.log(pc.yellow(`Datasource "${name}" already exists. Use --force to overwrite.`));
1497
+ return;
1498
+ }
1499
+ if (isDatasourcesTrackedByGit()) console.log(pc.yellow("Warning: .bon/datasources.yaml is tracked by git. Add it to .gitignore!"));
1500
+ addLocalDatasource({
1501
+ name,
1502
+ type: "postgres",
1503
+ source: "demo",
1504
+ config: {
1505
+ host: "aws-1-eu-west-1.pooler.supabase.com",
1506
+ port: "5432",
1507
+ database: "postgres",
1508
+ schema: "contoso"
1509
+ },
1510
+ credentials: {
1511
+ username: "demo_reader.yvbfzqogtdsqqkpyztlu",
1512
+ password: "contoso-demo-2025!"
1513
+ }
1514
+ });
1515
+ console.log();
1516
+ console.log(pc.green(`✓ Demo datasource "${name}" saved to .bon/datasources.yaml`));
1517
+ console.log();
1518
+ console.log(pc.dim("Contoso is a read-only retail dataset with tables like:"));
1519
+ console.log(pc.dim(" fact_sales, dim_product, dim_store, dim_customer"));
1520
+ console.log();
1521
+ console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1522
+ console.log(pc.dim(`Explore tables: bon preview ${name} "SELECT table_name FROM information_schema.tables WHERE table_schema = 'contoso'"`));
1523
+ }
1524
+ /**
1491
1525
  * Main datasource add command
1492
1526
  */
1493
1527
  async function datasourceAddCommand(options = {}) {
1494
- if (options.fromDbt !== void 0) await importFromDbt(options);
1528
+ if (options.demo) await addDemo(options);
1529
+ else if (options.fromDbt !== void 0) await importFromDbt(options);
1495
1530
  else await addManual(options);
1496
1531
  }
1497
1532
 
@@ -2806,7 +2841,7 @@ program.command("login").description("Authenticate with Bonnard via your browser
2806
2841
  program.command("logout").description("Remove stored credentials").action(logoutCommand);
2807
2842
  program.command("whoami").description("Show current login status").option("--verify", "Verify session is still valid with the server").action(whoamiCommand);
2808
2843
  const datasource = program.command("datasource").description("Manage warehouse data source connections");
2809
- datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
2844
+ datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--demo", "Add a read-only demo datasource (Contoso retail dataset) for testing").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
2810
2845
  datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
2811
2846
  datasource.command("test").description("Test data source connectivity by connecting directly to the warehouse").argument("<name>", "Data source name from .bon/datasources.yaml").option("--remote", "Test via Bonnard API instead of direct connection (requires login)").action(datasourceTestCommand);
2812
2847
  datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
@@ -12,16 +12,22 @@ confirming progress before moving on.
12
12
 
13
13
  ## Phase 1: Connect a Data Source
14
14
 
15
- Check if the user has dbt:
15
+ Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
16
16
 
17
17
  ```bash
18
- # Import from dbt (if they use it)
18
+ # Option A: Import from dbt (if they use it)
19
19
  bon datasource add --from-dbt
20
20
 
21
- # Or add manually (interactive)
21
+ # Option B: Add manually (interactive)
22
22
  bon datasource add
23
+
24
+ # Option C: Use demo data (no warehouse needed)
25
+ bon datasource add --demo
23
26
  ```
24
27
 
28
+ The demo option adds a read-only Contoso retail dataset with tables like
29
+ `fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
30
+
25
31
  Then verify the connection works:
26
32
 
27
33
  ```bash
@@ -35,65 +41,81 @@ If the test fails, common issues:
35
41
 
36
42
  ## Phase 2: Explore the Data
37
43
 
38
- Use `bon preview` to understand what tables and columns are available:
44
+ Use `bon preview` to understand what tables and columns are available.
45
+ **Always run this before creating cubes** — use the results to decide which
46
+ tables to model and what columns to expose.
39
47
 
40
48
  ```bash
41
- # List tables (Postgres)
49
+ # List tables — use the schema from the datasource config
50
+ # For demo data (contoso schema):
51
+ bon preview contoso_demo "SELECT table_name FROM information_schema.tables WHERE table_schema = 'contoso'"
52
+
53
+ # For user's own data (typically public schema):
42
54
  bon preview <datasource> "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"
43
55
 
44
- # List tables (Snowflake)
56
+ # Snowflake:
45
57
  bon preview <datasource> "SHOW TABLES"
46
58
 
47
- # Sample a table
48
- bon preview <datasource> "SELECT * FROM orders" --limit 10
59
+ # Then sample the key tables to see columns and data:
60
+ bon preview contoso_demo "SELECT * FROM contoso.fact_sales" --limit 10
61
+ bon preview contoso_demo "SELECT * FROM contoso.dim_product" --limit 10
49
62
  ```
50
63
 
51
- This helps you understand the schema before writing cubes.
64
+ Note the table names, column names, and data types — you'll use these in Phase 3.
52
65
 
53
66
  ## Phase 3: Create Your First Cube
54
67
 
55
- Create a file in `bonnard/cubes/` for the most important table. A cube
56
- typically maps directly to a database table — define measures for the metrics
57
- you want to track and dimensions for the attributes you want to filter and
58
- group by.
68
+ Based on what you found in Phase 2, create a file in `bonnard/cubes/` for
69
+ the most important table. A cube maps directly to a database table — define
70
+ measures for the metrics you want to track and dimensions for the attributes
71
+ you want to filter and group by. **Use the actual table and column names from
72
+ Phase 2, not placeholder names.**
59
73
 
60
- Example — `bonnard/cubes/orders.yaml`:
74
+ Example using demo data — `bonnard/cubes/sales.yaml`:
61
75
 
62
76
  ```yaml
63
77
  cubes:
64
- - name: orders
65
- sql_table: public.orders
78
+ - name: sales
79
+ sql_table: contoso.fact_sales
80
+ data_source: contoso_demo
66
81
 
67
82
  measures:
68
83
  - name: count
69
84
  type: count
70
- description: Total number of orders
85
+ description: Total number of sales transactions
71
86
 
72
87
  - name: total_revenue
73
88
  type: sum
74
- sql: amount
75
- description: Sum of order amounts
89
+ sql: sales_amount
90
+ description: Sum of sales revenue
91
+
92
+ - name: total_cost
93
+ type: sum
94
+ sql: total_cost
95
+ description: Sum of product costs
76
96
 
77
97
  dimensions:
78
- - name: id
98
+ - name: sales_key
79
99
  type: number
80
- sql: id
100
+ sql: sales_key
81
101
  primary_key: true
82
102
 
83
- - name: status
84
- type: string
85
- sql: status
86
- description: Order status (pending, completed, cancelled)
87
-
88
- - name: created_at
103
+ - name: date
89
104
  type: time
90
- sql: created_at
91
- description: When the order was placed
105
+ sql: date_key
106
+ description: Sale date
107
+
108
+ - name: sales_quantity
109
+ type: number
110
+ sql: sales_quantity
111
+ description: Number of units sold
92
112
  ```
93
113
 
94
114
  Key rules:
95
115
  - Every cube needs a `primary_key` dimension
96
116
  - Every measure and dimension should have a `description`
117
+ - Set `data_source` to match the datasource name from Phase 1
118
+ - Use `sql_table` with the full `schema.table` path
97
119
  - Use `sql_table` for simple table references, `sql` for complex queries
98
120
 
99
121
  Use `bon docs cubes` for the full reference, `bon docs cubes.measures.types`
@@ -102,21 +124,22 @@ for all 12 measure types, `bon docs cubes.dimensions.types` for dimension types.
102
124
  ## Phase 4: Create a View
103
125
 
104
126
  Views expose a curated subset of measures and dimensions for consumers.
105
- Create a file in `bonnard/views/`:
127
+ Create a file in `bonnard/views/` that references the cube from Phase 3.
106
128
 
107
- Example — `bonnard/views/orders_overview.yaml`:
129
+ Example using demo data — `bonnard/views/sales_overview.yaml`:
108
130
 
109
131
  ```yaml
110
132
  views:
111
- - name: orders_overview
112
- description: High-level order metrics and attributes
133
+ - name: sales_overview
134
+ description: High-level sales metrics and attributes
113
135
  cubes:
114
- - join_path: orders
136
+ - join_path: sales
115
137
  includes:
116
138
  - count
117
139
  - total_revenue
118
- - status
119
- - created_at
140
+ - total_cost
141
+ - date
142
+ - sales_quantity
120
143
  ```
121
144
 
122
145
  Use `bon docs views` for the full reference.
@@ -154,17 +177,17 @@ credentials (encrypted) to Bonnard.
154
177
 
155
178
  ## Phase 7: Test with a Query
156
179
 
157
- Verify the deployment works:
180
+ Verify the deployment works using the cube name from Phase 3:
158
181
 
159
182
  ```bash
160
183
  # Simple count
161
- bon query '{"measures": ["orders.count"]}'
184
+ bon query '{"measures": ["sales.count"]}'
162
185
 
163
- # Group by a dimension
164
- bon query '{"measures": ["orders.count"], "dimensions": ["orders.status"]}'
186
+ # With a dimension
187
+ bon query '{"measures": ["sales.total_revenue"], "dimensions": ["sales.date"]}'
165
188
 
166
189
  # SQL format
167
- bon query --sql "SELECT status, MEASURE(count) FROM orders GROUP BY 1"
190
+ bon query --sql "SELECT MEASURE(total_revenue) FROM sales"
168
191
  ```
169
192
 
170
193
  ## Phase 8: Connect AI Agents (Optional)
@@ -11,16 +11,22 @@ confirming progress before moving on.
11
11
 
12
12
  ## Phase 1: Connect a Data Source
13
13
 
14
- Check if the user has dbt:
14
+ Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
15
15
 
16
16
  ```bash
17
- # Import from dbt (if they use it)
17
+ # Option A: Import from dbt (if they use it)
18
18
  bon datasource add --from-dbt
19
19
 
20
- # Or add manually (interactive)
20
+ # Option B: Add manually (interactive)
21
21
  bon datasource add
22
+
23
+ # Option C: Use demo data (no warehouse needed)
24
+ bon datasource add --demo
22
25
  ```
23
26
 
27
+ The demo option adds a read-only Contoso retail dataset with tables like
28
+ `fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
29
+
24
30
  Then verify the connection works:
25
31
 
26
32
  ```bash
@@ -34,65 +40,81 @@ If the test fails, common issues:
34
40
 
35
41
  ## Phase 2: Explore the Data
36
42
 
37
- Use `bon preview` to understand what tables and columns are available:
43
+ Use `bon preview` to understand what tables and columns are available.
44
+ **Always run this before creating cubes** — use the results to decide which
45
+ tables to model and what columns to expose.
38
46
 
39
47
  ```bash
40
- # List tables (Postgres)
48
+ # List tables — use the schema from the datasource config
49
+ # For demo data (contoso schema):
50
+ bon preview contoso_demo "SELECT table_name FROM information_schema.tables WHERE table_schema = 'contoso'"
51
+
52
+ # For user's own data (typically public schema):
41
53
  bon preview <datasource> "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"
42
54
 
43
- # List tables (Snowflake)
55
+ # Snowflake:
44
56
  bon preview <datasource> "SHOW TABLES"
45
57
 
46
- # Sample a table
47
- bon preview <datasource> "SELECT * FROM orders" --limit 10
58
+ # Then sample the key tables to see columns and data:
59
+ bon preview contoso_demo "SELECT * FROM contoso.fact_sales" --limit 10
60
+ bon preview contoso_demo "SELECT * FROM contoso.dim_product" --limit 10
48
61
  ```
49
62
 
50
- This helps you understand the schema before writing cubes.
63
+ Note the table names, column names, and data types — you'll use these in Phase 3.
51
64
 
52
65
  ## Phase 3: Create Your First Cube
53
66
 
54
- Create a file in `bonnard/cubes/` for the most important table. A cube
55
- typically maps directly to a database table — define measures for the metrics
56
- you want to track and dimensions for the attributes you want to filter and
57
- group by.
67
+ Based on what you found in Phase 2, create a file in `bonnard/cubes/` for
68
+ the most important table. A cube maps directly to a database table — define
69
+ measures for the metrics you want to track and dimensions for the attributes
70
+ you want to filter and group by. **Use the actual table and column names from
71
+ Phase 2, not placeholder names.**
58
72
 
59
- Example — `bonnard/cubes/orders.yaml`:
73
+ Example using demo data — `bonnard/cubes/sales.yaml`:
60
74
 
61
75
  ```yaml
62
76
  cubes:
63
- - name: orders
64
- sql_table: public.orders
77
+ - name: sales
78
+ sql_table: contoso.fact_sales
79
+ data_source: contoso_demo
65
80
 
66
81
  measures:
67
82
  - name: count
68
83
  type: count
69
- description: Total number of orders
84
+ description: Total number of sales transactions
70
85
 
71
86
  - name: total_revenue
72
87
  type: sum
73
- sql: amount
74
- description: Sum of order amounts
88
+ sql: sales_amount
89
+ description: Sum of sales revenue
90
+
91
+ - name: total_cost
92
+ type: sum
93
+ sql: total_cost
94
+ description: Sum of product costs
75
95
 
76
96
  dimensions:
77
- - name: id
97
+ - name: sales_key
78
98
  type: number
79
- sql: id
99
+ sql: sales_key
80
100
  primary_key: true
81
101
 
82
- - name: status
83
- type: string
84
- sql: status
85
- description: Order status (pending, completed, cancelled)
86
-
87
- - name: created_at
102
+ - name: date
88
103
  type: time
89
- sql: created_at
90
- description: When the order was placed
104
+ sql: date_key
105
+ description: Sale date
106
+
107
+ - name: sales_quantity
108
+ type: number
109
+ sql: sales_quantity
110
+ description: Number of units sold
91
111
  ```
92
112
 
93
113
  Key rules:
94
114
  - Every cube needs a `primary_key` dimension
95
115
  - Every measure and dimension should have a `description`
116
+ - Set `data_source` to match the datasource name from Phase 1
117
+ - Use `sql_table` with the full `schema.table` path
96
118
  - Use `sql_table` for simple table references, `sql` for complex queries
97
119
 
98
120
  Use `bon docs cubes` for the full reference, `bon docs cubes.measures.types`
@@ -101,21 +123,22 @@ for all 12 measure types, `bon docs cubes.dimensions.types` for dimension types.
101
123
  ## Phase 4: Create a View
102
124
 
103
125
  Views expose a curated subset of measures and dimensions for consumers.
104
- Create a file in `bonnard/views/`:
126
+ Create a file in `bonnard/views/` that references the cube from Phase 3.
105
127
 
106
- Example — `bonnard/views/orders_overview.yaml`:
128
+ Example using demo data — `bonnard/views/sales_overview.yaml`:
107
129
 
108
130
  ```yaml
109
131
  views:
110
- - name: orders_overview
111
- description: High-level order metrics and attributes
132
+ - name: sales_overview
133
+ description: High-level sales metrics and attributes
112
134
  cubes:
113
- - join_path: orders
135
+ - join_path: sales
114
136
  includes:
115
137
  - count
116
138
  - total_revenue
117
- - status
118
- - created_at
139
+ - total_cost
140
+ - date
141
+ - sales_quantity
119
142
  ```
120
143
 
121
144
  Use `bon docs views` for the full reference.
@@ -153,17 +176,17 @@ credentials (encrypted) to Bonnard.
153
176
 
154
177
  ## Phase 7: Test with a Query
155
178
 
156
- Verify the deployment works:
179
+ Verify the deployment works using the cube name from Phase 3:
157
180
 
158
181
  ```bash
159
182
  # Simple count
160
- bon query '{"measures": ["orders.count"]}'
183
+ bon query '{"measures": ["sales.count"]}'
161
184
 
162
- # Group by a dimension
163
- bon query '{"measures": ["orders.count"], "dimensions": ["orders.status"]}'
185
+ # With a dimension
186
+ bon query '{"measures": ["sales.total_revenue"], "dimensions": ["sales.date"]}'
164
187
 
165
188
  # SQL format
166
- bon query --sql "SELECT status, MEASURE(count) FROM orders GROUP BY 1"
189
+ bon query --sql "SELECT MEASURE(total_revenue) FROM sales"
167
190
  ```
168
191
 
169
192
  ## Phase 8: Connect AI Agents (Optional)
@@ -39,12 +39,29 @@ my-project/
39
39
  └── datasources.yaml # Warehouse connections
40
40
  ```
41
41
 
42
+ ## Demo Data
43
+
44
+ No warehouse? Use the built-in demo dataset to try Bonnard:
45
+
46
+ ```bash
47
+ bon datasource add --demo
48
+ ```
49
+
50
+ This adds a read-only **Contoso** retail database (Postgres) with tables:
51
+ - `fact_sales` — transactions with sales_amount, unit_price, sales_quantity, date_key
52
+ - `dim_product` — product_name, brand_name, manufacturer, unit_cost, unit_price
53
+ - `dim_store` — store_name, store_type, employee_count, selling_area_size
54
+ - `dim_customer` — first_name, last_name, gender, yearly_income, education, occupation
55
+
56
+ All tables are in the `contoso` schema. The datasource is named `contoso_demo`.
57
+
42
58
  ## Quick Reference
43
59
 
44
60
  | Command | Purpose |
45
61
  |---------|---------|
46
62
  | `bon init` | Initialize new project |
47
63
  | `bon datasource add` | Add warehouse connection |
64
+ | `bon datasource add --demo` | Add demo dataset (no warehouse needed) |
48
65
  | `bon datasource add --from-dbt` | Import from dbt profiles |
49
66
  | `bon datasource test <name>` | Test connection |
50
67
  | `bon validate` | Validate YAML syntax |
@@ -0,0 +1,16 @@
1
+ # Bonnard datasources configuration
2
+ # This file contains credentials - add to .gitignore
3
+ # Env vars like {{ env_var('PASSWORD') }} are resolved at deploy time
4
+
5
+ datasources:
6
+ - name: contoso_demo
7
+ type: postgres
8
+ source: demo
9
+ config:
10
+ host: aws-1-eu-west-1.pooler.supabase.com
11
+ port: "5432"
12
+ database: postgres
13
+ schema: contoso
14
+ credentials:
15
+ username: demo_reader.yvbfzqogtdsqqkpyztlu
16
+ password: contoso-demo-2025!
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bonnard/cli",
3
- "version": "0.1.10",
3
+ "version": "0.1.12",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "bon": "./dist/bin/bon.mjs"
@@ -9,7 +9,7 @@
9
9
  "dist"
10
10
  ],
11
11
  "scripts": {
12
- "build": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin && cp -r src/templates dist/ && mkdir -p dist/docs/topics dist/docs/schemas && cp ../content/index.md dist/docs/_index.md && cp ../content/modeling/*.md dist/docs/topics/ && cp ../content/dashboards/*.md dist/docs/topics/",
12
+ "build": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin && cp -r src/templates dist/ && mkdir -p dist/docs/topics dist/docs/schemas && cp ../content/index.md dist/docs/_index.md && cp ../content/modeling/*.md dist/docs/topics/",
13
13
  "dev": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin --watch",
14
14
  "test": "vitest run"
15
15
  },