@bonnard/cli 0.2.1 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/bon.mjs +1415 -132
- package/dist/bin/{cubes-Bf0IPYd7.mjs → cubes-9rklhdAJ.mjs} +1 -1
- package/dist/bin/push-mZujN1Ik.mjs +35 -0
- package/dist/bin/{validate-DEh1XQnH.mjs → validate-BdqZBH2n.mjs} +1 -1
- package/dist/docs/topics/workflow.deploy.md +1 -1
- package/dist/docs/topics/workflow.md +0 -1
- package/dist/docs/topics/workflow.validate.md +1 -1
- package/dist/templates/claude/skills/bonnard-get-started/SKILL.md +13 -15
- package/dist/templates/claude/skills/bonnard-metabase-migrate/SKILL.md +258 -0
- package/dist/templates/cursor/rules/bonnard-get-started.mdc +13 -15
- package/dist/templates/cursor/rules/bonnard-metabase-migrate.mdc +257 -0
- package/dist/templates/shared/bonnard.md +4 -1
- package/package.json +1 -1
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { n as resolveEnvVarsInCredentials, r as post, t as getLocalDatasource } from "./bon.mjs";
|
|
2
|
+
import pc from "picocolors";
|
|
3
|
+
import { confirm } from "@inquirer/prompts";
|
|
4
|
+
|
|
5
|
+
//#region src/commands/datasource/push.ts
|
|
6
|
+
/**
|
|
7
|
+
* Push a datasource programmatically (for use by deploy command)
|
|
8
|
+
* Returns true on success, false on failure
|
|
9
|
+
*/
|
|
10
|
+
async function pushDatasource(name, options = {}) {
|
|
11
|
+
const datasource = getLocalDatasource(name);
|
|
12
|
+
if (!datasource) {
|
|
13
|
+
if (!options.silent) console.error(pc.red(`Datasource "${name}" not found locally`));
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
const { resolved, missing } = resolveEnvVarsInCredentials(datasource.credentials);
|
|
17
|
+
if (missing.length > 0) {
|
|
18
|
+
if (!options.silent) console.error(pc.red(`Missing env vars for "${name}": ${missing.join(", ")}`));
|
|
19
|
+
return false;
|
|
20
|
+
}
|
|
21
|
+
try {
|
|
22
|
+
await post("/api/datasources", {
|
|
23
|
+
name: datasource.name,
|
|
24
|
+
warehouse_type: datasource.type,
|
|
25
|
+
config: datasource.config,
|
|
26
|
+
credentials: resolved
|
|
27
|
+
});
|
|
28
|
+
return true;
|
|
29
|
+
} catch {
|
|
30
|
+
return false;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
//#endregion
|
|
35
|
+
export { pushDatasource };
|
|
@@ -148,7 +148,7 @@ Deploy aborted. Fix validation errors first.
|
|
|
148
148
|
Deploy aborted. Fix connection issues:
|
|
149
149
|
- Check credentials in .bon/datasources.yaml
|
|
150
150
|
- Verify network access to database
|
|
151
|
-
- Run: bon datasource
|
|
151
|
+
- Run: bon datasource add (to reconfigure)
|
|
152
152
|
```
|
|
153
153
|
|
|
154
154
|
### Auth Errors
|
|
@@ -146,7 +146,6 @@ bonnard/cubes/
|
|
|
146
146
|
| `bon datasource add --demo` | Add demo dataset (no warehouse needed) |
|
|
147
147
|
| `bon datasource add --from-dbt` | Import from dbt profiles |
|
|
148
148
|
| `bon datasource list` | List configured sources |
|
|
149
|
-
| `bon datasource test <name>` | Test connection (requires login) |
|
|
150
149
|
| `bon validate` | Check cube and view syntax |
|
|
151
150
|
| `bon deploy -m "message"` | Deploy to Bonnard (message required) |
|
|
152
151
|
| `bon deploy --ci` | Non-interactive deploy |
|
|
@@ -116,7 +116,7 @@ measures:
|
|
|
116
116
|
1. **Run before every deploy** — `bon validate && bon deploy`
|
|
117
117
|
2. **Add to CI/CD** — validate on pull requests
|
|
118
118
|
3. **Fix errors first** — don't deploy with validation errors
|
|
119
|
-
4. **Test connections** —
|
|
119
|
+
4. **Test connections** — connections are tested automatically during `bon deploy`
|
|
120
120
|
|
|
121
121
|
## See Also
|
|
122
122
|
|
|
@@ -15,29 +15,27 @@ confirming progress before moving on.
|
|
|
15
15
|
Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
|
|
16
16
|
|
|
17
17
|
```bash
|
|
18
|
-
# Option A:
|
|
18
|
+
# Option A: Use demo data (no warehouse needed)
|
|
19
|
+
bon datasource add --demo
|
|
20
|
+
|
|
21
|
+
# Option B: Import from dbt (if they use it)
|
|
19
22
|
bon datasource add --from-dbt
|
|
20
23
|
|
|
21
|
-
# Option
|
|
22
|
-
bon datasource add
|
|
24
|
+
# Option C: Add manually, non-interactive (preferred for agents)
|
|
25
|
+
bon datasource add --name my_warehouse --type postgres \
|
|
26
|
+
--host db.example.com --port 5432 --database mydb --schema public \
|
|
27
|
+
--user myuser --password mypassword
|
|
23
28
|
|
|
24
|
-
# Option
|
|
25
|
-
bon datasource add
|
|
29
|
+
# Option D: Add manually, interactive (in user's terminal)
|
|
30
|
+
bon datasource add
|
|
26
31
|
```
|
|
27
32
|
|
|
33
|
+
Supported types: `postgres` (also works for Redshift), `snowflake`, `bigquery`, `databricks`.
|
|
34
|
+
|
|
28
35
|
The demo option adds a read-only Contoso retail dataset with tables like
|
|
29
36
|
`fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
|
|
30
37
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
```bash
|
|
34
|
-
bon datasource test <name>
|
|
35
|
-
```
|
|
36
|
-
|
|
37
|
-
If the test fails, common issues:
|
|
38
|
-
- Wrong credentials — re-run `bon datasource add`
|
|
39
|
-
- Network/firewall — check warehouse allows connections from this machine
|
|
40
|
-
- SSL issues (Postgres) — may need `sslmode` in connection config
|
|
38
|
+
The connection will be tested automatically during `bon deploy`.
|
|
41
39
|
|
|
42
40
|
## Phase 2: Explore the Data
|
|
43
41
|
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: bonnard-metabase-migrate
|
|
3
|
+
description: Guide migration from an existing Metabase instance to a Bonnard semantic layer. Use when user says "migrate from metabase", "import metabase", "metabase to semantic layer", or has Metabase data they want to model.
|
|
4
|
+
allowed-tools: Bash(bon *)
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
# Migrate from Metabase to Bonnard
|
|
8
|
+
|
|
9
|
+
This skill guides you through analyzing an existing Metabase instance and
|
|
10
|
+
building a semantic layer that replicates its most important metrics.
|
|
11
|
+
Walk through each phase in order, confirming progress before moving on.
|
|
12
|
+
|
|
13
|
+
## Phase 1: Connect to Metabase
|
|
14
|
+
|
|
15
|
+
Set up a connection to the Metabase instance:
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
bon metabase connect
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
This prompts for the Metabase URL and API key. The API key should be created
|
|
22
|
+
in Metabase under Admin > Settings > Authentication > API Keys.
|
|
23
|
+
An admin-level key gives the richest analysis (permissions, schema access).
|
|
24
|
+
|
|
25
|
+
## Phase 2: Analyze the Instance
|
|
26
|
+
|
|
27
|
+
Generate an intelligence report that maps the entire Metabase instance:
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
bon metabase analyze
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
This writes a report to `.bon/metabase-analysis.md`. Read it carefully — it
|
|
34
|
+
drives every decision in the remaining phases.
|
|
35
|
+
|
|
36
|
+
### How to interpret each section
|
|
37
|
+
|
|
38
|
+
| Report Section | What It Tells You | Action |
|
|
39
|
+
|----------------|-------------------|--------|
|
|
40
|
+
| **Most Referenced Tables** | Tables used most in SQL queries | Create cubes for these first — they are the core of the data model |
|
|
41
|
+
| **Top Cards by Activity** | Most-viewed questions/models | `analytical` cards (GROUP BY + aggregation) map to measures; `lookup` cards indicate key filter dimensions; `display` cards can be skipped |
|
|
42
|
+
| **Common Filter Variables** | Template vars (`{{var}}`) used across 3+ cards | These must be dimensions on relevant cubes |
|
|
43
|
+
| **Foreign Key Relationships** | FK links between tables | Define `joins` between cubes using these relationships |
|
|
44
|
+
| **Collection Structure** | How users organize content by business area | Map each top-level collection to a view (one view per business domain) |
|
|
45
|
+
| **Dashboard Parameters** | Shared filters across dashboards | The most important shared dimensions — ensure they exist on relevant cubes |
|
|
46
|
+
| **Table Inventory** | Field counts and classification per table | Field classification (dims/measures/time) guides each cube definition; tables with 0 refs can be deprioritized |
|
|
47
|
+
| **Schema Access** | Which schemas non-admin groups can query | Focus on user-facing schemas — skip admin-only/staging schemas |
|
|
48
|
+
|
|
49
|
+
## Phase 3: Connect the Data Warehouse
|
|
50
|
+
|
|
51
|
+
Add a datasource pointing to the same database that Metabase queries.
|
|
52
|
+
The database connection details can often be found in Metabase under
|
|
53
|
+
Admin > Databases, or in the analysis report header.
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
# Non-interactive (preferred for agents)
|
|
57
|
+
bon datasource add --name my_warehouse --type postgres \
|
|
58
|
+
--host db.example.com --port 5432 --database mydb --schema public \
|
|
59
|
+
--user myuser --password mypassword
|
|
60
|
+
|
|
61
|
+
# Import from dbt if available
|
|
62
|
+
bon datasource add --from-dbt
|
|
63
|
+
|
|
64
|
+
# Interactive setup (in user's terminal)
|
|
65
|
+
bon datasource add
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
Supported types: `postgres` (also works for Redshift), `snowflake`, `bigquery`, `databricks`.
|
|
69
|
+
|
|
70
|
+
The connection will be tested automatically during `bon deploy`.
|
|
71
|
+
|
|
72
|
+
## Phase 4: Explore Key Tables
|
|
73
|
+
|
|
74
|
+
Before writing cubes, drill into the most important tables and cards
|
|
75
|
+
identified in Phase 2. Use the explore commands to understand field types
|
|
76
|
+
and existing SQL patterns:
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
# View table fields with type classification
|
|
80
|
+
bon metabase explore table <id>
|
|
81
|
+
|
|
82
|
+
# View card SQL and columns
|
|
83
|
+
bon metabase explore card <id>
|
|
84
|
+
|
|
85
|
+
# View schemas and tables in a database
|
|
86
|
+
bon metabase explore database <id>
|
|
87
|
+
|
|
88
|
+
# View cards in a collection
|
|
89
|
+
bon metabase explore collection <id>
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
### How explore output maps to cube definitions
|
|
93
|
+
|
|
94
|
+
| Explore Field | Cube Mapping |
|
|
95
|
+
|---------------|-------------|
|
|
96
|
+
| Field class `pk` | Set `primary_key: true` on dimension |
|
|
97
|
+
| Field class `fk` | Join candidate — note the target table |
|
|
98
|
+
| Field class `time` | Dimension with `type: time` |
|
|
99
|
+
| Field class `measure` | Measure candidate — check card SQL for aggregation type |
|
|
100
|
+
| Field class `dim` | Dimension with `type: string` or `type: number` |
|
|
101
|
+
|
|
102
|
+
### How card SQL maps to measures
|
|
103
|
+
|
|
104
|
+
Look at the SQL in `analytical` cards to determine measure types:
|
|
105
|
+
|
|
106
|
+
| Card SQL Pattern | Cube Measure |
|
|
107
|
+
|-----------------|-------------|
|
|
108
|
+
| `SUM(amount)` | `type: sum`, `sql: amount` |
|
|
109
|
+
| `COUNT(*)` | `type: count` |
|
|
110
|
+
| `COUNT(DISTINCT user_id)` | `type: count_distinct`, `sql: user_id` |
|
|
111
|
+
| `AVG(price)` | `type: avg`, `sql: price` |
|
|
112
|
+
| `MIN(date)` / `MAX(date)` | `type: min` / `type: max`, `sql: date` |
|
|
113
|
+
|
|
114
|
+
Use `bon docs cubes.measures.types` for all 12 measure types.
|
|
115
|
+
|
|
116
|
+
## Phase 5: Build Cubes
|
|
117
|
+
|
|
118
|
+
Create cubes for the most-referenced tables (from Phase 2). Start with the
|
|
119
|
+
highest-referenced table and work down. Create one file per cube in
|
|
120
|
+
`bonnard/cubes/`.
|
|
121
|
+
|
|
122
|
+
For each cube:
|
|
123
|
+
1. Set `sql_table` to the full `schema.table` path
|
|
124
|
+
2. Set `data_source` to the datasource name from Phase 3
|
|
125
|
+
3. Add a `primary_key` dimension
|
|
126
|
+
4. Add time dimensions for date/datetime columns
|
|
127
|
+
5. Add measures based on card SQL patterns (Phase 4)
|
|
128
|
+
6. Add dimensions for columns used as filters (template vars from Phase 2)
|
|
129
|
+
7. Add `description` to every measure and dimension
|
|
130
|
+
|
|
131
|
+
Example — `bonnard/cubes/orders.yaml`:
|
|
132
|
+
|
|
133
|
+
```yaml
|
|
134
|
+
cubes:
|
|
135
|
+
- name: orders
|
|
136
|
+
sql_table: public.orders
|
|
137
|
+
data_source: my_warehouse
|
|
138
|
+
description: Order transactions
|
|
139
|
+
|
|
140
|
+
measures:
|
|
141
|
+
- name: count
|
|
142
|
+
type: count
|
|
143
|
+
description: Total number of orders
|
|
144
|
+
|
|
145
|
+
- name: total_revenue
|
|
146
|
+
type: sum
|
|
147
|
+
sql: amount
|
|
148
|
+
description: Sum of order amounts
|
|
149
|
+
|
|
150
|
+
dimensions:
|
|
151
|
+
- name: id
|
|
152
|
+
type: number
|
|
153
|
+
sql: id
|
|
154
|
+
primary_key: true
|
|
155
|
+
|
|
156
|
+
- name: created_at
|
|
157
|
+
type: time
|
|
158
|
+
sql: created_at
|
|
159
|
+
description: Order creation timestamp
|
|
160
|
+
|
|
161
|
+
- name: status
|
|
162
|
+
type: string
|
|
163
|
+
sql: status
|
|
164
|
+
description: Order status (pending, completed, cancelled)
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
### Adding joins
|
|
168
|
+
|
|
169
|
+
Use FK relationships from the analysis report to define joins between cubes:
|
|
170
|
+
|
|
171
|
+
```yaml
|
|
172
|
+
joins:
|
|
173
|
+
- name: customers
|
|
174
|
+
sql: "{CUBE}.customer_id = {customers.id}"
|
|
175
|
+
relationship: many_to_one
|
|
176
|
+
```
|
|
177
|
+
|
|
178
|
+
Use `bon docs cubes.joins` for the full reference.
|
|
179
|
+
|
|
180
|
+
## Phase 6: Build Views
|
|
181
|
+
|
|
182
|
+
Map Metabase collections to views. Each top-level collection (business domain)
|
|
183
|
+
from the analysis report becomes a view that composes the relevant cubes.
|
|
184
|
+
|
|
185
|
+
Create one file per view in `bonnard/views/`.
|
|
186
|
+
|
|
187
|
+
Example — `bonnard/views/sales_analytics.yaml`:
|
|
188
|
+
|
|
189
|
+
```yaml
|
|
190
|
+
views:
|
|
191
|
+
- name: sales_analytics
|
|
192
|
+
description: Sales metrics and dimensions for the sales team
|
|
193
|
+
cubes:
|
|
194
|
+
- join_path: orders
|
|
195
|
+
includes:
|
|
196
|
+
- count
|
|
197
|
+
- total_revenue
|
|
198
|
+
- created_at
|
|
199
|
+
- status
|
|
200
|
+
|
|
201
|
+
- join_path: orders.customers
|
|
202
|
+
prefix: true
|
|
203
|
+
includes:
|
|
204
|
+
- name
|
|
205
|
+
- region
|
|
206
|
+
```
|
|
207
|
+
|
|
208
|
+
Use `bon docs views` for the full reference.
|
|
209
|
+
|
|
210
|
+
## Phase 7: Validate and Deploy
|
|
211
|
+
|
|
212
|
+
Validate the semantic layer:
|
|
213
|
+
|
|
214
|
+
```bash
|
|
215
|
+
bon validate
|
|
216
|
+
```
|
|
217
|
+
|
|
218
|
+
Fix any errors. Common issues:
|
|
219
|
+
- Missing `primary_key` dimension
|
|
220
|
+
- Unknown measure/dimension types
|
|
221
|
+
- Undefined cube referenced in a view join path
|
|
222
|
+
- Missing `data_source`
|
|
223
|
+
|
|
224
|
+
Then deploy:
|
|
225
|
+
|
|
226
|
+
```bash
|
|
227
|
+
bon login
|
|
228
|
+
bon deploy -m "Migrate semantic layer from Metabase"
|
|
229
|
+
```
|
|
230
|
+
|
|
231
|
+
## Phase 8: Verify
|
|
232
|
+
|
|
233
|
+
Compare results from the semantic layer against Metabase card outputs.
|
|
234
|
+
Pick 3-5 important `analytical` cards from the analysis report and run
|
|
235
|
+
equivalent queries:
|
|
236
|
+
|
|
237
|
+
```bash
|
|
238
|
+
# Run a semantic layer query
|
|
239
|
+
bon query '{"measures": ["orders.total_revenue"], "dimensions": ["orders.status"]}'
|
|
240
|
+
|
|
241
|
+
# SQL format
|
|
242
|
+
bon query --sql "SELECT status, MEASURE(total_revenue) FROM orders GROUP BY 1"
|
|
243
|
+
```
|
|
244
|
+
|
|
245
|
+
Compare the numbers with the corresponding Metabase card. If they don't match:
|
|
246
|
+
- Check the SQL in the card (`bon metabase explore card <id>`) for filters or transformations
|
|
247
|
+
- Ensure the measure type matches the aggregation (SUM vs COUNT vs AVG)
|
|
248
|
+
- Check for WHERE clauses that should be segments or pre-filters
|
|
249
|
+
|
|
250
|
+
## Next Steps
|
|
251
|
+
|
|
252
|
+
After the core migration is working:
|
|
253
|
+
|
|
254
|
+
- Add remaining tables as cubes (work down the reference count list)
|
|
255
|
+
- Add calculated measures for complex card SQL (`bon docs cubes.measures.calculated`)
|
|
256
|
+
- Add segments for common WHERE clauses (`bon docs cubes.segments`)
|
|
257
|
+
- Set up MCP for AI agent access (`bon mcp`)
|
|
258
|
+
- Review and iterate with `bon deployments` and `bon diff <id>`
|
|
@@ -14,29 +14,27 @@ confirming progress before moving on.
|
|
|
14
14
|
Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
|
|
15
15
|
|
|
16
16
|
```bash
|
|
17
|
-
# Option A:
|
|
17
|
+
# Option A: Use demo data (no warehouse needed)
|
|
18
|
+
bon datasource add --demo
|
|
19
|
+
|
|
20
|
+
# Option B: Import from dbt (if they use it)
|
|
18
21
|
bon datasource add --from-dbt
|
|
19
22
|
|
|
20
|
-
# Option
|
|
21
|
-
bon datasource add
|
|
23
|
+
# Option C: Add manually, non-interactive (preferred for agents)
|
|
24
|
+
bon datasource add --name my_warehouse --type postgres \
|
|
25
|
+
--host db.example.com --port 5432 --database mydb --schema public \
|
|
26
|
+
--user myuser --password mypassword
|
|
22
27
|
|
|
23
|
-
# Option
|
|
24
|
-
bon datasource add
|
|
28
|
+
# Option D: Add manually, interactive (in user's terminal)
|
|
29
|
+
bon datasource add
|
|
25
30
|
```
|
|
26
31
|
|
|
32
|
+
Supported types: `postgres` (also works for Redshift), `snowflake`, `bigquery`, `databricks`.
|
|
33
|
+
|
|
27
34
|
The demo option adds a read-only Contoso retail dataset with tables like
|
|
28
35
|
`fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
|
|
29
36
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
```bash
|
|
33
|
-
bon datasource test <name>
|
|
34
|
-
```
|
|
35
|
-
|
|
36
|
-
If the test fails, common issues:
|
|
37
|
-
- Wrong credentials — re-run `bon datasource add`
|
|
38
|
-
- Network/firewall — check warehouse allows connections from this machine
|
|
39
|
-
- SSL issues (Postgres) — may need `sslmode` in connection config
|
|
37
|
+
The connection will be tested automatically during `bon deploy`.
|
|
40
38
|
|
|
41
39
|
## Phase 2: Explore the Data
|
|
42
40
|
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
---
|
|
2
|
+
description: "Guide migration from an existing Metabase instance to a Bonnard semantic layer. Use when user says 'migrate from metabase', 'import metabase', 'metabase to semantic layer', or has Metabase data they want to model."
|
|
3
|
+
alwaysApply: false
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Migrate from Metabase to Bonnard
|
|
7
|
+
|
|
8
|
+
This skill guides you through analyzing an existing Metabase instance and
|
|
9
|
+
building a semantic layer that replicates its most important metrics.
|
|
10
|
+
Walk through each phase in order, confirming progress before moving on.
|
|
11
|
+
|
|
12
|
+
## Phase 1: Connect to Metabase
|
|
13
|
+
|
|
14
|
+
Set up a connection to the Metabase instance:
|
|
15
|
+
|
|
16
|
+
```bash
|
|
17
|
+
bon metabase connect
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
This prompts for the Metabase URL and API key. The API key should be created
|
|
21
|
+
in Metabase under Admin > Settings > Authentication > API Keys.
|
|
22
|
+
An admin-level key gives the richest analysis (permissions, schema access).
|
|
23
|
+
|
|
24
|
+
## Phase 2: Analyze the Instance
|
|
25
|
+
|
|
26
|
+
Generate an intelligence report that maps the entire Metabase instance:
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
bon metabase analyze
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
This writes a report to `.bon/metabase-analysis.md`. Read it carefully — it
|
|
33
|
+
drives every decision in the remaining phases.
|
|
34
|
+
|
|
35
|
+
### How to interpret each section
|
|
36
|
+
|
|
37
|
+
| Report Section | What It Tells You | Action |
|
|
38
|
+
|----------------|-------------------|--------|
|
|
39
|
+
| **Most Referenced Tables** | Tables used most in SQL queries | Create cubes for these first — they are the core of the data model |
|
|
40
|
+
| **Top Cards by Activity** | Most-viewed questions/models | `analytical` cards (GROUP BY + aggregation) map to measures; `lookup` cards indicate key filter dimensions; `display` cards can be skipped |
|
|
41
|
+
| **Common Filter Variables** | Template vars (`{{var}}`) used across 3+ cards | These must be dimensions on relevant cubes |
|
|
42
|
+
| **Foreign Key Relationships** | FK links between tables | Define `joins` between cubes using these relationships |
|
|
43
|
+
| **Collection Structure** | How users organize content by business area | Map each top-level collection to a view (one view per business domain) |
|
|
44
|
+
| **Dashboard Parameters** | Shared filters across dashboards | The most important shared dimensions — ensure they exist on relevant cubes |
|
|
45
|
+
| **Table Inventory** | Field counts and classification per table | Field classification (dims/measures/time) guides each cube definition; tables with 0 refs can be deprioritized |
|
|
46
|
+
| **Schema Access** | Which schemas non-admin groups can query | Focus on user-facing schemas — skip admin-only/staging schemas |
|
|
47
|
+
|
|
48
|
+
## Phase 3: Connect the Data Warehouse
|
|
49
|
+
|
|
50
|
+
Add a datasource pointing to the same database that Metabase queries.
|
|
51
|
+
The database connection details can often be found in Metabase under
|
|
52
|
+
Admin > Databases, or in the analysis report header.
|
|
53
|
+
|
|
54
|
+
```bash
|
|
55
|
+
# Non-interactive (preferred for agents)
|
|
56
|
+
bon datasource add --name my_warehouse --type postgres \
|
|
57
|
+
--host db.example.com --port 5432 --database mydb --schema public \
|
|
58
|
+
--user myuser --password mypassword
|
|
59
|
+
|
|
60
|
+
# Import from dbt if available
|
|
61
|
+
bon datasource add --from-dbt
|
|
62
|
+
|
|
63
|
+
# Interactive setup (in user's terminal)
|
|
64
|
+
bon datasource add
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
Supported types: `postgres` (also works for Redshift), `snowflake`, `bigquery`, `databricks`.
|
|
68
|
+
|
|
69
|
+
The connection will be tested automatically during `bon deploy`.
|
|
70
|
+
|
|
71
|
+
## Phase 4: Explore Key Tables
|
|
72
|
+
|
|
73
|
+
Before writing cubes, drill into the most important tables and cards
|
|
74
|
+
identified in Phase 2. Use the explore commands to understand field types
|
|
75
|
+
and existing SQL patterns:
|
|
76
|
+
|
|
77
|
+
```bash
|
|
78
|
+
# View table fields with type classification
|
|
79
|
+
bon metabase explore table <id>
|
|
80
|
+
|
|
81
|
+
# View card SQL and columns
|
|
82
|
+
bon metabase explore card <id>
|
|
83
|
+
|
|
84
|
+
# View schemas and tables in a database
|
|
85
|
+
bon metabase explore database <id>
|
|
86
|
+
|
|
87
|
+
# View cards in a collection
|
|
88
|
+
bon metabase explore collection <id>
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### How explore output maps to cube definitions
|
|
92
|
+
|
|
93
|
+
| Explore Field | Cube Mapping |
|
|
94
|
+
|---------------|-------------|
|
|
95
|
+
| Field class `pk` | Set `primary_key: true` on dimension |
|
|
96
|
+
| Field class `fk` | Join candidate — note the target table |
|
|
97
|
+
| Field class `time` | Dimension with `type: time` |
|
|
98
|
+
| Field class `measure` | Measure candidate — check card SQL for aggregation type |
|
|
99
|
+
| Field class `dim` | Dimension with `type: string` or `type: number` |
|
|
100
|
+
|
|
101
|
+
### How card SQL maps to measures
|
|
102
|
+
|
|
103
|
+
Look at the SQL in `analytical` cards to determine measure types:
|
|
104
|
+
|
|
105
|
+
| Card SQL Pattern | Cube Measure |
|
|
106
|
+
|-----------------|-------------|
|
|
107
|
+
| `SUM(amount)` | `type: sum`, `sql: amount` |
|
|
108
|
+
| `COUNT(*)` | `type: count` |
|
|
109
|
+
| `COUNT(DISTINCT user_id)` | `type: count_distinct`, `sql: user_id` |
|
|
110
|
+
| `AVG(price)` | `type: avg`, `sql: price` |
|
|
111
|
+
| `MIN(date)` / `MAX(date)` | `type: min` / `type: max`, `sql: date` |
|
|
112
|
+
|
|
113
|
+
Use `bon docs cubes.measures.types` for all 12 measure types.
|
|
114
|
+
|
|
115
|
+
## Phase 5: Build Cubes
|
|
116
|
+
|
|
117
|
+
Create cubes for the most-referenced tables (from Phase 2). Start with the
|
|
118
|
+
highest-referenced table and work down. Create one file per cube in
|
|
119
|
+
`bonnard/cubes/`.
|
|
120
|
+
|
|
121
|
+
For each cube:
|
|
122
|
+
1. Set `sql_table` to the full `schema.table` path
|
|
123
|
+
2. Set `data_source` to the datasource name from Phase 3
|
|
124
|
+
3. Add a `primary_key` dimension
|
|
125
|
+
4. Add time dimensions for date/datetime columns
|
|
126
|
+
5. Add measures based on card SQL patterns (Phase 4)
|
|
127
|
+
6. Add dimensions for columns used as filters (template vars from Phase 2)
|
|
128
|
+
7. Add `description` to every measure and dimension
|
|
129
|
+
|
|
130
|
+
Example — `bonnard/cubes/orders.yaml`:
|
|
131
|
+
|
|
132
|
+
```yaml
|
|
133
|
+
cubes:
|
|
134
|
+
- name: orders
|
|
135
|
+
sql_table: public.orders
|
|
136
|
+
data_source: my_warehouse
|
|
137
|
+
description: Order transactions
|
|
138
|
+
|
|
139
|
+
measures:
|
|
140
|
+
- name: count
|
|
141
|
+
type: count
|
|
142
|
+
description: Total number of orders
|
|
143
|
+
|
|
144
|
+
- name: total_revenue
|
|
145
|
+
type: sum
|
|
146
|
+
sql: amount
|
|
147
|
+
description: Sum of order amounts
|
|
148
|
+
|
|
149
|
+
dimensions:
|
|
150
|
+
- name: id
|
|
151
|
+
type: number
|
|
152
|
+
sql: id
|
|
153
|
+
primary_key: true
|
|
154
|
+
|
|
155
|
+
- name: created_at
|
|
156
|
+
type: time
|
|
157
|
+
sql: created_at
|
|
158
|
+
description: Order creation timestamp
|
|
159
|
+
|
|
160
|
+
- name: status
|
|
161
|
+
type: string
|
|
162
|
+
sql: status
|
|
163
|
+
description: Order status (pending, completed, cancelled)
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
### Adding joins
|
|
167
|
+
|
|
168
|
+
Use FK relationships from the analysis report to define joins between cubes:
|
|
169
|
+
|
|
170
|
+
```yaml
|
|
171
|
+
joins:
|
|
172
|
+
- name: customers
|
|
173
|
+
sql: "{CUBE}.customer_id = {customers.id}"
|
|
174
|
+
relationship: many_to_one
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
Use `bon docs cubes.joins` for the full reference.
|
|
178
|
+
|
|
179
|
+
## Phase 6: Build Views
|
|
180
|
+
|
|
181
|
+
Map Metabase collections to views. Each top-level collection (business domain)
|
|
182
|
+
from the analysis report becomes a view that composes the relevant cubes.
|
|
183
|
+
|
|
184
|
+
Create one file per view in `bonnard/views/`.
|
|
185
|
+
|
|
186
|
+
Example — `bonnard/views/sales_analytics.yaml`:
|
|
187
|
+
|
|
188
|
+
```yaml
|
|
189
|
+
views:
|
|
190
|
+
- name: sales_analytics
|
|
191
|
+
description: Sales metrics and dimensions for the sales team
|
|
192
|
+
cubes:
|
|
193
|
+
- join_path: orders
|
|
194
|
+
includes:
|
|
195
|
+
- count
|
|
196
|
+
- total_revenue
|
|
197
|
+
- created_at
|
|
198
|
+
- status
|
|
199
|
+
|
|
200
|
+
- join_path: orders.customers
|
|
201
|
+
prefix: true
|
|
202
|
+
includes:
|
|
203
|
+
- name
|
|
204
|
+
- region
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
Use `bon docs views` for the full reference.
|
|
208
|
+
|
|
209
|
+
## Phase 7: Validate and Deploy
|
|
210
|
+
|
|
211
|
+
Validate the semantic layer:
|
|
212
|
+
|
|
213
|
+
```bash
|
|
214
|
+
bon validate
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
Fix any errors. Common issues:
|
|
218
|
+
- Missing `primary_key` dimension
|
|
219
|
+
- Unknown measure/dimension types
|
|
220
|
+
- Undefined cube referenced in a view join path
|
|
221
|
+
- Missing `data_source`
|
|
222
|
+
|
|
223
|
+
Then deploy:
|
|
224
|
+
|
|
225
|
+
```bash
|
|
226
|
+
bon login
|
|
227
|
+
bon deploy -m "Migrate semantic layer from Metabase"
|
|
228
|
+
```
|
|
229
|
+
|
|
230
|
+
## Phase 8: Verify
|
|
231
|
+
|
|
232
|
+
Compare results from the semantic layer against Metabase card outputs.
|
|
233
|
+
Pick 3-5 important `analytical` cards from the analysis report and run
|
|
234
|
+
equivalent queries:
|
|
235
|
+
|
|
236
|
+
```bash
|
|
237
|
+
# Run a semantic layer query
|
|
238
|
+
bon query '{"measures": ["orders.total_revenue"], "dimensions": ["orders.status"]}'
|
|
239
|
+
|
|
240
|
+
# SQL format
|
|
241
|
+
bon query --sql "SELECT status, MEASURE(total_revenue) FROM orders GROUP BY 1"
|
|
242
|
+
```
|
|
243
|
+
|
|
244
|
+
Compare the numbers with the corresponding Metabase card. If they don't match:
|
|
245
|
+
- Check the SQL in the card (`bon metabase explore card <id>`) for filters or transformations
|
|
246
|
+
- Ensure the measure type matches the aggregation (SUM vs COUNT vs AVG)
|
|
247
|
+
- Check for WHERE clauses that should be segments or pre-filters
|
|
248
|
+
|
|
249
|
+
## Next Steps
|
|
250
|
+
|
|
251
|
+
After the core migration is working:
|
|
252
|
+
|
|
253
|
+
- Add remaining tables as cubes (work down the reference count list)
|
|
254
|
+
- Add calculated measures for complex card SQL (`bon docs cubes.measures.calculated`)
|
|
255
|
+
- Add segments for common WHERE clauses (`bon docs cubes.segments`)
|
|
256
|
+
- Set up MCP for AI agent access (`bon mcp`)
|
|
257
|
+
- Review and iterate with `bon deployments` and `bon diff <id>`
|