@bonnard/cli 0.1.9 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/bon.mjs CHANGED
@@ -564,23 +564,18 @@ function createAgentTemplates(cwd, env) {
564
564
  const claudeRulesDir = path.join(cwd, ".claude", "rules");
565
565
  const claudeSkillsDir = path.join(cwd, ".claude", "skills");
566
566
  fs.mkdirSync(claudeRulesDir, { recursive: true });
567
- fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-cli"), { recursive: true });
568
- fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-queries"), { recursive: true });
567
+ fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-get-started"), { recursive: true });
569
568
  writeTemplateFile(sharedBonnard, path.join(claudeRulesDir, "bonnard.md"), createdFiles);
570
- writeTemplateFile(loadTemplate("claude/skills/bonnard-cli/SKILL.md"), path.join(claudeSkillsDir, "bonnard-cli", "SKILL.md"), createdFiles);
571
- writeTemplateFile(loadTemplate("claude/skills/bonnard-queries/SKILL.md"), path.join(claudeSkillsDir, "bonnard-queries", "SKILL.md"), createdFiles);
569
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-get-started/SKILL.md"), path.join(claudeSkillsDir, "bonnard-get-started", "SKILL.md"), createdFiles);
572
570
  mergeSettingsJson(loadJsonTemplate("claude/settings.json"), path.join(cwd, ".claude", "settings.json"), createdFiles);
573
571
  const cursorRulesDir = path.join(cwd, ".cursor", "rules");
574
572
  fs.mkdirSync(cursorRulesDir, { recursive: true });
575
573
  writeTemplateFile(withCursorFrontmatter(sharedBonnard, "Bonnard semantic layer project context", true), path.join(cursorRulesDir, "bonnard.mdc"), createdFiles);
576
- writeTemplateFile(loadTemplate("cursor/rules/bonnard-cli.mdc"), path.join(cursorRulesDir, "bonnard-cli.mdc"), createdFiles);
577
- writeTemplateFile(loadTemplate("cursor/rules/bonnard-queries.mdc"), path.join(cursorRulesDir, "bonnard-queries.mdc"), createdFiles);
574
+ writeTemplateFile(loadTemplate("cursor/rules/bonnard-get-started.mdc"), path.join(cursorRulesDir, "bonnard-get-started.mdc"), createdFiles);
578
575
  const codexSkillsDir = path.join(cwd, ".agents", "skills");
579
- fs.mkdirSync(path.join(codexSkillsDir, "bonnard-cli"), { recursive: true });
580
- fs.mkdirSync(path.join(codexSkillsDir, "bonnard-queries"), { recursive: true });
576
+ fs.mkdirSync(path.join(codexSkillsDir, "bonnard-get-started"), { recursive: true });
581
577
  writeTemplateFile(sharedBonnard, path.join(cwd, "AGENTS.md"), createdFiles);
582
- writeTemplateFile(loadTemplate("claude/skills/bonnard-cli/SKILL.md"), path.join(codexSkillsDir, "bonnard-cli", "SKILL.md"), createdFiles);
583
- writeTemplateFile(loadTemplate("claude/skills/bonnard-queries/SKILL.md"), path.join(codexSkillsDir, "bonnard-queries", "SKILL.md"), createdFiles);
578
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-get-started/SKILL.md"), path.join(codexSkillsDir, "bonnard-get-started", "SKILL.md"), createdFiles);
584
579
  return createdFiles;
585
580
  }
586
581
  async function initCommand() {
@@ -1493,10 +1488,45 @@ async function addManual(options) {
1493
1488
  console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1494
1489
  }
1495
1490
  /**
1491
+ * Add the Contoso demo datasource (read-only retail dataset)
1492
+ */
1493
+ async function addDemo(options) {
1494
+ const name = "contoso_demo";
1495
+ if (datasourceExists(name) && !options.force) {
1496
+ console.log(pc.yellow(`Datasource "${name}" already exists. Use --force to overwrite.`));
1497
+ return;
1498
+ }
1499
+ if (isDatasourcesTrackedByGit()) console.log(pc.yellow("Warning: .bon/datasources.yaml is tracked by git. Add it to .gitignore!"));
1500
+ addLocalDatasource({
1501
+ name,
1502
+ type: "postgres",
1503
+ source: "demo",
1504
+ config: {
1505
+ host: "aws-1-eu-west-1.pooler.supabase.com",
1506
+ port: "5432",
1507
+ database: "postgres",
1508
+ schema: "contoso"
1509
+ },
1510
+ credentials: {
1511
+ username: "demo_reader.yvbfzqogtdsqqkpyztlu",
1512
+ password: "contoso-demo-2025!"
1513
+ }
1514
+ });
1515
+ console.log();
1516
+ console.log(pc.green(`✓ Demo datasource "${name}" saved to .bon/datasources.yaml`));
1517
+ console.log();
1518
+ console.log(pc.dim("Contoso is a read-only retail dataset with tables like:"));
1519
+ console.log(pc.dim(" fact_sales, dim_product, dim_store, dim_customer"));
1520
+ console.log();
1521
+ console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1522
+ console.log(pc.dim(`Explore tables: bon preview ${name} "SELECT table_name FROM information_schema.tables WHERE table_schema = 'contoso'"`));
1523
+ }
1524
+ /**
1496
1525
  * Main datasource add command
1497
1526
  */
1498
1527
  async function datasourceAddCommand(options = {}) {
1499
- if (options.fromDbt !== void 0) await importFromDbt(options);
1528
+ if (options.demo) await addDemo(options);
1529
+ else if (options.fromDbt !== void 0) await importFromDbt(options);
1500
1530
  else await addManual(options);
1501
1531
  }
1502
1532
 
@@ -1786,7 +1816,10 @@ async function queryPostgres(config, credentials, sql, options = {}) {
1786
1816
  try {
1787
1817
  await client.connect();
1788
1818
  const schema = options.schema || config.schema;
1789
- if (schema) await client.query(`SET search_path TO ${schema}`);
1819
+ if (schema) {
1820
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(schema)) throw new Error("Invalid schema name");
1821
+ await client.query(`SET search_path TO "${schema}"`);
1822
+ }
1790
1823
  const result = await client.query(sql);
1791
1824
  await client.end();
1792
1825
  const columns = result.fields?.map((f) => f.name) || [];
@@ -2808,7 +2841,7 @@ program.command("login").description("Authenticate with Bonnard via your browser
2808
2841
  program.command("logout").description("Remove stored credentials").action(logoutCommand);
2809
2842
  program.command("whoami").description("Show current login status").option("--verify", "Verify session is still valid with the server").action(whoamiCommand);
2810
2843
  const datasource = program.command("datasource").description("Manage warehouse data source connections");
2811
- datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
2844
+ datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--demo", "Add a read-only demo datasource (Contoso retail dataset) for testing").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
2812
2845
  datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
2813
2846
  datasource.command("test").description("Test data source connectivity by connecting directly to the warehouse").argument("<name>", "Data source name from .bon/datasources.yaml").option("--remote", "Test via Bonnard API instead of direct connection (requires login)").action(datasourceTestCommand);
2814
2847
  datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
@@ -0,0 +1,137 @@
1
+ # dashboards.components
2
+
3
+ > Chart and display components for rendering query results in dashboards.
4
+
5
+ ## Overview
6
+
7
+ Components are self-closing HTML-style tags that render query results as charts, tables, or KPI cards. Each component takes a `data` prop referencing a named query.
8
+
9
+ ## Syntax
10
+
11
+ ```markdown
12
+ <ComponentName data={query_name} prop="value" />
13
+ ```
14
+
15
+ - Components are self-closing (`/>`)
16
+ - `data` uses curly braces: `data={query_name}`
17
+ - Other props use quotes: `x="field_name"`
18
+ - Boolean props can be shorthand: `horizontal`
19
+
20
+ ## Component Reference
21
+
22
+ ### BigValue
23
+
24
+ Displays a single KPI metric as a large number.
25
+
26
+ ```markdown
27
+ <BigValue data={total_revenue} value="total_revenue" title="Revenue" />
28
+ ```
29
+
30
+ | Prop | Type | Required | Description |
31
+ |------|------|----------|-------------|
32
+ | `data` | query ref | Yes | Query name (should return a single row) |
33
+ | `value` | string | Yes | Measure field name to display |
34
+ | `title` | string | No | Label above the value |
35
+
36
+ ### LineChart
37
+
38
+ Renders a line chart, typically for time series.
39
+
40
+ ```markdown
41
+ <LineChart data={monthly_revenue} x="created_at" y="total_revenue" title="Revenue Trend" />
42
+ ```
43
+
44
+ | Prop | Type | Required | Description |
45
+ |------|------|----------|-------------|
46
+ | `data` | query ref | Yes | Query name |
47
+ | `x` | string | Yes | Field for x-axis (typically a time dimension) |
48
+ | `y` | string | Yes | Field for y-axis (typically a measure) |
49
+ | `title` | string | No | Chart title |
50
+
51
+ ### BarChart
52
+
53
+ Renders a vertical bar chart. Add `horizontal` for horizontal bars.
54
+
55
+ ```markdown
56
+ <BarChart data={revenue_by_city} x="city" y="total_revenue" />
57
+ <BarChart data={revenue_by_city} x="city" y="total_revenue" horizontal />
58
+ ```
59
+
60
+ | Prop | Type | Required | Description |
61
+ |------|------|----------|-------------|
62
+ | `data` | query ref | Yes | Query name |
63
+ | `x` | string | Yes | Field for category axis |
64
+ | `y` | string | Yes | Field for value axis |
65
+ | `title` | string | No | Chart title |
66
+ | `horizontal` | boolean | No | Render as horizontal bar chart |
67
+
68
+ ### AreaChart
69
+
70
+ Renders a filled area chart.
71
+
72
+ ```markdown
73
+ <AreaChart data={monthly_revenue} x="created_at" y="total_revenue" />
74
+ ```
75
+
76
+ | Prop | Type | Required | Description |
77
+ |------|------|----------|-------------|
78
+ | `data` | query ref | Yes | Query name |
79
+ | `x` | string | Yes | Field for x-axis |
80
+ | `y` | string | Yes | Field for y-axis |
81
+ | `title` | string | No | Chart title |
82
+
83
+ ### PieChart
84
+
85
+ Renders a pie/donut chart.
86
+
87
+ ```markdown
88
+ <PieChart data={by_status} name="status" value="count" title="Order Status" />
89
+ ```
90
+
91
+ | Prop | Type | Required | Description |
92
+ |------|------|----------|-------------|
93
+ | `data` | query ref | Yes | Query name |
94
+ | `name` | string | Yes | Field for slice labels |
95
+ | `value` | string | Yes | Field for slice values |
96
+ | `title` | string | No | Chart title |
97
+
98
+ ### DataTable
99
+
100
+ Renders query results as a table.
101
+
102
+ ```markdown
103
+ <DataTable data={top_products} />
104
+ <DataTable data={top_products} columns="name,revenue,count" />
105
+ ```
106
+
107
+ | Prop | Type | Required | Description |
108
+ |------|------|----------|-------------|
109
+ | `data` | query ref | Yes | Query name |
110
+ | `columns` | string | No | Comma-separated list of columns to show (default: all) |
111
+ | `title` | string | No | Table title |
112
+
113
+ ## Layout: Grid
114
+
115
+ Wrap components in a `<Grid>` tag to arrange them in columns:
116
+
117
+ ```markdown
118
+ <Grid cols="3">
119
+ <BigValue data={total_orders} value="count" title="Orders" />
120
+ <BigValue data={total_revenue} value="total_revenue" title="Revenue" />
121
+ <BigValue data={avg_order} value="avg_order_value" title="Avg Order" />
122
+ </Grid>
123
+ ```
124
+
125
+ | Prop | Type | Default | Description |
126
+ |------|------|---------|-------------|
127
+ | `cols` | string | `"2"` | Number of columns in the grid |
128
+
129
+ ## Field Names
130
+
131
+ Component field names (e.g. `x="city"`, `value="total_revenue"`) use the **unqualified** measure or dimension name — the same names defined in your cube. For example, if your cube has `measures: [{ name: total_revenue, ... }]`, use `value="total_revenue"`.
132
+
133
+ ## See Also
134
+
135
+ - dashboards.queries
136
+ - dashboards.examples
137
+ - dashboards
@@ -0,0 +1,130 @@
1
+ # dashboards.examples
2
+
3
+ > Complete dashboard examples showing common patterns.
4
+
5
+ ## Revenue Overview Dashboard
6
+
7
+ A KPI + trend + breakdown dashboard:
8
+
9
+ ```markdown
10
+ ---
11
+ title: Revenue Overview
12
+ description: Monthly revenue trends and breakdowns
13
+ ---
14
+
15
+ # Revenue Overview
16
+
17
+ ` ``query total_revenue
18
+ cube: orders
19
+ measures: [total_revenue]
20
+ ` ``
21
+
22
+ ` ``query order_count
23
+ cube: orders
24
+ measures: [count]
25
+ ` ``
26
+
27
+ ` ``query avg_order
28
+ cube: orders
29
+ measures: [avg_order_value]
30
+ ` ``
31
+
32
+ <Grid cols="3">
33
+ <BigValue data={total_revenue} value="total_revenue" title="Total Revenue" />
34
+ <BigValue data={order_count} value="count" title="Orders" />
35
+ <BigValue data={avg_order} value="avg_order_value" title="Avg Order" />
36
+ </Grid>
37
+
38
+ ## Monthly Trend
39
+
40
+ ` ``query monthly_revenue
41
+ cube: orders
42
+ measures: [total_revenue]
43
+ timeDimension:
44
+ dimension: created_at
45
+ granularity: month
46
+ dateRange: [2025-01-01, 2025-12-31]
47
+ ` ``
48
+
49
+ <LineChart data={monthly_revenue} x="created_at" y="total_revenue" title="Monthly Revenue" />
50
+
51
+ ## By Category
52
+
53
+ ` ``query by_category
54
+ cube: orders
55
+ measures: [total_revenue, count]
56
+ dimensions: [category]
57
+ orderBy:
58
+ total_revenue: desc
59
+ ` ``
60
+
61
+ <BarChart data={by_category} x="category" y="total_revenue" title="Revenue by Category" />
62
+ <DataTable data={by_category} />
63
+ ```
64
+
65
+ ## Sales Pipeline Dashboard
66
+
67
+ Filtered data with pie chart:
68
+
69
+ ```markdown
70
+ ---
71
+ title: Sales Pipeline
72
+ description: Order status breakdown and city analysis
73
+ ---
74
+
75
+ # Sales Pipeline
76
+
77
+ ` ``query by_status
78
+ cube: orders
79
+ measures: [count]
80
+ dimensions: [status]
81
+ ` ``
82
+
83
+ <PieChart data={by_status} name="status" value="count" title="Order Status" />
84
+
85
+ ## Top Cities
86
+
87
+ ` ``query top_cities
88
+ cube: orders
89
+ measures: [total_revenue, count]
90
+ dimensions: [city]
91
+ orderBy:
92
+ total_revenue: desc
93
+ limit: 10
94
+ ` ``
95
+
96
+ <BarChart data={top_cities} x="city" y="total_revenue" horizontal />
97
+ <DataTable data={top_cities} />
98
+
99
+ ## Completed Orders Over Time
100
+
101
+ ` ``query completed_trend
102
+ cube: orders
103
+ measures: [total_revenue]
104
+ timeDimension:
105
+ dimension: created_at
106
+ granularity: week
107
+ dateRange: [2025-01-01, 2025-06-30]
108
+ filters:
109
+ - dimension: status
110
+ operator: equals
111
+ values: [completed]
112
+ ` ``
113
+
114
+ <AreaChart data={completed_trend} x="created_at" y="total_revenue" title="Completed Order Revenue" />
115
+ ```
116
+
117
+ ## Tips
118
+
119
+ - **Start with KPIs**: Use `BigValue` in a `Grid` at the top for key metrics
120
+ - **One query per chart**: Each component gets its own query — keep them focused
121
+ - **Use views**: Prefer view names over cube names when available for cleaner field names
122
+ - **Name queries descriptively**: `monthly_revenue` is better than `q1`
123
+ - **Limit large datasets**: Add `limit` to dimension queries to avoid oversized charts
124
+ - **Time series**: Always use `timeDimension` with `granularity` for time-based charts
125
+
126
+ ## See Also
127
+
128
+ - dashboards
129
+ - dashboards.queries
130
+ - dashboards.components
@@ -0,0 +1,83 @@
1
+ # dashboards
2
+
3
+ > Build interactive dashboards from markdown with embedded semantic layer queries.
4
+
5
+ ## Overview
6
+
7
+ Dashboards are markdown files with YAML frontmatter, query blocks, and chart components. Write them as `.md` files, deploy with `bon dashboard deploy`, and view them in the Bonnard web app.
8
+
9
+ ## Format
10
+
11
+ A dashboard file has three parts:
12
+
13
+ 1. **Frontmatter** — YAML metadata between `---` delimiters
14
+ 2. **Query blocks** — Named data queries in ` ```query ` code fences
15
+ 3. **Content** — Markdown text and chart component tags
16
+
17
+ ## Minimal Example
18
+
19
+ ```markdown
20
+ ---
21
+ title: Order Summary
22
+ description: Key metrics for the orders pipeline
23
+ ---
24
+
25
+ # Order Summary
26
+
27
+ ` ``query order_count
28
+ cube: orders
29
+ measures: [count]
30
+ ` ``
31
+
32
+ <BigValue data={order_count} value="count" title="Total Orders" />
33
+
34
+ ` ``query by_status
35
+ cube: orders
36
+ measures: [count]
37
+ dimensions: [status]
38
+ ` ``
39
+
40
+ <BarChart data={by_status} x="status" y="count" />
41
+ ```
42
+
43
+ ## Frontmatter
44
+
45
+ The YAML frontmatter is required and must include `title`:
46
+
47
+ ```yaml
48
+ ---
49
+ title: Revenue Dashboard # Required
50
+ description: Monthly trends # Optional
51
+ slug: revenue-dashboard # Optional (derived from title if omitted)
52
+ ---
53
+ ```
54
+
55
+ | Field | Required | Description |
56
+ |-------|----------|-------------|
57
+ | `title` | Yes | Dashboard title displayed in the viewer and listings |
58
+ | `description` | No | Short description shown in dashboard listings |
59
+ | `slug` | No | URL-safe identifier. Auto-derived from title if omitted |
60
+
61
+ ## Deployment
62
+
63
+ ```bash
64
+ # Deploy a single dashboard
65
+ bon dashboard deploy revenue.md
66
+
67
+ # Deploy all dashboards in a directory
68
+ bon dashboard deploy dashboards/
69
+
70
+ # List deployed dashboards
71
+ bon dashboard list
72
+
73
+ # Remove a dashboard
74
+ bon dashboard remove revenue-dashboard
75
+ ```
76
+
77
+ Via MCP tools, agents can use `deploy_dashboard` with the markdown content as a string.
78
+
79
+ ## See Also
80
+
81
+ - dashboards.queries
82
+ - dashboards.components
83
+ - dashboards.examples
@@ -0,0 +1,117 @@
1
+ # dashboards.queries
2
+
3
+ > Define data queries in dashboard markdown using YAML code fences.
4
+
5
+ ## Overview
6
+
7
+ Query blocks fetch data from the semantic layer. Each query has a unique name and maps to a `QueryOptions` shape. Query results are referenced by chart components using `data={query_name}`.
8
+
9
+ ## Syntax
10
+
11
+ Query blocks use fenced code with the `query` language tag followed by a name:
12
+
13
+ ````markdown
14
+ ```query revenue_trend
15
+ cube: orders
16
+ measures: [total_revenue]
17
+ timeDimension:
18
+ dimension: created_at
19
+ granularity: month
20
+ dateRange: [2025-01-01, 2025-12-31]
21
+ ```
22
+ ````
23
+
24
+ ## Query Properties
25
+
26
+ | Property | Type | Required | Description |
27
+ |----------|------|----------|-------------|
28
+ | `cube` | string | Yes | The cube or view to query (e.g. `orders`) |
29
+ | `measures` | string[] | No | Measures to aggregate (e.g. `[count, total_revenue]`) |
30
+ | `dimensions` | string[] | No | Dimensions to group by (e.g. `[status, city]`) |
31
+ | `filters` | Filter[] | No | Row-level filters |
32
+ | `timeDimension` | object | No | Time-based grouping and date range |
33
+ | `orderBy` | object | No | Sort specification (e.g. `{total_revenue: desc}`) |
34
+ | `limit` | number | No | Maximum rows to return |
35
+
36
+ ### timeDimension
37
+
38
+ | Property | Type | Required | Description |
39
+ |----------|------|----------|-------------|
40
+ | `dimension` | string | Yes | Time dimension name (e.g. `created_at`) |
41
+ | `granularity` | string | No | `day`, `week`, `month`, `quarter`, or `year` |
42
+ | `dateRange` | string[] | No | `[start, end]` in `YYYY-MM-DD` format |
43
+
44
+ ### filters
45
+
46
+ Each filter is an object with:
47
+
48
+ | Property | Type | Description |
49
+ |----------|------|-------------|
50
+ | `dimension` | string | Dimension to filter on |
51
+ | `operator` | string | `equals`, `notEquals`, `contains`, `gt`, `gte`, `lt`, `lte` |
52
+ | `values` | array | Values to filter by |
53
+
54
+ ## Examples
55
+
56
+ ### Simple aggregation
57
+
58
+ ````markdown
59
+ ```query total_orders
60
+ cube: orders
61
+ measures: [count]
62
+ ```
63
+ ````
64
+
65
+ ### Grouped by dimension
66
+
67
+ ````markdown
68
+ ```query revenue_by_city
69
+ cube: orders
70
+ measures: [total_revenue]
71
+ dimensions: [city]
72
+ orderBy:
73
+ total_revenue: desc
74
+ limit: 10
75
+ ```
76
+ ````
77
+
78
+ ### Time series
79
+
80
+ ````markdown
81
+ ```query monthly_revenue
82
+ cube: orders
83
+ measures: [total_revenue]
84
+ timeDimension:
85
+ dimension: created_at
86
+ granularity: month
87
+ dateRange: [2025-01-01, 2025-12-31]
88
+ ```
89
+ ````
90
+
91
+ ### With filters
92
+
93
+ ````markdown
94
+ ```query completed_orders
95
+ cube: orders
96
+ measures: [count, total_revenue]
97
+ dimensions: [category]
98
+ filters:
99
+ - dimension: status
100
+ operator: equals
101
+ values: [completed]
102
+ ```
103
+ ````
104
+
105
+ ## Rules
106
+
107
+ - Query names must be valid identifiers (letters, numbers, `_`, `$`)
108
+ - Query names must be unique within a dashboard
109
+ - Every query must specify a `cube`
110
+ - Field names are unqualified (use `count` not `orders.count`) — the `cube` provides the context
111
+ - Components reference queries by name: `data={query_name}`
112
+
113
+ ## See Also
114
+
115
+ - dashboards.components
116
+ - dashboards
117
+ - workflow.query
@@ -0,0 +1,195 @@
1
+ ---
2
+ name: bonnard-get-started
3
+ description: Guide a user through setting up their first semantic layer after bon init. Use when user says "get started", "what next", "help me set up", or has just run bon init.
4
+ allowed-tools: Bash(bon *)
5
+ ---
6
+
7
+ # Get Started with Bonnard
8
+
9
+ This skill guides you through building and deploying a semantic layer.
10
+ The user has already run `bon init`. Walk through each phase in order,
11
+ confirming progress before moving on.
12
+
13
+ ## Phase 1: Connect a Data Source
14
+
15
+ Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
16
+
17
+ ```bash
18
+ # Option A: Import from dbt (if they use it)
19
+ bon datasource add --from-dbt
20
+
21
+ # Option B: Add manually (interactive)
22
+ bon datasource add
23
+
24
+ # Option C: Use demo data (no warehouse needed)
25
+ bon datasource add --demo
26
+ ```
27
+
28
+ The demo option adds a read-only Contoso retail dataset with tables like
29
+ `fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
30
+
31
+ Then verify the connection works:
32
+
33
+ ```bash
34
+ bon datasource test <name>
35
+ ```
36
+
37
+ If the test fails, common issues:
38
+ - Wrong credentials — re-run `bon datasource add`
39
+ - Network/firewall — check warehouse allows connections from this machine
40
+ - SSL issues (Postgres) — may need `sslmode` in connection config
41
+
42
+ ## Phase 2: Explore the Data
43
+
44
+ Use `bon preview` to understand what tables and columns are available:
45
+
46
+ ```bash
47
+ # List tables (Postgres)
48
+ bon preview <datasource> "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"
49
+
50
+ # List tables (Snowflake)
51
+ bon preview <datasource> "SHOW TABLES"
52
+
53
+ # Sample a table
54
+ bon preview <datasource> "SELECT * FROM orders" --limit 10
55
+ ```
56
+
57
+ This helps you understand the schema before writing cubes.
58
+
59
+ ## Phase 3: Create Your First Cube
60
+
61
+ Create a file in `bonnard/cubes/` for the most important table. A cube
62
+ typically maps directly to a database table — define measures for the metrics
63
+ you want to track and dimensions for the attributes you want to filter and
64
+ group by.
65
+
66
+ Example — `bonnard/cubes/orders.yaml`:
67
+
68
+ ```yaml
69
+ cubes:
70
+ - name: orders
71
+ sql_table: public.orders
72
+
73
+ measures:
74
+ - name: count
75
+ type: count
76
+ description: Total number of orders
77
+
78
+ - name: total_revenue
79
+ type: sum
80
+ sql: amount
81
+ description: Sum of order amounts
82
+
83
+ dimensions:
84
+ - name: id
85
+ type: number
86
+ sql: id
87
+ primary_key: true
88
+
89
+ - name: status
90
+ type: string
91
+ sql: status
92
+ description: Order status (pending, completed, cancelled)
93
+
94
+ - name: created_at
95
+ type: time
96
+ sql: created_at
97
+ description: When the order was placed
98
+ ```
99
+
100
+ Key rules:
101
+ - Every cube needs a `primary_key` dimension
102
+ - Every measure and dimension should have a `description`
103
+ - Use `sql_table` for simple table references, `sql` for complex queries
104
+
105
+ Use `bon docs cubes` for the full reference, `bon docs cubes.measures.types`
106
+ for all 12 measure types, `bon docs cubes.dimensions.types` for dimension types.
107
+
108
+ ## Phase 4: Create a View
109
+
110
+ Views expose a curated subset of measures and dimensions for consumers.
111
+ Create a file in `bonnard/views/`:
112
+
113
+ Example — `bonnard/views/orders_overview.yaml`:
114
+
115
+ ```yaml
116
+ views:
117
+ - name: orders_overview
118
+ description: High-level order metrics and attributes
119
+ cubes:
120
+ - join_path: orders
121
+ includes:
122
+ - count
123
+ - total_revenue
124
+ - status
125
+ - created_at
126
+ ```
127
+
128
+ Use `bon docs views` for the full reference.
129
+
130
+ ## Phase 5: Validate
131
+
132
+ Check for errors:
133
+
134
+ ```bash
135
+ bon validate
136
+ ```
137
+
138
+ Fix any errors before proceeding. Common issues:
139
+ - Missing required fields (`name`, `type`, `sql`)
140
+ - Unknown measure/dimension types (e.g., `text` should be `string`)
141
+ - Bad YAML indentation
142
+
143
+ Optionally test the datasource connection too:
144
+
145
+ ```bash
146
+ bon validate --test-connection
147
+ ```
148
+
149
+ ## Phase 6: Deploy
150
+
151
+ Log in (if not already) and deploy:
152
+
153
+ ```bash
154
+ bon login
155
+ bon deploy
156
+ ```
157
+
158
+ Deploy validates, tests connections, uploads cubes/views, and syncs datasource
159
+ credentials (encrypted) to Bonnard.
160
+
161
+ ## Phase 7: Test with a Query
162
+
163
+ Verify the deployment works:
164
+
165
+ ```bash
166
+ # Simple count
167
+ bon query '{"measures": ["orders.count"]}'
168
+
169
+ # Group by a dimension
170
+ bon query '{"measures": ["orders.count"], "dimensions": ["orders.status"]}'
171
+
172
+ # SQL format
173
+ bon query --sql "SELECT status, MEASURE(count) FROM orders GROUP BY 1"
174
+ ```
175
+
176
+ ## Phase 8: Connect AI Agents (Optional)
177
+
178
+ Set up MCP so AI agents can query the semantic layer:
179
+
180
+ ```bash
181
+ bon mcp
182
+ ```
183
+
184
+ This shows setup instructions for Claude Desktop, ChatGPT, Cursor, VS Code,
185
+ and other MCP clients. The MCP URL is `https://mcp.bonnard.dev/mcp`.
186
+
187
+ ## Next Steps
188
+
189
+ After the first cube is working:
190
+
191
+ - Add more cubes for other tables
192
+ - Add joins between cubes (`bon docs cubes.joins`)
193
+ - Add calculated measures (`bon docs cubes.measures.calculated`)
194
+ - Add segments for common filters (`bon docs cubes.segments`)
195
+ - Build dashboards (`bon docs dashboards`)
@@ -0,0 +1,194 @@
1
+ ---
2
+ description: "Guide a user through setting up their first semantic layer after bon init. Use when user says 'get started', 'what next', 'help me set up', or has just run bon init."
3
+ alwaysApply: false
4
+ ---
5
+
6
+ # Get Started with Bonnard
7
+
8
+ This skill guides you through building and deploying a semantic layer.
9
+ The user has already run `bon init`. Walk through each phase in order,
10
+ confirming progress before moving on.
11
+
12
+ ## Phase 1: Connect a Data Source
13
+
14
+ Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
15
+
16
+ ```bash
17
+ # Option A: Import from dbt (if they use it)
18
+ bon datasource add --from-dbt
19
+
20
+ # Option B: Add manually (interactive)
21
+ bon datasource add
22
+
23
+ # Option C: Use demo data (no warehouse needed)
24
+ bon datasource add --demo
25
+ ```
26
+
27
+ The demo option adds a read-only Contoso retail dataset with tables like
28
+ `fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
29
+
30
+ Then verify the connection works:
31
+
32
+ ```bash
33
+ bon datasource test <name>
34
+ ```
35
+
36
+ If the test fails, common issues:
37
+ - Wrong credentials — re-run `bon datasource add`
38
+ - Network/firewall — check warehouse allows connections from this machine
39
+ - SSL issues (Postgres) — may need `sslmode` in connection config
40
+
41
+ ## Phase 2: Explore the Data
42
+
43
+ Use `bon preview` to understand what tables and columns are available:
44
+
45
+ ```bash
46
+ # List tables (Postgres)
47
+ bon preview <datasource> "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"
48
+
49
+ # List tables (Snowflake)
50
+ bon preview <datasource> "SHOW TABLES"
51
+
52
+ # Sample a table
53
+ bon preview <datasource> "SELECT * FROM orders" --limit 10
54
+ ```
55
+
56
+ This helps you understand the schema before writing cubes.
57
+
58
+ ## Phase 3: Create Your First Cube
59
+
60
+ Create a file in `bonnard/cubes/` for the most important table. A cube
61
+ typically maps directly to a database table — define measures for the metrics
62
+ you want to track and dimensions for the attributes you want to filter and
63
+ group by.
64
+
65
+ Example — `bonnard/cubes/orders.yaml`:
66
+
67
+ ```yaml
68
+ cubes:
69
+ - name: orders
70
+ sql_table: public.orders
71
+
72
+ measures:
73
+ - name: count
74
+ type: count
75
+ description: Total number of orders
76
+
77
+ - name: total_revenue
78
+ type: sum
79
+ sql: amount
80
+ description: Sum of order amounts
81
+
82
+ dimensions:
83
+ - name: id
84
+ type: number
85
+ sql: id
86
+ primary_key: true
87
+
88
+ - name: status
89
+ type: string
90
+ sql: status
91
+ description: Order status (pending, completed, cancelled)
92
+
93
+ - name: created_at
94
+ type: time
95
+ sql: created_at
96
+ description: When the order was placed
97
+ ```
98
+
99
+ Key rules:
100
+ - Every cube needs a `primary_key` dimension
101
+ - Every measure and dimension should have a `description`
102
+ - Use `sql_table` for simple table references, `sql` for complex queries
103
+
104
+ Use `bon docs cubes` for the full reference, `bon docs cubes.measures.types`
105
+ for all 12 measure types, `bon docs cubes.dimensions.types` for dimension types.
106
+
107
+ ## Phase 4: Create a View
108
+
109
+ Views expose a curated subset of measures and dimensions for consumers.
110
+ Create a file in `bonnard/views/`:
111
+
112
+ Example — `bonnard/views/orders_overview.yaml`:
113
+
114
+ ```yaml
115
+ views:
116
+ - name: orders_overview
117
+ description: High-level order metrics and attributes
118
+ cubes:
119
+ - join_path: orders
120
+ includes:
121
+ - count
122
+ - total_revenue
123
+ - status
124
+ - created_at
125
+ ```
126
+
127
+ Use `bon docs views` for the full reference.
128
+
129
+ ## Phase 5: Validate
130
+
131
+ Check for errors:
132
+
133
+ ```bash
134
+ bon validate
135
+ ```
136
+
137
+ Fix any errors before proceeding. Common issues:
138
+ - Missing required fields (`name`, `type`, `sql`)
139
+ - Unknown measure/dimension types (e.g., `text` should be `string`)
140
+ - Bad YAML indentation
141
+
142
+ Optionally test the datasource connection too:
143
+
144
+ ```bash
145
+ bon validate --test-connection
146
+ ```
147
+
148
+ ## Phase 6: Deploy
149
+
150
+ Log in (if not already) and deploy:
151
+
152
+ ```bash
153
+ bon login
154
+ bon deploy
155
+ ```
156
+
157
+ Deploy validates, tests connections, uploads cubes/views, and syncs datasource
158
+ credentials (encrypted) to Bonnard.
159
+
160
+ ## Phase 7: Test with a Query
161
+
162
+ Verify the deployment works:
163
+
164
+ ```bash
165
+ # Simple count
166
+ bon query '{"measures": ["orders.count"]}'
167
+
168
+ # Group by a dimension
169
+ bon query '{"measures": ["orders.count"], "dimensions": ["orders.status"]}'
170
+
171
+ # SQL format
172
+ bon query --sql "SELECT status, MEASURE(count) FROM orders GROUP BY 1"
173
+ ```
174
+
175
+ ## Phase 8: Connect AI Agents (Optional)
176
+
177
+ Set up MCP so AI agents can query the semantic layer:
178
+
179
+ ```bash
180
+ bon mcp
181
+ ```
182
+
183
+ This shows setup instructions for Claude Desktop, ChatGPT, Cursor, VS Code,
184
+ and other MCP clients. The MCP URL is `https://mcp.bonnard.dev/mcp`.
185
+
186
+ ## Next Steps
187
+
188
+ After the first cube is working:
189
+
190
+ - Add more cubes for other tables
191
+ - Add joins between cubes (`bon docs cubes.joins`)
192
+ - Add calculated measures (`bon docs cubes.measures.calculated`)
193
+ - Add segments for common filters (`bon docs cubes.segments`)
194
+ - Build dashboards (`bon docs dashboards`)
@@ -39,12 +39,29 @@ my-project/
39
39
  └── datasources.yaml # Warehouse connections
40
40
  ```
41
41
 
42
+ ## Demo Data
43
+
44
+ No warehouse? Use the built-in demo dataset to try Bonnard:
45
+
46
+ ```bash
47
+ bon datasource add --demo
48
+ ```
49
+
50
+ This adds a read-only **Contoso** retail database (Postgres) with tables:
51
+ - `fact_sales` — transactions with sales_amount, unit_price, sales_quantity, date_key
52
+ - `dim_product` — product_name, brand_name, manufacturer, unit_cost, unit_price
53
+ - `dim_store` — store_name, store_type, employee_count, selling_area_size
54
+ - `dim_customer` — first_name, last_name, gender, yearly_income, education, occupation
55
+
56
+ All tables are in the `contoso` schema. The datasource is named `contoso_demo`.
57
+
42
58
  ## Quick Reference
43
59
 
44
60
  | Command | Purpose |
45
61
  |---------|---------|
46
62
  | `bon init` | Initialize new project |
47
63
  | `bon datasource add` | Add warehouse connection |
64
+ | `bon datasource add --demo` | Add demo dataset (no warehouse needed) |
48
65
  | `bon datasource add --from-dbt` | Import from dbt profiles |
49
66
  | `bon datasource test <name>` | Test connection |
50
67
  | `bon validate` | Validate YAML syntax |
@@ -78,5 +95,4 @@ Topics follow dot notation (e.g., `cubes.dimensions.time`). Use `--recursive` to
78
95
  4. **Validate** — `bon validate --test-connection`
79
96
  5. **Deploy** — `bon login` then `bon deploy`
80
97
 
81
- For CLI details: `/bonnard-cli`
82
- For YAML patterns: `/bonnard-queries`
98
+ For a guided walkthrough: `/bonnard-get-started`
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bonnard/cli",
3
- "version": "0.1.9",
3
+ "version": "0.1.11",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "bon": "./dist/bin/bon.mjs"
@@ -1,59 +0,0 @@
1
- ---
2
- name: bonnard-cli
3
- description: Bonnard CLI reference. Use when user needs help with bon commands, data sources, or deployments.
4
- allowed-tools: Bash(bon *)
5
- ---
6
-
7
- # Bonnard CLI
8
-
9
- ## Commands
10
-
11
- ### Project Setup
12
- ```bash
13
- bon init # Initialize new project
14
- bon login # Authenticate with Bonnard
15
- bon logout # Clear credentials
16
- ```
17
-
18
- ### Data Sources
19
- ```bash
20
- bon datasource add # Add new data source (interactive)
21
- bon datasource list # List all data sources
22
- bon datasource test <name> # Test connection
23
- bon datasource remove <name> # Remove data source
24
- ```
25
-
26
- ### Development
27
- ```bash
28
- bon validate # Validate cubes and views
29
- bon deploy # Deploy to Bonnard
30
- ```
31
-
32
- ### Data Exploration (Local)
33
- ```bash
34
- bon preview <datasource> "<sql>" # Preview data with raw SQL
35
- bon preview <datasource> "<sql>" --format json # JSON output
36
- bon preview <datasource> "<sql>" --limit 100 # Limit rows
37
- ```
38
-
39
- ### Semantic Layer Queries (Deployed)
40
- ```bash
41
- # JSON format (default)
42
- bon query '{"measures": ["orders.count"]}'
43
- bon query '{"measures": ["orders.total_revenue"], "dimensions": ["orders.status"]}'
44
-
45
- # SQL format
46
- bon query --sql "SELECT status, MEASURE(count) FROM orders GROUP BY 1"
47
- bon query --sql "SELECT city, SUM(amount) FROM orders GROUP BY 1" --limit 10
48
- ```
49
-
50
- ### Documentation
51
- ```bash
52
- bon docs # Show all topics
53
- bon docs <topic> # View topic (e.g., cubes.measures)
54
- bon docs <topic> --recursive # Topic + all children
55
- bon docs --search "<query>" # Search all docs
56
- bon docs <topic> --format json # Structured output
57
- ```
58
-
59
- **Key topics:** `cubes`, `cubes.measures`, `cubes.dimensions`, `cubes.joins`, `views`, `pre-aggregations`, `syntax`
@@ -1,68 +0,0 @@
1
- ---
2
- name: bonnard-queries
3
- description: How to write queries and work with cubes and views. Use when user asks about metrics, dimensions, or data modeling.
4
- ---
5
-
6
- # Bonnard Query Patterns
7
-
8
- ## Cube Structure (YAML)
9
-
10
- ```yaml
11
- cubes:
12
- - name: orders
13
- sql: SELECT * FROM public.orders
14
-
15
- measures:
16
- - name: count
17
- type: count
18
- description: Total number of orders
19
-
20
- - name: total_amount
21
- type: sum
22
- sql: amount
23
- description: Sum of order amounts in dollars
24
-
25
- dimensions:
26
- - name: status
27
- type: string
28
- sql: status
29
- description: Order status (pending, completed, cancelled)
30
-
31
- - name: created_at
32
- type: time
33
- sql: created_at
34
- description: When the order was placed
35
- ```
36
-
37
- ## View Structure (YAML)
38
-
39
- ```yaml
40
- views:
41
- - name: orders_overview
42
- cubes:
43
- - join_path: orders
44
- includes:
45
- - count
46
- - total_amount
47
- - status
48
- ```
49
-
50
- ## Workflow
51
-
52
- 1. Define cubes in `bonnard/cubes/*.yaml`
53
- 2. Define views in `bonnard/views/*.yaml`
54
- 3. Run `bon validate` to check syntax
55
- 4. Run `bon deploy` to publish
56
-
57
- ## Learn More
58
-
59
- Use `bon docs` for comprehensive documentation:
60
-
61
- ```bash
62
- bon docs cubes.measures.types # All 12 measure types
63
- bon docs cubes.dimensions.types # All 6 dimension types
64
- bon docs cubes.joins # Relationship types
65
- bon docs cubes.segments # Predefined filters
66
- bon docs views.cubes # View composition
67
- bon docs --search "rolling" # Search for concepts
68
- ```
@@ -1,47 +0,0 @@
1
- ---
2
- description: "Bonnard CLI reference. Use when user needs help with bon commands, data sources, or deployments."
3
- alwaysApply: false
4
- ---
5
-
6
- # Bonnard CLI
7
-
8
- ## Commands
9
-
10
- ### Project Setup
11
- ```bash
12
- bon init # Initialize new project
13
- bon login # Authenticate with Bonnard
14
- bon logout # Clear credentials
15
- ```
16
-
17
- ### Data Sources
18
- ```bash
19
- bon datasource add # Add new data source (interactive)
20
- bon datasource list # List all data sources
21
- bon datasource test <name> # Test connection
22
- bon datasource remove <name> # Remove data source
23
- ```
24
-
25
- ### Development
26
- ```bash
27
- bon validate # Validate cubes and views
28
- bon deploy # Deploy to Bonnard
29
- ```
30
-
31
- ### Data Exploration (Local)
32
- ```bash
33
- bon preview <datasource> "<sql>" # Preview data with raw SQL
34
- bon preview <datasource> "<sql>" --format json # JSON output
35
- bon preview <datasource> "<sql>" --limit 100 # Limit rows
36
- ```
37
-
38
- ### Semantic Layer Queries (Deployed)
39
- ```bash
40
- # JSON format (default)
41
- bon query '{"measures": ["orders.count"]}'
42
- bon query '{"measures": ["orders.total_revenue"], "dimensions": ["orders.status"]}'
43
-
44
- # SQL format
45
- bon query --sql "SELECT status, MEASURE(count) FROM orders GROUP BY 1"
46
- bon query --sql "SELECT city, SUM(amount) FROM orders GROUP BY 1" --limit 10
47
- ```
@@ -1,49 +0,0 @@
1
- ---
2
- description: "How to write queries and work with cubes and views. Use when user asks about metrics, dimensions, or data modeling."
3
- alwaysApply: false
4
- ---
5
-
6
- # Bonnard Query Patterns
7
-
8
- ## Cube Structure (YAML)
9
-
10
- ```yaml
11
- cubes:
12
- - name: orders
13
- sql: SELECT * FROM public.orders
14
-
15
- measures:
16
- - name: count
17
- type: count
18
- - name: total_amount
19
- type: sum
20
- sql: amount
21
-
22
- dimensions:
23
- - name: status
24
- type: string
25
- sql: status
26
- - name: created_at
27
- type: time
28
- sql: created_at
29
- ```
30
-
31
- ## View Structure (YAML)
32
-
33
- ```yaml
34
- views:
35
- - name: orders_overview
36
- cubes:
37
- - join_path: orders
38
- includes:
39
- - count
40
- - total_amount
41
- - status
42
- ```
43
-
44
- ## Workflow
45
-
46
- 1. Define cubes in `bonnard/cubes/*.yaml`
47
- 2. Define views in `bonnard/views/*.yaml`
48
- 3. Run `bon validate` to check syntax
49
- 4. Run `bon deploy` to publish