@bonnard/cli 0.1.10 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/bon.mjs CHANGED
@@ -1488,10 +1488,45 @@ async function addManual(options) {
1488
1488
  console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1489
1489
  }
1490
1490
  /**
1491
+ * Add the Contoso demo datasource (read-only retail dataset)
1492
+ */
1493
+ async function addDemo(options) {
1494
+ const name = "contoso_demo";
1495
+ if (datasourceExists(name) && !options.force) {
1496
+ console.log(pc.yellow(`Datasource "${name}" already exists. Use --force to overwrite.`));
1497
+ return;
1498
+ }
1499
+ if (isDatasourcesTrackedByGit()) console.log(pc.yellow("Warning: .bon/datasources.yaml is tracked by git. Add it to .gitignore!"));
1500
+ addLocalDatasource({
1501
+ name,
1502
+ type: "postgres",
1503
+ source: "demo",
1504
+ config: {
1505
+ host: "aws-1-eu-west-1.pooler.supabase.com",
1506
+ port: "5432",
1507
+ database: "postgres",
1508
+ schema: "contoso"
1509
+ },
1510
+ credentials: {
1511
+ username: "demo_reader.yvbfzqogtdsqqkpyztlu",
1512
+ password: "contoso-demo-2025!"
1513
+ }
1514
+ });
1515
+ console.log();
1516
+ console.log(pc.green(`✓ Demo datasource "${name}" saved to .bon/datasources.yaml`));
1517
+ console.log();
1518
+ console.log(pc.dim("Contoso is a read-only retail dataset with tables like:"));
1519
+ console.log(pc.dim(" fact_sales, dim_product, dim_store, dim_customer"));
1520
+ console.log();
1521
+ console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1522
+ console.log(pc.dim(`Explore tables: bon preview ${name} "SELECT table_name FROM information_schema.tables WHERE table_schema = 'contoso'"`));
1523
+ }
1524
+ /**
1491
1525
  * Main datasource add command
1492
1526
  */
1493
1527
  async function datasourceAddCommand(options = {}) {
1494
- if (options.fromDbt !== void 0) await importFromDbt(options);
1528
+ if (options.demo) await addDemo(options);
1529
+ else if (options.fromDbt !== void 0) await importFromDbt(options);
1495
1530
  else await addManual(options);
1496
1531
  }
1497
1532
 
@@ -2806,7 +2841,7 @@ program.command("login").description("Authenticate with Bonnard via your browser
2806
2841
  program.command("logout").description("Remove stored credentials").action(logoutCommand);
2807
2842
  program.command("whoami").description("Show current login status").option("--verify", "Verify session is still valid with the server").action(whoamiCommand);
2808
2843
  const datasource = program.command("datasource").description("Manage warehouse data source connections");
2809
- datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
2844
+ datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--demo", "Add a read-only demo datasource (Contoso retail dataset) for testing").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
2810
2845
  datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
2811
2846
  datasource.command("test").description("Test data source connectivity by connecting directly to the warehouse").argument("<name>", "Data source name from .bon/datasources.yaml").option("--remote", "Test via Bonnard API instead of direct connection (requires login)").action(datasourceTestCommand);
2812
2847
  datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
@@ -12,16 +12,22 @@ confirming progress before moving on.
12
12
 
13
13
  ## Phase 1: Connect a Data Source
14
14
 
15
- Check if the user has dbt:
15
+ Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
16
16
 
17
17
  ```bash
18
- # Import from dbt (if they use it)
18
+ # Option A: Import from dbt (if they use it)
19
19
  bon datasource add --from-dbt
20
20
 
21
- # Or add manually (interactive)
21
+ # Option B: Add manually (interactive)
22
22
  bon datasource add
23
+
24
+ # Option C: Use demo data (no warehouse needed)
25
+ bon datasource add --demo
23
26
  ```
24
27
 
28
+ The demo option adds a read-only Contoso retail dataset with tables like
29
+ `fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
30
+
25
31
  Then verify the connection works:
26
32
 
27
33
  ```bash
@@ -11,16 +11,22 @@ confirming progress before moving on.
11
11
 
12
12
  ## Phase 1: Connect a Data Source
13
13
 
14
- Check if the user has dbt:
14
+ Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
15
15
 
16
16
  ```bash
17
- # Import from dbt (if they use it)
17
+ # Option A: Import from dbt (if they use it)
18
18
  bon datasource add --from-dbt
19
19
 
20
- # Or add manually (interactive)
20
+ # Option B: Add manually (interactive)
21
21
  bon datasource add
22
+
23
+ # Option C: Use demo data (no warehouse needed)
24
+ bon datasource add --demo
22
25
  ```
23
26
 
27
+ The demo option adds a read-only Contoso retail dataset with tables like
28
+ `fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
29
+
24
30
  Then verify the connection works:
25
31
 
26
32
  ```bash
@@ -39,12 +39,29 @@ my-project/
39
39
  └── datasources.yaml # Warehouse connections
40
40
  ```
41
41
 
42
+ ## Demo Data
43
+
44
+ No warehouse? Use the built-in demo dataset to try Bonnard:
45
+
46
+ ```bash
47
+ bon datasource add --demo
48
+ ```
49
+
50
+ This adds a read-only **Contoso** retail database (Postgres) with tables:
51
+ - `fact_sales` — transactions with sales_amount, unit_price, sales_quantity, date_key
52
+ - `dim_product` — product_name, brand_name, manufacturer, unit_cost, unit_price
53
+ - `dim_store` — store_name, store_type, employee_count, selling_area_size
54
+ - `dim_customer` — first_name, last_name, gender, yearly_income, education, occupation
55
+
56
+ All tables are in the `contoso` schema. The datasource is named `contoso_demo`.
57
+
42
58
  ## Quick Reference
43
59
 
44
60
  | Command | Purpose |
45
61
  |---------|---------|
46
62
  | `bon init` | Initialize new project |
47
63
  | `bon datasource add` | Add warehouse connection |
64
+ | `bon datasource add --demo` | Add demo dataset (no warehouse needed) |
48
65
  | `bon datasource add --from-dbt` | Import from dbt profiles |
49
66
  | `bon datasource test <name>` | Test connection |
50
67
  | `bon validate` | Validate YAML syntax |
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bonnard/cli",
3
- "version": "0.1.10",
3
+ "version": "0.1.11",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "bon": "./dist/bin/bon.mjs"
@@ -9,7 +9,7 @@
9
9
  "dist"
10
10
  ],
11
11
  "scripts": {
12
- "build": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin && cp -r src/templates dist/ && mkdir -p dist/docs/topics dist/docs/schemas && cp ../content/index.md dist/docs/_index.md && cp ../content/modeling/*.md dist/docs/topics/ && cp ../content/dashboards/*.md dist/docs/topics/",
12
+ "build": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin && cp -r src/templates dist/ && mkdir -p dist/docs/topics dist/docs/schemas && cp ../content/index.md dist/docs/_index.md && cp ../content/modeling/*.md dist/docs/topics/",
13
13
  "dev": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin --watch",
14
14
  "test": "vitest run"
15
15
  },