@bonnard/cli 0.2.2 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/bon.mjs CHANGED
@@ -10,7 +10,6 @@ import os from "node:os";
10
10
  import http from "node:http";
11
11
  import crypto from "node:crypto";
12
12
  import { execFileSync } from "node:child_process";
13
- import { confirm } from "@inquirer/prompts";
14
13
  import { encode } from "@toon-format/toon";
15
14
 
16
15
  //#region rolldown:runtime
@@ -82,6 +81,7 @@ function mapDbtType(dbtType) {
82
81
  snowflake: "snowflake",
83
82
  postgres: "postgres",
84
83
  postgresql: "postgres",
84
+ redshift: "redshift",
85
85
  bigquery: "bigquery",
86
86
  databricks: "databricks"
87
87
  }[dbtType.toLowerCase()] ?? null;
@@ -338,6 +338,7 @@ function extractWarehouseFromEnv(cwd) {
338
338
  const type = {
339
339
  snowflake: "snowflake",
340
340
  postgres: "postgres",
341
+ redshift: "redshift",
341
342
  bigquery: "bigquery",
342
343
  databricks: "databricks"
343
344
  }[cubeDbType[1].trim().toLowerCase()];
@@ -787,7 +788,6 @@ async function logoutCommand() {
787
788
  var api_exports = /* @__PURE__ */ __exportAll({
788
789
  del: () => del,
789
790
  get: () => get,
790
- getRemoteDatasources: () => getRemoteDatasources,
791
791
  post: () => post
792
792
  });
793
793
  const APP_URL = process.env.BON_APP_URL || "https://app.bonnard.dev";
@@ -829,12 +829,6 @@ function post(path, body) {
829
829
  function del(path) {
830
830
  return request("DELETE", path);
831
831
  }
832
- /**
833
- * Fetch remote datasources from Bonnard server
834
- */
835
- async function getRemoteDatasources() {
836
- return (await get("/api/datasources")).dataSources || [];
837
- }
838
832
 
839
833
  //#endregion
840
834
  //#region src/commands/whoami.ts
@@ -1221,6 +1215,43 @@ const WAREHOUSE_CONFIGS = [
1221
1215
  required: true
1222
1216
  }]
1223
1217
  },
1218
+ {
1219
+ value: "redshift",
1220
+ label: "Redshift",
1221
+ configFields: [
1222
+ {
1223
+ name: "host",
1224
+ message: "Host (cluster endpoint)",
1225
+ required: true
1226
+ },
1227
+ {
1228
+ name: "port",
1229
+ message: "Port",
1230
+ default: "5439"
1231
+ },
1232
+ {
1233
+ name: "database",
1234
+ message: "Database name",
1235
+ required: true
1236
+ },
1237
+ {
1238
+ name: "schema",
1239
+ message: "Schema",
1240
+ default: "public"
1241
+ }
1242
+ ],
1243
+ credentialFields: [{
1244
+ name: "username",
1245
+ flag: "user",
1246
+ message: "Username",
1247
+ required: true
1248
+ }, {
1249
+ name: "password",
1250
+ message: "Password",
1251
+ secret: true,
1252
+ required: true
1253
+ }]
1254
+ },
1224
1255
  {
1225
1256
  value: "bigquery",
1226
1257
  label: "BigQuery",
@@ -1490,7 +1521,7 @@ async function addManual(options) {
1490
1521
  console.log();
1491
1522
  console.log(pc.green(`✓ Datasource "${name}" saved to .bon/datasources.yaml`));
1492
1523
  console.log();
1493
- console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1524
+ console.log(pc.dim(`Connection will be tested during \`bon deploy\``));
1494
1525
  }
1495
1526
  /**
1496
1527
  * Add the Contoso demo datasource (read-only retail dataset)
@@ -1523,7 +1554,7 @@ async function addDemo(options) {
1523
1554
  console.log(pc.dim("Contoso is a read-only retail dataset with tables like:"));
1524
1555
  console.log(pc.dim(" fact_sales, dim_product, dim_store, dim_customer"));
1525
1556
  console.log();
1526
- console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1557
+ console.log(pc.dim(`Connection will be tested during \`bon deploy\``));
1527
1558
  }
1528
1559
  /**
1529
1560
  * Main datasource add command
@@ -1630,34 +1661,6 @@ async function datasourceListCommand(options = {}) {
1630
1661
  if (showRemote) await listRemoteDatasources();
1631
1662
  }
1632
1663
 
1633
- //#endregion
1634
- //#region src/commands/datasource/test.ts
1635
- async function datasourceTestCommand(name) {
1636
- if (!loadCredentials()) {
1637
- console.log(pc.red("Not logged in. Run `bon login` to test datasources."));
1638
- process.exit(1);
1639
- }
1640
- console.log(pc.dim(`Testing ${name} via remote API...`));
1641
- console.log();
1642
- try {
1643
- const result = await post("/api/datasources/test", { name });
1644
- if (result.success) {
1645
- console.log(pc.green(result.message));
1646
- if (result.details) {
1647
- if (result.details.warehouse) console.log(pc.dim(` Warehouse: ${result.details.warehouse}`));
1648
- if (result.details.account) console.log(pc.dim(` Account: ${result.details.account}`));
1649
- if (result.details.latencyMs != null) console.log(pc.dim(` Latency: ${result.details.latencyMs}ms`));
1650
- }
1651
- } else {
1652
- console.log(pc.red(result.message));
1653
- process.exit(1);
1654
- }
1655
- } catch (err) {
1656
- console.error(pc.red(`Failed to test data source: ${err.message}`));
1657
- process.exit(1);
1658
- }
1659
- }
1660
-
1661
1664
  //#endregion
1662
1665
  //#region src/commands/datasource/remove.ts
1663
1666
  async function datasourceRemoveCommand(name, options = {}) {
@@ -1692,93 +1695,6 @@ async function removeRemote(name) {
1692
1695
  }
1693
1696
  }
1694
1697
 
1695
- //#endregion
1696
- //#region src/commands/datasource/push.ts
1697
- var push_exports = /* @__PURE__ */ __exportAll({
1698
- datasourcePushCommand: () => datasourcePushCommand,
1699
- pushDatasource: () => pushDatasource
1700
- });
1701
- /**
1702
- * Push a local datasource to Bonnard server
1703
- */
1704
- async function datasourcePushCommand(name, options = {}) {
1705
- if (!loadCredentials()) {
1706
- console.error(pc.red("Not logged in. Run `bon login` first."));
1707
- process.exit(1);
1708
- }
1709
- const datasource = getLocalDatasource(name);
1710
- if (!datasource) {
1711
- console.error(pc.red(`Datasource "${name}" not found in .bon/datasources.yaml`));
1712
- console.log(pc.dim("Run `bon datasource list --local` to see available datasources."));
1713
- process.exit(1);
1714
- }
1715
- const { resolved, missing } = resolveEnvVarsInCredentials(datasource.credentials);
1716
- if (missing.length > 0) {
1717
- console.error(pc.red(`Missing environment variables: ${missing.join(", ")}`));
1718
- console.log(pc.dim("Set them in your environment or use plain values in .bon/datasources.yaml"));
1719
- process.exit(1);
1720
- }
1721
- try {
1722
- if ((await getRemoteDatasources()).some((ds) => ds.name === name) && !options.force) {
1723
- if (!await confirm({
1724
- message: `Datasource "${name}" already exists on remote. Overwrite?`,
1725
- default: false
1726
- })) {
1727
- console.log(pc.dim("Aborted."));
1728
- process.exit(0);
1729
- }
1730
- }
1731
- } catch (err) {
1732
- console.log(pc.dim(`Note: Could not check remote datasources: ${err.message}`));
1733
- }
1734
- console.log(pc.dim(`Pushing "${name}"...`));
1735
- try {
1736
- await post("/api/datasources", {
1737
- name: datasource.name,
1738
- warehouse_type: datasource.type,
1739
- config: datasource.config,
1740
- credentials: resolved
1741
- });
1742
- console.log(pc.green(`✓ Datasource "${name}" pushed to Bonnard`));
1743
- } catch (err) {
1744
- const message = err.message;
1745
- if (message.includes("already exists")) {
1746
- console.error(pc.red(`Datasource "${name}" already exists on remote.`));
1747
- console.log(pc.dim("Use --force to overwrite."));
1748
- process.exit(1);
1749
- }
1750
- console.error(pc.red(`Failed to push datasource: ${message}`));
1751
- process.exit(1);
1752
- }
1753
- }
1754
- /**
1755
- * Push a datasource programmatically (for use by deploy command)
1756
- * Returns true on success, false on failure
1757
- */
1758
- async function pushDatasource(name, options = {}) {
1759
- const datasource = getLocalDatasource(name);
1760
- if (!datasource) {
1761
- if (!options.silent) console.error(pc.red(`Datasource "${name}" not found locally`));
1762
- return false;
1763
- }
1764
- const { resolved, missing } = resolveEnvVarsInCredentials(datasource.credentials);
1765
- if (missing.length > 0) {
1766
- if (!options.silent) console.error(pc.red(`Missing env vars for "${name}": ${missing.join(", ")}`));
1767
- return false;
1768
- }
1769
- try {
1770
- await post("/api/datasources", {
1771
- name: datasource.name,
1772
- warehouse_type: datasource.type,
1773
- config: datasource.config,
1774
- credentials: resolved
1775
- });
1776
- return true;
1777
- } catch {
1778
- return false;
1779
- }
1780
- }
1781
-
1782
1698
  //#endregion
1783
1699
  //#region src/commands/validate.ts
1784
1700
  async function validateCommand() {
@@ -1788,7 +1704,7 @@ async function validateCommand() {
1788
1704
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
1789
1705
  process.exit(1);
1790
1706
  }
1791
- const { validate } = await import("./validate-DEh1XQnH.mjs");
1707
+ const { validate } = await import("./validate-BdqZBH2n.mjs");
1792
1708
  const result = await validate(cwd);
1793
1709
  if (result.cubes.length === 0 && result.views.length === 0 && result.valid) {
1794
1710
  console.log(pc.yellow(`No cube or view files found in ${BONNARD_DIR}/cubes/ or ${BONNARD_DIR}/views/.`));
@@ -1854,7 +1770,7 @@ async function deployCommand(options = {}) {
1854
1770
  process.exit(1);
1855
1771
  }
1856
1772
  console.log(pc.dim("Validating cubes and views..."));
1857
- const { validate } = await import("./validate-DEh1XQnH.mjs");
1773
+ const { validate } = await import("./validate-BdqZBH2n.mjs");
1858
1774
  const result = await validate(cwd);
1859
1775
  if (!result.valid) {
1860
1776
  console.log(pc.red("Validation failed:\n"));
@@ -1923,9 +1839,9 @@ async function deployCommand(options = {}) {
1923
1839
  * Returns true if any connection failed (strict mode)
1924
1840
  */
1925
1841
  async function testAndSyncDatasources(cwd, options = {}) {
1926
- const { extractDatasourcesFromCubes } = await import("./cubes-Bf0IPYd7.mjs");
1842
+ const { extractDatasourcesFromCubes } = await import("./cubes-9rklhdAJ.mjs");
1927
1843
  const { loadLocalDatasources } = await Promise.resolve().then(() => local_exports);
1928
- const { pushDatasource } = await Promise.resolve().then(() => push_exports);
1844
+ const { pushDatasource } = await import("./push-Bv9AFGc2.mjs");
1929
1845
  const references = extractDatasourcesFromCubes(cwd);
1930
1846
  if (references.length === 0) return false;
1931
1847
  console.log();
@@ -1948,51 +1864,16 @@ async function testAndSyncDatasources(cwd, options = {}) {
1948
1864
  console.log(pc.red("Missing datasources. Fix issues before deploying."));
1949
1865
  return true;
1950
1866
  }
1951
- console.log(pc.dim("Checking remote datasources..."));
1952
- let remoteDatasources;
1953
- try {
1954
- remoteDatasources = await getRemoteDatasources();
1955
- } catch (err) {
1956
- console.log(pc.red(`Failed to fetch remote datasources: ${err.message}`));
1957
- return true;
1867
+ console.log(pc.dim("Syncing datasources..."));
1868
+ for (const name of foundDatasources) if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ ${name} synced`));
1869
+ else {
1870
+ console.log(pc.red(`✗ Failed to sync "${name}"`));
1871
+ failed = true;
1958
1872
  }
1959
- const remoteNames = new Set(remoteDatasources.map((ds) => ds.name));
1960
- const missingRemote = foundDatasources.filter((name) => !remoteNames.has(name));
1961
- if (missingRemote.length > 0) {
1962
- console.log();
1963
- console.log(pc.yellow(`⚠ Missing remote datasource${missingRemote.length > 1 ? "s" : ""}: ${missingRemote.join(", ")}`));
1873
+ if (failed) {
1964
1874
  console.log();
1965
- if (options.ci) {
1966
- console.log(pc.red("Deploy aborted (--ci mode)."));
1967
- console.log(pc.dim(`Run: bon datasource push <name>`));
1968
- return true;
1969
- }
1970
- if (options.pushDatasources) for (const name of missingRemote) {
1971
- console.log(pc.dim(`Pushing "${name}"...`));
1972
- if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
1973
- else {
1974
- console.log(pc.red(`✗ Failed to push "${name}"`));
1975
- return true;
1976
- }
1977
- }
1978
- else {
1979
- if (!await confirm({
1980
- message: `Push ${missingRemote.length > 1 ? "these datasources" : `"${missingRemote[0]}"`} to Bonnard? (credentials will be encrypted)`,
1981
- default: true
1982
- })) {
1983
- console.log(pc.dim("Deploy aborted."));
1984
- return true;
1985
- }
1986
- console.log();
1987
- for (const name of missingRemote) {
1988
- console.log(pc.dim(`Pushing "${name}"...`));
1989
- if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
1990
- else {
1991
- console.log(pc.red(`✗ Failed to push "${name}"`));
1992
- return true;
1993
- }
1994
- }
1995
- }
1875
+ console.log(pc.red("Datasource sync failed. Check .bon/datasources.yaml and credentials."));
1876
+ return true;
1996
1877
  }
1997
1878
  console.log();
1998
1879
  console.log(pc.dim("Testing datasource connections..."));
@@ -3931,11 +3812,9 @@ program.command("whoami").description("Show current login status").option("--ver
3931
3812
  const datasource = program.command("datasource").description("Manage warehouse data source connections");
3932
3813
  datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--demo", "Add a read-only demo datasource (Contoso retail dataset) for testing").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
3933
3814
  datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
3934
- datasource.command("test").description("Test data source connectivity via Bonnard API (requires login)").argument("<name>", "Data source name from .bon/datasources.yaml").action(datasourceTestCommand);
3935
3815
  datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
3936
- datasource.command("push").description("Push a local data source to Bonnard server (requires login)").argument("<name>", "Data source name from .bon/datasources.yaml").option("--force", "Overwrite if already exists on remote").action(datasourcePushCommand);
3937
3816
  program.command("validate").description("Validate YAML syntax in bonnard/cubes/ and bonnard/views/").action(validateCommand);
3938
- program.command("deploy").description("Deploy cubes and views to Bonnard. Requires login, validates, syncs datasources").option("--ci", "Non-interactive mode (fail if missing datasources)").option("--push-datasources", "Auto-push missing datasources without prompting").requiredOption("-m, --message <text>", "Deploy message describing your changes").action(deployCommand);
3817
+ program.command("deploy").description("Deploy cubes and views to Bonnard. Requires login, validates, syncs datasources").option("--ci", "Non-interactive mode").requiredOption("-m, --message <text>", "Deploy message describing your changes").action(deployCommand);
3939
3818
  program.command("deployments").description("List deployment history").option("--all", "Show all deployments (default: last 10)").option("--format <format>", "Output format: table or json", "table").action(deploymentsCommand);
3940
3819
  program.command("diff").description("Show changes in a deployment").argument("<id>", "Deployment ID").option("--format <format>", "Output format: table or json", "table").option("--breaking", "Show only breaking changes").action(diffCommand);
3941
3820
  program.command("annotate").description("Annotate deployment changes with reasoning").argument("<id>", "Deployment ID").option("--data <json>", "Annotations JSON").action(annotateCommand);
@@ -3949,4 +3828,4 @@ metabase.command("analyze").description("Analyze Metabase instance and generate
3949
3828
  program.parse();
3950
3829
 
3951
3830
  //#endregion
3952
- export { getProjectPaths as t };
3831
+ export { getProjectPaths as i, resolveEnvVarsInCredentials as n, post as r, getLocalDatasource as t };
@@ -1,4 +1,4 @@
1
- import { t as getProjectPaths } from "./bon.mjs";
1
+ import { i as getProjectPaths } from "./bon.mjs";
2
2
  import fs from "node:fs";
3
3
  import path from "node:path";
4
4
  import YAML from "yaml";
@@ -0,0 +1,35 @@
1
+ import { n as resolveEnvVarsInCredentials, r as post, t as getLocalDatasource } from "./bon.mjs";
2
+ import pc from "picocolors";
3
+ import "@inquirer/prompts";
4
+
5
+ //#region src/commands/datasource/push.ts
6
+ /**
7
+ * Push a datasource programmatically (for use by deploy command)
8
+ * Returns true on success, false on failure
9
+ */
10
+ async function pushDatasource(name, options = {}) {
11
+ const datasource = getLocalDatasource(name);
12
+ if (!datasource) {
13
+ if (!options.silent) console.error(pc.red(`Datasource "${name}" not found locally`));
14
+ return false;
15
+ }
16
+ const { resolved, missing } = resolveEnvVarsInCredentials(datasource.credentials);
17
+ if (missing.length > 0) {
18
+ if (!options.silent) console.error(pc.red(`Missing env vars for "${name}": ${missing.join(", ")}`));
19
+ return false;
20
+ }
21
+ try {
22
+ await post("/api/datasources", {
23
+ name: datasource.name,
24
+ warehouse_type: datasource.type,
25
+ config: datasource.config,
26
+ credentials: resolved
27
+ });
28
+ return true;
29
+ } catch {
30
+ return false;
31
+ }
32
+ }
33
+
34
+ //#endregion
35
+ export { pushDatasource };
@@ -1,4 +1,4 @@
1
- import { t as getProjectPaths } from "./bon.mjs";
1
+ import { i as getProjectPaths } from "./bon.mjs";
2
2
  import fs from "node:fs";
3
3
  import path from "node:path";
4
4
  import YAML from "yaml";
@@ -35,10 +35,10 @@ bon docs cubes.measures # Read modeling docs in terminal
35
35
  Deploy from GitHub Actions, GitLab CI, or any pipeline:
36
36
 
37
37
  ```bash
38
- bon deploy --ci --push-datasources -m "CI deploy"
38
+ bon deploy --ci -m "CI deploy"
39
39
  ```
40
40
 
41
- Non-interactive mode with automatic datasource sync. Fails fast if anything is misconfigured.
41
+ Non-interactive mode. Datasources are synced automatically. Fails fast if anything is misconfigured.
42
42
 
43
43
  ## Deployment versioning
44
44
 
@@ -19,15 +19,16 @@ A `-m` message is **required** — it describes what changed in this deployment.
19
19
  | Flag | Description |
20
20
  |------|-------------|
21
21
  | `-m "message"` | **Required.** Deployment description |
22
- | `--ci` | Non-interactive mode (fails on missing datasources) |
23
- | `--push-datasources` | Auto-push missing datasources to Bonnard |
22
+ | `--ci` | Non-interactive mode |
23
+
24
+ Datasources are always synced automatically during deploy.
24
25
 
25
26
  ### CI/CD
26
27
 
27
- For automated pipelines, combine `--ci` with `--push-datasources`:
28
+ For automated pipelines, use `--ci` for non-interactive mode:
28
29
 
29
30
  ```bash
30
- bon deploy --ci --push-datasources -m "CI deploy"
31
+ bon deploy --ci -m "CI deploy"
31
32
  ```
32
33
 
33
34
  ## Prerequisites
@@ -148,7 +149,7 @@ Deploy aborted. Fix validation errors first.
148
149
  Deploy aborted. Fix connection issues:
149
150
  - Check credentials in .bon/datasources.yaml
150
151
  - Verify network access to database
151
- - Run: bon datasource test analytics
152
+ - Run: bon datasource add (to reconfigure)
152
153
  ```
153
154
 
154
155
  ### Auth Errors
@@ -146,7 +146,6 @@ bonnard/cubes/
146
146
  | `bon datasource add --demo` | Add demo dataset (no warehouse needed) |
147
147
  | `bon datasource add --from-dbt` | Import from dbt profiles |
148
148
  | `bon datasource list` | List configured sources |
149
- | `bon datasource test <name>` | Test connection (requires login) |
150
149
  | `bon validate` | Check cube and view syntax |
151
150
  | `bon deploy -m "message"` | Deploy to Bonnard (message required) |
152
151
  | `bon deploy --ci` | Non-interactive deploy |
@@ -116,7 +116,7 @@ measures:
116
116
  1. **Run before every deploy** — `bon validate && bon deploy`
117
117
  2. **Add to CI/CD** — validate on pull requests
118
118
  3. **Fix errors first** — don't deploy with validation errors
119
- 4. **Test connections** — use `bon datasource test <name>` to check connectivity
119
+ 4. **Test connections** — connections are tested automatically during `bon deploy`
120
120
 
121
121
  ## See Also
122
122
 
@@ -15,29 +15,27 @@ confirming progress before moving on.
15
15
  Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
16
16
 
17
17
  ```bash
18
- # Option A: Import from dbt (if they use it)
18
+ # Option A: Use demo data (no warehouse needed)
19
+ bon datasource add --demo
20
+
21
+ # Option B: Import from dbt (if they use it)
19
22
  bon datasource add --from-dbt
20
23
 
21
- # Option B: Add manually (interactive)
22
- bon datasource add
24
+ # Option C: Add manually, non-interactive (preferred for agents)
25
+ bon datasource add --name my_warehouse --type postgres \
26
+ --host db.example.com --port 5432 --database mydb --schema public \
27
+ --user myuser --password mypassword
23
28
 
24
- # Option C: Use demo data (no warehouse needed)
25
- bon datasource add --demo
29
+ # Option D: Add manually, interactive (in user's terminal)
30
+ bon datasource add
26
31
  ```
27
32
 
33
+ Supported types: `postgres`, `redshift`, `snowflake`, `bigquery`, `databricks`.
34
+
28
35
  The demo option adds a read-only Contoso retail dataset with tables like
29
36
  `fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
30
37
 
31
- Then verify the connection works:
32
-
33
- ```bash
34
- bon datasource test <name>
35
- ```
36
-
37
- If the test fails, common issues:
38
- - Wrong credentials — re-run `bon datasource add`
39
- - Network/firewall — check warehouse allows connections from this machine
40
- - SSL issues (Postgres) — may need `sslmode` in connection config
38
+ The connection will be tested automatically during `bon deploy`.
41
39
 
42
40
  ## Phase 2: Explore the Data
43
41
 
@@ -48,24 +48,26 @@ drives every decision in the remaining phases.
48
48
 
49
49
  ## Phase 3: Connect the Data Warehouse
50
50
 
51
- Add a datasource pointing to the same database that Metabase queries:
51
+ Add a datasource pointing to the same database that Metabase queries.
52
+ The database connection details can often be found in Metabase under
53
+ Admin > Databases, or in the analysis report header.
52
54
 
53
55
  ```bash
54
- # Interactive setup
55
- bon datasource add
56
+ # Non-interactive (preferred for agents)
57
+ bon datasource add --name my_warehouse --type postgres \
58
+ --host db.example.com --port 5432 --database mydb --schema public \
59
+ --user myuser --password mypassword
56
60
 
57
- # Or import from dbt if available
61
+ # Import from dbt if available
58
62
  bon datasource add --from-dbt
59
- ```
60
-
61
- Then verify the connection:
62
63
 
63
- ```bash
64
- bon datasource test <name>
64
+ # Interactive setup (in user's terminal)
65
+ bon datasource add
65
66
  ```
66
67
 
67
- The database connection details can often be found in Metabase under
68
- Admin > Databases, or in the analysis report header.
68
+ Supported types: `postgres`, `redshift`, `snowflake`, `bigquery`, `databricks`.
69
+
70
+ The connection will be tested automatically during `bon deploy`.
69
71
 
70
72
  ## Phase 4: Explore Key Tables
71
73
 
@@ -14,29 +14,27 @@ confirming progress before moving on.
14
14
  Ask the user if they have a warehouse to connect, or want to try a demo dataset first:
15
15
 
16
16
  ```bash
17
- # Option A: Import from dbt (if they use it)
17
+ # Option A: Use demo data (no warehouse needed)
18
+ bon datasource add --demo
19
+
20
+ # Option B: Import from dbt (if they use it)
18
21
  bon datasource add --from-dbt
19
22
 
20
- # Option B: Add manually (interactive)
21
- bon datasource add
23
+ # Option C: Add manually, non-interactive (preferred for agents)
24
+ bon datasource add --name my_warehouse --type postgres \
25
+ --host db.example.com --port 5432 --database mydb --schema public \
26
+ --user myuser --password mypassword
22
27
 
23
- # Option C: Use demo data (no warehouse needed)
24
- bon datasource add --demo
28
+ # Option D: Add manually, interactive (in user's terminal)
29
+ bon datasource add
25
30
  ```
26
31
 
32
+ Supported types: `postgres`, `redshift`, `snowflake`, `bigquery`, `databricks`.
33
+
27
34
  The demo option adds a read-only Contoso retail dataset with tables like
28
35
  `fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
29
36
 
30
- Then verify the connection works:
31
-
32
- ```bash
33
- bon datasource test <name>
34
- ```
35
-
36
- If the test fails, common issues:
37
- - Wrong credentials — re-run `bon datasource add`
38
- - Network/firewall — check warehouse allows connections from this machine
39
- - SSL issues (Postgres) — may need `sslmode` in connection config
37
+ The connection will be tested automatically during `bon deploy`.
40
38
 
41
39
  ## Phase 2: Explore the Data
42
40
 
@@ -47,24 +47,26 @@ drives every decision in the remaining phases.
47
47
 
48
48
  ## Phase 3: Connect the Data Warehouse
49
49
 
50
- Add a datasource pointing to the same database that Metabase queries:
50
+ Add a datasource pointing to the same database that Metabase queries.
51
+ The database connection details can often be found in Metabase under
52
+ Admin > Databases, or in the analysis report header.
51
53
 
52
54
  ```bash
53
- # Interactive setup
54
- bon datasource add
55
+ # Non-interactive (preferred for agents)
56
+ bon datasource add --name my_warehouse --type postgres \
57
+ --host db.example.com --port 5432 --database mydb --schema public \
58
+ --user myuser --password mypassword
55
59
 
56
- # Or import from dbt if available
60
+ # Import from dbt if available
57
61
  bon datasource add --from-dbt
58
- ```
59
-
60
- Then verify the connection:
61
62
 
62
- ```bash
63
- bon datasource test <name>
63
+ # Interactive setup (in user's terminal)
64
+ bon datasource add
64
65
  ```
65
66
 
66
- The database connection details can often be found in Metabase under
67
- Admin > Databases, or in the analysis report header.
67
+ Supported types: `postgres`, `redshift`, `snowflake`, `bigquery`, `databricks`.
68
+
69
+ The connection will be tested automatically during `bon deploy`.
68
70
 
69
71
  ## Phase 4: Explore Key Tables
70
72
 
@@ -63,10 +63,9 @@ All tables are in the `contoso` schema. The datasource is named `contoso_demo`.
63
63
  | `bon datasource add` | Add warehouse connection |
64
64
  | `bon datasource add --demo` | Add demo dataset (no warehouse needed) |
65
65
  | `bon datasource add --from-dbt` | Import from dbt profiles |
66
- | `bon datasource test <name>` | Test connection (requires login) |
67
66
  | `bon validate` | Validate YAML syntax, warn on missing descriptions and `data_source` |
68
67
  | `bon deploy -m "message"` | Deploy to Bonnard (requires login, message required) |
69
- | `bon deploy --ci` | Non-interactive deploy (fails on missing datasources) |
68
+ | `bon deploy --ci` | Non-interactive deploy |
70
69
  | `bon deployments` | List recent deployments (add `--all` for full history) |
71
70
  | `bon diff <deployment-id>` | Show changes in a deployment (`--breaking` for breaking only) |
72
71
  | `bon annotate <deployment-id>` | Add reasoning/context to deployment changes |
@@ -118,7 +117,7 @@ Every deploy creates a versioned deployment with change detection:
118
117
  - **Diff**: `bon diff <id>` shows all changes; `bon diff <id> --breaking` filters to breaking only
119
118
  - **Annotate**: `bon annotate <id> --data '{"object": "note"}'` adds context to changes
120
119
 
121
- For CI/CD pipelines, use `bon deploy --ci -m "message"` (non-interactive, fails on issues) or `bon deploy --push-datasources -m "message"` to auto-push missing datasources.
120
+ For CI/CD pipelines, use `bon deploy --ci -m "message"` (non-interactive, fails on issues). Datasources are always synced automatically during deploy.
122
121
 
123
122
  ## Best Practices
124
123
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bonnard/cli",
3
- "version": "0.2.2",
3
+ "version": "0.2.4",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "bon": "./dist/bin/bon.mjs"