@bonnard/cli 0.2.3 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/bon.mjs +49 -53
- package/dist/bin/{push-mZujN1Ik.mjs → push-Bv9AFGc2.mjs} +1 -1
- package/dist/docs/topics/features.cli.md +2 -2
- package/dist/docs/topics/workflow.deploy.md +5 -4
- package/dist/templates/claude/skills/bonnard-get-started/SKILL.md +1 -1
- package/dist/templates/claude/skills/bonnard-metabase-migrate/SKILL.md +1 -1
- package/dist/templates/cursor/rules/bonnard-get-started.mdc +1 -1
- package/dist/templates/cursor/rules/bonnard-metabase-migrate.mdc +1 -1
- package/dist/templates/shared/bonnard.md +2 -2
- package/package.json +1 -1
package/dist/bin/bon.mjs
CHANGED
|
@@ -10,7 +10,6 @@ import os from "node:os";
|
|
|
10
10
|
import http from "node:http";
|
|
11
11
|
import crypto from "node:crypto";
|
|
12
12
|
import { execFileSync } from "node:child_process";
|
|
13
|
-
import { confirm } from "@inquirer/prompts";
|
|
14
13
|
import { encode } from "@toon-format/toon";
|
|
15
14
|
|
|
16
15
|
//#region rolldown:runtime
|
|
@@ -82,6 +81,7 @@ function mapDbtType(dbtType) {
|
|
|
82
81
|
snowflake: "snowflake",
|
|
83
82
|
postgres: "postgres",
|
|
84
83
|
postgresql: "postgres",
|
|
84
|
+
redshift: "redshift",
|
|
85
85
|
bigquery: "bigquery",
|
|
86
86
|
databricks: "databricks"
|
|
87
87
|
}[dbtType.toLowerCase()] ?? null;
|
|
@@ -338,6 +338,7 @@ function extractWarehouseFromEnv(cwd) {
|
|
|
338
338
|
const type = {
|
|
339
339
|
snowflake: "snowflake",
|
|
340
340
|
postgres: "postgres",
|
|
341
|
+
redshift: "redshift",
|
|
341
342
|
bigquery: "bigquery",
|
|
342
343
|
databricks: "databricks"
|
|
343
344
|
}[cubeDbType[1].trim().toLowerCase()];
|
|
@@ -787,7 +788,6 @@ async function logoutCommand() {
|
|
|
787
788
|
var api_exports = /* @__PURE__ */ __exportAll({
|
|
788
789
|
del: () => del,
|
|
789
790
|
get: () => get,
|
|
790
|
-
getRemoteDatasources: () => getRemoteDatasources,
|
|
791
791
|
post: () => post
|
|
792
792
|
});
|
|
793
793
|
const APP_URL = process.env.BON_APP_URL || "https://app.bonnard.dev";
|
|
@@ -829,12 +829,6 @@ function post(path, body) {
|
|
|
829
829
|
function del(path) {
|
|
830
830
|
return request("DELETE", path);
|
|
831
831
|
}
|
|
832
|
-
/**
|
|
833
|
-
* Fetch remote datasources from Bonnard server
|
|
834
|
-
*/
|
|
835
|
-
async function getRemoteDatasources() {
|
|
836
|
-
return (await get("/api/datasources")).dataSources || [];
|
|
837
|
-
}
|
|
838
832
|
|
|
839
833
|
//#endregion
|
|
840
834
|
//#region src/commands/whoami.ts
|
|
@@ -1221,6 +1215,43 @@ const WAREHOUSE_CONFIGS = [
|
|
|
1221
1215
|
required: true
|
|
1222
1216
|
}]
|
|
1223
1217
|
},
|
|
1218
|
+
{
|
|
1219
|
+
value: "redshift",
|
|
1220
|
+
label: "Redshift",
|
|
1221
|
+
configFields: [
|
|
1222
|
+
{
|
|
1223
|
+
name: "host",
|
|
1224
|
+
message: "Host (cluster endpoint)",
|
|
1225
|
+
required: true
|
|
1226
|
+
},
|
|
1227
|
+
{
|
|
1228
|
+
name: "port",
|
|
1229
|
+
message: "Port",
|
|
1230
|
+
default: "5439"
|
|
1231
|
+
},
|
|
1232
|
+
{
|
|
1233
|
+
name: "database",
|
|
1234
|
+
message: "Database name",
|
|
1235
|
+
required: true
|
|
1236
|
+
},
|
|
1237
|
+
{
|
|
1238
|
+
name: "schema",
|
|
1239
|
+
message: "Schema",
|
|
1240
|
+
default: "public"
|
|
1241
|
+
}
|
|
1242
|
+
],
|
|
1243
|
+
credentialFields: [{
|
|
1244
|
+
name: "username",
|
|
1245
|
+
flag: "user",
|
|
1246
|
+
message: "Username",
|
|
1247
|
+
required: true
|
|
1248
|
+
}, {
|
|
1249
|
+
name: "password",
|
|
1250
|
+
message: "Password",
|
|
1251
|
+
secret: true,
|
|
1252
|
+
required: true
|
|
1253
|
+
}]
|
|
1254
|
+
},
|
|
1224
1255
|
{
|
|
1225
1256
|
value: "bigquery",
|
|
1226
1257
|
label: "BigQuery",
|
|
@@ -1810,7 +1841,7 @@ async function deployCommand(options = {}) {
|
|
|
1810
1841
|
async function testAndSyncDatasources(cwd, options = {}) {
|
|
1811
1842
|
const { extractDatasourcesFromCubes } = await import("./cubes-9rklhdAJ.mjs");
|
|
1812
1843
|
const { loadLocalDatasources } = await Promise.resolve().then(() => local_exports);
|
|
1813
|
-
const { pushDatasource } = await import("./push-
|
|
1844
|
+
const { pushDatasource } = await import("./push-Bv9AFGc2.mjs");
|
|
1814
1845
|
const references = extractDatasourcesFromCubes(cwd);
|
|
1815
1846
|
if (references.length === 0) return false;
|
|
1816
1847
|
console.log();
|
|
@@ -1833,51 +1864,16 @@ async function testAndSyncDatasources(cwd, options = {}) {
|
|
|
1833
1864
|
console.log(pc.red("Missing datasources. Fix issues before deploying."));
|
|
1834
1865
|
return true;
|
|
1835
1866
|
}
|
|
1836
|
-
console.log(pc.dim("
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
console.log(pc.red(`Failed to fetch remote datasources: ${err.message}`));
|
|
1842
|
-
return true;
|
|
1867
|
+
console.log(pc.dim("Syncing datasources..."));
|
|
1868
|
+
for (const name of foundDatasources) if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ ${name} synced`));
|
|
1869
|
+
else {
|
|
1870
|
+
console.log(pc.red(`✗ Failed to sync "${name}"`));
|
|
1871
|
+
failed = true;
|
|
1843
1872
|
}
|
|
1844
|
-
|
|
1845
|
-
const missingRemote = foundDatasources.filter((name) => !remoteNames.has(name));
|
|
1846
|
-
if (missingRemote.length > 0) {
|
|
1847
|
-
console.log();
|
|
1848
|
-
console.log(pc.yellow(`⚠ Missing remote datasource${missingRemote.length > 1 ? "s" : ""}: ${missingRemote.join(", ")}`));
|
|
1873
|
+
if (failed) {
|
|
1849
1874
|
console.log();
|
|
1850
|
-
|
|
1851
|
-
|
|
1852
|
-
console.log(pc.dim(`Use --push-datasources to auto-push missing datasources`));
|
|
1853
|
-
return true;
|
|
1854
|
-
}
|
|
1855
|
-
if (options.pushDatasources) for (const name of missingRemote) {
|
|
1856
|
-
console.log(pc.dim(`Pushing "${name}"...`));
|
|
1857
|
-
if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
|
|
1858
|
-
else {
|
|
1859
|
-
console.log(pc.red(`✗ Failed to push "${name}"`));
|
|
1860
|
-
return true;
|
|
1861
|
-
}
|
|
1862
|
-
}
|
|
1863
|
-
else {
|
|
1864
|
-
if (!await confirm({
|
|
1865
|
-
message: `Push ${missingRemote.length > 1 ? "these datasources" : `"${missingRemote[0]}"`} to Bonnard? (credentials will be encrypted)`,
|
|
1866
|
-
default: true
|
|
1867
|
-
})) {
|
|
1868
|
-
console.log(pc.dim("Deploy aborted."));
|
|
1869
|
-
return true;
|
|
1870
|
-
}
|
|
1871
|
-
console.log();
|
|
1872
|
-
for (const name of missingRemote) {
|
|
1873
|
-
console.log(pc.dim(`Pushing "${name}"...`));
|
|
1874
|
-
if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
|
|
1875
|
-
else {
|
|
1876
|
-
console.log(pc.red(`✗ Failed to push "${name}"`));
|
|
1877
|
-
return true;
|
|
1878
|
-
}
|
|
1879
|
-
}
|
|
1880
|
-
}
|
|
1875
|
+
console.log(pc.red("Datasource sync failed. Check .bon/datasources.yaml and credentials."));
|
|
1876
|
+
return true;
|
|
1881
1877
|
}
|
|
1882
1878
|
console.log();
|
|
1883
1879
|
console.log(pc.dim("Testing datasource connections..."));
|
|
@@ -3818,7 +3814,7 @@ datasource.command("add").description("Add a data source to .bon/datasources.yam
|
|
|
3818
3814
|
datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
|
|
3819
3815
|
datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
|
|
3820
3816
|
program.command("validate").description("Validate YAML syntax in bonnard/cubes/ and bonnard/views/").action(validateCommand);
|
|
3821
|
-
program.command("deploy").description("Deploy cubes and views to Bonnard. Requires login, validates, syncs datasources").option("--ci", "Non-interactive mode
|
|
3817
|
+
program.command("deploy").description("Deploy cubes and views to Bonnard. Requires login, validates, syncs datasources").option("--ci", "Non-interactive mode").requiredOption("-m, --message <text>", "Deploy message describing your changes").action(deployCommand);
|
|
3822
3818
|
program.command("deployments").description("List deployment history").option("--all", "Show all deployments (default: last 10)").option("--format <format>", "Output format: table or json", "table").action(deploymentsCommand);
|
|
3823
3819
|
program.command("diff").description("Show changes in a deployment").argument("<id>", "Deployment ID").option("--format <format>", "Output format: table or json", "table").option("--breaking", "Show only breaking changes").action(diffCommand);
|
|
3824
3820
|
program.command("annotate").description("Annotate deployment changes with reasoning").argument("<id>", "Deployment ID").option("--data <json>", "Annotations JSON").action(annotateCommand);
|
|
@@ -35,10 +35,10 @@ bon docs cubes.measures # Read modeling docs in terminal
|
|
|
35
35
|
Deploy from GitHub Actions, GitLab CI, or any pipeline:
|
|
36
36
|
|
|
37
37
|
```bash
|
|
38
|
-
bon deploy --ci
|
|
38
|
+
bon deploy --ci -m "CI deploy"
|
|
39
39
|
```
|
|
40
40
|
|
|
41
|
-
Non-interactive mode
|
|
41
|
+
Non-interactive mode. Datasources are synced automatically. Fails fast if anything is misconfigured.
|
|
42
42
|
|
|
43
43
|
## Deployment versioning
|
|
44
44
|
|
|
@@ -19,15 +19,16 @@ A `-m` message is **required** — it describes what changed in this deployment.
|
|
|
19
19
|
| Flag | Description |
|
|
20
20
|
|------|-------------|
|
|
21
21
|
| `-m "message"` | **Required.** Deployment description |
|
|
22
|
-
| `--ci` | Non-interactive mode
|
|
23
|
-
|
|
22
|
+
| `--ci` | Non-interactive mode |
|
|
23
|
+
|
|
24
|
+
Datasources are always synced automatically during deploy.
|
|
24
25
|
|
|
25
26
|
### CI/CD
|
|
26
27
|
|
|
27
|
-
For automated pipelines,
|
|
28
|
+
For automated pipelines, use `--ci` for non-interactive mode:
|
|
28
29
|
|
|
29
30
|
```bash
|
|
30
|
-
bon deploy --ci
|
|
31
|
+
bon deploy --ci -m "CI deploy"
|
|
31
32
|
```
|
|
32
33
|
|
|
33
34
|
## Prerequisites
|
|
@@ -30,7 +30,7 @@ bon datasource add --name my_warehouse --type postgres \
|
|
|
30
30
|
bon datasource add
|
|
31
31
|
```
|
|
32
32
|
|
|
33
|
-
Supported types: `postgres`
|
|
33
|
+
Supported types: `postgres`, `redshift`, `snowflake`, `bigquery`, `databricks`.
|
|
34
34
|
|
|
35
35
|
The demo option adds a read-only Contoso retail dataset with tables like
|
|
36
36
|
`fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
|
|
@@ -65,7 +65,7 @@ bon datasource add --from-dbt
|
|
|
65
65
|
bon datasource add
|
|
66
66
|
```
|
|
67
67
|
|
|
68
|
-
Supported types: `postgres`
|
|
68
|
+
Supported types: `postgres`, `redshift`, `snowflake`, `bigquery`, `databricks`.
|
|
69
69
|
|
|
70
70
|
The connection will be tested automatically during `bon deploy`.
|
|
71
71
|
|
|
@@ -29,7 +29,7 @@ bon datasource add --name my_warehouse --type postgres \
|
|
|
29
29
|
bon datasource add
|
|
30
30
|
```
|
|
31
31
|
|
|
32
|
-
Supported types: `postgres`
|
|
32
|
+
Supported types: `postgres`, `redshift`, `snowflake`, `bigquery`, `databricks`.
|
|
33
33
|
|
|
34
34
|
The demo option adds a read-only Contoso retail dataset with tables like
|
|
35
35
|
`fact_sales`, `dim_product`, `dim_store`, and `dim_customer`.
|
|
@@ -64,7 +64,7 @@ bon datasource add --from-dbt
|
|
|
64
64
|
bon datasource add
|
|
65
65
|
```
|
|
66
66
|
|
|
67
|
-
Supported types: `postgres`
|
|
67
|
+
Supported types: `postgres`, `redshift`, `snowflake`, `bigquery`, `databricks`.
|
|
68
68
|
|
|
69
69
|
The connection will be tested automatically during `bon deploy`.
|
|
70
70
|
|
|
@@ -65,7 +65,7 @@ All tables are in the `contoso` schema. The datasource is named `contoso_demo`.
|
|
|
65
65
|
| `bon datasource add --from-dbt` | Import from dbt profiles |
|
|
66
66
|
| `bon validate` | Validate YAML syntax, warn on missing descriptions and `data_source` |
|
|
67
67
|
| `bon deploy -m "message"` | Deploy to Bonnard (requires login, message required) |
|
|
68
|
-
| `bon deploy --ci` | Non-interactive deploy
|
|
68
|
+
| `bon deploy --ci` | Non-interactive deploy |
|
|
69
69
|
| `bon deployments` | List recent deployments (add `--all` for full history) |
|
|
70
70
|
| `bon diff <deployment-id>` | Show changes in a deployment (`--breaking` for breaking only) |
|
|
71
71
|
| `bon annotate <deployment-id>` | Add reasoning/context to deployment changes |
|
|
@@ -117,7 +117,7 @@ Every deploy creates a versioned deployment with change detection:
|
|
|
117
117
|
- **Diff**: `bon diff <id>` shows all changes; `bon diff <id> --breaking` filters to breaking only
|
|
118
118
|
- **Annotate**: `bon annotate <id> --data '{"object": "note"}'` adds context to changes
|
|
119
119
|
|
|
120
|
-
For CI/CD pipelines, use `bon deploy --ci -m "message"` (non-interactive, fails on issues)
|
|
120
|
+
For CI/CD pipelines, use `bon deploy --ci -m "message"` (non-interactive, fails on issues). Datasources are always synced automatically during deploy.
|
|
121
121
|
|
|
122
122
|
## Best Practices
|
|
123
123
|
|