@bonnard/cli 0.2.10 → 0.2.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +166 -51
- package/dist/bin/api-B7cdKn9j.mjs +3 -0
- package/dist/bin/api-DqgY-30K.mjs +75 -0
- package/dist/bin/bon.mjs +243 -282
- package/dist/bin/{cubes-9rklhdAJ.mjs → cubes-BvtwNBUG.mjs} +1 -1
- package/dist/bin/local-BkK5XL7T.mjs +3 -0
- package/dist/bin/local-ByvuW3eV.mjs +149 -0
- package/dist/bin/project-Dj085D_B.mjs +27 -0
- package/dist/bin/{push-Bv9AFGc2.mjs → push-BOkUmRL8.mjs} +2 -1
- package/dist/bin/{validate-Bc8zGNw7.mjs → validate-C4W_Vto2.mjs} +1 -1
- package/dist/docs/topics/dashboards.examples.md +59 -76
- package/dist/docs/topics/dashboards.inputs.md +17 -23
- package/dist/docs/topics/dashboards.md +5 -7
- package/dist/docs/topics/dashboards.queries.md +17 -24
- package/dist/docs/topics/querying.sdk.md +3 -4
- package/package.json +1 -1
package/dist/bin/bon.mjs
CHANGED
|
@@ -1,56 +1,90 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
import { n as getProjectPaths, t as BONNARD_DIR } from "./project-Dj085D_B.mjs";
|
|
3
|
+
import { a as loadCredentials, i as clearCredentials, n as get, o as saveCredentials, r as post, t as del } from "./api-DqgY-30K.mjs";
|
|
4
|
+
import { i as ensureBonDir, n as addLocalDatasource, o as loadLocalDatasources, r as datasourceExists, s as removeLocalDatasource, t as isDatasourcesTrackedByGit } from "./local-ByvuW3eV.mjs";
|
|
2
5
|
import { createRequire } from "node:module";
|
|
3
6
|
import { program } from "commander";
|
|
4
7
|
import fs from "node:fs";
|
|
5
8
|
import path from "node:path";
|
|
6
|
-
import
|
|
9
|
+
import os from "node:os";
|
|
7
10
|
import pc from "picocolors";
|
|
11
|
+
import { fileURLToPath } from "node:url";
|
|
8
12
|
import YAML from "yaml";
|
|
9
|
-
import os from "node:os";
|
|
10
13
|
import http from "node:http";
|
|
11
14
|
import crypto from "node:crypto";
|
|
12
|
-
import { execFileSync } from "node:child_process";
|
|
13
15
|
import { encode } from "@toon-format/toon";
|
|
14
16
|
|
|
15
17
|
//#region \0rolldown/runtime.js
|
|
16
|
-
var __defProp = Object.defineProperty;
|
|
17
|
-
var __exportAll = (all, no_symbols) => {
|
|
18
|
-
let target = {};
|
|
19
|
-
for (var name in all) {
|
|
20
|
-
__defProp(target, name, {
|
|
21
|
-
get: all[name],
|
|
22
|
-
enumerable: true
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
if (!no_symbols) {
|
|
26
|
-
__defProp(target, Symbol.toStringTag, { value: "Module" });
|
|
27
|
-
}
|
|
28
|
-
return target;
|
|
29
|
-
};
|
|
30
18
|
var __require = /* @__PURE__ */ createRequire(import.meta.url);
|
|
31
19
|
|
|
32
20
|
//#endregion
|
|
33
|
-
//#region src/lib/
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
21
|
+
//#region src/lib/update-check.ts
|
|
22
|
+
const CACHE_DIR = path.join(os.homedir(), ".config", "bon");
|
|
23
|
+
const CACHE_FILE = path.join(CACHE_DIR, "update-check.json");
|
|
24
|
+
const CHECK_INTERVAL_MS = 1440 * 60 * 1e3;
|
|
25
|
+
const REGISTRY_URL = "https://registry.npmjs.org/@bonnard/cli/latest";
|
|
26
|
+
const FETCH_TIMEOUT_MS = 3e3;
|
|
27
|
+
function readCache() {
|
|
28
|
+
try {
|
|
29
|
+
const raw = fs.readFileSync(CACHE_FILE, "utf-8");
|
|
30
|
+
return JSON.parse(raw);
|
|
31
|
+
} catch {
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
function writeCache(data) {
|
|
36
|
+
try {
|
|
37
|
+
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
|
38
|
+
fs.writeFileSync(CACHE_FILE, JSON.stringify(data));
|
|
39
|
+
} catch {}
|
|
40
|
+
}
|
|
41
|
+
function isNewer(latest, current) {
|
|
42
|
+
const l = latest.split(".").map(Number);
|
|
43
|
+
const c = current.split(".").map(Number);
|
|
44
|
+
for (let i = 0; i < 3; i++) {
|
|
45
|
+
if ((l[i] ?? 0) > (c[i] ?? 0)) return true;
|
|
46
|
+
if ((l[i] ?? 0) < (c[i] ?? 0)) return false;
|
|
47
|
+
}
|
|
48
|
+
return false;
|
|
49
|
+
}
|
|
50
|
+
async function fetchLatestVersion() {
|
|
51
|
+
try {
|
|
52
|
+
const controller = new AbortController();
|
|
53
|
+
const timeout = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS);
|
|
54
|
+
const res = await fetch(REGISTRY_URL, { signal: controller.signal });
|
|
55
|
+
clearTimeout(timeout);
|
|
56
|
+
if (!res.ok) return null;
|
|
57
|
+
return (await res.json()).version ?? null;
|
|
58
|
+
} catch {
|
|
59
|
+
return null;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
40
62
|
/**
|
|
41
|
-
*
|
|
42
|
-
*
|
|
63
|
+
* Start a background version check. Returns a function that,
|
|
64
|
+
* when called, prints an update notice if a newer version exists.
|
|
65
|
+
* The check is cached for 24 hours and never blocks the CLI.
|
|
43
66
|
*/
|
|
44
|
-
function
|
|
45
|
-
const
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
67
|
+
function startUpdateCheck(currentVersion) {
|
|
68
|
+
const cached = readCache();
|
|
69
|
+
const now = Date.now();
|
|
70
|
+
if (cached && now - cached.lastCheck < CHECK_INTERVAL_MS) return async () => {
|
|
71
|
+
if (isNewer(cached.latestVersion, currentVersion)) printUpdateNotice(cached.latestVersion, currentVersion);
|
|
72
|
+
};
|
|
73
|
+
const fetchPromise = fetchLatestVersion();
|
|
74
|
+
return async () => {
|
|
75
|
+
const latest = await fetchPromise;
|
|
76
|
+
if (latest) {
|
|
77
|
+
writeCache({
|
|
78
|
+
lastCheck: now,
|
|
79
|
+
latestVersion: latest
|
|
80
|
+
});
|
|
81
|
+
if (isNewer(latest, currentVersion)) printUpdateNotice(latest, currentVersion);
|
|
82
|
+
}
|
|
52
83
|
};
|
|
53
84
|
}
|
|
85
|
+
function printUpdateNotice(latest, current) {
|
|
86
|
+
console.error(`\nUpdate available: ${pc.yellow(latest)} (currently installed ${current})\nRun ${pc.cyan("npm install -g @bonnard/cli")} to update\n`);
|
|
87
|
+
}
|
|
54
88
|
|
|
55
89
|
//#endregion
|
|
56
90
|
//#region src/lib/dbt/profiles.ts
|
|
@@ -83,7 +117,8 @@ function mapDbtType(dbtType) {
|
|
|
83
117
|
postgresql: "postgres",
|
|
84
118
|
redshift: "redshift",
|
|
85
119
|
bigquery: "bigquery",
|
|
86
|
-
databricks: "databricks"
|
|
120
|
+
databricks: "databricks",
|
|
121
|
+
duckdb: "duckdb"
|
|
87
122
|
}[dbtType.toLowerCase()] ?? null;
|
|
88
123
|
}
|
|
89
124
|
/**
|
|
@@ -156,6 +191,8 @@ const PYTHON_PACKAGES = {
|
|
|
156
191
|
"dbt-postgres": "dbt",
|
|
157
192
|
"dbt-bigquery": "dbt",
|
|
158
193
|
"dbt-databricks": "dbt",
|
|
194
|
+
"dbt-duckdb": "dbt",
|
|
195
|
+
duckdb: "duckdb",
|
|
159
196
|
dagster: "dagster",
|
|
160
197
|
sqlmesh: "sqlmesh",
|
|
161
198
|
"apache-airflow": "airflow",
|
|
@@ -340,7 +377,8 @@ function extractWarehouseFromEnv(cwd) {
|
|
|
340
377
|
postgres: "postgres",
|
|
341
378
|
redshift: "redshift",
|
|
342
379
|
bigquery: "bigquery",
|
|
343
|
-
databricks: "databricks"
|
|
380
|
+
databricks: "databricks",
|
|
381
|
+
duckdb: "duckdb"
|
|
344
382
|
}[cubeDbType[1].trim().toLowerCase()];
|
|
345
383
|
if (type) return {
|
|
346
384
|
type,
|
|
@@ -361,6 +399,11 @@ function extractWarehouseFromEnv(cwd) {
|
|
|
361
399
|
source: "env",
|
|
362
400
|
config: {}
|
|
363
401
|
};
|
|
402
|
+
if (content.match(/^MOTHERDUCK_TOKEN=/m) || content.match(/^CUBEJS_DB_DUCKDB_DATABASE_PATH=/m)) return {
|
|
403
|
+
type: "duckdb",
|
|
404
|
+
source: "env",
|
|
405
|
+
config: {}
|
|
406
|
+
};
|
|
364
407
|
} catch {}
|
|
365
408
|
return null;
|
|
366
409
|
}
|
|
@@ -625,41 +668,14 @@ async function initCommand() {
|
|
|
625
668
|
}
|
|
626
669
|
}
|
|
627
670
|
|
|
628
|
-
//#endregion
|
|
629
|
-
//#region src/lib/credentials.ts
|
|
630
|
-
const CREDENTIALS_DIR = path.join(os.homedir(), ".config", "bon");
|
|
631
|
-
const CREDENTIALS_FILE = path.join(CREDENTIALS_DIR, "credentials.json");
|
|
632
|
-
function saveCredentials(credentials) {
|
|
633
|
-
fs.mkdirSync(CREDENTIALS_DIR, {
|
|
634
|
-
recursive: true,
|
|
635
|
-
mode: 448
|
|
636
|
-
});
|
|
637
|
-
fs.writeFileSync(CREDENTIALS_FILE, JSON.stringify(credentials, null, 2), { mode: 384 });
|
|
638
|
-
}
|
|
639
|
-
function loadCredentials() {
|
|
640
|
-
try {
|
|
641
|
-
const raw = fs.readFileSync(CREDENTIALS_FILE, "utf-8");
|
|
642
|
-
const parsed = JSON.parse(raw);
|
|
643
|
-
if (parsed.token && parsed.email) return parsed;
|
|
644
|
-
return null;
|
|
645
|
-
} catch {
|
|
646
|
-
return null;
|
|
647
|
-
}
|
|
648
|
-
}
|
|
649
|
-
function clearCredentials() {
|
|
650
|
-
try {
|
|
651
|
-
fs.unlinkSync(CREDENTIALS_FILE);
|
|
652
|
-
} catch {}
|
|
653
|
-
}
|
|
654
|
-
|
|
655
671
|
//#endregion
|
|
656
672
|
//#region src/commands/login.ts
|
|
657
|
-
const APP_URL
|
|
673
|
+
const APP_URL = process.env.BON_APP_URL || "https://app.bonnard.dev";
|
|
658
674
|
const TIMEOUT_MS = 120 * 1e3;
|
|
659
675
|
async function loginCommand() {
|
|
660
676
|
const state = crypto.randomUUID();
|
|
661
677
|
const { port, close } = await startCallbackServer(state);
|
|
662
|
-
const url = `${APP_URL
|
|
678
|
+
const url = `${APP_URL}/auth/device?state=${state}&port=${port}`;
|
|
663
679
|
console.log(pc.dim(`Opening browser to ${url}`));
|
|
664
680
|
const open = (await import("open")).default;
|
|
665
681
|
await open(url);
|
|
@@ -788,53 +804,6 @@ async function logoutCommand() {
|
|
|
788
804
|
console.log(pc.green("Logged out"));
|
|
789
805
|
}
|
|
790
806
|
|
|
791
|
-
//#endregion
|
|
792
|
-
//#region src/lib/api.ts
|
|
793
|
-
var api_exports = /* @__PURE__ */ __exportAll({
|
|
794
|
-
del: () => del,
|
|
795
|
-
get: () => get,
|
|
796
|
-
post: () => post
|
|
797
|
-
});
|
|
798
|
-
const APP_URL = process.env.BON_APP_URL || "https://app.bonnard.dev";
|
|
799
|
-
const VERCEL_BYPASS = process.env.VERCEL_AUTOMATION_BYPASS_SECRET;
|
|
800
|
-
function getToken() {
|
|
801
|
-
const creds = loadCredentials();
|
|
802
|
-
if (!creds) {
|
|
803
|
-
console.error(pc.red("Not logged in. Run `bon login` first."));
|
|
804
|
-
process.exit(1);
|
|
805
|
-
}
|
|
806
|
-
return creds.token;
|
|
807
|
-
}
|
|
808
|
-
async function request(method, path, body) {
|
|
809
|
-
const token = getToken();
|
|
810
|
-
const url = `${APP_URL}${path}`;
|
|
811
|
-
const headers = {
|
|
812
|
-
Authorization: `Bearer ${token}`,
|
|
813
|
-
"Content-Type": "application/json"
|
|
814
|
-
};
|
|
815
|
-
if (VERCEL_BYPASS) headers["x-vercel-protection-bypass"] = VERCEL_BYPASS;
|
|
816
|
-
const res = await fetch(url, {
|
|
817
|
-
method,
|
|
818
|
-
headers,
|
|
819
|
-
body: body ? JSON.stringify(body) : void 0
|
|
820
|
-
});
|
|
821
|
-
const data = await res.json();
|
|
822
|
-
if (!res.ok) {
|
|
823
|
-
const message = data.error || res.statusText;
|
|
824
|
-
throw new Error(message);
|
|
825
|
-
}
|
|
826
|
-
return data;
|
|
827
|
-
}
|
|
828
|
-
function get(path) {
|
|
829
|
-
return request("GET", path);
|
|
830
|
-
}
|
|
831
|
-
function post(path, body) {
|
|
832
|
-
return request("POST", path, body);
|
|
833
|
-
}
|
|
834
|
-
function del(path) {
|
|
835
|
-
return request("DELETE", path);
|
|
836
|
-
}
|
|
837
|
-
|
|
838
807
|
//#endregion
|
|
839
808
|
//#region src/commands/whoami.ts
|
|
840
809
|
async function whoamiCommand(options = {}) {
|
|
@@ -859,163 +828,6 @@ async function whoamiCommand(options = {}) {
|
|
|
859
828
|
}
|
|
860
829
|
}
|
|
861
830
|
|
|
862
|
-
//#endregion
|
|
863
|
-
//#region src/lib/local/datasources.ts
|
|
864
|
-
/**
|
|
865
|
-
* Local datasource storage (.bon/datasources.yaml)
|
|
866
|
-
*
|
|
867
|
-
* Single file containing both config and credentials.
|
|
868
|
-
* Credentials may contain:
|
|
869
|
-
* - Plain values: "my_password"
|
|
870
|
-
* - dbt env var syntax: "{{ env_var('MY_PASSWORD') }}"
|
|
871
|
-
*
|
|
872
|
-
* Env vars are resolved at deploy time, not import time.
|
|
873
|
-
*/
|
|
874
|
-
const BON_DIR$2 = ".bon";
|
|
875
|
-
const DATASOURCES_FILE$1 = "datasources.yaml";
|
|
876
|
-
function getBonDir(cwd = process.cwd()) {
|
|
877
|
-
return path.join(cwd, BON_DIR$2);
|
|
878
|
-
}
|
|
879
|
-
function getDatasourcesPath$1(cwd = process.cwd()) {
|
|
880
|
-
return path.join(getBonDir(cwd), DATASOURCES_FILE$1);
|
|
881
|
-
}
|
|
882
|
-
/**
|
|
883
|
-
* Ensure .bon directory exists
|
|
884
|
-
*/
|
|
885
|
-
function ensureBonDir(cwd = process.cwd()) {
|
|
886
|
-
const bonDir = getBonDir(cwd);
|
|
887
|
-
if (!fs.existsSync(bonDir)) fs.mkdirSync(bonDir, { recursive: true });
|
|
888
|
-
}
|
|
889
|
-
/**
|
|
890
|
-
* Load all local datasources
|
|
891
|
-
*/
|
|
892
|
-
function loadLocalDatasources(cwd = process.cwd()) {
|
|
893
|
-
const filePath = getDatasourcesPath$1(cwd);
|
|
894
|
-
if (!fs.existsSync(filePath)) return [];
|
|
895
|
-
try {
|
|
896
|
-
const content = fs.readFileSync(filePath, "utf-8");
|
|
897
|
-
return YAML.parse(content)?.datasources ?? [];
|
|
898
|
-
} catch {
|
|
899
|
-
return [];
|
|
900
|
-
}
|
|
901
|
-
}
|
|
902
|
-
/**
|
|
903
|
-
* Save all local datasources (with secure permissions since it contains credentials)
|
|
904
|
-
*/
|
|
905
|
-
function saveLocalDatasources(datasources, cwd = process.cwd()) {
|
|
906
|
-
ensureBonDir(cwd);
|
|
907
|
-
const filePath = getDatasourcesPath$1(cwd);
|
|
908
|
-
const file = { datasources };
|
|
909
|
-
const content = `# Bonnard datasources configuration
|
|
910
|
-
# This file contains credentials - add to .gitignore
|
|
911
|
-
# Env vars like {{ env_var('PASSWORD') }} are resolved at deploy time
|
|
912
|
-
|
|
913
|
-
` + YAML.stringify(file, { indent: 2 });
|
|
914
|
-
fs.writeFileSync(filePath, content, { mode: 384 });
|
|
915
|
-
}
|
|
916
|
-
/**
|
|
917
|
-
* Add a single datasource (updates existing or appends new)
|
|
918
|
-
*/
|
|
919
|
-
function addLocalDatasource(datasource, cwd = process.cwd()) {
|
|
920
|
-
const existing = loadLocalDatasources(cwd);
|
|
921
|
-
const index = existing.findIndex((ds) => ds.name === datasource.name);
|
|
922
|
-
if (index >= 0) existing[index] = datasource;
|
|
923
|
-
else existing.push(datasource);
|
|
924
|
-
saveLocalDatasources(existing, cwd);
|
|
925
|
-
}
|
|
926
|
-
/**
|
|
927
|
-
* Remove a datasource by name
|
|
928
|
-
*/
|
|
929
|
-
function removeLocalDatasource(name, cwd = process.cwd()) {
|
|
930
|
-
const existing = loadLocalDatasources(cwd);
|
|
931
|
-
const filtered = existing.filter((ds) => ds.name !== name);
|
|
932
|
-
if (filtered.length === existing.length) return false;
|
|
933
|
-
saveLocalDatasources(filtered, cwd);
|
|
934
|
-
return true;
|
|
935
|
-
}
|
|
936
|
-
/**
|
|
937
|
-
* Get a single datasource by name
|
|
938
|
-
*/
|
|
939
|
-
function getLocalDatasource(name, cwd = process.cwd()) {
|
|
940
|
-
return loadLocalDatasources(cwd).find((ds) => ds.name === name) ?? null;
|
|
941
|
-
}
|
|
942
|
-
/**
|
|
943
|
-
* Check if a datasource name already exists locally
|
|
944
|
-
*/
|
|
945
|
-
function datasourceExists(name, cwd = process.cwd()) {
|
|
946
|
-
return getLocalDatasource(name, cwd) !== null;
|
|
947
|
-
}
|
|
948
|
-
/**
|
|
949
|
-
* Resolve {{ env_var('VAR_NAME') }} patterns in credentials
|
|
950
|
-
* Used at deploy time to resolve env vars before uploading
|
|
951
|
-
*/
|
|
952
|
-
function resolveEnvVarsInCredentials(credentials) {
|
|
953
|
-
const resolved = {};
|
|
954
|
-
const missing = [];
|
|
955
|
-
const envVarPattern = /\{\{\s*env_var\(['"]([\w_]+)['"]\)\s*\}\}/;
|
|
956
|
-
for (const [key, value] of Object.entries(credentials)) {
|
|
957
|
-
const match = value.match(envVarPattern);
|
|
958
|
-
if (match) {
|
|
959
|
-
const varName = match[1];
|
|
960
|
-
const envValue = process.env[varName];
|
|
961
|
-
if (envValue !== void 0) resolved[key] = envValue;
|
|
962
|
-
else {
|
|
963
|
-
missing.push(varName);
|
|
964
|
-
resolved[key] = value;
|
|
965
|
-
}
|
|
966
|
-
} else resolved[key] = value;
|
|
967
|
-
}
|
|
968
|
-
return {
|
|
969
|
-
resolved,
|
|
970
|
-
missing
|
|
971
|
-
};
|
|
972
|
-
}
|
|
973
|
-
|
|
974
|
-
//#endregion
|
|
975
|
-
//#region src/lib/local/credentials.ts
|
|
976
|
-
/**
|
|
977
|
-
* Credential utilities (git tracking check)
|
|
978
|
-
*/
|
|
979
|
-
const BON_DIR$1 = ".bon";
|
|
980
|
-
const DATASOURCES_FILE = "datasources.yaml";
|
|
981
|
-
function getDatasourcesPath(cwd = process.cwd()) {
|
|
982
|
-
return path.join(cwd, BON_DIR$1, DATASOURCES_FILE);
|
|
983
|
-
}
|
|
984
|
-
/**
|
|
985
|
-
* Check if datasources file is tracked by git (it shouldn't be - contains credentials)
|
|
986
|
-
*/
|
|
987
|
-
function isDatasourcesTrackedByGit(cwd = process.cwd()) {
|
|
988
|
-
const filePath = getDatasourcesPath(cwd);
|
|
989
|
-
if (!fs.existsSync(filePath)) return false;
|
|
990
|
-
try {
|
|
991
|
-
execFileSync("git", [
|
|
992
|
-
"ls-files",
|
|
993
|
-
"--error-unmatch",
|
|
994
|
-
filePath
|
|
995
|
-
], {
|
|
996
|
-
cwd,
|
|
997
|
-
stdio: "pipe"
|
|
998
|
-
});
|
|
999
|
-
return true;
|
|
1000
|
-
} catch {
|
|
1001
|
-
return false;
|
|
1002
|
-
}
|
|
1003
|
-
}
|
|
1004
|
-
|
|
1005
|
-
//#endregion
|
|
1006
|
-
//#region src/lib/local/index.ts
|
|
1007
|
-
var local_exports = /* @__PURE__ */ __exportAll({
|
|
1008
|
-
addLocalDatasource: () => addLocalDatasource,
|
|
1009
|
-
datasourceExists: () => datasourceExists,
|
|
1010
|
-
ensureBonDir: () => ensureBonDir,
|
|
1011
|
-
getLocalDatasource: () => getLocalDatasource,
|
|
1012
|
-
isDatasourcesTrackedByGit: () => isDatasourcesTrackedByGit,
|
|
1013
|
-
loadLocalDatasources: () => loadLocalDatasources,
|
|
1014
|
-
removeLocalDatasource: () => removeLocalDatasource,
|
|
1015
|
-
resolveEnvVarsInCredentials: () => resolveEnvVarsInCredentials,
|
|
1016
|
-
saveLocalDatasources: () => saveLocalDatasources
|
|
1017
|
-
});
|
|
1018
|
-
|
|
1019
831
|
//#endregion
|
|
1020
832
|
//#region src/lib/dbt/mapping.ts
|
|
1021
833
|
/**
|
|
@@ -1104,6 +916,19 @@ function mapDatabricks(config) {
|
|
|
1104
916
|
};
|
|
1105
917
|
}
|
|
1106
918
|
/**
|
|
919
|
+
* Map DuckDB dbt config to Bonnard format
|
|
920
|
+
*/
|
|
921
|
+
function mapDuckDB(config) {
|
|
922
|
+
const dbPath = getString(config, "path") || getString(config, "database");
|
|
923
|
+
return {
|
|
924
|
+
config: {
|
|
925
|
+
...dbPath && { database_path: dbPath },
|
|
926
|
+
...getString(config, "schema") && { schema: getString(config, "schema") }
|
|
927
|
+
},
|
|
928
|
+
credentials: { ...getString(config, "motherduck_token") && { motherduck_token: getString(config, "motherduck_token") } }
|
|
929
|
+
};
|
|
930
|
+
}
|
|
931
|
+
/**
|
|
1107
932
|
* Map a parsed dbt connection to Bonnard format
|
|
1108
933
|
* Values are copied as-is, including {{ env_var(...) }} patterns
|
|
1109
934
|
*/
|
|
@@ -1123,6 +948,9 @@ function mapDbtConnection(connection) {
|
|
|
1123
948
|
case "databricks":
|
|
1124
949
|
mapped = mapDatabricks(config);
|
|
1125
950
|
break;
|
|
951
|
+
case "duckdb":
|
|
952
|
+
mapped = mapDuckDB(config);
|
|
953
|
+
break;
|
|
1126
954
|
default: throw new Error(`Unsupported warehouse type: ${type}`);
|
|
1127
955
|
}
|
|
1128
956
|
return { datasource: {
|
|
@@ -1317,6 +1145,26 @@ const WAREHOUSE_CONFIGS = [
|
|
|
1317
1145
|
secret: true,
|
|
1318
1146
|
required: true
|
|
1319
1147
|
}]
|
|
1148
|
+
},
|
|
1149
|
+
{
|
|
1150
|
+
value: "duckdb",
|
|
1151
|
+
label: "DuckDB",
|
|
1152
|
+
configFields: [{
|
|
1153
|
+
name: "database_path",
|
|
1154
|
+
flag: "databasePath",
|
|
1155
|
+
message: "Database path (file path, :memory:, or md:db_name for MotherDuck)",
|
|
1156
|
+
required: true
|
|
1157
|
+
}, {
|
|
1158
|
+
name: "schema",
|
|
1159
|
+
message: "Schema name",
|
|
1160
|
+
default: "main"
|
|
1161
|
+
}],
|
|
1162
|
+
credentialFields: [{
|
|
1163
|
+
name: "motherduck_token",
|
|
1164
|
+
flag: "motherduckToken",
|
|
1165
|
+
message: "MotherDuck token (required for md: paths)",
|
|
1166
|
+
secret: true
|
|
1167
|
+
}]
|
|
1320
1168
|
}
|
|
1321
1169
|
];
|
|
1322
1170
|
/**
|
|
@@ -1332,8 +1180,10 @@ function formatType$1(type) {
|
|
|
1332
1180
|
return {
|
|
1333
1181
|
snowflake: "Snowflake",
|
|
1334
1182
|
postgres: "Postgres",
|
|
1183
|
+
redshift: "Redshift",
|
|
1335
1184
|
bigquery: "BigQuery",
|
|
1336
|
-
databricks: "Databricks"
|
|
1185
|
+
databricks: "Databricks",
|
|
1186
|
+
duckdb: "DuckDB"
|
|
1337
1187
|
}[type] || type;
|
|
1338
1188
|
}
|
|
1339
1189
|
/**
|
|
@@ -1367,7 +1217,7 @@ async function importFromDbt(options) {
|
|
|
1367
1217
|
}
|
|
1368
1218
|
if (connections.length === 0) {
|
|
1369
1219
|
console.log(pc.yellow("No supported connections found in dbt profiles."));
|
|
1370
|
-
console.log(pc.dim("Supported types: snowflake, postgres, bigquery, databricks"));
|
|
1220
|
+
console.log(pc.dim("Supported types: snowflake, postgres, redshift, bigquery, databricks, duckdb"));
|
|
1371
1221
|
process.exit(0);
|
|
1372
1222
|
}
|
|
1373
1223
|
if (typeof options.fromDbt === "string") {
|
|
@@ -1483,7 +1333,7 @@ async function addManual(options) {
|
|
|
1483
1333
|
const warehouseConfig = WAREHOUSE_CONFIGS.find((w) => w.value === warehouseType);
|
|
1484
1334
|
if (!warehouseConfig) {
|
|
1485
1335
|
console.error(pc.red(`Invalid warehouse type: ${warehouseType}`));
|
|
1486
|
-
console.log(pc.dim("Valid types: snowflake, postgres, bigquery, databricks"));
|
|
1336
|
+
console.log(pc.dim("Valid types: snowflake, postgres, redshift, bigquery, databricks, duckdb"));
|
|
1487
1337
|
process.exit(1);
|
|
1488
1338
|
}
|
|
1489
1339
|
const config = {};
|
|
@@ -1507,6 +1357,7 @@ async function addManual(options) {
|
|
|
1507
1357
|
let value;
|
|
1508
1358
|
if (field.name === "password" && options.passwordEnv) value = envVarRef(options.passwordEnv);
|
|
1509
1359
|
else if (field.name === "token" && options.tokenEnv) value = envVarRef(options.tokenEnv);
|
|
1360
|
+
else if (field.name === "motherduck_token" && options.motherduckTokenEnv) value = envVarRef(options.motherduckTokenEnv);
|
|
1510
1361
|
else value = getOptionValue(options, field);
|
|
1511
1362
|
if (!value && !nonInteractive) if (field.secret) value = await password({ message: field.message + ":" });
|
|
1512
1363
|
else value = await input({ message: field.message + ":" });
|
|
@@ -1630,7 +1481,7 @@ async function listRemoteDatasources() {
|
|
|
1630
1481
|
return;
|
|
1631
1482
|
}
|
|
1632
1483
|
try {
|
|
1633
|
-
const { get } = await
|
|
1484
|
+
const { get } = await import("./api-B7cdKn9j.mjs");
|
|
1634
1485
|
const result = await get("/api/datasources");
|
|
1635
1486
|
if (result.dataSources.length === 0) {
|
|
1636
1487
|
console.log(pc.dim("No remote data sources found."));
|
|
@@ -1691,7 +1542,7 @@ async function removeRemote(name) {
|
|
|
1691
1542
|
process.exit(1);
|
|
1692
1543
|
}
|
|
1693
1544
|
try {
|
|
1694
|
-
const { del } = await
|
|
1545
|
+
const { del } = await import("./api-B7cdKn9j.mjs");
|
|
1695
1546
|
await del(`/api/datasources/${encodeURIComponent(name)}`);
|
|
1696
1547
|
console.log(pc.green(`✓ Removed "${name}" from remote server`));
|
|
1697
1548
|
} catch (err) {
|
|
@@ -1709,7 +1560,7 @@ async function validateCommand() {
|
|
|
1709
1560
|
console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
|
|
1710
1561
|
process.exit(1);
|
|
1711
1562
|
}
|
|
1712
|
-
const { validate } = await import("./validate-
|
|
1563
|
+
const { validate } = await import("./validate-C4W_Vto2.mjs");
|
|
1713
1564
|
const result = await validate(cwd);
|
|
1714
1565
|
if (result.cubes.length === 0 && result.views.length === 0 && result.valid) {
|
|
1715
1566
|
console.log(pc.yellow(`No cube or view files found in ${BONNARD_DIR}/cubes/ or ${BONNARD_DIR}/views/.`));
|
|
@@ -1783,7 +1634,7 @@ async function deployCommand(options = {}) {
|
|
|
1783
1634
|
process.exit(1);
|
|
1784
1635
|
}
|
|
1785
1636
|
console.log(pc.dim("Validating cubes and views..."));
|
|
1786
|
-
const { validate } = await import("./validate-
|
|
1637
|
+
const { validate } = await import("./validate-C4W_Vto2.mjs");
|
|
1787
1638
|
const result = await validate(cwd);
|
|
1788
1639
|
if (!result.valid) {
|
|
1789
1640
|
console.log(pc.red("Validation failed:\n"));
|
|
@@ -1852,9 +1703,9 @@ async function deployCommand(options = {}) {
|
|
|
1852
1703
|
* Returns true if any connection failed (strict mode)
|
|
1853
1704
|
*/
|
|
1854
1705
|
async function testAndSyncDatasources(cwd, options = {}) {
|
|
1855
|
-
const { extractDatasourcesFromCubes } = await import("./cubes-
|
|
1856
|
-
const { loadLocalDatasources } = await
|
|
1857
|
-
const { pushDatasource } = await import("./push-
|
|
1706
|
+
const { extractDatasourcesFromCubes } = await import("./cubes-BvtwNBUG.mjs");
|
|
1707
|
+
const { loadLocalDatasources } = await import("./local-BkK5XL7T.mjs");
|
|
1708
|
+
const { pushDatasource } = await import("./push-BOkUmRL8.mjs");
|
|
1858
1709
|
const references = extractDatasourcesFromCubes(cwd);
|
|
1859
1710
|
if (references.length === 0) return false;
|
|
1860
1711
|
console.log();
|
|
@@ -3891,6 +3742,110 @@ async function metabaseAnalyzeCommand(options) {
|
|
|
3891
3742
|
console.log(pc.dim(`Full report: ${outputPath}`));
|
|
3892
3743
|
}
|
|
3893
3744
|
|
|
3745
|
+
//#endregion
|
|
3746
|
+
//#region src/commands/keys/list.ts
|
|
3747
|
+
function formatDate(dateStr) {
|
|
3748
|
+
if (!dateStr) return "—";
|
|
3749
|
+
return new Date(dateStr).toLocaleDateString("en-US", {
|
|
3750
|
+
month: "short",
|
|
3751
|
+
day: "numeric",
|
|
3752
|
+
year: "numeric"
|
|
3753
|
+
});
|
|
3754
|
+
}
|
|
3755
|
+
function printKeyTable(keys, title, description) {
|
|
3756
|
+
console.log(pc.bold(title));
|
|
3757
|
+
console.log(pc.dim(description));
|
|
3758
|
+
console.log();
|
|
3759
|
+
if (keys.length === 0) {
|
|
3760
|
+
console.log(pc.dim(" No keys."));
|
|
3761
|
+
return;
|
|
3762
|
+
}
|
|
3763
|
+
const maxNameLen = Math.max(...keys.map((k) => k.name.length), 4);
|
|
3764
|
+
const maxPrefixLen = Math.max(...keys.map((k) => k.key_prefix.length + 3), 3);
|
|
3765
|
+
const header = ` ${"NAME".padEnd(maxNameLen)} ${"KEY".padEnd(maxPrefixLen)} ${"CREATED".padEnd(14)} LAST USED`;
|
|
3766
|
+
console.log(pc.dim(header));
|
|
3767
|
+
console.log(pc.dim(" " + "─".repeat(header.length - 2)));
|
|
3768
|
+
for (const k of keys) {
|
|
3769
|
+
const name = k.name.padEnd(maxNameLen);
|
|
3770
|
+
const prefix = (k.key_prefix + "...").padEnd(maxPrefixLen);
|
|
3771
|
+
const created = formatDate(k.created_at).padEnd(14);
|
|
3772
|
+
const lastUsed = formatDate(k.last_used_at);
|
|
3773
|
+
console.log(` ${pc.bold(name)} ${pc.dim(prefix)} ${created} ${lastUsed}`);
|
|
3774
|
+
}
|
|
3775
|
+
}
|
|
3776
|
+
async function keysListCommand() {
|
|
3777
|
+
try {
|
|
3778
|
+
const result = await get("/api/web/keys");
|
|
3779
|
+
printKeyTable(result.publishableKeys, "Publishable Keys", "Client-side, read-only access");
|
|
3780
|
+
console.log();
|
|
3781
|
+
printKeyTable(result.secretKeys, "Secret Keys", "Server-side, full access");
|
|
3782
|
+
const total = result.publishableKeys.length + result.secretKeys.length;
|
|
3783
|
+
console.log();
|
|
3784
|
+
console.log(pc.dim(`${total} key${total !== 1 ? "s" : ""} total`));
|
|
3785
|
+
} catch (err) {
|
|
3786
|
+
console.error(pc.red(`Error: ${err.message}`));
|
|
3787
|
+
process.exit(1);
|
|
3788
|
+
}
|
|
3789
|
+
}
|
|
3790
|
+
|
|
3791
|
+
//#endregion
|
|
3792
|
+
//#region src/commands/keys/create.ts
|
|
3793
|
+
async function keysCreateCommand(options) {
|
|
3794
|
+
const { name, type } = options;
|
|
3795
|
+
if (type !== "publishable" && type !== "secret") {
|
|
3796
|
+
console.error(pc.red("Error: --type must be 'publishable' or 'secret'"));
|
|
3797
|
+
process.exit(1);
|
|
3798
|
+
}
|
|
3799
|
+
try {
|
|
3800
|
+
const result = await post("/api/web/keys", {
|
|
3801
|
+
name,
|
|
3802
|
+
type
|
|
3803
|
+
});
|
|
3804
|
+
console.log(pc.green("Key created successfully."));
|
|
3805
|
+
console.log();
|
|
3806
|
+
console.log(pc.bold(" Name: ") + result.name);
|
|
3807
|
+
console.log(pc.bold(" Type: ") + type);
|
|
3808
|
+
console.log(pc.bold(" Key: ") + result.key);
|
|
3809
|
+
console.log();
|
|
3810
|
+
console.log(pc.yellow("⚠ Save this key now — it won't be shown again."));
|
|
3811
|
+
} catch (err) {
|
|
3812
|
+
console.error(pc.red(`Error: ${err.message}`));
|
|
3813
|
+
process.exit(1);
|
|
3814
|
+
}
|
|
3815
|
+
}
|
|
3816
|
+
|
|
3817
|
+
//#endregion
|
|
3818
|
+
//#region src/commands/keys/revoke.ts
|
|
3819
|
+
async function keysRevokeCommand(nameOrPrefix) {
|
|
3820
|
+
try {
|
|
3821
|
+
const result = await get("/api/web/keys");
|
|
3822
|
+
let match;
|
|
3823
|
+
let type;
|
|
3824
|
+
for (const k of result.publishableKeys) if (k.name === nameOrPrefix || k.key_prefix.startsWith(nameOrPrefix)) {
|
|
3825
|
+
match = k;
|
|
3826
|
+
type = "publishable";
|
|
3827
|
+
break;
|
|
3828
|
+
}
|
|
3829
|
+
if (!match) {
|
|
3830
|
+
for (const k of result.secretKeys) if (k.name === nameOrPrefix || k.key_prefix.startsWith(nameOrPrefix)) {
|
|
3831
|
+
match = k;
|
|
3832
|
+
type = "secret";
|
|
3833
|
+
break;
|
|
3834
|
+
}
|
|
3835
|
+
}
|
|
3836
|
+
if (!match || !type) {
|
|
3837
|
+
console.error(pc.red(`No key found matching "${nameOrPrefix}".`));
|
|
3838
|
+
console.error(pc.dim("Use `bon keys list` to see available keys."));
|
|
3839
|
+
process.exit(1);
|
|
3840
|
+
}
|
|
3841
|
+
await del(`/api/web/keys/${match.id}?type=${type}`);
|
|
3842
|
+
console.log(pc.green(`Revoked ${type} key "${match.name}" (${match.key_prefix}...).`));
|
|
3843
|
+
} catch (err) {
|
|
3844
|
+
console.error(pc.red(`Error: ${err.message}`));
|
|
3845
|
+
process.exit(1);
|
|
3846
|
+
}
|
|
3847
|
+
}
|
|
3848
|
+
|
|
3894
3849
|
//#endregion
|
|
3895
3850
|
//#region src/bin/bon.ts
|
|
3896
3851
|
const { version } = createRequire(import.meta.url)("../../package.json");
|
|
@@ -3900,7 +3855,7 @@ program.command("login").description("Authenticate with Bonnard via your browser
|
|
|
3900
3855
|
program.command("logout").description("Remove stored credentials").action(logoutCommand);
|
|
3901
3856
|
program.command("whoami").description("Show current login status").option("--verify", "Verify session is still valid with the server").action(whoamiCommand);
|
|
3902
3857
|
const datasource = program.command("datasource").description("Manage warehouse data source connections");
|
|
3903
|
-
datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--demo", "Add a read-only demo datasource (Contoso retail dataset) for testing").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
|
|
3858
|
+
datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--demo", "Add a read-only demo datasource (Contoso retail dataset) for testing").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, redshift, bigquery, databricks, duckdb (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--database-path <databasePath>", "Database path: file path, :memory:, or md:db_name for MotherDuck (DuckDB)").option("--motherduck-token <token>", "MotherDuck token (DuckDB, for md: paths)").option("--motherduck-token-env <varName>", "Env var name for MotherDuck token, stores as {{ env_var('NAME') }}").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
|
|
3904
3859
|
datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
|
|
3905
3860
|
datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
|
|
3906
3861
|
program.command("validate").description("Validate YAML syntax in bonnard/cubes/ and bonnard/views/").action(validateCommand);
|
|
@@ -3911,11 +3866,17 @@ program.command("annotate").description("Annotate deployment changes with reason
|
|
|
3911
3866
|
program.command("mcp").description("MCP connection info and setup instructions").action(mcpCommand).command("test").description("Test MCP server connectivity").action(mcpTestCommand);
|
|
3912
3867
|
program.command("query").description("Execute a query against the deployed semantic layer").argument("<query>", "JSON query or SQL (with --sql flag)").option("--sql", "Use SQL API instead of JSON format").option("--limit <limit>", "Max rows to return").option("--format <format>", "Output format: toon or json", "toon").action(cubeQueryCommand);
|
|
3913
3868
|
program.command("docs").description("Browse documentation for building cubes and views").argument("[topic]", "Topic to display (e.g., cubes, cubes.measures)").option("-r, --recursive", "Show topic and all child topics").option("-s, --search <query>", "Search topics for a keyword").option("-f, --format <format>", "Output format: markdown or json", "markdown").action(docsCommand).command("schema").description("Show JSON schema for a type (cube, view, measure, etc.)").argument("<type>", "Schema type to display").action(docsSchemaCommand);
|
|
3869
|
+
const keys = program.command("keys").description("Manage API keys for the Bonnard SDK");
|
|
3870
|
+
keys.command("list").description("List all API keys for your organization").action(keysListCommand);
|
|
3871
|
+
keys.command("create").description("Create a new API key").requiredOption("--name <name>", "Key name (e.g. 'Production SDK')").requiredOption("--type <type>", "Key type: publishable or secret").action(keysCreateCommand);
|
|
3872
|
+
keys.command("revoke").description("Revoke an API key by name or prefix").argument("<name-or-prefix>", "Key name or key prefix to revoke").action(keysRevokeCommand);
|
|
3914
3873
|
const metabase = program.command("metabase").description("Connect to and explore Metabase content");
|
|
3915
3874
|
metabase.command("connect").description("Configure Metabase API connection").option("--url <url>", "Metabase instance URL").option("--api-key <key>", "Metabase API key").option("--force", "Overwrite existing configuration").action(metabaseConnectCommand);
|
|
3916
3875
|
metabase.command("explore").description("Browse Metabase databases, collections, cards, and dashboards").argument("[resource]", "databases, collections, cards, dashboards, card, dashboard, database, table, collection").argument("[id]", "Resource ID (e.g. card <id>, dashboard <id>, database <id>, table <id>, collection <id>)").action(metabaseExploreCommand);
|
|
3917
3876
|
metabase.command("analyze").description("Analyze Metabase instance and generate a structured report for semantic layer planning").option("--output <path>", "Output file path", ".bon/metabase-analysis.md").option("--top-cards <n>", "Number of top cards to include in report", "50").action(metabaseAnalyzeCommand);
|
|
3918
|
-
|
|
3877
|
+
const showUpdateNotice = startUpdateCheck(version);
|
|
3878
|
+
await program.parseAsync();
|
|
3879
|
+
await showUpdateNotice();
|
|
3919
3880
|
|
|
3920
3881
|
//#endregion
|
|
3921
|
-
export {
|
|
3882
|
+
export { };
|