@bonnard/cli 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/bin/bon.mjs +1892 -97
  2. package/dist/bin/models-IsV2sX74.mjs +76 -0
  3. package/dist/bin/{validate-Bd1D39Bj.mjs → validate-C4EHvJzJ.mjs} +47 -4
  4. package/dist/docs/README.md +82 -0
  5. package/dist/docs/_index.md +69 -0
  6. package/dist/docs/topics/cubes.data-source.md +96 -0
  7. package/dist/docs/topics/cubes.dimensions.format.md +199 -0
  8. package/dist/docs/topics/cubes.dimensions.md +188 -0
  9. package/dist/docs/topics/cubes.dimensions.primary-key.md +110 -0
  10. package/dist/docs/topics/cubes.dimensions.sub-query.md +178 -0
  11. package/dist/docs/topics/cubes.dimensions.time.md +115 -0
  12. package/dist/docs/topics/cubes.dimensions.types.md +111 -0
  13. package/dist/docs/topics/cubes.extends.md +153 -0
  14. package/dist/docs/topics/cubes.hierarchies.md +178 -0
  15. package/dist/docs/topics/cubes.joins.md +119 -0
  16. package/dist/docs/topics/cubes.md +121 -0
  17. package/dist/docs/topics/cubes.measures.calculated.md +103 -0
  18. package/dist/docs/topics/cubes.measures.drill-members.md +162 -0
  19. package/dist/docs/topics/cubes.measures.filters.md +90 -0
  20. package/dist/docs/topics/cubes.measures.format.md +157 -0
  21. package/dist/docs/topics/cubes.measures.md +166 -0
  22. package/dist/docs/topics/cubes.measures.rolling.md +123 -0
  23. package/dist/docs/topics/cubes.measures.types.md +126 -0
  24. package/dist/docs/topics/cubes.public.md +176 -0
  25. package/dist/docs/topics/cubes.refresh-key.md +157 -0
  26. package/dist/docs/topics/cubes.segments.md +125 -0
  27. package/dist/docs/topics/cubes.sql.md +65 -0
  28. package/dist/docs/topics/pre-aggregations.md +130 -0
  29. package/dist/docs/topics/pre-aggregations.rollup.md +166 -0
  30. package/dist/docs/topics/syntax.context-variables.md +157 -0
  31. package/dist/docs/topics/syntax.md +137 -0
  32. package/dist/docs/topics/syntax.references.md +178 -0
  33. package/dist/docs/topics/views.cubes.md +166 -0
  34. package/dist/docs/topics/views.folders.md +158 -0
  35. package/dist/docs/topics/views.includes.md +143 -0
  36. package/dist/docs/topics/views.md +142 -0
  37. package/dist/docs/topics/workflow.deploy.md +132 -0
  38. package/dist/docs/topics/workflow.md +151 -0
  39. package/dist/docs/topics/workflow.query.md +203 -0
  40. package/dist/docs/topics/workflow.validate.md +156 -0
  41. package/dist/templates/claude/rules/bonnard.md +15 -0
  42. package/dist/templates/claude/settings.json +7 -0
  43. package/dist/templates/claude/skills/bonnard-cli/SKILL.md +59 -0
  44. package/dist/templates/claude/skills/bonnard-queries/SKILL.md +68 -0
  45. package/dist/templates/cursor/rules/bonnard-cli.mdc +47 -0
  46. package/dist/templates/cursor/rules/bonnard-queries.mdc +49 -0
  47. package/dist/templates/cursor/rules/bonnard.mdc +20 -0
  48. package/dist/templates/shared/bonnard.md +81 -0
  49. package/package.json +8 -3
package/dist/bin/bon.mjs CHANGED
@@ -1,19 +1,131 @@
1
1
  #!/usr/bin/env node
2
+ import { createRequire } from "node:module";
2
3
  import { program } from "commander";
3
4
  import fs from "node:fs";
4
5
  import path from "node:path";
6
+ import { fileURLToPath } from "node:url";
5
7
  import pc from "picocolors";
6
8
  import http from "node:http";
7
9
  import crypto from "node:crypto";
8
10
  import os from "node:os";
11
+ import YAML from "yaml";
12
+ import { execFileSync } from "node:child_process";
13
+ import { confirm } from "@inquirer/prompts";
9
14
  import { encode } from "@toon-format/toon";
10
15
 
16
+ //#region rolldown:runtime
17
+ var __defProp = Object.defineProperty;
18
+ var __exportAll = (all, symbols) => {
19
+ let target = {};
20
+ for (var name in all) {
21
+ __defProp(target, name, {
22
+ get: all[name],
23
+ enumerable: true
24
+ });
25
+ }
26
+ if (symbols) {
27
+ __defProp(target, Symbol.toStringTag, { value: "Module" });
28
+ }
29
+ return target;
30
+ };
31
+ var __require = /* @__PURE__ */ createRequire(import.meta.url);
32
+
33
+ //#endregion
11
34
  //#region src/commands/init.ts
35
+ const __filename$1 = fileURLToPath(import.meta.url);
36
+ const __dirname$1 = path.dirname(__filename$1);
37
+ const TEMPLATES_DIR = path.join(__dirname$1, "..", "templates");
12
38
  const BON_YAML_TEMPLATE = (projectName) => `project:
13
39
  name: ${projectName}
14
40
  `;
15
41
  const GITIGNORE_TEMPLATE = `.bon/
16
42
  `;
43
+ /**
44
+ * Load a template file from the templates directory
45
+ */
46
+ function loadTemplate(relativePath) {
47
+ const templatePath = path.join(TEMPLATES_DIR, relativePath);
48
+ return fs.readFileSync(templatePath, "utf-8");
49
+ }
50
+ /**
51
+ * Load a JSON template file
52
+ */
53
+ function loadJsonTemplate(relativePath) {
54
+ const content = loadTemplate(relativePath);
55
+ return JSON.parse(content);
56
+ }
57
+ /**
58
+ * Write a template file, appending if target exists and doesn't already have Bonnard content
59
+ */
60
+ function writeTemplateFile(content, targetPath, createdFiles) {
61
+ if (fs.existsSync(targetPath)) {
62
+ if (!fs.readFileSync(targetPath, "utf-8").includes("# Bonnard")) {
63
+ fs.appendFileSync(targetPath, `\n\n${content}`);
64
+ createdFiles.push(`${path.relative(process.cwd(), targetPath)} (appended)`);
65
+ }
66
+ } else {
67
+ fs.writeFileSync(targetPath, content);
68
+ createdFiles.push(path.relative(process.cwd(), targetPath));
69
+ }
70
+ }
71
+ /**
72
+ * Merge settings.json, preserving existing settings
73
+ */
74
+ function mergeSettingsJson(templateSettings, targetPath, createdFiles) {
75
+ if (fs.existsSync(targetPath)) {
76
+ const existingContent = JSON.parse(fs.readFileSync(targetPath, "utf-8"));
77
+ const templatePerms = templateSettings.permissions;
78
+ if (templatePerms?.allow) {
79
+ existingContent.permissions = existingContent.permissions || {};
80
+ existingContent.permissions.allow = existingContent.permissions.allow || [];
81
+ for (const permission of templatePerms.allow) if (!existingContent.permissions.allow.includes(permission)) existingContent.permissions.allow.push(permission);
82
+ }
83
+ fs.writeFileSync(targetPath, JSON.stringify(existingContent, null, 2) + "\n");
84
+ createdFiles.push(`${path.relative(process.cwd(), targetPath)} (merged)`);
85
+ } else {
86
+ fs.writeFileSync(targetPath, JSON.stringify(templateSettings, null, 2) + "\n");
87
+ createdFiles.push(path.relative(process.cwd(), targetPath));
88
+ }
89
+ }
90
+ /**
91
+ * Add Cursor frontmatter to shared content
92
+ */
93
+ function withCursorFrontmatter(content, description, alwaysApply) {
94
+ return `---
95
+ description: "${description}"
96
+ alwaysApply: ${alwaysApply}
97
+ ---
98
+
99
+ ` + content;
100
+ }
101
+ /**
102
+ * Create agent templates (Claude Code, Cursor, and Codex)
103
+ */
104
+ function createAgentTemplates(cwd) {
105
+ const createdFiles = [];
106
+ const sharedBonnard = loadTemplate("shared/bonnard.md");
107
+ const claudeRulesDir = path.join(cwd, ".claude", "rules");
108
+ const claudeSkillsDir = path.join(cwd, ".claude", "skills");
109
+ fs.mkdirSync(claudeRulesDir, { recursive: true });
110
+ fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-cli"), { recursive: true });
111
+ fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-queries"), { recursive: true });
112
+ writeTemplateFile(sharedBonnard, path.join(claudeRulesDir, "bonnard.md"), createdFiles);
113
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-cli/SKILL.md"), path.join(claudeSkillsDir, "bonnard-cli", "SKILL.md"), createdFiles);
114
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-queries/SKILL.md"), path.join(claudeSkillsDir, "bonnard-queries", "SKILL.md"), createdFiles);
115
+ mergeSettingsJson(loadJsonTemplate("claude/settings.json"), path.join(cwd, ".claude", "settings.json"), createdFiles);
116
+ const cursorRulesDir = path.join(cwd, ".cursor", "rules");
117
+ fs.mkdirSync(cursorRulesDir, { recursive: true });
118
+ writeTemplateFile(withCursorFrontmatter(sharedBonnard, "Bonnard semantic layer project context", true), path.join(cursorRulesDir, "bonnard.mdc"), createdFiles);
119
+ writeTemplateFile(loadTemplate("cursor/rules/bonnard-cli.mdc"), path.join(cursorRulesDir, "bonnard-cli.mdc"), createdFiles);
120
+ writeTemplateFile(loadTemplate("cursor/rules/bonnard-queries.mdc"), path.join(cursorRulesDir, "bonnard-queries.mdc"), createdFiles);
121
+ const codexSkillsDir = path.join(cwd, ".agents", "skills");
122
+ fs.mkdirSync(path.join(codexSkillsDir, "bonnard-cli"), { recursive: true });
123
+ fs.mkdirSync(path.join(codexSkillsDir, "bonnard-queries"), { recursive: true });
124
+ writeTemplateFile(sharedBonnard, path.join(cwd, "AGENTS.md"), createdFiles);
125
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-cli/SKILL.md"), path.join(codexSkillsDir, "bonnard-cli", "SKILL.md"), createdFiles);
126
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-queries/SKILL.md"), path.join(codexSkillsDir, "bonnard-queries", "SKILL.md"), createdFiles);
127
+ return createdFiles;
128
+ }
17
129
  async function initCommand() {
18
130
  const cwd = process.cwd();
19
131
  const projectName = path.basename(cwd);
@@ -26,13 +138,20 @@ async function initCommand() {
26
138
  fs.mkdirSync(path.join(cwd, ".bon"), { recursive: true });
27
139
  fs.writeFileSync(path.join(cwd, "bon.yaml"), BON_YAML_TEMPLATE(projectName));
28
140
  fs.writeFileSync(path.join(cwd, ".gitignore"), GITIGNORE_TEMPLATE);
141
+ const agentFiles = createAgentTemplates(cwd);
29
142
  console.log(pc.green(`Initialised Bonnard project "${projectName}"`));
30
143
  console.log();
144
+ console.log(pc.bold("Core files:"));
31
145
  console.log(` ${pc.dim("bon.yaml")} project config`);
32
146
  console.log(` ${pc.dim("models/")} model definitions`);
33
147
  console.log(` ${pc.dim("views/")} view definitions`);
34
148
  console.log(` ${pc.dim(".bon/")} local state (gitignored)`);
35
149
  console.log(` ${pc.dim(".gitignore")} git ignore rules`);
150
+ if (agentFiles.length > 0) {
151
+ console.log();
152
+ console.log(pc.bold("Agent support:"));
153
+ for (const file of agentFiles) console.log(` ${pc.dim(file)}`);
154
+ }
36
155
  }
37
156
 
38
157
  //#endregion
@@ -64,7 +183,7 @@ function clearCredentials() {
64
183
 
65
184
  //#endregion
66
185
  //#region src/commands/login.ts
67
- const APP_URL$1 = process.env.BON_APP_URL || "http://localhost:3000";
186
+ const APP_URL$1 = process.env.BON_APP_URL || "https://app.bonnard.dev";
68
187
  const TIMEOUT_MS = 120 * 1e3;
69
188
  async function loginCommand() {
70
189
  const state = crypto.randomUUID();
@@ -200,7 +319,14 @@ async function logoutCommand() {
200
319
 
201
320
  //#endregion
202
321
  //#region src/lib/api.ts
203
- const APP_URL = process.env.BON_APP_URL || "http://localhost:3000";
322
+ var api_exports = /* @__PURE__ */ __exportAll({
323
+ del: () => del,
324
+ get: () => get,
325
+ getRemoteDatasources: () => getRemoteDatasources,
326
+ post: () => post
327
+ });
328
+ const APP_URL = process.env.BON_APP_URL || "https://app.bonnard.dev";
329
+ const VERCEL_BYPASS = process.env.VERCEL_AUTOMATION_BYPASS_SECRET;
204
330
  function getToken() {
205
331
  const creds = loadCredentials();
206
332
  if (!creds) {
@@ -212,12 +338,14 @@ function getToken() {
212
338
  async function request(method, path, body) {
213
339
  const token = getToken();
214
340
  const url = `${APP_URL}${path}`;
341
+ const headers = {
342
+ Authorization: `Bearer ${token}`,
343
+ "Content-Type": "application/json"
344
+ };
345
+ if (VERCEL_BYPASS) headers["x-vercel-protection-bypass"] = VERCEL_BYPASS;
215
346
  const res = await fetch(url, {
216
347
  method,
217
- headers: {
218
- Authorization: `Bearer ${token}`,
219
- "Content-Type": "application/json"
220
- },
348
+ headers,
221
349
  body: body ? JSON.stringify(body) : void 0
222
350
  });
223
351
  const data = await res.json();
@@ -236,13 +364,383 @@ function post(path, body) {
236
364
  function del(path) {
237
365
  return request("DELETE", path);
238
366
  }
367
+ /**
368
+ * Fetch remote datasources from Bonnard server
369
+ */
370
+ async function getRemoteDatasources() {
371
+ return (await get("/api/datasources")).dataSources || [];
372
+ }
373
+
374
+ //#endregion
375
+ //#region src/commands/whoami.ts
376
+ async function whoamiCommand(options = {}) {
377
+ const credentials = loadCredentials();
378
+ if (!credentials) {
379
+ console.log(pc.yellow("Not logged in."));
380
+ console.log(pc.dim("Run `bon login` to authenticate."));
381
+ process.exit(1);
382
+ }
383
+ if (options.verify) try {
384
+ const result = await get("/api/cli/whoami");
385
+ console.log(pc.green(`Logged in as ${result.email}`));
386
+ if (result.orgName) console.log(pc.dim(`Organization: ${result.orgName}`));
387
+ } catch (err) {
388
+ console.log(pc.red("Session expired or invalid."));
389
+ console.log(pc.dim("Run `bon login` to re-authenticate."));
390
+ process.exit(1);
391
+ }
392
+ else {
393
+ console.log(pc.green(`Logged in as ${credentials.email}`));
394
+ console.log(pc.dim("Use --verify to check if session is still valid."));
395
+ }
396
+ }
397
+
398
+ //#endregion
399
+ //#region src/lib/local/datasources.ts
400
+ /**
401
+ * Local datasource storage (.bon/datasources.yaml)
402
+ *
403
+ * Single file containing both config and credentials.
404
+ * Credentials may contain:
405
+ * - Plain values: "my_password"
406
+ * - dbt env var syntax: "{{ env_var('MY_PASSWORD') }}"
407
+ *
408
+ * Env vars are resolved at deploy time, not import time.
409
+ */
410
+ const BON_DIR$1 = ".bon";
411
+ const DATASOURCES_FILE$1 = "datasources.yaml";
412
+ function getBonDir(cwd = process.cwd()) {
413
+ return path.join(cwd, BON_DIR$1);
414
+ }
415
+ function getDatasourcesPath$1(cwd = process.cwd()) {
416
+ return path.join(getBonDir(cwd), DATASOURCES_FILE$1);
417
+ }
418
+ /**
419
+ * Ensure .bon directory exists
420
+ */
421
+ function ensureBonDir(cwd = process.cwd()) {
422
+ const bonDir = getBonDir(cwd);
423
+ if (!fs.existsSync(bonDir)) fs.mkdirSync(bonDir, { recursive: true });
424
+ }
425
+ /**
426
+ * Load all local datasources
427
+ */
428
+ function loadLocalDatasources(cwd = process.cwd()) {
429
+ const filePath = getDatasourcesPath$1(cwd);
430
+ if (!fs.existsSync(filePath)) return [];
431
+ try {
432
+ const content = fs.readFileSync(filePath, "utf-8");
433
+ return YAML.parse(content)?.datasources ?? [];
434
+ } catch {
435
+ return [];
436
+ }
437
+ }
438
+ /**
439
+ * Save all local datasources (with secure permissions since it contains credentials)
440
+ */
441
+ function saveLocalDatasources(datasources, cwd = process.cwd()) {
442
+ ensureBonDir(cwd);
443
+ const filePath = getDatasourcesPath$1(cwd);
444
+ const file = { datasources };
445
+ const content = `# Bonnard datasources configuration
446
+ # This file contains credentials - add to .gitignore
447
+ # Env vars like {{ env_var('PASSWORD') }} are resolved at deploy time
448
+
449
+ ` + YAML.stringify(file, { indent: 2 });
450
+ fs.writeFileSync(filePath, content, { mode: 384 });
451
+ }
452
+ /**
453
+ * Add a single datasource (updates existing or appends new)
454
+ */
455
+ function addLocalDatasource(datasource, cwd = process.cwd()) {
456
+ const existing = loadLocalDatasources(cwd);
457
+ const index = existing.findIndex((ds) => ds.name === datasource.name);
458
+ if (index >= 0) existing[index] = datasource;
459
+ else existing.push(datasource);
460
+ saveLocalDatasources(existing, cwd);
461
+ }
462
+ /**
463
+ * Remove a datasource by name
464
+ */
465
+ function removeLocalDatasource(name, cwd = process.cwd()) {
466
+ const existing = loadLocalDatasources(cwd);
467
+ const filtered = existing.filter((ds) => ds.name !== name);
468
+ if (filtered.length === existing.length) return false;
469
+ saveLocalDatasources(filtered, cwd);
470
+ return true;
471
+ }
472
+ /**
473
+ * Get a single datasource by name
474
+ */
475
+ function getLocalDatasource(name, cwd = process.cwd()) {
476
+ return loadLocalDatasources(cwd).find((ds) => ds.name === name) ?? null;
477
+ }
478
+ /**
479
+ * Check if a datasource name already exists locally
480
+ */
481
+ function datasourceExists(name, cwd = process.cwd()) {
482
+ return getLocalDatasource(name, cwd) !== null;
483
+ }
484
+ /**
485
+ * Resolve {{ env_var('VAR_NAME') }} patterns in credentials
486
+ * Used at deploy time to resolve env vars before uploading
487
+ */
488
+ function resolveEnvVarsInCredentials(credentials) {
489
+ const resolved = {};
490
+ const missing = [];
491
+ const envVarPattern = /\{\{\s*env_var\(['"]([\w_]+)['"]\)\s*\}\}/;
492
+ for (const [key, value] of Object.entries(credentials)) {
493
+ const match = value.match(envVarPattern);
494
+ if (match) {
495
+ const varName = match[1];
496
+ const envValue = process.env[varName];
497
+ if (envValue !== void 0) resolved[key] = envValue;
498
+ else {
499
+ missing.push(varName);
500
+ resolved[key] = value;
501
+ }
502
+ } else resolved[key] = value;
503
+ }
504
+ return {
505
+ resolved,
506
+ missing
507
+ };
508
+ }
509
+
510
+ //#endregion
511
+ //#region src/lib/local/credentials.ts
512
+ /**
513
+ * Credential utilities (git tracking check)
514
+ */
515
+ const BON_DIR = ".bon";
516
+ const DATASOURCES_FILE = "datasources.yaml";
517
+ function getDatasourcesPath(cwd = process.cwd()) {
518
+ return path.join(cwd, BON_DIR, DATASOURCES_FILE);
519
+ }
520
+ /**
521
+ * Check if datasources file is tracked by git (it shouldn't be - contains credentials)
522
+ */
523
+ function isDatasourcesTrackedByGit(cwd = process.cwd()) {
524
+ const filePath = getDatasourcesPath(cwd);
525
+ if (!fs.existsSync(filePath)) return false;
526
+ try {
527
+ execFileSync("git", [
528
+ "ls-files",
529
+ "--error-unmatch",
530
+ filePath
531
+ ], {
532
+ cwd,
533
+ stdio: "pipe"
534
+ });
535
+ return true;
536
+ } catch {
537
+ return false;
538
+ }
539
+ }
540
+
541
+ //#endregion
542
+ //#region src/lib/local/index.ts
543
+ var local_exports = /* @__PURE__ */ __exportAll({
544
+ addLocalDatasource: () => addLocalDatasource,
545
+ datasourceExists: () => datasourceExists,
546
+ ensureBonDir: () => ensureBonDir,
547
+ getLocalDatasource: () => getLocalDatasource,
548
+ isDatasourcesTrackedByGit: () => isDatasourcesTrackedByGit,
549
+ loadLocalDatasources: () => loadLocalDatasources,
550
+ removeLocalDatasource: () => removeLocalDatasource,
551
+ resolveEnvVarsInCredentials: () => resolveEnvVarsInCredentials,
552
+ saveLocalDatasources: () => saveLocalDatasources
553
+ });
554
+
555
+ //#endregion
556
+ //#region src/lib/dbt/profiles.ts
557
+ /**
558
+ * dbt profiles.yml parser
559
+ *
560
+ * Parses ~/.dbt/profiles.yml and extracts connection configs.
561
+ * Does NOT resolve env vars - they are kept as-is for deploy time resolution.
562
+ */
563
+ const DBT_PROFILES_PATH = path.join(os.homedir(), ".dbt", "profiles.yml");
564
+ /**
565
+ * Check if dbt profiles.yml exists
566
+ */
567
+ function dbtProfilesExist(profilesPath = DBT_PROFILES_PATH) {
568
+ return fs.existsSync(profilesPath);
569
+ }
570
+ /**
571
+ * Get the default dbt profiles path
572
+ */
573
+ function getDefaultProfilesPath() {
574
+ return DBT_PROFILES_PATH;
575
+ }
576
+ /**
577
+ * Map dbt type to Bonnard warehouse type
578
+ */
579
+ function mapDbtType(dbtType) {
580
+ return {
581
+ snowflake: "snowflake",
582
+ postgres: "postgres",
583
+ postgresql: "postgres",
584
+ bigquery: "bigquery",
585
+ databricks: "databricks"
586
+ }[dbtType.toLowerCase()] ?? null;
587
+ }
588
+ /**
589
+ * Parse dbt profiles.yml and return all connections
590
+ * Config values are kept as-is (including {{ env_var(...) }} patterns)
591
+ */
592
+ function parseDbtProfiles(profilesPath = DBT_PROFILES_PATH) {
593
+ if (!fs.existsSync(profilesPath)) throw new Error(`dbt profiles not found at ${profilesPath}`);
594
+ const content = fs.readFileSync(profilesPath, "utf-8");
595
+ const profiles = YAML.parse(content);
596
+ if (!profiles || typeof profiles !== "object") throw new Error("Invalid dbt profiles.yml format");
597
+ const connections = [];
598
+ for (const [profileName, profile] of Object.entries(profiles)) {
599
+ if (profileName === "config") continue;
600
+ if (!profile.outputs || typeof profile.outputs !== "object") continue;
601
+ const defaultTarget = profile.target || "dev";
602
+ for (const [targetName, target] of Object.entries(profile.outputs)) {
603
+ if (!target || typeof target !== "object" || !target.type) continue;
604
+ const warehouseType = mapDbtType(target.type);
605
+ if (!warehouseType) continue;
606
+ connections.push({
607
+ profileName,
608
+ targetName,
609
+ isDefaultTarget: targetName === defaultTarget,
610
+ type: warehouseType,
611
+ config: target
612
+ });
613
+ }
614
+ }
615
+ return connections;
616
+ }
617
+
618
+ //#endregion
619
+ //#region src/lib/dbt/mapping.ts
620
+ /**
621
+ * Generate a datasource name from profile/target
622
+ */
623
+ function generateDatasourceName(profileName, targetName) {
624
+ return `${profileName}-${targetName}`.toLowerCase().replace(/[^a-z0-9-]/g, "-");
625
+ }
626
+ /**
627
+ * Extract string value from config, handling numbers
628
+ */
629
+ function getString(config, key) {
630
+ const value = config[key];
631
+ if (value === void 0 || value === null) return void 0;
632
+ return String(value);
633
+ }
634
+ /**
635
+ * Map Snowflake dbt config to Bonnard format
636
+ */
637
+ function mapSnowflake(config) {
638
+ return {
639
+ config: {
640
+ ...getString(config, "account") && { account: getString(config, "account") },
641
+ ...getString(config, "database") && { database: getString(config, "database") },
642
+ ...getString(config, "warehouse") && { warehouse: getString(config, "warehouse") },
643
+ ...getString(config, "schema") && { schema: getString(config, "schema") },
644
+ ...getString(config, "role") && { role: getString(config, "role") }
645
+ },
646
+ credentials: {
647
+ ...getString(config, "user") && { username: getString(config, "user") },
648
+ ...getString(config, "password") && { password: getString(config, "password") }
649
+ }
650
+ };
651
+ }
652
+ /**
653
+ * Map Postgres dbt config to Bonnard format
654
+ */
655
+ function mapPostgres(config) {
656
+ const database = getString(config, "dbname") || getString(config, "database");
657
+ return {
658
+ config: {
659
+ ...getString(config, "host") && { host: getString(config, "host") },
660
+ ...getString(config, "port") && { port: getString(config, "port") },
661
+ ...database && { database },
662
+ ...getString(config, "schema") && { schema: getString(config, "schema") },
663
+ ...getString(config, "sslmode") && { sslmode: getString(config, "sslmode") }
664
+ },
665
+ credentials: {
666
+ ...getString(config, "user") && { username: getString(config, "user") },
667
+ ...getString(config, "password") && { password: getString(config, "password") }
668
+ }
669
+ };
670
+ }
671
+ /**
672
+ * Map BigQuery dbt config to Bonnard format
673
+ */
674
+ function mapBigQuery(config) {
675
+ const credentials = {};
676
+ if (config.keyfile && typeof config.keyfile === "string") try {
677
+ credentials.service_account_json = __require("node:fs").readFileSync(config.keyfile, "utf-8");
678
+ } catch {
679
+ credentials.keyfile_path = config.keyfile;
680
+ }
681
+ else if (config.keyfile_json) credentials.service_account_json = JSON.stringify(config.keyfile_json);
682
+ return {
683
+ config: {
684
+ ...getString(config, "project") && { project_id: getString(config, "project") },
685
+ ...getString(config, "dataset") && { dataset: getString(config, "dataset") },
686
+ ...getString(config, "location") && { location: getString(config, "location") }
687
+ },
688
+ credentials
689
+ };
690
+ }
691
+ /**
692
+ * Map Databricks dbt config to Bonnard format
693
+ */
694
+ function mapDatabricks(config) {
695
+ return {
696
+ config: {
697
+ ...getString(config, "host") && { hostname: getString(config, "host") },
698
+ ...getString(config, "http_path") && { http_path: getString(config, "http_path") },
699
+ ...getString(config, "catalog") && { catalog: getString(config, "catalog") },
700
+ ...getString(config, "schema") && { schema: getString(config, "schema") }
701
+ },
702
+ credentials: { ...getString(config, "token") && { token: getString(config, "token") } }
703
+ };
704
+ }
705
+ /**
706
+ * Map a parsed dbt connection to Bonnard format
707
+ * Values are copied as-is, including {{ env_var(...) }} patterns
708
+ */
709
+ function mapDbtConnection(connection) {
710
+ const { profileName, targetName, type, config } = connection;
711
+ let mapped;
712
+ switch (type) {
713
+ case "snowflake":
714
+ mapped = mapSnowflake(config);
715
+ break;
716
+ case "postgres":
717
+ mapped = mapPostgres(config);
718
+ break;
719
+ case "bigquery":
720
+ mapped = mapBigQuery(config);
721
+ break;
722
+ case "databricks":
723
+ mapped = mapDatabricks(config);
724
+ break;
725
+ default: throw new Error(`Unsupported warehouse type: ${type}`);
726
+ }
727
+ return { datasource: {
728
+ name: generateDatasourceName(profileName, targetName),
729
+ type,
730
+ source: "dbt",
731
+ dbtProfile: profileName,
732
+ dbtTarget: targetName,
733
+ config: mapped.config,
734
+ credentials: mapped.credentials
735
+ } };
736
+ }
239
737
 
240
738
  //#endregion
241
739
  //#region src/commands/datasource/add.ts
242
740
  async function prompts() {
243
741
  return import("@inquirer/prompts");
244
742
  }
245
- const WAREHOUSE_TYPES = [
743
+ const WAREHOUSE_CONFIGS = [
246
744
  {
247
745
  value: "snowflake",
248
746
  label: "Snowflake",
@@ -274,11 +772,14 @@ const WAREHOUSE_TYPES = [
274
772
  ],
275
773
  credentialFields: [{
276
774
  name: "username",
277
- message: "Username"
775
+ flag: "user",
776
+ message: "Username",
777
+ required: true
278
778
  }, {
279
779
  name: "password",
280
780
  message: "Password",
281
- secret: true
781
+ secret: true,
782
+ required: true
282
783
  }]
283
784
  },
284
785
  {
@@ -292,7 +793,8 @@ const WAREHOUSE_TYPES = [
292
793
  },
293
794
  {
294
795
  name: "port",
295
- message: "Port (default: 5432)"
796
+ message: "Port",
797
+ default: "5432"
296
798
  },
297
799
  {
298
800
  name: "database",
@@ -301,16 +803,20 @@ const WAREHOUSE_TYPES = [
301
803
  },
302
804
  {
303
805
  name: "schema",
304
- message: "Schema (default: public)"
806
+ message: "Schema",
807
+ default: "public"
305
808
  }
306
809
  ],
307
810
  credentialFields: [{
308
811
  name: "username",
309
- message: "Username"
812
+ flag: "user",
813
+ message: "Username",
814
+ required: true
310
815
  }, {
311
816
  name: "password",
312
817
  message: "Password",
313
- secret: true
818
+ secret: true,
819
+ required: true
314
820
  }]
315
821
  },
316
822
  {
@@ -319,6 +825,7 @@ const WAREHOUSE_TYPES = [
319
825
  configFields: [
320
826
  {
321
827
  name: "project_id",
828
+ flag: "projectId",
322
829
  message: "GCP Project ID",
323
830
  required: true
324
831
  },
@@ -334,7 +841,12 @@ const WAREHOUSE_TYPES = [
334
841
  ],
335
842
  credentialFields: [{
336
843
  name: "service_account_json",
337
- message: "Service account JSON (paste or path)"
844
+ flag: "serviceAccountJson",
845
+ message: "Service account JSON"
846
+ }, {
847
+ name: "keyfile_path",
848
+ flag: "keyfile",
849
+ message: "Path to service account key file"
338
850
  }]
339
851
  },
340
852
  {
@@ -348,96 +860,799 @@ const WAREHOUSE_TYPES = [
348
860
  },
349
861
  {
350
862
  name: "http_path",
863
+ flag: "httpPath",
351
864
  message: "HTTP path",
352
865
  required: true
353
866
  },
354
867
  {
355
868
  name: "catalog",
356
869
  message: "Catalog name"
870
+ },
871
+ {
872
+ name: "schema",
873
+ message: "Schema name"
357
874
  }
358
875
  ],
359
876
  credentialFields: [{
360
877
  name: "token",
361
878
  message: "Personal access token",
362
- secret: true
879
+ secret: true,
880
+ required: true
363
881
  }]
364
882
  }
365
883
  ];
366
- async function datasourceAddCommand() {
367
- const { input, select, password } = await prompts();
368
- let name;
369
- while (true) {
370
- name = await input({ message: "Name for this data source:" });
371
- const { dataSources } = await get("/api/datasources");
372
- if (dataSources.some((ds) => ds.name === name)) {
373
- console.log(pc.red(`A data source named "${name}" already exists. Choose a different name.`));
884
+ /**
885
+ * Convert env var name to dbt-style reference
886
+ */
887
+ function envVarRef(varName) {
888
+ return `{{ env_var('${varName}') }}`;
889
+ }
890
+ /**
891
+ * Format warehouse type for display
892
+ */
893
+ function formatType$1(type) {
894
+ return {
895
+ snowflake: "Snowflake",
896
+ postgres: "Postgres",
897
+ bigquery: "BigQuery",
898
+ databricks: "Databricks"
899
+ }[type] || type;
900
+ }
901
+ /**
902
+ * Get value from options, checking both direct and flag name
903
+ */
904
+ function getOptionValue(options, field) {
905
+ return options[(field.flag || field.name).replace(/_([a-z])/g, (_, c) => c.toUpperCase())];
906
+ }
907
+ /**
908
+ * Check if running in non-interactive mode (name and type provided via flags)
909
+ */
910
+ function isNonInteractive(options) {
911
+ return !!(options.name && options.type);
912
+ }
913
+ /**
914
+ * Import datasources from dbt profiles.yml
915
+ */
916
+ async function importFromDbt(options) {
917
+ const profilesPath = getDefaultProfilesPath();
918
+ if (!dbtProfilesExist(profilesPath)) {
919
+ console.error(pc.red(`dbt profiles not found at ${profilesPath}`));
920
+ console.log(pc.dim("Make sure dbt is configured with ~/.dbt/profiles.yml"));
921
+ process.exit(1);
922
+ }
923
+ let connections;
924
+ try {
925
+ connections = parseDbtProfiles(profilesPath);
926
+ } catch (err) {
927
+ console.error(pc.red(`Failed to parse dbt profiles: ${err.message}`));
928
+ process.exit(1);
929
+ }
930
+ if (connections.length === 0) {
931
+ console.log(pc.yellow("No supported connections found in dbt profiles."));
932
+ console.log(pc.dim("Supported types: snowflake, postgres, bigquery, databricks"));
933
+ process.exit(0);
934
+ }
935
+ if (typeof options.fromDbt === "string") {
936
+ const parts = options.fromDbt.split("/");
937
+ const profileName = parts[0];
938
+ const targetName = options.target || parts[1];
939
+ const filtered = connections.filter((c) => {
940
+ if (c.profileName !== profileName) return false;
941
+ if (targetName) return c.targetName === targetName;
942
+ return c.isDefaultTarget;
943
+ });
944
+ if (filtered.length === 0) {
945
+ console.error(pc.red(`Profile "${profileName}"${targetName ? `/${targetName}` : ""} not found`));
946
+ process.exit(1);
947
+ }
948
+ await importConnections(filtered);
949
+ return;
950
+ }
951
+ if (options.all) {
952
+ await importConnections(connections);
953
+ return;
954
+ }
955
+ if (options.defaultTargets) {
956
+ await importConnections(connections.filter((c) => c.isDefaultTarget));
957
+ return;
958
+ }
959
+ const { checkbox } = await prompts();
960
+ console.log();
961
+ console.log(pc.bold(`Found ${connections.length} connections in ~/.dbt/profiles.yml:`));
962
+ console.log();
963
+ const selected = await checkbox({
964
+ message: "Select connections to import:",
965
+ choices: connections.map((conn) => {
966
+ const name = `${conn.profileName}/${conn.targetName}`;
967
+ const typeLabel = formatType$1(conn.type);
968
+ const defaultLabel = conn.isDefaultTarget ? pc.cyan(" (default)") : "";
969
+ return {
970
+ name: `${name.padEnd(30)} ${typeLabel}${defaultLabel}`,
971
+ value: conn,
972
+ checked: conn.isDefaultTarget
973
+ };
974
+ }),
975
+ pageSize: 15
976
+ });
977
+ if (selected.length === 0) {
978
+ console.log(pc.yellow("No connections selected."));
979
+ return;
980
+ }
981
+ await importConnections(selected);
982
+ }
983
+ /**
984
+ * Import selected connections
985
+ */
986
+ async function importConnections(connections) {
987
+ console.log();
988
+ if (isDatasourcesTrackedByGit()) console.log(pc.yellow("Warning: .bon/datasources.yaml is tracked by git. Add it to .gitignore!"));
989
+ let imported = 0;
990
+ let skipped = 0;
991
+ for (const conn of connections) {
992
+ const { profileName, targetName } = conn;
993
+ const name = generateDatasourceName(profileName, targetName);
994
+ if (datasourceExists(name)) {
995
+ console.log(pc.dim(`• ${profileName}/${targetName} → ${name} (already exists, skipped)`));
996
+ skipped++;
374
997
  continue;
375
998
  }
376
- break;
999
+ try {
1000
+ addLocalDatasource(mapDbtConnection(conn).datasource);
1001
+ console.log(pc.green(`✓ ${profileName}/${targetName} → ${name} (${conn.type})`));
1002
+ imported++;
1003
+ } catch (err) {
1004
+ console.log(pc.red(`✗ ${profileName}/${targetName}: ${err.message}`));
1005
+ skipped++;
1006
+ }
377
1007
  }
378
- const warehouseType = await select({
1008
+ console.log();
1009
+ if (imported > 0) {
1010
+ console.log(pc.green(`Imported ${imported} datasource${imported !== 1 ? "s" : ""}`));
1011
+ console.log(pc.dim(" .bon/datasources.yaml"));
1012
+ }
1013
+ if (skipped > 0) console.log(pc.dim(`Skipped ${skipped} connection${skipped !== 1 ? "s" : ""}`));
1014
+ }
1015
+ /**
1016
+ * Add datasource manually (with flags and/or interactive prompts)
1017
+ */
1018
+ async function addManual(options) {
1019
+ const { input, select, password, confirm } = await prompts();
1020
+ const nonInteractive = isNonInteractive(options);
1021
+ if (isDatasourcesTrackedByGit()) console.log(pc.yellow("Warning: .bon/datasources.yaml is tracked by git. Add it to .gitignore!"));
1022
+ let name = options.name;
1023
+ if (!name) name = await input({ message: "Datasource name:" });
1024
+ if (datasourceExists(name)) {
1025
+ if (options.force) console.log(pc.dim(`Overwriting existing datasource "${name}"`));
1026
+ else if (nonInteractive) {
1027
+ console.error(pc.red(`Datasource "${name}" already exists. Use --force to overwrite.`));
1028
+ process.exit(1);
1029
+ } else if (!await confirm({
1030
+ message: `Datasource "${name}" already exists. Overwrite?`,
1031
+ default: false
1032
+ })) {
1033
+ console.log(pc.yellow("Cancelled."));
1034
+ return;
1035
+ }
1036
+ }
1037
+ let warehouseType = options.type;
1038
+ if (!warehouseType) warehouseType = await select({
379
1039
  message: "Warehouse type:",
380
- choices: WAREHOUSE_TYPES.map((w) => ({
1040
+ choices: WAREHOUSE_CONFIGS.map((w) => ({
381
1041
  name: w.label,
382
1042
  value: w.value
383
1043
  }))
384
1044
  });
385
- const wt = WAREHOUSE_TYPES.find((w) => w.value === warehouseType);
1045
+ const warehouseConfig = WAREHOUSE_CONFIGS.find((w) => w.value === warehouseType);
1046
+ if (!warehouseConfig) {
1047
+ console.error(pc.red(`Invalid warehouse type: ${warehouseType}`));
1048
+ console.log(pc.dim("Valid types: snowflake, postgres, bigquery, databricks"));
1049
+ process.exit(1);
1050
+ }
386
1051
  const config = {};
387
- for (const field of wt.configFields) {
388
- const value = await input({
389
- message: field.message,
390
- required: field.required
391
- });
1052
+ for (const field of warehouseConfig.configFields) {
1053
+ let value = getOptionValue(options, field);
1054
+ if (!value && !nonInteractive) {
1055
+ const defaultHint = field.default ? ` (default: ${field.default})` : "";
1056
+ value = await input({
1057
+ message: field.message + defaultHint + ":",
1058
+ default: field.default
1059
+ });
1060
+ }
392
1061
  if (value) config[field.name] = value;
1062
+ else if (field.required) {
1063
+ console.error(pc.red(`Missing required field: ${field.name}`));
1064
+ process.exit(1);
1065
+ }
393
1066
  }
394
1067
  const credentials = {};
395
- for (const field of wt.credentialFields) {
396
- const value = field.secret ? await password({ message: field.message }) : await input({ message: field.message });
1068
+ for (const field of warehouseConfig.credentialFields) {
1069
+ let value;
1070
+ if (field.name === "password" && options.passwordEnv) value = envVarRef(options.passwordEnv);
1071
+ else if (field.name === "token" && options.tokenEnv) value = envVarRef(options.tokenEnv);
1072
+ else value = getOptionValue(options, field);
1073
+ if (!value && !nonInteractive) if (field.secret) value = await password({ message: field.message + ":" });
1074
+ else value = await input({ message: field.message + ":" });
397
1075
  if (value) credentials[field.name] = value;
1076
+ else if (field.required) {
1077
+ console.error(pc.red(`Missing required credential: ${field.name}`));
1078
+ process.exit(1);
1079
+ }
398
1080
  }
399
- try {
400
- const result = await post("/api/datasources", {
401
- name,
402
- warehouse_type: warehouseType,
403
- config,
404
- credentials
405
- });
406
- console.log(pc.green(`Data source "${result.dataSource.name}" created (${result.dataSource.id})`));
407
- } catch (err) {
408
- console.error(pc.red(`Failed to create data source: ${err.message}`));
409
- process.exit(1);
410
- }
1081
+ addLocalDatasource({
1082
+ name,
1083
+ type: warehouseType,
1084
+ source: "manual",
1085
+ config,
1086
+ credentials
1087
+ });
1088
+ console.log();
1089
+ console.log(pc.green(`✓ Datasource "${name}" saved to .bon/datasources.yaml`));
1090
+ console.log();
1091
+ console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1092
+ }
1093
+ /**
1094
+ * Main datasource add command
1095
+ */
1096
+ async function datasourceAddCommand(options = {}) {
1097
+ if (options.fromDbt !== void 0) await importFromDbt(options);
1098
+ else await addManual(options);
411
1099
  }
412
1100
 
413
1101
  //#endregion
414
1102
  //#region src/commands/datasource/list.ts
415
- async function datasourceListCommand() {
1103
+ /**
1104
+ * Format warehouse type for display
1105
+ */
1106
+ function formatType(type) {
1107
+ return {
1108
+ snowflake: "Snowflake",
1109
+ postgres: "Postgres",
1110
+ bigquery: "BigQuery",
1111
+ databricks: "Databricks"
1112
+ }[type] || type;
1113
+ }
1114
+ /**
1115
+ * Format source for display
1116
+ */
1117
+ function formatSource(source) {
1118
+ return {
1119
+ dbt: "dbt",
1120
+ manual: "manual",
1121
+ mcp: "mcp"
1122
+ }[source] || source;
1123
+ }
1124
+ /**
1125
+ * List local datasources
1126
+ */
1127
+ function listLocalDatasources() {
1128
+ const datasources = loadLocalDatasources();
1129
+ if (datasources.length === 0) {
1130
+ console.log(pc.dim("No local data sources found."));
1131
+ console.log(pc.dim("Run `bon datasource add` or `bon datasource add --from-dbt` to create one."));
1132
+ return;
1133
+ }
1134
+ console.log(pc.bold("Local Data Sources") + pc.dim(" (.bon/datasources.yaml)"));
1135
+ console.log();
1136
+ const maxNameLen = Math.max(...datasources.map((ds) => ds.name.length), 4);
1137
+ const maxTypeLen = Math.max(...datasources.map((ds) => formatType(ds.type).length), 4);
1138
+ const header = ` ${"NAME".padEnd(maxNameLen)} ${"TYPE".padEnd(maxTypeLen)} SOURCE ORIGIN`;
1139
+ console.log(pc.dim(header));
1140
+ console.log(pc.dim(" " + "─".repeat(header.length - 2)));
1141
+ for (const ds of datasources) {
1142
+ const name = ds.name.padEnd(maxNameLen);
1143
+ const type = formatType(ds.type).padEnd(maxTypeLen);
1144
+ const source = formatSource(ds.source).padEnd(10);
1145
+ let origin = "";
1146
+ if (ds.source === "dbt" && ds.dbtProfile) origin = `${ds.dbtProfile}/${ds.dbtTarget}`;
1147
+ console.log(` ${pc.bold(name)} ${type} ${source} ${pc.dim(origin)}`);
1148
+ }
1149
+ console.log();
1150
+ console.log(pc.dim(`${datasources.length} datasource${datasources.length !== 1 ? "s" : ""}`));
1151
+ }
1152
+ /**
1153
+ * List remote datasources (requires login)
1154
+ */
1155
+ async function listRemoteDatasources() {
1156
+ if (!loadCredentials()) {
1157
+ console.log(pc.dim("Not logged in. Run `bon login` to see remote data sources."));
1158
+ return;
1159
+ }
416
1160
  try {
1161
+ const { get } = await Promise.resolve().then(() => api_exports);
417
1162
  const result = await get("/api/datasources");
418
1163
  if (result.dataSources.length === 0) {
419
- console.log(pc.dim("No data sources found. Run `bon datasource add` to create one."));
1164
+ console.log(pc.dim("No remote data sources found."));
420
1165
  return;
421
1166
  }
422
- console.log(pc.bold("Data Sources\n"));
1167
+ console.log(pc.bold("Remote Data Sources") + pc.dim(" (Bonnard server)"));
1168
+ console.log();
1169
+ const maxNameLen = Math.max(...result.dataSources.map((ds) => ds.name.length), 4);
1170
+ const maxTypeLen = Math.max(...result.dataSources.map((ds) => ds.warehouse_type.length), 4);
1171
+ const header = ` ${"NAME".padEnd(maxNameLen)} ${"TYPE".padEnd(maxTypeLen)} STATUS`;
1172
+ console.log(pc.dim(header));
1173
+ console.log(pc.dim(" " + "─".repeat(header.length - 2)));
423
1174
  for (const ds of result.dataSources) {
1175
+ const name = ds.name.padEnd(maxNameLen);
1176
+ const type = ds.warehouse_type.padEnd(maxTypeLen);
424
1177
  const statusColor = ds.status === "active" ? pc.green : ds.status === "error" ? pc.red : pc.yellow;
425
- console.log(` ${pc.bold(ds.name)}`);
426
- console.log(` ID: ${pc.dim(ds.id)}`);
427
- console.log(` Type: ${ds.warehouse_type}`);
428
- console.log(` Status: ${statusColor(ds.status)}`);
429
- console.log(` Created: ${new Date(ds.created_at).toLocaleDateString()}`);
430
- console.log();
1178
+ console.log(` ${pc.bold(name)} ${type} ${statusColor(ds.status)}`);
431
1179
  }
1180
+ console.log();
1181
+ console.log(pc.dim(`${result.dataSources.length} datasource${result.dataSources.length !== 1 ? "s" : ""}`));
432
1182
  } catch (err) {
433
- console.error(pc.red(`Failed to list data sources: ${err.message}`));
434
- process.exit(1);
1183
+ console.log(pc.yellow(`Could not fetch remote sources: ${err.message}`));
1184
+ }
1185
+ }
1186
+ /**
1187
+ * Main list command
1188
+ */
1189
+ async function datasourceListCommand(options = {}) {
1190
+ const showLocal = options.local || !options.local && !options.remote;
1191
+ const showRemote = options.remote || !options.local && !options.remote;
1192
+ if (showLocal) listLocalDatasources();
1193
+ if (showLocal && showRemote) console.log();
1194
+ if (showRemote) await listRemoteDatasources();
1195
+ }
1196
+
1197
+ //#endregion
1198
+ //#region src/lib/connection/snowflake.ts
1199
+ /**
1200
+ * Snowflake connection testing and querying
1201
+ */
1202
+ const require$4 = createRequire(import.meta.url);
1203
+ function loadSnowflake() {
1204
+ try {
1205
+ const snowflake = require$4("snowflake-sdk");
1206
+ snowflake.configure({ logLevel: "ERROR" });
1207
+ return snowflake;
1208
+ } catch {
1209
+ return null;
1210
+ }
1211
+ }
1212
+ async function testSnowflakeConnection(config, credentials) {
1213
+ const snowflake = loadSnowflake();
1214
+ if (!snowflake) return {
1215
+ success: false,
1216
+ message: "Snowflake driver not installed",
1217
+ error: "Run: pnpm add snowflake-sdk"
1218
+ };
1219
+ const startTime = Date.now();
1220
+ return new Promise((resolve) => {
1221
+ const connection = snowflake.createConnection({
1222
+ account: config.account,
1223
+ username: credentials.username,
1224
+ password: credentials.password,
1225
+ database: config.database,
1226
+ warehouse: config.warehouse,
1227
+ schema: config.schema,
1228
+ role: config.role
1229
+ });
1230
+ connection.connect((err) => {
1231
+ if (err) {
1232
+ resolve({
1233
+ success: false,
1234
+ message: "Connection failed",
1235
+ error: err.message
1236
+ });
1237
+ return;
1238
+ }
1239
+ connection.execute({
1240
+ sqlText: "SELECT 1",
1241
+ complete: (queryErr) => {
1242
+ const latencyMs = Date.now() - startTime;
1243
+ connection.destroy(() => {});
1244
+ if (queryErr) resolve({
1245
+ success: false,
1246
+ message: "Query failed",
1247
+ error: queryErr.message,
1248
+ latencyMs
1249
+ });
1250
+ else resolve({
1251
+ success: true,
1252
+ message: "Connection successful",
1253
+ latencyMs
1254
+ });
1255
+ }
1256
+ });
1257
+ });
1258
+ });
1259
+ }
1260
+ async function querySnowflake(config, credentials, sql, options = {}) {
1261
+ const snowflake = loadSnowflake();
1262
+ if (!snowflake) return {
1263
+ columns: [],
1264
+ rows: [],
1265
+ rowCount: 0,
1266
+ truncated: false,
1267
+ error: "Snowflake driver not installed. Run: pnpm add snowflake-sdk"
1268
+ };
1269
+ const limit = options.limit ?? 1e3;
1270
+ return new Promise((resolve) => {
1271
+ const connection = snowflake.createConnection({
1272
+ account: config.account,
1273
+ username: credentials.username,
1274
+ password: credentials.password,
1275
+ database: options.database || config.database,
1276
+ warehouse: config.warehouse,
1277
+ schema: options.schema || config.schema,
1278
+ role: config.role
1279
+ });
1280
+ connection.connect((err) => {
1281
+ if (err) {
1282
+ resolve({
1283
+ columns: [],
1284
+ rows: [],
1285
+ rowCount: 0,
1286
+ truncated: false,
1287
+ error: err.message
1288
+ });
1289
+ return;
1290
+ }
1291
+ connection.execute({
1292
+ sqlText: sql,
1293
+ complete: (queryErr, _stmt, rows) => {
1294
+ connection.destroy(() => {});
1295
+ if (queryErr) {
1296
+ resolve({
1297
+ columns: [],
1298
+ rows: [],
1299
+ rowCount: 0,
1300
+ truncated: false,
1301
+ error: queryErr.message
1302
+ });
1303
+ return;
1304
+ }
1305
+ const allRows = rows || [];
1306
+ const truncated = allRows.length > limit;
1307
+ const resultRows = truncated ? allRows.slice(0, limit) : allRows;
1308
+ resolve({
1309
+ columns: resultRows.length > 0 ? Object.keys(resultRows[0]) : [],
1310
+ rows: resultRows,
1311
+ rowCount: resultRows.length,
1312
+ truncated
1313
+ });
1314
+ }
1315
+ });
1316
+ });
1317
+ });
1318
+ }
1319
+
1320
+ //#endregion
1321
+ //#region src/lib/connection/postgres.ts
1322
+ /**
1323
+ * Postgres connection testing and querying
1324
+ */
1325
+ const require$3 = createRequire(import.meta.url);
1326
+ function loadPg() {
1327
+ try {
1328
+ return require$3("pg");
1329
+ } catch {
1330
+ return null;
1331
+ }
1332
+ }
1333
+ function createClient(config, credentials, pg) {
1334
+ return new pg.Client({
1335
+ host: config.host,
1336
+ port: config.port ? parseInt(config.port, 10) : 5432,
1337
+ database: config.database,
1338
+ user: credentials.username,
1339
+ password: credentials.password,
1340
+ ssl: config.sslmode === "require" ? { rejectUnauthorized: false } : void 0
1341
+ });
1342
+ }
1343
+ async function testPostgresConnection(config, credentials) {
1344
+ const pg = loadPg();
1345
+ if (!pg) return {
1346
+ success: false,
1347
+ message: "Postgres driver not installed",
1348
+ error: "Run: pnpm add pg"
1349
+ };
1350
+ const startTime = Date.now();
1351
+ const client = createClient(config, credentials, pg);
1352
+ try {
1353
+ await client.connect();
1354
+ await client.query("SELECT 1");
1355
+ const latencyMs = Date.now() - startTime;
1356
+ await client.end();
1357
+ return {
1358
+ success: true,
1359
+ message: "Connection successful",
1360
+ latencyMs
1361
+ };
1362
+ } catch (err) {
1363
+ try {
1364
+ await client.end();
1365
+ } catch {}
1366
+ return {
1367
+ success: false,
1368
+ message: "Connection failed",
1369
+ error: err.message
1370
+ };
1371
+ }
1372
+ }
1373
+ async function queryPostgres(config, credentials, sql, options = {}) {
1374
+ const pg = loadPg();
1375
+ if (!pg) return {
1376
+ columns: [],
1377
+ rows: [],
1378
+ rowCount: 0,
1379
+ truncated: false,
1380
+ error: "Postgres driver not installed. Run: pnpm add pg"
1381
+ };
1382
+ const limit = options.limit ?? 1e3;
1383
+ const client = createClient(config, credentials, pg);
1384
+ try {
1385
+ await client.connect();
1386
+ const schema = options.schema || config.schema;
1387
+ if (schema) await client.query(`SET search_path TO ${schema}`);
1388
+ const result = await client.query(sql);
1389
+ await client.end();
1390
+ const columns = result.fields?.map((f) => f.name) || [];
1391
+ const allRows = result.rows || [];
1392
+ const truncated = allRows.length > limit;
1393
+ const rows = truncated ? allRows.slice(0, limit) : allRows;
1394
+ return {
1395
+ columns,
1396
+ rows,
1397
+ rowCount: rows.length,
1398
+ truncated
1399
+ };
1400
+ } catch (err) {
1401
+ try {
1402
+ await client.end();
1403
+ } catch {}
1404
+ return {
1405
+ columns: [],
1406
+ rows: [],
1407
+ rowCount: 0,
1408
+ truncated: false,
1409
+ error: err.message
1410
+ };
1411
+ }
1412
+ }
1413
+
1414
+ //#endregion
1415
+ //#region src/lib/connection/bigquery.ts
1416
+ /**
1417
+ * BigQuery connection testing
1418
+ */
1419
+ const require$2 = createRequire(import.meta.url);
1420
+ async function testBigQueryConnection(config, credentials) {
1421
+ let BigQuery;
1422
+ try {
1423
+ BigQuery = require$2("@google-cloud/bigquery").BigQuery;
1424
+ } catch {
1425
+ return {
1426
+ success: false,
1427
+ message: "BigQuery driver not installed",
1428
+ error: "Run: pnpm add @google-cloud/bigquery"
1429
+ };
1430
+ }
1431
+ const startTime = Date.now();
1432
+ try {
1433
+ const options = { projectId: config.project_id };
1434
+ if (config.location) options.location = config.location;
1435
+ if (credentials.service_account_json) options.credentials = JSON.parse(credentials.service_account_json);
1436
+ else if (credentials.keyfile_path) options.keyFilename = credentials.keyfile_path;
1437
+ await new BigQuery(options).query("SELECT 1");
1438
+ return {
1439
+ success: true,
1440
+ message: "Connection successful",
1441
+ latencyMs: Date.now() - startTime
1442
+ };
1443
+ } catch (err) {
1444
+ return {
1445
+ success: false,
1446
+ message: "Connection failed",
1447
+ error: err.message
1448
+ };
1449
+ }
1450
+ }
1451
+
1452
+ //#endregion
1453
+ //#region src/lib/connection/databricks.ts
1454
+ /**
1455
+ * Databricks connection testing
1456
+ */
1457
+ const require$1 = createRequire(import.meta.url);
1458
+ async function testDatabricksConnection(config, credentials) {
1459
+ let DBSQLClient;
1460
+ try {
1461
+ const module = require$1("@databricks/sql");
1462
+ DBSQLClient = module.default || module;
1463
+ } catch {
1464
+ return {
1465
+ success: false,
1466
+ message: "Databricks driver not installed",
1467
+ error: "Run: pnpm add @databricks/sql"
1468
+ };
1469
+ }
1470
+ const startTime = Date.now();
1471
+ const client = new DBSQLClient();
1472
+ try {
1473
+ const connection = await client.connect({
1474
+ host: config.hostname,
1475
+ path: config.http_path,
1476
+ token: credentials.token
1477
+ });
1478
+ const session = await connection.openSession({
1479
+ initialCatalog: config.catalog,
1480
+ initialSchema: config.schema
1481
+ });
1482
+ const operation = await session.executeStatement("SELECT 1");
1483
+ await operation.fetchAll();
1484
+ await operation.close();
1485
+ const latencyMs = Date.now() - startTime;
1486
+ await session.close();
1487
+ await connection.close();
1488
+ return {
1489
+ success: true,
1490
+ message: "Connection successful",
1491
+ latencyMs
1492
+ };
1493
+ } catch (err) {
1494
+ try {
1495
+ await client.close();
1496
+ } catch {}
1497
+ return {
1498
+ success: false,
1499
+ message: "Connection failed",
1500
+ error: err.message
1501
+ };
1502
+ }
1503
+ }
1504
+
1505
+ //#endregion
1506
+ //#region src/lib/connection/index.ts
1507
+ var connection_exports = /* @__PURE__ */ __exportAll({
1508
+ executeQuery: () => executeQuery,
1509
+ testConnection: () => testConnection
1510
+ });
1511
+ /**
1512
+ * Test connection to a datasource
1513
+ */
1514
+ async function testConnection(datasource) {
1515
+ const { type, config, credentials } = datasource;
1516
+ switch (type) {
1517
+ case "snowflake": return testSnowflakeConnection({
1518
+ account: config.account,
1519
+ database: config.database,
1520
+ warehouse: config.warehouse,
1521
+ schema: config.schema,
1522
+ role: config.role
1523
+ }, {
1524
+ username: credentials.username,
1525
+ password: credentials.password
1526
+ });
1527
+ case "postgres": return testPostgresConnection({
1528
+ host: config.host,
1529
+ port: config.port,
1530
+ database: config.database,
1531
+ schema: config.schema,
1532
+ sslmode: config.sslmode
1533
+ }, {
1534
+ username: credentials.username,
1535
+ password: credentials.password
1536
+ });
1537
+ case "bigquery": return testBigQueryConnection({
1538
+ project_id: config.project_id,
1539
+ dataset: config.dataset,
1540
+ location: config.location
1541
+ }, {
1542
+ service_account_json: credentials.service_account_json,
1543
+ keyfile_path: credentials.keyfile_path
1544
+ });
1545
+ case "databricks": return testDatabricksConnection({
1546
+ hostname: config.hostname,
1547
+ http_path: config.http_path,
1548
+ catalog: config.catalog,
1549
+ schema: config.schema
1550
+ }, { token: credentials.token });
1551
+ default: return {
1552
+ success: false,
1553
+ message: `Unsupported warehouse type: ${type}`
1554
+ };
1555
+ }
1556
+ }
1557
+ /**
1558
+ * Execute a query against a datasource
1559
+ */
1560
+ async function executeQuery(datasource, sql, options = {}) {
1561
+ const { type, config, credentials } = datasource;
1562
+ switch (type) {
1563
+ case "snowflake": return querySnowflake({
1564
+ account: config.account,
1565
+ database: config.database,
1566
+ warehouse: config.warehouse,
1567
+ schema: config.schema,
1568
+ role: config.role
1569
+ }, {
1570
+ username: credentials.username,
1571
+ password: credentials.password
1572
+ }, sql, options);
1573
+ case "postgres": return queryPostgres({
1574
+ host: config.host,
1575
+ port: config.port,
1576
+ database: config.database,
1577
+ schema: config.schema,
1578
+ sslmode: config.sslmode
1579
+ }, {
1580
+ username: credentials.username,
1581
+ password: credentials.password
1582
+ }, sql, options);
1583
+ case "bigquery": return {
1584
+ columns: [],
1585
+ rows: [],
1586
+ rowCount: 0,
1587
+ truncated: false,
1588
+ error: "BigQuery local querying not yet implemented"
1589
+ };
1590
+ case "databricks": return {
1591
+ columns: [],
1592
+ rows: [],
1593
+ rowCount: 0,
1594
+ truncated: false,
1595
+ error: "Databricks local querying not yet implemented"
1596
+ };
1597
+ default: return {
1598
+ columns: [],
1599
+ rows: [],
1600
+ rowCount: 0,
1601
+ truncated: false,
1602
+ error: `Unsupported warehouse type: ${type}`
1603
+ };
435
1604
  }
436
1605
  }
437
1606
 
438
1607
  //#endregion
439
1608
  //#region src/commands/datasource/test.ts
440
- async function datasourceTestCommand(name) {
1609
+ async function datasourceTestCommand(name, options = {}) {
1610
+ const localDs = getLocalDatasource(name);
1611
+ if (options.remote || !localDs) {
1612
+ await testRemote(name, !localDs);
1613
+ return;
1614
+ }
1615
+ await testLocal(name, localDs);
1616
+ }
1617
+ /**
1618
+ * Test datasource locally using direct connection
1619
+ */
1620
+ async function testLocal(name, ds) {
1621
+ console.log(pc.dim(`Testing ${name} locally...`));
1622
+ console.log();
1623
+ const { resolved, missing } = resolveEnvVarsInCredentials(ds.credentials);
1624
+ if (missing.length > 0) {
1625
+ console.log(pc.red(`Missing environment variables: ${missing.join(", ")}`));
1626
+ console.log(pc.dim("Set these env vars or update .bon/datasources.yaml with actual values."));
1627
+ process.exit(1);
1628
+ }
1629
+ const result = await testConnection({
1630
+ type: ds.type,
1631
+ config: ds.config,
1632
+ credentials: resolved
1633
+ });
1634
+ if (result.success) {
1635
+ console.log(pc.green(`✓ ${result.message}`));
1636
+ if (result.latencyMs) console.log(pc.dim(` Latency: ${result.latencyMs}ms`));
1637
+ } else {
1638
+ console.log(pc.red(`✗ ${result.message}`));
1639
+ if (result.error) console.log(pc.dim(` ${result.error}`));
1640
+ process.exit(1);
1641
+ }
1642
+ }
1643
+ /**
1644
+ * Test datasource via remote API (requires login)
1645
+ */
1646
+ async function testRemote(name, notFoundLocally) {
1647
+ if (!loadCredentials()) {
1648
+ if (notFoundLocally) {
1649
+ console.log(pc.red(`Datasource "${name}" not found locally.`));
1650
+ console.log(pc.dim("Run `bon datasource add` to create it, or `bon login` to test remote datasources."));
1651
+ } else console.log(pc.red("Not logged in. Run `bon login` to test remote datasources."));
1652
+ process.exit(1);
1653
+ }
1654
+ console.log(pc.dim(`Testing ${name} via remote API...`));
1655
+ console.log();
441
1656
  try {
442
1657
  const result = await post("/api/datasources/test", { name });
443
1658
  if (result.success) {
@@ -447,7 +1662,10 @@ async function datasourceTestCommand(name) {
447
1662
  if (result.details.account) console.log(pc.dim(` Account: ${result.details.account}`));
448
1663
  if (result.details.latencyMs != null) console.log(pc.dim(` Latency: ${result.details.latencyMs}ms`));
449
1664
  }
450
- } else console.log(pc.red(result.message));
1665
+ } else {
1666
+ console.log(pc.red(result.message));
1667
+ process.exit(1);
1668
+ }
451
1669
  } catch (err) {
452
1670
  console.error(pc.red(`Failed to test data source: ${err.message}`));
453
1671
  process.exit(1);
@@ -456,60 +1674,176 @@ async function datasourceTestCommand(name) {
456
1674
 
457
1675
  //#endregion
458
1676
  //#region src/commands/datasource/remove.ts
459
- async function datasourceRemoveCommand(name) {
1677
+ async function datasourceRemoveCommand(name, options = {}) {
1678
+ const existsLocally = datasourceExists(name);
1679
+ if (options.remote) {
1680
+ await removeRemote(name);
1681
+ return;
1682
+ }
1683
+ if (existsLocally) {
1684
+ if (removeLocalDatasource(name)) console.log(pc.green(`✓ Removed "${name}" from local storage`));
1685
+ } else {
1686
+ console.error(pc.red(`Datasource "${name}" not found.`));
1687
+ console.log(pc.dim("Use --remote to remove from remote server."));
1688
+ process.exit(1);
1689
+ }
1690
+ }
1691
+ /**
1692
+ * Remove datasource from remote server
1693
+ */
1694
+ async function removeRemote(name) {
1695
+ if (!loadCredentials()) {
1696
+ console.error(pc.red("Not logged in. Run `bon login` to remove remote datasources."));
1697
+ process.exit(1);
1698
+ }
460
1699
  try {
1700
+ const { del } = await Promise.resolve().then(() => api_exports);
461
1701
  await del(`/api/datasources/${encodeURIComponent(name)}`);
462
- console.log(pc.green(`Data source "${name}" removed.`));
1702
+ console.log(pc.green(`✓ Removed "${name}" from remote server`));
463
1703
  } catch (err) {
464
- console.error(pc.red(`Failed to remove data source: ${err.message}`));
1704
+ console.error(pc.red(`Failed to remove remote datasource: ${err.message}`));
465
1705
  process.exit(1);
466
1706
  }
467
1707
  }
468
1708
 
469
1709
  //#endregion
470
- //#region src/commands/query.ts
471
- async function queryCommand(datasourceName, sql, options) {
472
- const limit = options.limit ? parseInt(options.limit, 10) : 1e3;
473
- const format = options.format ?? "toon";
1710
+ //#region src/commands/datasource/push.ts
1711
+ var push_exports = /* @__PURE__ */ __exportAll({
1712
+ datasourcePushCommand: () => datasourcePushCommand,
1713
+ pushDatasource: () => pushDatasource
1714
+ });
1715
+ /**
1716
+ * Push a local datasource to Bonnard server
1717
+ */
1718
+ async function datasourcePushCommand(name, options = {}) {
1719
+ if (!loadCredentials()) {
1720
+ console.error(pc.red("Not logged in. Run `bon login` first."));
1721
+ process.exit(1);
1722
+ }
1723
+ const datasource = getLocalDatasource(name);
1724
+ if (!datasource) {
1725
+ console.error(pc.red(`Datasource "${name}" not found in .bon/datasources.yaml`));
1726
+ console.log(pc.dim("Run `bon datasource list --local` to see available datasources."));
1727
+ process.exit(1);
1728
+ }
1729
+ const { resolved, missing } = resolveEnvVarsInCredentials(datasource.credentials);
1730
+ if (missing.length > 0) {
1731
+ console.error(pc.red(`Missing environment variables: ${missing.join(", ")}`));
1732
+ console.log(pc.dim("Set them in your environment or use plain values in .bon/datasources.yaml"));
1733
+ process.exit(1);
1734
+ }
474
1735
  try {
475
- const result = await post("/api/datasources/query", {
476
- name: datasourceName,
477
- sql,
478
- options: {
479
- schema: options.schema,
480
- database: options.database,
481
- limit
1736
+ if ((await getRemoteDatasources()).some((ds) => ds.name === name) && !options.force) {
1737
+ if (!await confirm({
1738
+ message: `Datasource "${name}" already exists on remote. Overwrite?`,
1739
+ default: false
1740
+ })) {
1741
+ console.log(pc.dim("Aborted."));
1742
+ process.exit(0);
482
1743
  }
1744
+ }
1745
+ } catch (err) {
1746
+ console.log(pc.dim(`Note: Could not check remote datasources: ${err.message}`));
1747
+ }
1748
+ console.log(pc.dim(`Pushing "${name}"...`));
1749
+ try {
1750
+ await post("/api/datasources", {
1751
+ name: datasource.name,
1752
+ warehouse_type: datasource.type,
1753
+ config: datasource.config,
1754
+ credentials: resolved
483
1755
  });
484
- if (result.error) {
485
- console.error(pc.red(result.error));
1756
+ console.log(pc.green(`✓ Datasource "${name}" pushed to Bonnard`));
1757
+ } catch (err) {
1758
+ const message = err.message;
1759
+ if (message.includes("already exists")) {
1760
+ console.error(pc.red(`Datasource "${name}" already exists on remote.`));
1761
+ console.log(pc.dim("Use --force to overwrite."));
486
1762
  process.exit(1);
487
1763
  }
488
- if (result.rowCount === 0) {
489
- console.log("No rows returned.");
490
- return;
491
- }
492
- if (format === "json") console.log(JSON.stringify(result, null, 2));
493
- else {
494
- const toon = encode({ results: result.rows });
495
- console.log(toon);
496
- }
497
- if (result.truncated) console.log(pc.dim(`(truncated to ${result.rowCount} rows)`));
498
- } catch (err) {
499
- console.error(pc.red(`Query failed: ${err.message}`));
1764
+ console.error(pc.red(`Failed to push datasource: ${message}`));
500
1765
  process.exit(1);
501
1766
  }
502
1767
  }
1768
+ /**
1769
+ * Push a datasource programmatically (for use by deploy command)
1770
+ * Returns true on success, false on failure
1771
+ */
1772
+ async function pushDatasource(name, options = {}) {
1773
+ const datasource = getLocalDatasource(name);
1774
+ if (!datasource) {
1775
+ if (!options.silent) console.error(pc.red(`Datasource "${name}" not found locally`));
1776
+ return false;
1777
+ }
1778
+ const { resolved, missing } = resolveEnvVarsInCredentials(datasource.credentials);
1779
+ if (missing.length > 0) {
1780
+ if (!options.silent) console.error(pc.red(`Missing env vars for "${name}": ${missing.join(", ")}`));
1781
+ return false;
1782
+ }
1783
+ try {
1784
+ await post("/api/datasources", {
1785
+ name: datasource.name,
1786
+ warehouse_type: datasource.type,
1787
+ config: datasource.config,
1788
+ credentials: resolved
1789
+ });
1790
+ return true;
1791
+ } catch {
1792
+ return false;
1793
+ }
1794
+ }
1795
+
1796
+ //#endregion
1797
+ //#region src/commands/preview.ts
1798
+ async function previewCommand(datasourceName, sql, options) {
1799
+ const limit = options.limit ? parseInt(options.limit, 10) : 1e3;
1800
+ const format = options.format ?? "toon";
1801
+ const ds = getLocalDatasource(datasourceName);
1802
+ if (!ds) {
1803
+ console.error(pc.red(`Datasource "${datasourceName}" not found in .bon/datasources.yaml`));
1804
+ console.log(pc.dim("Run `bon datasource add` to create it."));
1805
+ process.exit(1);
1806
+ }
1807
+ const { resolved, missing } = resolveEnvVarsInCredentials(ds.credentials);
1808
+ if (missing.length > 0) {
1809
+ console.error(pc.red(`Missing environment variables: ${missing.join(", ")}`));
1810
+ console.log(pc.dim("Set these env vars or update .bon/datasources.yaml with actual values."));
1811
+ process.exit(1);
1812
+ }
1813
+ const result = await executeQuery({
1814
+ type: ds.type,
1815
+ config: ds.config,
1816
+ credentials: resolved
1817
+ }, sql, {
1818
+ limit,
1819
+ schema: options.schema,
1820
+ database: options.database
1821
+ });
1822
+ if (result.error) {
1823
+ console.error(pc.red(result.error));
1824
+ process.exit(1);
1825
+ }
1826
+ if (result.rowCount === 0) {
1827
+ console.log("No rows returned.");
1828
+ return;
1829
+ }
1830
+ if (format === "json") console.log(JSON.stringify(result, null, 2));
1831
+ else {
1832
+ const toon = encode({ results: result.rows });
1833
+ console.log(toon);
1834
+ }
1835
+ if (result.truncated) console.log(pc.dim(`(truncated to ${result.rowCount} rows)`));
1836
+ }
503
1837
 
504
1838
  //#endregion
505
1839
  //#region src/commands/validate.ts
506
- async function validateCommand() {
1840
+ async function validateCommand(options = {}) {
507
1841
  const cwd = process.cwd();
508
1842
  if (!fs.existsSync(path.join(cwd, "bon.yaml"))) {
509
1843
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
510
1844
  process.exit(1);
511
1845
  }
512
- const { validate } = await import("./validate-Bd1D39Bj.mjs");
1846
+ const { validate } = await import("./validate-C4EHvJzJ.mjs");
513
1847
  const result = await validate(cwd);
514
1848
  if (result.cubes.length === 0 && result.views.length === 0 && result.valid) {
515
1849
  console.log(pc.yellow("No model or view files found in models/ or views/."));
@@ -524,21 +1858,482 @@ async function validateCommand() {
524
1858
  console.log();
525
1859
  if (result.cubes.length > 0) console.log(` ${pc.dim("Cubes")} (${result.cubes.length}): ${result.cubes.join(", ")}`);
526
1860
  if (result.views.length > 0) console.log(` ${pc.dim("Views")} (${result.views.length}): ${result.views.join(", ")}`);
1861
+ if (result.missingDescriptions.length > 0) {
1862
+ console.log();
1863
+ console.log(pc.yellow(`⚠ ${result.missingDescriptions.length} items missing descriptions`));
1864
+ console.log(pc.dim(" Descriptions help AI agents and analysts discover the right metrics."));
1865
+ const byParent = /* @__PURE__ */ new Map();
1866
+ for (const m of result.missingDescriptions) {
1867
+ const list = byParent.get(m.parent) || [];
1868
+ const label = m.type === "cube" || m.type === "view" ? `(${m.type})` : m.name;
1869
+ list.push(label);
1870
+ byParent.set(m.parent, list);
1871
+ }
1872
+ for (const [parent, items] of byParent) console.log(pc.dim(` ${parent}: ${items.join(", ")}`));
1873
+ }
1874
+ if (options.testConnection) {
1875
+ console.log();
1876
+ await testReferencedConnections(cwd);
1877
+ }
1878
+ }
1879
+ /**
1880
+ * Test connections for datasources referenced by models
1881
+ * Lenient: warns but doesn't fail validation
1882
+ */
1883
+ async function testReferencedConnections(cwd) {
1884
+ const { extractDatasourcesFromModels } = await import("./models-IsV2sX74.mjs");
1885
+ const { loadLocalDatasources, resolveEnvVarsInCredentials } = await Promise.resolve().then(() => local_exports);
1886
+ const { testConnection } = await Promise.resolve().then(() => connection_exports);
1887
+ const references = extractDatasourcesFromModels(cwd);
1888
+ if (references.length === 0) {
1889
+ console.log(pc.dim("No datasource references found in models."));
1890
+ return;
1891
+ }
1892
+ console.log(pc.bold("Testing connections..."));
1893
+ console.log();
1894
+ const localDatasources = loadLocalDatasources(cwd);
1895
+ let warnings = 0;
1896
+ for (const ref of references) {
1897
+ const ds = localDatasources.find((d) => d.name === ref.name);
1898
+ if (!ds) {
1899
+ console.log(pc.yellow(`⚠ ${ref.name}: not found in .bon/datasources.yaml`));
1900
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
1901
+ warnings++;
1902
+ continue;
1903
+ }
1904
+ const { resolved, missing } = resolveEnvVarsInCredentials(ds.credentials);
1905
+ if (missing.length > 0) {
1906
+ console.log(pc.yellow(`⚠ ${ref.name}: missing env vars: ${missing.join(", ")}`));
1907
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
1908
+ warnings++;
1909
+ continue;
1910
+ }
1911
+ const result = await testConnection({
1912
+ type: ds.type,
1913
+ config: ds.config,
1914
+ credentials: resolved
1915
+ });
1916
+ if (result.success) {
1917
+ const latency = result.latencyMs ? pc.dim(` (${result.latencyMs}ms)`) : "";
1918
+ console.log(pc.green(`✓ ${ref.name}${latency}`));
1919
+ } else {
1920
+ console.log(pc.yellow(`⚠ ${ref.name}: ${result.error || result.message}`));
1921
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
1922
+ warnings++;
1923
+ }
1924
+ }
1925
+ if (warnings > 0) {
1926
+ console.log();
1927
+ console.log(pc.yellow(`${warnings} connection warning(s)`));
1928
+ console.log(pc.dim("Connection issues won't block file validation, but will fail at deploy."));
1929
+ }
1930
+ }
1931
+
1932
+ //#endregion
1933
+ //#region src/commands/deploy.ts
1934
+ function collectFiles(dir, rootDir) {
1935
+ const files = {};
1936
+ if (!fs.existsSync(dir)) return files;
1937
+ function walk(current) {
1938
+ for (const entry of fs.readdirSync(current, { withFileTypes: true })) {
1939
+ const fullPath = path.join(current, entry.name);
1940
+ if (entry.isDirectory()) walk(fullPath);
1941
+ else if (entry.name.endsWith(".yaml") || entry.name.endsWith(".yml")) {
1942
+ const relativePath = path.relative(rootDir, fullPath);
1943
+ files[relativePath] = fs.readFileSync(fullPath, "utf-8");
1944
+ }
1945
+ }
1946
+ }
1947
+ walk(dir);
1948
+ return files;
1949
+ }
1950
+ async function deployCommand(options = {}) {
1951
+ const cwd = process.cwd();
1952
+ if (!fs.existsSync(path.join(cwd, "bon.yaml"))) {
1953
+ console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
1954
+ process.exit(1);
1955
+ }
1956
+ console.log(pc.dim("Validating models..."));
1957
+ const { validate } = await import("./validate-C4EHvJzJ.mjs");
1958
+ const result = await validate(cwd);
1959
+ if (!result.valid) {
1960
+ console.log(pc.red("Validation failed:\n"));
1961
+ for (const err of result.errors) console.log(pc.red(` • ${err}`));
1962
+ process.exit(1);
1963
+ }
1964
+ if (result.cubes.length === 0 && result.views.length === 0) {
1965
+ console.log(pc.yellow("No model or view files found in models/ or views/. Nothing to deploy."));
1966
+ process.exit(1);
1967
+ }
1968
+ console.log(pc.dim(` Found ${result.cubes.length} cube(s) and ${result.views.length} view(s)`));
1969
+ if (await testAndSyncDatasources(cwd, options)) process.exit(1);
1970
+ const files = {
1971
+ ...collectFiles(path.join(cwd, "models"), cwd),
1972
+ ...collectFiles(path.join(cwd, "views"), cwd)
1973
+ };
1974
+ const fileCount = Object.keys(files).length;
1975
+ console.log(pc.dim(`Deploying ${fileCount} file(s)...`));
1976
+ console.log();
1977
+ try {
1978
+ const response = await post("/api/deploy", { files });
1979
+ console.log(pc.green("Deploy successful!"));
1980
+ console.log(`Deployment ID: ${pc.cyan(response.deployment.id)}`);
1981
+ console.log(`Cube API: ${pc.cyan(`${response.deployment.cubeApiUrl}/cubejs-api/v1`)}`);
1982
+ } catch (err) {
1983
+ console.log(pc.red(`Deploy failed: ${err instanceof Error ? err.message : err}`));
1984
+ process.exit(1);
1985
+ }
1986
+ }
1987
+ /**
1988
+ * Test connections and sync datasources to remote
1989
+ * Returns true if any connection failed (strict mode)
1990
+ */
1991
+ async function testAndSyncDatasources(cwd, options = {}) {
1992
+ const { extractDatasourcesFromModels } = await import("./models-IsV2sX74.mjs");
1993
+ const { loadLocalDatasources, resolveEnvVarsInCredentials } = await Promise.resolve().then(() => local_exports);
1994
+ const { testConnection } = await Promise.resolve().then(() => connection_exports);
1995
+ const { pushDatasource } = await Promise.resolve().then(() => push_exports);
1996
+ const references = extractDatasourcesFromModels(cwd);
1997
+ if (references.length === 0) return false;
1998
+ console.log();
1999
+ console.log(pc.dim("Testing datasource connections..."));
2000
+ const localDatasources = loadLocalDatasources(cwd);
2001
+ let failed = false;
2002
+ const validatedDatasources = [];
2003
+ for (const ref of references) {
2004
+ const ds = localDatasources.find((d) => d.name === ref.name);
2005
+ if (!ds) {
2006
+ console.log(pc.red(`✗ ${ref.name}: not found in .bon/datasources.yaml`));
2007
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
2008
+ console.log(pc.dim(` Run: bon datasource add --from-dbt`));
2009
+ failed = true;
2010
+ continue;
2011
+ }
2012
+ const { resolved, missing } = resolveEnvVarsInCredentials(ds.credentials);
2013
+ if (missing.length > 0) {
2014
+ console.log(pc.red(`✗ ${ref.name}: missing env vars: ${missing.join(", ")}`));
2015
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
2016
+ failed = true;
2017
+ continue;
2018
+ }
2019
+ const result = await testConnection({
2020
+ type: ds.type,
2021
+ config: ds.config,
2022
+ credentials: resolved
2023
+ });
2024
+ if (result.success) {
2025
+ const latency = result.latencyMs ? pc.dim(` (${result.latencyMs}ms)`) : "";
2026
+ console.log(pc.green(`✓ ${ref.name}${latency}`));
2027
+ validatedDatasources.push(ref.name);
2028
+ } else {
2029
+ console.log(pc.red(`✗ ${ref.name}: ${result.error || result.message}`));
2030
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
2031
+ failed = true;
2032
+ }
2033
+ }
2034
+ console.log();
2035
+ if (failed) {
2036
+ console.log(pc.red("Connection tests failed. Fix datasource issues before deploying."));
2037
+ return true;
2038
+ }
2039
+ console.log(pc.dim("Checking remote datasources..."));
2040
+ let remoteDatasources;
2041
+ try {
2042
+ remoteDatasources = await getRemoteDatasources();
2043
+ } catch (err) {
2044
+ console.log(pc.red(`Failed to fetch remote datasources: ${err.message}`));
2045
+ return true;
2046
+ }
2047
+ const remoteNames = new Set(remoteDatasources.map((ds) => ds.name));
2048
+ const missingRemote = validatedDatasources.filter((name) => !remoteNames.has(name));
2049
+ if (missingRemote.length === 0) {
2050
+ console.log(pc.green("✓ All datasources exist on remote"));
2051
+ console.log();
2052
+ return false;
2053
+ }
2054
+ console.log();
2055
+ console.log(pc.yellow(`⚠ Missing remote datasource${missingRemote.length > 1 ? "s" : ""}: ${missingRemote.join(", ")}`));
2056
+ console.log();
2057
+ if (options.ci) {
2058
+ console.log(pc.red("Deploy aborted (--ci mode)."));
2059
+ console.log(pc.dim(`Run: bon datasource push <name>`));
2060
+ return true;
2061
+ }
2062
+ if (options.pushDatasources) {
2063
+ for (const name of missingRemote) {
2064
+ console.log(pc.dim(`Pushing "${name}"...`));
2065
+ if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
2066
+ else {
2067
+ console.log(pc.red(`✗ Failed to push "${name}"`));
2068
+ return true;
2069
+ }
2070
+ }
2071
+ console.log();
2072
+ return false;
2073
+ }
2074
+ if (!await confirm({
2075
+ message: `Push ${missingRemote.length > 1 ? "these datasources" : `"${missingRemote[0]}"`} to Bonnard? (credentials will be encrypted)`,
2076
+ default: true
2077
+ })) {
2078
+ console.log(pc.dim("Deploy aborted."));
2079
+ return true;
2080
+ }
2081
+ console.log();
2082
+ for (const name of missingRemote) {
2083
+ console.log(pc.dim(`Pushing "${name}"...`));
2084
+ if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
2085
+ else {
2086
+ console.log(pc.red(`✗ Failed to push "${name}"`));
2087
+ return true;
2088
+ }
2089
+ }
2090
+ console.log();
2091
+ return false;
2092
+ }
2093
+
2094
+ //#endregion
2095
+ //#region src/commands/docs.ts
2096
+ const __filename = fileURLToPath(import.meta.url);
2097
+ const __dirname = path.dirname(__filename);
2098
+ const DOCS_DIR = path.join(__dirname, "..", "docs");
2099
+ /**
2100
+ * Get list of all available topics by scanning the topics directory
2101
+ */
2102
+ function getAvailableTopics() {
2103
+ const topicsDir = path.join(DOCS_DIR, "topics");
2104
+ if (!fs.existsSync(topicsDir)) return [];
2105
+ return fs.readdirSync(topicsDir).filter((f) => f.endsWith(".md")).map((f) => f.replace(".md", "")).sort();
2106
+ }
2107
+ /**
2108
+ * Load the index file
2109
+ */
2110
+ function loadIndex() {
2111
+ const indexPath = path.join(DOCS_DIR, "_index.md");
2112
+ if (!fs.existsSync(indexPath)) return null;
2113
+ return fs.readFileSync(indexPath, "utf-8");
2114
+ }
2115
+ /**
2116
+ * Load a specific topic
2117
+ */
2118
+ function loadTopic(topicId) {
2119
+ const topicPath = path.join(DOCS_DIR, "topics", `${topicId}.md`);
2120
+ if (!fs.existsSync(topicPath)) return null;
2121
+ return fs.readFileSync(topicPath, "utf-8");
2122
+ }
2123
+ /**
2124
+ * Load a JSON schema
2125
+ */
2126
+ function loadSchema(schemaName) {
2127
+ const schemaPath = path.join(DOCS_DIR, "schemas", `${schemaName}.schema.json`);
2128
+ if (!fs.existsSync(schemaPath)) return null;
2129
+ return JSON.parse(fs.readFileSync(schemaPath, "utf-8"));
2130
+ }
2131
+ /**
2132
+ * Get child topics for a given topic
2133
+ */
2134
+ function getChildTopics(topicId) {
2135
+ const allTopics = getAvailableTopics();
2136
+ const prefix = topicId + ".";
2137
+ return allTopics.filter((t) => t.startsWith(prefix) && !t.slice(prefix.length).includes("."));
2138
+ }
2139
+ /**
2140
+ * Search topics for a query string
2141
+ */
2142
+ function searchTopics(query) {
2143
+ const results = [];
2144
+ const queryLower = query.toLowerCase();
2145
+ for (const topic of getAvailableTopics()) {
2146
+ const content = loadTopic(topic);
2147
+ if (!content) continue;
2148
+ const lines = content.split("\n");
2149
+ const matches = [];
2150
+ for (const line of lines) if (line.toLowerCase().includes(queryLower)) matches.push(line.trim());
2151
+ if (matches.length > 0) results.push({
2152
+ topic,
2153
+ matches: matches.slice(0, 3)
2154
+ });
2155
+ }
2156
+ return results;
2157
+ }
2158
+ /**
2159
+ * Format topic as JSON
2160
+ */
2161
+ function formatAsJson(topicId, content) {
2162
+ const lines = content.split("\n");
2163
+ const title = lines.find((l) => l.startsWith("# "))?.replace("# ", "") || topicId;
2164
+ const description = lines.find((l) => l.startsWith("> "))?.replace("> ", "") || "";
2165
+ const children = getChildTopics(topicId);
2166
+ const seeAlsoIndex = lines.findIndex((l) => l.includes("## See Also"));
2167
+ const seeAlso = [];
2168
+ if (seeAlsoIndex !== -1) for (let i = seeAlsoIndex + 1; i < lines.length; i++) {
2169
+ const match = lines[i].match(/^- (.+)$/);
2170
+ if (match) seeAlso.push(match[1]);
2171
+ else if (lines[i].startsWith("##")) break;
2172
+ }
2173
+ const moreInfoIndex = lines.findIndex((l) => l.includes("## More Information"));
2174
+ let reference;
2175
+ if (moreInfoIndex !== -1 && lines[moreInfoIndex + 2]) {
2176
+ const url = lines[moreInfoIndex + 2].trim();
2177
+ if (url.startsWith("http")) reference = url;
2178
+ }
2179
+ return JSON.stringify({
2180
+ topic: topicId,
2181
+ title,
2182
+ description,
2183
+ children,
2184
+ seeAlso,
2185
+ reference
2186
+ }, null, 2);
2187
+ }
2188
+ /**
2189
+ * Main docs command
2190
+ */
2191
+ async function docsCommand(topic, options = {}) {
2192
+ if (options.search) {
2193
+ const results = searchTopics(options.search);
2194
+ if (results.length === 0) {
2195
+ console.log(pc.yellow(`No topics found matching "${options.search}"`));
2196
+ return;
2197
+ }
2198
+ console.log(pc.bold(`Found ${results.length} topic(s) matching "${options.search}":\n`));
2199
+ for (const result of results) {
2200
+ console.log(pc.cyan(` ${result.topic}`));
2201
+ for (const match of result.matches) console.log(pc.dim(` ${match.slice(0, 80)}${match.length > 80 ? "..." : ""}`));
2202
+ console.log();
2203
+ }
2204
+ return;
2205
+ }
2206
+ if (!topic) {
2207
+ const index = loadIndex();
2208
+ if (!index) {
2209
+ console.log(pc.red("Documentation index not found."));
2210
+ console.log(pc.dim("Expected at: " + path.join(DOCS_DIR, "_index.md")));
2211
+ process.exit(1);
2212
+ }
2213
+ console.log(index);
2214
+ return;
2215
+ }
2216
+ const content = loadTopic(topic);
2217
+ if (!content) {
2218
+ const available = getAvailableTopics();
2219
+ console.log(pc.red(`Topic "${topic}" not found.`));
2220
+ console.log();
2221
+ const similar = available.filter((t) => t.includes(topic) || topic.includes(t) || t.split(".").some((part) => topic.includes(part)));
2222
+ if (similar.length > 0) {
2223
+ console.log(pc.dim("Similar topics:"));
2224
+ for (const s of similar.slice(0, 5)) console.log(pc.dim(` - ${s}`));
2225
+ } else console.log(pc.dim("Run `bon docs` to see available topics."));
2226
+ process.exit(1);
2227
+ }
2228
+ if (options.format === "json") console.log(formatAsJson(topic, content));
2229
+ else console.log(content);
2230
+ if (options.recursive) {
2231
+ const children = getChildTopics(topic);
2232
+ for (const child of children) {
2233
+ const childContent = loadTopic(child);
2234
+ if (childContent) {
2235
+ console.log("\n" + "─".repeat(60) + "\n");
2236
+ if (options.format === "json") console.log(formatAsJson(child, childContent));
2237
+ else console.log(childContent);
2238
+ }
2239
+ }
2240
+ }
2241
+ }
2242
+ /**
2243
+ * Schema subcommand
2244
+ */
2245
+ async function docsSchemaCommand(schemaName) {
2246
+ const schema = loadSchema(schemaName);
2247
+ if (!schema) {
2248
+ const schemasDir = path.join(DOCS_DIR, "schemas");
2249
+ const available = fs.existsSync(schemasDir) ? fs.readdirSync(schemasDir).filter((f) => f.endsWith(".schema.json")).map((f) => f.replace(".schema.json", "")) : [];
2250
+ console.log(pc.red(`Schema "${schemaName}" not found.`));
2251
+ if (available.length > 0) {
2252
+ console.log(pc.dim("\nAvailable schemas:"));
2253
+ for (const s of available) console.log(pc.dim(` - ${s}`));
2254
+ }
2255
+ process.exit(1);
2256
+ }
2257
+ console.log(JSON.stringify(schema, null, 2));
2258
+ }
2259
+
2260
+ //#endregion
2261
+ //#region src/commands/cube/query.ts
2262
+ /**
2263
+ * Query the deployed Cube semantic layer
2264
+ *
2265
+ * Supports two formats:
2266
+ * - JSON (default): bon cube query '{"measures": ["orders.count"]}'
2267
+ * - SQL: bon cube query --sql "SELECT MEASURE(count) FROM orders"
2268
+ */
2269
+ async function cubeQueryCommand(queryInput, options = {}) {
2270
+ if (!loadCredentials()) {
2271
+ console.error(pc.red("Not logged in. Run `bon login` first."));
2272
+ process.exit(1);
2273
+ }
2274
+ const format = options.format ?? "toon";
2275
+ const limit = options.limit ? parseInt(options.limit, 10) : void 0;
2276
+ try {
2277
+ let payload;
2278
+ if (options.sql) payload = { sql: queryInput };
2279
+ else {
2280
+ let query;
2281
+ try {
2282
+ query = JSON.parse(queryInput);
2283
+ } catch {
2284
+ console.error(pc.red("Invalid JSON query. Use --sql for SQL queries."));
2285
+ console.log(pc.dim("Example: bon cube query '{\"measures\": [\"orders.count\"]}'"));
2286
+ process.exit(1);
2287
+ }
2288
+ if (limit && !query.limit) query.limit = limit;
2289
+ payload = { query };
2290
+ }
2291
+ const result = await post("/api/cube/query", payload);
2292
+ if (result.error) {
2293
+ console.error(pc.red(`Query error: ${result.error}`));
2294
+ process.exit(1);
2295
+ }
2296
+ if (result.sql) {
2297
+ console.log(pc.dim("Generated SQL:"));
2298
+ console.log(result.sql.sql.join("\n"));
2299
+ return;
2300
+ }
2301
+ const data = result.results?.[0]?.data ?? result.data ?? [];
2302
+ if (data.length === 0) {
2303
+ console.log("No rows returned.");
2304
+ return;
2305
+ }
2306
+ if (format === "json") console.log(JSON.stringify(data, null, 2));
2307
+ else {
2308
+ const toon = encode({ results: data });
2309
+ console.log(toon);
2310
+ }
2311
+ if (limit && data.length >= limit) console.log(pc.dim(`(limited to ${limit} rows)`));
2312
+ } catch (err) {
2313
+ console.error(pc.red(`Query failed: ${err.message}`));
2314
+ process.exit(1);
2315
+ }
527
2316
  }
528
2317
 
529
2318
  //#endregion
530
2319
  //#region src/bin/bon.ts
531
- program.name("bon").description("Bonnard semantic layer CLI").version("0.1.0");
532
- program.command("init").description("Create a new Bonnard project in the current directory").action(initCommand);
2320
+ const { version } = createRequire(import.meta.url)("../../package.json");
2321
+ program.name("bon").description("Bonnard semantic layer CLI").version(version);
2322
+ program.command("init").description("Create bon.yaml, models/, views/, .bon/, and agent templates (.claude/, .cursor/)").action(initCommand);
533
2323
  program.command("login").description("Authenticate with Bonnard via your browser").action(loginCommand);
534
2324
  program.command("logout").description("Remove stored credentials").action(logoutCommand);
2325
+ program.command("whoami").description("Show current login status").option("--verify", "Verify session is still valid with the server").action(whoamiCommand);
535
2326
  const datasource = program.command("datasource").description("Manage warehouse data source connections");
536
- datasource.command("add").description("Add a new data source connection").action(datasourceAddCommand);
537
- datasource.command("list").description("List configured data sources").action(datasourceListCommand);
538
- datasource.command("test").description("Test data source connectivity").argument("<name>", "Data source name").action(datasourceTestCommand);
539
- datasource.command("remove").description("Remove a data source").argument("<name>", "Data source name").action(datasourceRemoveCommand);
540
- program.command("query").description("Run a SQL query against a warehouse").argument("<datasource-name>", "Data source name").argument("<sql>", "SQL query to execute").option("--schema <schema>", "Override schema").option("--database <database>", "Override database").option("--limit <limit>", "Max rows to return", "1000").option("--format <format>", "Output format: toon or json", "toon").action(queryCommand);
541
- program.command("validate").description("Validate Cube model and view YAML files").action(validateCommand);
2327
+ datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
2328
+ datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
2329
+ datasource.command("test").description("Test data source connectivity by connecting directly to the warehouse").argument("<name>", "Data source name from .bon/datasources.yaml").option("--remote", "Test via Bonnard API instead of direct connection (requires login)").action(datasourceTestCommand);
2330
+ datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
2331
+ datasource.command("push").description("Push a local data source to Bonnard server (requires login)").argument("<name>", "Data source name from .bon/datasources.yaml").option("--force", "Overwrite if already exists on remote").action(datasourcePushCommand);
2332
+ program.command("preview").description("Preview data from a local warehouse using raw SQL (for development/exploration)").argument("<datasource>", "Data source name from .bon/datasources.yaml").argument("<sql>", "SQL query to execute").option("--schema <schema>", "Override schema").option("--database <database>", "Override database").option("--limit <limit>", "Max rows to return", "1000").option("--format <format>", "Output format: toon or json", "toon").action(previewCommand);
2333
+ program.command("validate").description("Validate YAML syntax in models/ and views/").option("--test-connection", "Also test datasource connections (warns on failure, doesn't block)").action(validateCommand);
2334
+ program.command("deploy").description("Deploy models to Bonnard. Requires login, validates models, tests connections (fails on error)").option("--ci", "Non-interactive mode (fail if missing datasources)").option("--push-datasources", "Auto-push missing datasources without prompting").action(deployCommand);
2335
+ program.command("cube").description("Query the deployed Cube semantic layer").command("query").description("Execute a query against the deployed semantic layer").argument("<query>", "JSON query or SQL (with --sql flag)").option("--sql", "Use Cube SQL API instead of JSON format").option("--limit <limit>", "Max rows to return").option("--format <format>", "Output format: toon or json", "toon").action(cubeQueryCommand);
2336
+ program.command("docs").description("Browse Cube documentation for building models and views").argument("[topic]", "Topic to display (e.g., cubes, cubes.measures)").option("-r, --recursive", "Show topic and all child topics").option("-s, --search <query>", "Search topics for a keyword").option("-f, --format <format>", "Output format: markdown or json", "markdown").action(docsCommand).command("schema").description("Show JSON schema for a type (cube, view, measure, etc.)").argument("<type>", "Schema type to display").action(docsSchemaCommand);
542
2337
  program.parse();
543
2338
 
544
2339
  //#endregion