@bonnard/cli 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/bin/bon.mjs +1919 -100
  2. package/dist/bin/models-IsV2sX74.mjs +76 -0
  3. package/dist/bin/{validate-Bd1D39Bj.mjs → validate-C4EHvJzJ.mjs} +47 -4
  4. package/dist/docs/README.md +78 -0
  5. package/dist/docs/_index.md +70 -0
  6. package/dist/docs/topics/cubes.data-source.md +92 -0
  7. package/dist/docs/topics/cubes.dimensions.format.md +195 -0
  8. package/dist/docs/topics/cubes.dimensions.md +184 -0
  9. package/dist/docs/topics/cubes.dimensions.primary-key.md +106 -0
  10. package/dist/docs/topics/cubes.dimensions.sub-query.md +174 -0
  11. package/dist/docs/topics/cubes.dimensions.time.md +111 -0
  12. package/dist/docs/topics/cubes.dimensions.types.md +107 -0
  13. package/dist/docs/topics/cubes.extends.md +149 -0
  14. package/dist/docs/topics/cubes.hierarchies.md +174 -0
  15. package/dist/docs/topics/cubes.joins.md +115 -0
  16. package/dist/docs/topics/cubes.md +117 -0
  17. package/dist/docs/topics/cubes.measures.calculated.md +99 -0
  18. package/dist/docs/topics/cubes.measures.drill-members.md +158 -0
  19. package/dist/docs/topics/cubes.measures.filters.md +86 -0
  20. package/dist/docs/topics/cubes.measures.format.md +153 -0
  21. package/dist/docs/topics/cubes.measures.md +162 -0
  22. package/dist/docs/topics/cubes.measures.rolling.md +119 -0
  23. package/dist/docs/topics/cubes.measures.types.md +122 -0
  24. package/dist/docs/topics/cubes.public.md +172 -0
  25. package/dist/docs/topics/cubes.refresh-key.md +153 -0
  26. package/dist/docs/topics/cubes.segments.md +121 -0
  27. package/dist/docs/topics/cubes.sql.md +61 -0
  28. package/dist/docs/topics/pre-aggregations.md +126 -0
  29. package/dist/docs/topics/pre-aggregations.rollup.md +162 -0
  30. package/dist/docs/topics/syntax.context-variables.md +153 -0
  31. package/dist/docs/topics/syntax.md +133 -0
  32. package/dist/docs/topics/syntax.references.md +174 -0
  33. package/dist/docs/topics/views.cubes.md +162 -0
  34. package/dist/docs/topics/views.folders.md +154 -0
  35. package/dist/docs/topics/views.includes.md +139 -0
  36. package/dist/docs/topics/views.md +138 -0
  37. package/dist/docs/topics/workflow.deploy.md +128 -0
  38. package/dist/docs/topics/workflow.mcp.md +100 -0
  39. package/dist/docs/topics/workflow.md +147 -0
  40. package/dist/docs/topics/workflow.query.md +198 -0
  41. package/dist/docs/topics/workflow.validate.md +152 -0
  42. package/dist/templates/claude/rules/bonnard.md +15 -0
  43. package/dist/templates/claude/settings.json +7 -0
  44. package/dist/templates/claude/skills/bonnard-cli/SKILL.md +59 -0
  45. package/dist/templates/claude/skills/bonnard-queries/SKILL.md +68 -0
  46. package/dist/templates/cursor/rules/bonnard-cli.mdc +47 -0
  47. package/dist/templates/cursor/rules/bonnard-queries.mdc +49 -0
  48. package/dist/templates/cursor/rules/bonnard.mdc +20 -0
  49. package/dist/templates/shared/bonnard.md +81 -0
  50. package/package.json +13 -8
package/dist/bin/bon.mjs CHANGED
@@ -3,18 +3,129 @@ import { createRequire } from "node:module";
3
3
  import { program } from "commander";
4
4
  import fs from "node:fs";
5
5
  import path from "node:path";
6
+ import { fileURLToPath } from "node:url";
6
7
  import pc from "picocolors";
7
8
  import http from "node:http";
8
9
  import crypto from "node:crypto";
9
10
  import os from "node:os";
11
+ import YAML from "yaml";
12
+ import { execFileSync } from "node:child_process";
13
+ import { confirm } from "@inquirer/prompts";
10
14
  import { encode } from "@toon-format/toon";
11
15
 
16
+ //#region rolldown:runtime
17
+ var __defProp = Object.defineProperty;
18
+ var __exportAll = (all, symbols) => {
19
+ let target = {};
20
+ for (var name in all) {
21
+ __defProp(target, name, {
22
+ get: all[name],
23
+ enumerable: true
24
+ });
25
+ }
26
+ if (symbols) {
27
+ __defProp(target, Symbol.toStringTag, { value: "Module" });
28
+ }
29
+ return target;
30
+ };
31
+ var __require = /* @__PURE__ */ createRequire(import.meta.url);
32
+
33
+ //#endregion
12
34
  //#region src/commands/init.ts
35
+ const __filename$1 = fileURLToPath(import.meta.url);
36
+ const __dirname$1 = path.dirname(__filename$1);
37
+ const TEMPLATES_DIR = path.join(__dirname$1, "..", "templates");
13
38
  const BON_YAML_TEMPLATE = (projectName) => `project:
14
39
  name: ${projectName}
15
40
  `;
16
41
  const GITIGNORE_TEMPLATE = `.bon/
17
42
  `;
43
+ /**
44
+ * Load a template file from the templates directory
45
+ */
46
+ function loadTemplate(relativePath) {
47
+ const templatePath = path.join(TEMPLATES_DIR, relativePath);
48
+ return fs.readFileSync(templatePath, "utf-8");
49
+ }
50
+ /**
51
+ * Load a JSON template file
52
+ */
53
+ function loadJsonTemplate(relativePath) {
54
+ const content = loadTemplate(relativePath);
55
+ return JSON.parse(content);
56
+ }
57
+ /**
58
+ * Write a template file, appending if target exists and doesn't already have Bonnard content
59
+ */
60
+ function writeTemplateFile(content, targetPath, createdFiles) {
61
+ if (fs.existsSync(targetPath)) {
62
+ if (!fs.readFileSync(targetPath, "utf-8").includes("# Bonnard")) {
63
+ fs.appendFileSync(targetPath, `\n\n${content}`);
64
+ createdFiles.push(`${path.relative(process.cwd(), targetPath)} (appended)`);
65
+ }
66
+ } else {
67
+ fs.writeFileSync(targetPath, content);
68
+ createdFiles.push(path.relative(process.cwd(), targetPath));
69
+ }
70
+ }
71
+ /**
72
+ * Merge settings.json, preserving existing settings
73
+ */
74
+ function mergeSettingsJson(templateSettings, targetPath, createdFiles) {
75
+ if (fs.existsSync(targetPath)) {
76
+ const existingContent = JSON.parse(fs.readFileSync(targetPath, "utf-8"));
77
+ const templatePerms = templateSettings.permissions;
78
+ if (templatePerms?.allow) {
79
+ existingContent.permissions = existingContent.permissions || {};
80
+ existingContent.permissions.allow = existingContent.permissions.allow || [];
81
+ for (const permission of templatePerms.allow) if (!existingContent.permissions.allow.includes(permission)) existingContent.permissions.allow.push(permission);
82
+ }
83
+ fs.writeFileSync(targetPath, JSON.stringify(existingContent, null, 2) + "\n");
84
+ createdFiles.push(`${path.relative(process.cwd(), targetPath)} (merged)`);
85
+ } else {
86
+ fs.writeFileSync(targetPath, JSON.stringify(templateSettings, null, 2) + "\n");
87
+ createdFiles.push(path.relative(process.cwd(), targetPath));
88
+ }
89
+ }
90
+ /**
91
+ * Add Cursor frontmatter to shared content
92
+ */
93
+ function withCursorFrontmatter(content, description, alwaysApply) {
94
+ return `---
95
+ description: "${description}"
96
+ alwaysApply: ${alwaysApply}
97
+ ---
98
+
99
+ ` + content;
100
+ }
101
+ /**
102
+ * Create agent templates (Claude Code, Cursor, and Codex)
103
+ */
104
+ function createAgentTemplates(cwd) {
105
+ const createdFiles = [];
106
+ const sharedBonnard = loadTemplate("shared/bonnard.md");
107
+ const claudeRulesDir = path.join(cwd, ".claude", "rules");
108
+ const claudeSkillsDir = path.join(cwd, ".claude", "skills");
109
+ fs.mkdirSync(claudeRulesDir, { recursive: true });
110
+ fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-cli"), { recursive: true });
111
+ fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-queries"), { recursive: true });
112
+ writeTemplateFile(sharedBonnard, path.join(claudeRulesDir, "bonnard.md"), createdFiles);
113
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-cli/SKILL.md"), path.join(claudeSkillsDir, "bonnard-cli", "SKILL.md"), createdFiles);
114
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-queries/SKILL.md"), path.join(claudeSkillsDir, "bonnard-queries", "SKILL.md"), createdFiles);
115
+ mergeSettingsJson(loadJsonTemplate("claude/settings.json"), path.join(cwd, ".claude", "settings.json"), createdFiles);
116
+ const cursorRulesDir = path.join(cwd, ".cursor", "rules");
117
+ fs.mkdirSync(cursorRulesDir, { recursive: true });
118
+ writeTemplateFile(withCursorFrontmatter(sharedBonnard, "Bonnard semantic layer project context", true), path.join(cursorRulesDir, "bonnard.mdc"), createdFiles);
119
+ writeTemplateFile(loadTemplate("cursor/rules/bonnard-cli.mdc"), path.join(cursorRulesDir, "bonnard-cli.mdc"), createdFiles);
120
+ writeTemplateFile(loadTemplate("cursor/rules/bonnard-queries.mdc"), path.join(cursorRulesDir, "bonnard-queries.mdc"), createdFiles);
121
+ const codexSkillsDir = path.join(cwd, ".agents", "skills");
122
+ fs.mkdirSync(path.join(codexSkillsDir, "bonnard-cli"), { recursive: true });
123
+ fs.mkdirSync(path.join(codexSkillsDir, "bonnard-queries"), { recursive: true });
124
+ writeTemplateFile(sharedBonnard, path.join(cwd, "AGENTS.md"), createdFiles);
125
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-cli/SKILL.md"), path.join(codexSkillsDir, "bonnard-cli", "SKILL.md"), createdFiles);
126
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-queries/SKILL.md"), path.join(codexSkillsDir, "bonnard-queries", "SKILL.md"), createdFiles);
127
+ return createdFiles;
128
+ }
18
129
  async function initCommand() {
19
130
  const cwd = process.cwd();
20
131
  const projectName = path.basename(cwd);
@@ -27,13 +138,20 @@ async function initCommand() {
27
138
  fs.mkdirSync(path.join(cwd, ".bon"), { recursive: true });
28
139
  fs.writeFileSync(path.join(cwd, "bon.yaml"), BON_YAML_TEMPLATE(projectName));
29
140
  fs.writeFileSync(path.join(cwd, ".gitignore"), GITIGNORE_TEMPLATE);
141
+ const agentFiles = createAgentTemplates(cwd);
30
142
  console.log(pc.green(`Initialised Bonnard project "${projectName}"`));
31
143
  console.log();
144
+ console.log(pc.bold("Core files:"));
32
145
  console.log(` ${pc.dim("bon.yaml")} project config`);
33
146
  console.log(` ${pc.dim("models/")} model definitions`);
34
147
  console.log(` ${pc.dim("views/")} view definitions`);
35
148
  console.log(` ${pc.dim(".bon/")} local state (gitignored)`);
36
149
  console.log(` ${pc.dim(".gitignore")} git ignore rules`);
150
+ if (agentFiles.length > 0) {
151
+ console.log();
152
+ console.log(pc.bold("Agent support:"));
153
+ for (const file of agentFiles) console.log(` ${pc.dim(file)}`);
154
+ }
37
155
  }
38
156
 
39
157
  //#endregion
@@ -65,7 +183,7 @@ function clearCredentials() {
65
183
 
66
184
  //#endregion
67
185
  //#region src/commands/login.ts
68
- const APP_URL$1 = process.env.BON_APP_URL || "http://localhost:3000";
186
+ const APP_URL$1 = process.env.BON_APP_URL || "https://app.bonnard.dev";
69
187
  const TIMEOUT_MS = 120 * 1e3;
70
188
  async function loginCommand() {
71
189
  const state = crypto.randomUUID();
@@ -201,7 +319,14 @@ async function logoutCommand() {
201
319
 
202
320
  //#endregion
203
321
  //#region src/lib/api.ts
204
- const APP_URL = process.env.BON_APP_URL || "http://localhost:3000";
322
+ var api_exports = /* @__PURE__ */ __exportAll({
323
+ del: () => del,
324
+ get: () => get,
325
+ getRemoteDatasources: () => getRemoteDatasources,
326
+ post: () => post
327
+ });
328
+ const APP_URL = process.env.BON_APP_URL || "https://app.bonnard.dev";
329
+ const VERCEL_BYPASS = process.env.VERCEL_AUTOMATION_BYPASS_SECRET;
205
330
  function getToken() {
206
331
  const creds = loadCredentials();
207
332
  if (!creds) {
@@ -213,12 +338,14 @@ function getToken() {
213
338
  async function request(method, path, body) {
214
339
  const token = getToken();
215
340
  const url = `${APP_URL}${path}`;
341
+ const headers = {
342
+ Authorization: `Bearer ${token}`,
343
+ "Content-Type": "application/json"
344
+ };
345
+ if (VERCEL_BYPASS) headers["x-vercel-protection-bypass"] = VERCEL_BYPASS;
216
346
  const res = await fetch(url, {
217
347
  method,
218
- headers: {
219
- Authorization: `Bearer ${token}`,
220
- "Content-Type": "application/json"
221
- },
348
+ headers,
222
349
  body: body ? JSON.stringify(body) : void 0
223
350
  });
224
351
  const data = await res.json();
@@ -237,13 +364,383 @@ function post(path, body) {
237
364
  function del(path) {
238
365
  return request("DELETE", path);
239
366
  }
367
+ /**
368
+ * Fetch remote datasources from Bonnard server
369
+ */
370
+ async function getRemoteDatasources() {
371
+ return (await get("/api/datasources")).dataSources || [];
372
+ }
373
+
374
+ //#endregion
375
+ //#region src/commands/whoami.ts
376
+ async function whoamiCommand(options = {}) {
377
+ const credentials = loadCredentials();
378
+ if (!credentials) {
379
+ console.log(pc.yellow("Not logged in."));
380
+ console.log(pc.dim("Run `bon login` to authenticate."));
381
+ process.exit(1);
382
+ }
383
+ if (options.verify) try {
384
+ const result = await get("/api/cli/whoami");
385
+ console.log(pc.green(`Logged in as ${result.email}`));
386
+ if (result.orgName) console.log(pc.dim(`Organization: ${result.orgName}`));
387
+ } catch (err) {
388
+ console.log(pc.red("Session expired or invalid."));
389
+ console.log(pc.dim("Run `bon login` to re-authenticate."));
390
+ process.exit(1);
391
+ }
392
+ else {
393
+ console.log(pc.green(`Logged in as ${credentials.email}`));
394
+ console.log(pc.dim("Use --verify to check if session is still valid."));
395
+ }
396
+ }
397
+
398
+ //#endregion
399
+ //#region src/lib/local/datasources.ts
400
+ /**
401
+ * Local datasource storage (.bon/datasources.yaml)
402
+ *
403
+ * Single file containing both config and credentials.
404
+ * Credentials may contain:
405
+ * - Plain values: "my_password"
406
+ * - dbt env var syntax: "{{ env_var('MY_PASSWORD') }}"
407
+ *
408
+ * Env vars are resolved at deploy time, not import time.
409
+ */
410
+ const BON_DIR$1 = ".bon";
411
+ const DATASOURCES_FILE$1 = "datasources.yaml";
412
+ function getBonDir(cwd = process.cwd()) {
413
+ return path.join(cwd, BON_DIR$1);
414
+ }
415
+ function getDatasourcesPath$1(cwd = process.cwd()) {
416
+ return path.join(getBonDir(cwd), DATASOURCES_FILE$1);
417
+ }
418
+ /**
419
+ * Ensure .bon directory exists
420
+ */
421
+ function ensureBonDir(cwd = process.cwd()) {
422
+ const bonDir = getBonDir(cwd);
423
+ if (!fs.existsSync(bonDir)) fs.mkdirSync(bonDir, { recursive: true });
424
+ }
425
+ /**
426
+ * Load all local datasources
427
+ */
428
+ function loadLocalDatasources(cwd = process.cwd()) {
429
+ const filePath = getDatasourcesPath$1(cwd);
430
+ if (!fs.existsSync(filePath)) return [];
431
+ try {
432
+ const content = fs.readFileSync(filePath, "utf-8");
433
+ return YAML.parse(content)?.datasources ?? [];
434
+ } catch {
435
+ return [];
436
+ }
437
+ }
438
+ /**
439
+ * Save all local datasources (with secure permissions since it contains credentials)
440
+ */
441
+ function saveLocalDatasources(datasources, cwd = process.cwd()) {
442
+ ensureBonDir(cwd);
443
+ const filePath = getDatasourcesPath$1(cwd);
444
+ const file = { datasources };
445
+ const content = `# Bonnard datasources configuration
446
+ # This file contains credentials - add to .gitignore
447
+ # Env vars like {{ env_var('PASSWORD') }} are resolved at deploy time
448
+
449
+ ` + YAML.stringify(file, { indent: 2 });
450
+ fs.writeFileSync(filePath, content, { mode: 384 });
451
+ }
452
+ /**
453
+ * Add a single datasource (updates existing or appends new)
454
+ */
455
+ function addLocalDatasource(datasource, cwd = process.cwd()) {
456
+ const existing = loadLocalDatasources(cwd);
457
+ const index = existing.findIndex((ds) => ds.name === datasource.name);
458
+ if (index >= 0) existing[index] = datasource;
459
+ else existing.push(datasource);
460
+ saveLocalDatasources(existing, cwd);
461
+ }
462
+ /**
463
+ * Remove a datasource by name
464
+ */
465
+ function removeLocalDatasource(name, cwd = process.cwd()) {
466
+ const existing = loadLocalDatasources(cwd);
467
+ const filtered = existing.filter((ds) => ds.name !== name);
468
+ if (filtered.length === existing.length) return false;
469
+ saveLocalDatasources(filtered, cwd);
470
+ return true;
471
+ }
472
+ /**
473
+ * Get a single datasource by name
474
+ */
475
+ function getLocalDatasource(name, cwd = process.cwd()) {
476
+ return loadLocalDatasources(cwd).find((ds) => ds.name === name) ?? null;
477
+ }
478
+ /**
479
+ * Check if a datasource name already exists locally
480
+ */
481
+ function datasourceExists(name, cwd = process.cwd()) {
482
+ return getLocalDatasource(name, cwd) !== null;
483
+ }
484
+ /**
485
+ * Resolve {{ env_var('VAR_NAME') }} patterns in credentials
486
+ * Used at deploy time to resolve env vars before uploading
487
+ */
488
+ function resolveEnvVarsInCredentials(credentials) {
489
+ const resolved = {};
490
+ const missing = [];
491
+ const envVarPattern = /\{\{\s*env_var\(['"]([\w_]+)['"]\)\s*\}\}/;
492
+ for (const [key, value] of Object.entries(credentials)) {
493
+ const match = value.match(envVarPattern);
494
+ if (match) {
495
+ const varName = match[1];
496
+ const envValue = process.env[varName];
497
+ if (envValue !== void 0) resolved[key] = envValue;
498
+ else {
499
+ missing.push(varName);
500
+ resolved[key] = value;
501
+ }
502
+ } else resolved[key] = value;
503
+ }
504
+ return {
505
+ resolved,
506
+ missing
507
+ };
508
+ }
509
+
510
+ //#endregion
511
+ //#region src/lib/local/credentials.ts
512
+ /**
513
+ * Credential utilities (git tracking check)
514
+ */
515
+ const BON_DIR = ".bon";
516
+ const DATASOURCES_FILE = "datasources.yaml";
517
+ function getDatasourcesPath(cwd = process.cwd()) {
518
+ return path.join(cwd, BON_DIR, DATASOURCES_FILE);
519
+ }
520
+ /**
521
+ * Check if datasources file is tracked by git (it shouldn't be - contains credentials)
522
+ */
523
+ function isDatasourcesTrackedByGit(cwd = process.cwd()) {
524
+ const filePath = getDatasourcesPath(cwd);
525
+ if (!fs.existsSync(filePath)) return false;
526
+ try {
527
+ execFileSync("git", [
528
+ "ls-files",
529
+ "--error-unmatch",
530
+ filePath
531
+ ], {
532
+ cwd,
533
+ stdio: "pipe"
534
+ });
535
+ return true;
536
+ } catch {
537
+ return false;
538
+ }
539
+ }
540
+
541
+ //#endregion
542
+ //#region src/lib/local/index.ts
543
+ var local_exports = /* @__PURE__ */ __exportAll({
544
+ addLocalDatasource: () => addLocalDatasource,
545
+ datasourceExists: () => datasourceExists,
546
+ ensureBonDir: () => ensureBonDir,
547
+ getLocalDatasource: () => getLocalDatasource,
548
+ isDatasourcesTrackedByGit: () => isDatasourcesTrackedByGit,
549
+ loadLocalDatasources: () => loadLocalDatasources,
550
+ removeLocalDatasource: () => removeLocalDatasource,
551
+ resolveEnvVarsInCredentials: () => resolveEnvVarsInCredentials,
552
+ saveLocalDatasources: () => saveLocalDatasources
553
+ });
554
+
555
+ //#endregion
556
+ //#region src/lib/dbt/profiles.ts
557
+ /**
558
+ * dbt profiles.yml parser
559
+ *
560
+ * Parses ~/.dbt/profiles.yml and extracts connection configs.
561
+ * Does NOT resolve env vars - they are kept as-is for deploy time resolution.
562
+ */
563
+ const DBT_PROFILES_PATH = path.join(os.homedir(), ".dbt", "profiles.yml");
564
+ /**
565
+ * Check if dbt profiles.yml exists
566
+ */
567
+ function dbtProfilesExist(profilesPath = DBT_PROFILES_PATH) {
568
+ return fs.existsSync(profilesPath);
569
+ }
570
+ /**
571
+ * Get the default dbt profiles path
572
+ */
573
+ function getDefaultProfilesPath() {
574
+ return DBT_PROFILES_PATH;
575
+ }
576
+ /**
577
+ * Map dbt type to Bonnard warehouse type
578
+ */
579
+ function mapDbtType(dbtType) {
580
+ return {
581
+ snowflake: "snowflake",
582
+ postgres: "postgres",
583
+ postgresql: "postgres",
584
+ bigquery: "bigquery",
585
+ databricks: "databricks"
586
+ }[dbtType.toLowerCase()] ?? null;
587
+ }
588
+ /**
589
+ * Parse dbt profiles.yml and return all connections
590
+ * Config values are kept as-is (including {{ env_var(...) }} patterns)
591
+ */
592
+ function parseDbtProfiles(profilesPath = DBT_PROFILES_PATH) {
593
+ if (!fs.existsSync(profilesPath)) throw new Error(`dbt profiles not found at ${profilesPath}`);
594
+ const content = fs.readFileSync(profilesPath, "utf-8");
595
+ const profiles = YAML.parse(content);
596
+ if (!profiles || typeof profiles !== "object") throw new Error("Invalid dbt profiles.yml format");
597
+ const connections = [];
598
+ for (const [profileName, profile] of Object.entries(profiles)) {
599
+ if (profileName === "config") continue;
600
+ if (!profile.outputs || typeof profile.outputs !== "object") continue;
601
+ const defaultTarget = profile.target || "dev";
602
+ for (const [targetName, target] of Object.entries(profile.outputs)) {
603
+ if (!target || typeof target !== "object" || !target.type) continue;
604
+ const warehouseType = mapDbtType(target.type);
605
+ if (!warehouseType) continue;
606
+ connections.push({
607
+ profileName,
608
+ targetName,
609
+ isDefaultTarget: targetName === defaultTarget,
610
+ type: warehouseType,
611
+ config: target
612
+ });
613
+ }
614
+ }
615
+ return connections;
616
+ }
617
+
618
+ //#endregion
619
+ //#region src/lib/dbt/mapping.ts
620
+ /**
621
+ * Generate a datasource name from profile/target
622
+ */
623
+ function generateDatasourceName(profileName, targetName) {
624
+ return `${profileName}-${targetName}`.toLowerCase().replace(/[^a-z0-9-]/g, "-");
625
+ }
626
+ /**
627
+ * Extract string value from config, handling numbers
628
+ */
629
+ function getString(config, key) {
630
+ const value = config[key];
631
+ if (value === void 0 || value === null) return void 0;
632
+ return String(value);
633
+ }
634
+ /**
635
+ * Map Snowflake dbt config to Bonnard format
636
+ */
637
+ function mapSnowflake(config) {
638
+ return {
639
+ config: {
640
+ ...getString(config, "account") && { account: getString(config, "account") },
641
+ ...getString(config, "database") && { database: getString(config, "database") },
642
+ ...getString(config, "warehouse") && { warehouse: getString(config, "warehouse") },
643
+ ...getString(config, "schema") && { schema: getString(config, "schema") },
644
+ ...getString(config, "role") && { role: getString(config, "role") }
645
+ },
646
+ credentials: {
647
+ ...getString(config, "user") && { username: getString(config, "user") },
648
+ ...getString(config, "password") && { password: getString(config, "password") }
649
+ }
650
+ };
651
+ }
652
+ /**
653
+ * Map Postgres dbt config to Bonnard format
654
+ */
655
+ function mapPostgres(config) {
656
+ const database = getString(config, "dbname") || getString(config, "database");
657
+ return {
658
+ config: {
659
+ ...getString(config, "host") && { host: getString(config, "host") },
660
+ ...getString(config, "port") && { port: getString(config, "port") },
661
+ ...database && { database },
662
+ ...getString(config, "schema") && { schema: getString(config, "schema") },
663
+ ...getString(config, "sslmode") && { sslmode: getString(config, "sslmode") }
664
+ },
665
+ credentials: {
666
+ ...getString(config, "user") && { username: getString(config, "user") },
667
+ ...getString(config, "password") && { password: getString(config, "password") }
668
+ }
669
+ };
670
+ }
671
+ /**
672
+ * Map BigQuery dbt config to Bonnard format
673
+ */
674
+ function mapBigQuery(config) {
675
+ const credentials = {};
676
+ if (config.keyfile && typeof config.keyfile === "string") try {
677
+ credentials.service_account_json = __require("node:fs").readFileSync(config.keyfile, "utf-8");
678
+ } catch {
679
+ credentials.keyfile_path = config.keyfile;
680
+ }
681
+ else if (config.keyfile_json) credentials.service_account_json = JSON.stringify(config.keyfile_json);
682
+ return {
683
+ config: {
684
+ ...getString(config, "project") && { project_id: getString(config, "project") },
685
+ ...getString(config, "dataset") && { dataset: getString(config, "dataset") },
686
+ ...getString(config, "location") && { location: getString(config, "location") }
687
+ },
688
+ credentials
689
+ };
690
+ }
691
+ /**
692
+ * Map Databricks dbt config to Bonnard format
693
+ */
694
+ function mapDatabricks(config) {
695
+ return {
696
+ config: {
697
+ ...getString(config, "host") && { hostname: getString(config, "host") },
698
+ ...getString(config, "http_path") && { http_path: getString(config, "http_path") },
699
+ ...getString(config, "catalog") && { catalog: getString(config, "catalog") },
700
+ ...getString(config, "schema") && { schema: getString(config, "schema") }
701
+ },
702
+ credentials: { ...getString(config, "token") && { token: getString(config, "token") } }
703
+ };
704
+ }
705
+ /**
706
+ * Map a parsed dbt connection to Bonnard format
707
+ * Values are copied as-is, including {{ env_var(...) }} patterns
708
+ */
709
+ function mapDbtConnection(connection) {
710
+ const { profileName, targetName, type, config } = connection;
711
+ let mapped;
712
+ switch (type) {
713
+ case "snowflake":
714
+ mapped = mapSnowflake(config);
715
+ break;
716
+ case "postgres":
717
+ mapped = mapPostgres(config);
718
+ break;
719
+ case "bigquery":
720
+ mapped = mapBigQuery(config);
721
+ break;
722
+ case "databricks":
723
+ mapped = mapDatabricks(config);
724
+ break;
725
+ default: throw new Error(`Unsupported warehouse type: ${type}`);
726
+ }
727
+ return { datasource: {
728
+ name: generateDatasourceName(profileName, targetName),
729
+ type,
730
+ source: "dbt",
731
+ dbtProfile: profileName,
732
+ dbtTarget: targetName,
733
+ config: mapped.config,
734
+ credentials: mapped.credentials
735
+ } };
736
+ }
240
737
 
241
738
  //#endregion
242
739
  //#region src/commands/datasource/add.ts
243
740
  async function prompts() {
244
741
  return import("@inquirer/prompts");
245
742
  }
246
- const WAREHOUSE_TYPES = [
743
+ const WAREHOUSE_CONFIGS = [
247
744
  {
248
745
  value: "snowflake",
249
746
  label: "Snowflake",
@@ -275,11 +772,14 @@ const WAREHOUSE_TYPES = [
275
772
  ],
276
773
  credentialFields: [{
277
774
  name: "username",
278
- message: "Username"
775
+ flag: "user",
776
+ message: "Username",
777
+ required: true
279
778
  }, {
280
779
  name: "password",
281
780
  message: "Password",
282
- secret: true
781
+ secret: true,
782
+ required: true
283
783
  }]
284
784
  },
285
785
  {
@@ -293,7 +793,8 @@ const WAREHOUSE_TYPES = [
293
793
  },
294
794
  {
295
795
  name: "port",
296
- message: "Port (default: 5432)"
796
+ message: "Port",
797
+ default: "5432"
297
798
  },
298
799
  {
299
800
  name: "database",
@@ -302,16 +803,20 @@ const WAREHOUSE_TYPES = [
302
803
  },
303
804
  {
304
805
  name: "schema",
305
- message: "Schema (default: public)"
806
+ message: "Schema",
807
+ default: "public"
306
808
  }
307
809
  ],
308
810
  credentialFields: [{
309
811
  name: "username",
310
- message: "Username"
812
+ flag: "user",
813
+ message: "Username",
814
+ required: true
311
815
  }, {
312
816
  name: "password",
313
817
  message: "Password",
314
- secret: true
818
+ secret: true,
819
+ required: true
315
820
  }]
316
821
  },
317
822
  {
@@ -320,6 +825,7 @@ const WAREHOUSE_TYPES = [
320
825
  configFields: [
321
826
  {
322
827
  name: "project_id",
828
+ flag: "projectId",
323
829
  message: "GCP Project ID",
324
830
  required: true
325
831
  },
@@ -335,7 +841,12 @@ const WAREHOUSE_TYPES = [
335
841
  ],
336
842
  credentialFields: [{
337
843
  name: "service_account_json",
338
- message: "Service account JSON (paste or path)"
844
+ flag: "serviceAccountJson",
845
+ message: "Service account JSON"
846
+ }, {
847
+ name: "keyfile_path",
848
+ flag: "keyfile",
849
+ message: "Path to service account key file"
339
850
  }]
340
851
  },
341
852
  {
@@ -349,96 +860,799 @@ const WAREHOUSE_TYPES = [
349
860
  },
350
861
  {
351
862
  name: "http_path",
863
+ flag: "httpPath",
352
864
  message: "HTTP path",
353
865
  required: true
354
866
  },
355
867
  {
356
868
  name: "catalog",
357
869
  message: "Catalog name"
870
+ },
871
+ {
872
+ name: "schema",
873
+ message: "Schema name"
358
874
  }
359
875
  ],
360
876
  credentialFields: [{
361
877
  name: "token",
362
878
  message: "Personal access token",
363
- secret: true
879
+ secret: true,
880
+ required: true
364
881
  }]
365
882
  }
366
883
  ];
367
- async function datasourceAddCommand() {
368
- const { input, select, password } = await prompts();
369
- let name;
370
- while (true) {
371
- name = await input({ message: "Name for this data source:" });
372
- const { dataSources } = await get("/api/datasources");
373
- if (dataSources.some((ds) => ds.name === name)) {
374
- console.log(pc.red(`A data source named "${name}" already exists. Choose a different name.`));
884
+ /**
885
+ * Convert env var name to dbt-style reference
886
+ */
887
+ function envVarRef(varName) {
888
+ return `{{ env_var('${varName}') }}`;
889
+ }
890
+ /**
891
+ * Format warehouse type for display
892
+ */
893
+ function formatType$1(type) {
894
+ return {
895
+ snowflake: "Snowflake",
896
+ postgres: "Postgres",
897
+ bigquery: "BigQuery",
898
+ databricks: "Databricks"
899
+ }[type] || type;
900
+ }
901
+ /**
902
+ * Get value from options, checking both direct and flag name
903
+ */
904
+ function getOptionValue(options, field) {
905
+ return options[(field.flag || field.name).replace(/_([a-z])/g, (_, c) => c.toUpperCase())];
906
+ }
907
+ /**
908
+ * Check if running in non-interactive mode (name and type provided via flags)
909
+ */
910
+ function isNonInteractive(options) {
911
+ return !!(options.name && options.type);
912
+ }
913
+ /**
914
+ * Import datasources from dbt profiles.yml
915
+ */
916
+ async function importFromDbt(options) {
917
+ const profilesPath = getDefaultProfilesPath();
918
+ if (!dbtProfilesExist(profilesPath)) {
919
+ console.error(pc.red(`dbt profiles not found at ${profilesPath}`));
920
+ console.log(pc.dim("Make sure dbt is configured with ~/.dbt/profiles.yml"));
921
+ process.exit(1);
922
+ }
923
+ let connections;
924
+ try {
925
+ connections = parseDbtProfiles(profilesPath);
926
+ } catch (err) {
927
+ console.error(pc.red(`Failed to parse dbt profiles: ${err.message}`));
928
+ process.exit(1);
929
+ }
930
+ if (connections.length === 0) {
931
+ console.log(pc.yellow("No supported connections found in dbt profiles."));
932
+ console.log(pc.dim("Supported types: snowflake, postgres, bigquery, databricks"));
933
+ process.exit(0);
934
+ }
935
+ if (typeof options.fromDbt === "string") {
936
+ const parts = options.fromDbt.split("/");
937
+ const profileName = parts[0];
938
+ const targetName = options.target || parts[1];
939
+ const filtered = connections.filter((c) => {
940
+ if (c.profileName !== profileName) return false;
941
+ if (targetName) return c.targetName === targetName;
942
+ return c.isDefaultTarget;
943
+ });
944
+ if (filtered.length === 0) {
945
+ console.error(pc.red(`Profile "${profileName}"${targetName ? `/${targetName}` : ""} not found`));
946
+ process.exit(1);
947
+ }
948
+ await importConnections(filtered);
949
+ return;
950
+ }
951
+ if (options.all) {
952
+ await importConnections(connections);
953
+ return;
954
+ }
955
+ if (options.defaultTargets) {
956
+ await importConnections(connections.filter((c) => c.isDefaultTarget));
957
+ return;
958
+ }
959
+ const { checkbox } = await prompts();
960
+ console.log();
961
+ console.log(pc.bold(`Found ${connections.length} connections in ~/.dbt/profiles.yml:`));
962
+ console.log();
963
+ const selected = await checkbox({
964
+ message: "Select connections to import:",
965
+ choices: connections.map((conn) => {
966
+ const name = `${conn.profileName}/${conn.targetName}`;
967
+ const typeLabel = formatType$1(conn.type);
968
+ const defaultLabel = conn.isDefaultTarget ? pc.cyan(" (default)") : "";
969
+ return {
970
+ name: `${name.padEnd(30)} ${typeLabel}${defaultLabel}`,
971
+ value: conn,
972
+ checked: conn.isDefaultTarget
973
+ };
974
+ }),
975
+ pageSize: 15
976
+ });
977
+ if (selected.length === 0) {
978
+ console.log(pc.yellow("No connections selected."));
979
+ return;
980
+ }
981
+ await importConnections(selected);
982
+ }
983
+ /**
984
+ * Import selected connections
985
+ */
986
+ async function importConnections(connections) {
987
+ console.log();
988
+ if (isDatasourcesTrackedByGit()) console.log(pc.yellow("Warning: .bon/datasources.yaml is tracked by git. Add it to .gitignore!"));
989
+ let imported = 0;
990
+ let skipped = 0;
991
+ for (const conn of connections) {
992
+ const { profileName, targetName } = conn;
993
+ const name = generateDatasourceName(profileName, targetName);
994
+ if (datasourceExists(name)) {
995
+ console.log(pc.dim(`• ${profileName}/${targetName} → ${name} (already exists, skipped)`));
996
+ skipped++;
375
997
  continue;
376
998
  }
377
- break;
999
+ try {
1000
+ addLocalDatasource(mapDbtConnection(conn).datasource);
1001
+ console.log(pc.green(`✓ ${profileName}/${targetName} → ${name} (${conn.type})`));
1002
+ imported++;
1003
+ } catch (err) {
1004
+ console.log(pc.red(`✗ ${profileName}/${targetName}: ${err.message}`));
1005
+ skipped++;
1006
+ }
1007
+ }
1008
+ console.log();
1009
+ if (imported > 0) {
1010
+ console.log(pc.green(`Imported ${imported} datasource${imported !== 1 ? "s" : ""}`));
1011
+ console.log(pc.dim(" .bon/datasources.yaml"));
378
1012
  }
379
- const warehouseType = await select({
1013
+ if (skipped > 0) console.log(pc.dim(`Skipped ${skipped} connection${skipped !== 1 ? "s" : ""}`));
1014
+ }
1015
+ /**
1016
+ * Add datasource manually (with flags and/or interactive prompts)
1017
+ */
1018
+ async function addManual(options) {
1019
+ const { input, select, password, confirm } = await prompts();
1020
+ const nonInteractive = isNonInteractive(options);
1021
+ if (isDatasourcesTrackedByGit()) console.log(pc.yellow("Warning: .bon/datasources.yaml is tracked by git. Add it to .gitignore!"));
1022
+ let name = options.name;
1023
+ if (!name) name = await input({ message: "Datasource name:" });
1024
+ if (datasourceExists(name)) {
1025
+ if (options.force) console.log(pc.dim(`Overwriting existing datasource "${name}"`));
1026
+ else if (nonInteractive) {
1027
+ console.error(pc.red(`Datasource "${name}" already exists. Use --force to overwrite.`));
1028
+ process.exit(1);
1029
+ } else if (!await confirm({
1030
+ message: `Datasource "${name}" already exists. Overwrite?`,
1031
+ default: false
1032
+ })) {
1033
+ console.log(pc.yellow("Cancelled."));
1034
+ return;
1035
+ }
1036
+ }
1037
+ let warehouseType = options.type;
1038
+ if (!warehouseType) warehouseType = await select({
380
1039
  message: "Warehouse type:",
381
- choices: WAREHOUSE_TYPES.map((w) => ({
1040
+ choices: WAREHOUSE_CONFIGS.map((w) => ({
382
1041
  name: w.label,
383
1042
  value: w.value
384
1043
  }))
385
1044
  });
386
- const wt = WAREHOUSE_TYPES.find((w) => w.value === warehouseType);
1045
+ const warehouseConfig = WAREHOUSE_CONFIGS.find((w) => w.value === warehouseType);
1046
+ if (!warehouseConfig) {
1047
+ console.error(pc.red(`Invalid warehouse type: ${warehouseType}`));
1048
+ console.log(pc.dim("Valid types: snowflake, postgres, bigquery, databricks"));
1049
+ process.exit(1);
1050
+ }
387
1051
  const config = {};
388
- for (const field of wt.configFields) {
389
- const value = await input({
390
- message: field.message,
391
- required: field.required
392
- });
1052
+ for (const field of warehouseConfig.configFields) {
1053
+ let value = getOptionValue(options, field);
1054
+ if (!value && !nonInteractive) {
1055
+ const defaultHint = field.default ? ` (default: ${field.default})` : "";
1056
+ value = await input({
1057
+ message: field.message + defaultHint + ":",
1058
+ default: field.default
1059
+ });
1060
+ }
393
1061
  if (value) config[field.name] = value;
1062
+ else if (field.required) {
1063
+ console.error(pc.red(`Missing required field: ${field.name}`));
1064
+ process.exit(1);
1065
+ }
394
1066
  }
395
1067
  const credentials = {};
396
- for (const field of wt.credentialFields) {
397
- const value = field.secret ? await password({ message: field.message }) : await input({ message: field.message });
1068
+ for (const field of warehouseConfig.credentialFields) {
1069
+ let value;
1070
+ if (field.name === "password" && options.passwordEnv) value = envVarRef(options.passwordEnv);
1071
+ else if (field.name === "token" && options.tokenEnv) value = envVarRef(options.tokenEnv);
1072
+ else value = getOptionValue(options, field);
1073
+ if (!value && !nonInteractive) if (field.secret) value = await password({ message: field.message + ":" });
1074
+ else value = await input({ message: field.message + ":" });
398
1075
  if (value) credentials[field.name] = value;
1076
+ else if (field.required) {
1077
+ console.error(pc.red(`Missing required credential: ${field.name}`));
1078
+ process.exit(1);
1079
+ }
399
1080
  }
400
- try {
401
- const result = await post("/api/datasources", {
402
- name,
403
- warehouse_type: warehouseType,
404
- config,
405
- credentials
406
- });
407
- console.log(pc.green(`Data source "${result.dataSource.name}" created (${result.dataSource.id})`));
408
- } catch (err) {
409
- console.error(pc.red(`Failed to create data source: ${err.message}`));
410
- process.exit(1);
411
- }
1081
+ addLocalDatasource({
1082
+ name,
1083
+ type: warehouseType,
1084
+ source: "manual",
1085
+ config,
1086
+ credentials
1087
+ });
1088
+ console.log();
1089
+ console.log(pc.green(`✓ Datasource "${name}" saved to .bon/datasources.yaml`));
1090
+ console.log();
1091
+ console.log(pc.dim(`Test connection: bon datasource test ${name}`));
1092
+ }
1093
+ /**
1094
+ * Main datasource add command
1095
+ */
1096
+ async function datasourceAddCommand(options = {}) {
1097
+ if (options.fromDbt !== void 0) await importFromDbt(options);
1098
+ else await addManual(options);
412
1099
  }
413
1100
 
414
1101
  //#endregion
415
1102
  //#region src/commands/datasource/list.ts
416
- async function datasourceListCommand() {
1103
+ /**
1104
+ * Format warehouse type for display
1105
+ */
1106
+ function formatType(type) {
1107
+ return {
1108
+ snowflake: "Snowflake",
1109
+ postgres: "Postgres",
1110
+ bigquery: "BigQuery",
1111
+ databricks: "Databricks"
1112
+ }[type] || type;
1113
+ }
1114
+ /**
1115
+ * Format source for display
1116
+ */
1117
+ function formatSource(source) {
1118
+ return {
1119
+ dbt: "dbt",
1120
+ manual: "manual",
1121
+ mcp: "mcp"
1122
+ }[source] || source;
1123
+ }
1124
+ /**
1125
+ * List local datasources
1126
+ */
1127
+ function listLocalDatasources() {
1128
+ const datasources = loadLocalDatasources();
1129
+ if (datasources.length === 0) {
1130
+ console.log(pc.dim("No local data sources found."));
1131
+ console.log(pc.dim("Run `bon datasource add` or `bon datasource add --from-dbt` to create one."));
1132
+ return;
1133
+ }
1134
+ console.log(pc.bold("Local Data Sources") + pc.dim(" (.bon/datasources.yaml)"));
1135
+ console.log();
1136
+ const maxNameLen = Math.max(...datasources.map((ds) => ds.name.length), 4);
1137
+ const maxTypeLen = Math.max(...datasources.map((ds) => formatType(ds.type).length), 4);
1138
+ const header = ` ${"NAME".padEnd(maxNameLen)} ${"TYPE".padEnd(maxTypeLen)} SOURCE ORIGIN`;
1139
+ console.log(pc.dim(header));
1140
+ console.log(pc.dim(" " + "─".repeat(header.length - 2)));
1141
+ for (const ds of datasources) {
1142
+ const name = ds.name.padEnd(maxNameLen);
1143
+ const type = formatType(ds.type).padEnd(maxTypeLen);
1144
+ const source = formatSource(ds.source).padEnd(10);
1145
+ let origin = "";
1146
+ if (ds.source === "dbt" && ds.dbtProfile) origin = `${ds.dbtProfile}/${ds.dbtTarget}`;
1147
+ console.log(` ${pc.bold(name)} ${type} ${source} ${pc.dim(origin)}`);
1148
+ }
1149
+ console.log();
1150
+ console.log(pc.dim(`${datasources.length} datasource${datasources.length !== 1 ? "s" : ""}`));
1151
+ }
1152
+ /**
1153
+ * List remote datasources (requires login)
1154
+ */
1155
+ async function listRemoteDatasources() {
1156
+ if (!loadCredentials()) {
1157
+ console.log(pc.dim("Not logged in. Run `bon login` to see remote data sources."));
1158
+ return;
1159
+ }
417
1160
  try {
1161
+ const { get } = await Promise.resolve().then(() => api_exports);
418
1162
  const result = await get("/api/datasources");
419
1163
  if (result.dataSources.length === 0) {
420
- console.log(pc.dim("No data sources found. Run `bon datasource add` to create one."));
1164
+ console.log(pc.dim("No remote data sources found."));
421
1165
  return;
422
1166
  }
423
- console.log(pc.bold("Data Sources\n"));
1167
+ console.log(pc.bold("Remote Data Sources") + pc.dim(" (Bonnard server)"));
1168
+ console.log();
1169
+ const maxNameLen = Math.max(...result.dataSources.map((ds) => ds.name.length), 4);
1170
+ const maxTypeLen = Math.max(...result.dataSources.map((ds) => ds.warehouse_type.length), 4);
1171
+ const header = ` ${"NAME".padEnd(maxNameLen)} ${"TYPE".padEnd(maxTypeLen)} STATUS`;
1172
+ console.log(pc.dim(header));
1173
+ console.log(pc.dim(" " + "─".repeat(header.length - 2)));
424
1174
  for (const ds of result.dataSources) {
1175
+ const name = ds.name.padEnd(maxNameLen);
1176
+ const type = ds.warehouse_type.padEnd(maxTypeLen);
425
1177
  const statusColor = ds.status === "active" ? pc.green : ds.status === "error" ? pc.red : pc.yellow;
426
- console.log(` ${pc.bold(ds.name)}`);
427
- console.log(` ID: ${pc.dim(ds.id)}`);
428
- console.log(` Type: ${ds.warehouse_type}`);
429
- console.log(` Status: ${statusColor(ds.status)}`);
430
- console.log(` Created: ${new Date(ds.created_at).toLocaleDateString()}`);
431
- console.log();
1178
+ console.log(` ${pc.bold(name)} ${type} ${statusColor(ds.status)}`);
432
1179
  }
1180
+ console.log();
1181
+ console.log(pc.dim(`${result.dataSources.length} datasource${result.dataSources.length !== 1 ? "s" : ""}`));
433
1182
  } catch (err) {
434
- console.error(pc.red(`Failed to list data sources: ${err.message}`));
435
- process.exit(1);
1183
+ console.log(pc.yellow(`Could not fetch remote sources: ${err.message}`));
1184
+ }
1185
+ }
1186
+ /**
1187
+ * Main list command
1188
+ */
1189
+ async function datasourceListCommand(options = {}) {
1190
+ const showLocal = options.local || !options.local && !options.remote;
1191
+ const showRemote = options.remote || !options.local && !options.remote;
1192
+ if (showLocal) listLocalDatasources();
1193
+ if (showLocal && showRemote) console.log();
1194
+ if (showRemote) await listRemoteDatasources();
1195
+ }
1196
+
1197
+ //#endregion
1198
+ //#region src/lib/connection/snowflake.ts
1199
+ /**
1200
+ * Snowflake connection testing and querying
1201
+ */
1202
+ const require$4 = createRequire(import.meta.url);
1203
+ function loadSnowflake() {
1204
+ try {
1205
+ const snowflake = require$4("snowflake-sdk");
1206
+ snowflake.configure({ logLevel: "ERROR" });
1207
+ return snowflake;
1208
+ } catch {
1209
+ return null;
1210
+ }
1211
+ }
1212
+ async function testSnowflakeConnection(config, credentials) {
1213
+ const snowflake = loadSnowflake();
1214
+ if (!snowflake) return {
1215
+ success: false,
1216
+ message: "Snowflake driver not installed",
1217
+ error: "Run: pnpm add snowflake-sdk"
1218
+ };
1219
+ const startTime = Date.now();
1220
+ return new Promise((resolve) => {
1221
+ const connection = snowflake.createConnection({
1222
+ account: config.account,
1223
+ username: credentials.username,
1224
+ password: credentials.password,
1225
+ database: config.database,
1226
+ warehouse: config.warehouse,
1227
+ schema: config.schema,
1228
+ role: config.role
1229
+ });
1230
+ connection.connect((err) => {
1231
+ if (err) {
1232
+ resolve({
1233
+ success: false,
1234
+ message: "Connection failed",
1235
+ error: err.message
1236
+ });
1237
+ return;
1238
+ }
1239
+ connection.execute({
1240
+ sqlText: "SELECT 1",
1241
+ complete: (queryErr) => {
1242
+ const latencyMs = Date.now() - startTime;
1243
+ connection.destroy(() => {});
1244
+ if (queryErr) resolve({
1245
+ success: false,
1246
+ message: "Query failed",
1247
+ error: queryErr.message,
1248
+ latencyMs
1249
+ });
1250
+ else resolve({
1251
+ success: true,
1252
+ message: "Connection successful",
1253
+ latencyMs
1254
+ });
1255
+ }
1256
+ });
1257
+ });
1258
+ });
1259
+ }
1260
+ async function querySnowflake(config, credentials, sql, options = {}) {
1261
+ const snowflake = loadSnowflake();
1262
+ if (!snowflake) return {
1263
+ columns: [],
1264
+ rows: [],
1265
+ rowCount: 0,
1266
+ truncated: false,
1267
+ error: "Snowflake driver not installed. Run: pnpm add snowflake-sdk"
1268
+ };
1269
+ const limit = options.limit ?? 1e3;
1270
+ return new Promise((resolve) => {
1271
+ const connection = snowflake.createConnection({
1272
+ account: config.account,
1273
+ username: credentials.username,
1274
+ password: credentials.password,
1275
+ database: options.database || config.database,
1276
+ warehouse: config.warehouse,
1277
+ schema: options.schema || config.schema,
1278
+ role: config.role
1279
+ });
1280
+ connection.connect((err) => {
1281
+ if (err) {
1282
+ resolve({
1283
+ columns: [],
1284
+ rows: [],
1285
+ rowCount: 0,
1286
+ truncated: false,
1287
+ error: err.message
1288
+ });
1289
+ return;
1290
+ }
1291
+ connection.execute({
1292
+ sqlText: sql,
1293
+ complete: (queryErr, _stmt, rows) => {
1294
+ connection.destroy(() => {});
1295
+ if (queryErr) {
1296
+ resolve({
1297
+ columns: [],
1298
+ rows: [],
1299
+ rowCount: 0,
1300
+ truncated: false,
1301
+ error: queryErr.message
1302
+ });
1303
+ return;
1304
+ }
1305
+ const allRows = rows || [];
1306
+ const truncated = allRows.length > limit;
1307
+ const resultRows = truncated ? allRows.slice(0, limit) : allRows;
1308
+ resolve({
1309
+ columns: resultRows.length > 0 ? Object.keys(resultRows[0]) : [],
1310
+ rows: resultRows,
1311
+ rowCount: resultRows.length,
1312
+ truncated
1313
+ });
1314
+ }
1315
+ });
1316
+ });
1317
+ });
1318
+ }
1319
+
1320
+ //#endregion
1321
+ //#region src/lib/connection/postgres.ts
1322
+ /**
1323
+ * Postgres connection testing and querying
1324
+ */
1325
+ const require$3 = createRequire(import.meta.url);
1326
+ function loadPg() {
1327
+ try {
1328
+ return require$3("pg");
1329
+ } catch {
1330
+ return null;
1331
+ }
1332
+ }
1333
+ function createClient(config, credentials, pg) {
1334
+ return new pg.Client({
1335
+ host: config.host,
1336
+ port: config.port ? parseInt(config.port, 10) : 5432,
1337
+ database: config.database,
1338
+ user: credentials.username,
1339
+ password: credentials.password,
1340
+ ssl: config.sslmode === "require" ? { rejectUnauthorized: false } : void 0
1341
+ });
1342
+ }
1343
+ async function testPostgresConnection(config, credentials) {
1344
+ const pg = loadPg();
1345
+ if (!pg) return {
1346
+ success: false,
1347
+ message: "Postgres driver not installed",
1348
+ error: "Run: pnpm add pg"
1349
+ };
1350
+ const startTime = Date.now();
1351
+ const client = createClient(config, credentials, pg);
1352
+ try {
1353
+ await client.connect();
1354
+ await client.query("SELECT 1");
1355
+ const latencyMs = Date.now() - startTime;
1356
+ await client.end();
1357
+ return {
1358
+ success: true,
1359
+ message: "Connection successful",
1360
+ latencyMs
1361
+ };
1362
+ } catch (err) {
1363
+ try {
1364
+ await client.end();
1365
+ } catch {}
1366
+ return {
1367
+ success: false,
1368
+ message: "Connection failed",
1369
+ error: err.message
1370
+ };
1371
+ }
1372
+ }
1373
+ async function queryPostgres(config, credentials, sql, options = {}) {
1374
+ const pg = loadPg();
1375
+ if (!pg) return {
1376
+ columns: [],
1377
+ rows: [],
1378
+ rowCount: 0,
1379
+ truncated: false,
1380
+ error: "Postgres driver not installed. Run: pnpm add pg"
1381
+ };
1382
+ const limit = options.limit ?? 1e3;
1383
+ const client = createClient(config, credentials, pg);
1384
+ try {
1385
+ await client.connect();
1386
+ const schema = options.schema || config.schema;
1387
+ if (schema) await client.query(`SET search_path TO ${schema}`);
1388
+ const result = await client.query(sql);
1389
+ await client.end();
1390
+ const columns = result.fields?.map((f) => f.name) || [];
1391
+ const allRows = result.rows || [];
1392
+ const truncated = allRows.length > limit;
1393
+ const rows = truncated ? allRows.slice(0, limit) : allRows;
1394
+ return {
1395
+ columns,
1396
+ rows,
1397
+ rowCount: rows.length,
1398
+ truncated
1399
+ };
1400
+ } catch (err) {
1401
+ try {
1402
+ await client.end();
1403
+ } catch {}
1404
+ return {
1405
+ columns: [],
1406
+ rows: [],
1407
+ rowCount: 0,
1408
+ truncated: false,
1409
+ error: err.message
1410
+ };
1411
+ }
1412
+ }
1413
+
1414
+ //#endregion
1415
+ //#region src/lib/connection/bigquery.ts
1416
+ /**
1417
+ * BigQuery connection testing
1418
+ */
1419
+ const require$2 = createRequire(import.meta.url);
1420
+ async function testBigQueryConnection(config, credentials) {
1421
+ let BigQuery;
1422
+ try {
1423
+ BigQuery = require$2("@google-cloud/bigquery").BigQuery;
1424
+ } catch {
1425
+ return {
1426
+ success: false,
1427
+ message: "BigQuery driver not installed",
1428
+ error: "Run: pnpm add @google-cloud/bigquery"
1429
+ };
1430
+ }
1431
+ const startTime = Date.now();
1432
+ try {
1433
+ const options = { projectId: config.project_id };
1434
+ if (config.location) options.location = config.location;
1435
+ if (credentials.service_account_json) options.credentials = JSON.parse(credentials.service_account_json);
1436
+ else if (credentials.keyfile_path) options.keyFilename = credentials.keyfile_path;
1437
+ await new BigQuery(options).query("SELECT 1");
1438
+ return {
1439
+ success: true,
1440
+ message: "Connection successful",
1441
+ latencyMs: Date.now() - startTime
1442
+ };
1443
+ } catch (err) {
1444
+ return {
1445
+ success: false,
1446
+ message: "Connection failed",
1447
+ error: err.message
1448
+ };
1449
+ }
1450
+ }
1451
+
1452
+ //#endregion
1453
+ //#region src/lib/connection/databricks.ts
1454
+ /**
1455
+ * Databricks connection testing
1456
+ */
1457
+ const require$1 = createRequire(import.meta.url);
1458
+ async function testDatabricksConnection(config, credentials) {
1459
+ let DBSQLClient;
1460
+ try {
1461
+ const module = require$1("@databricks/sql");
1462
+ DBSQLClient = module.default || module;
1463
+ } catch {
1464
+ return {
1465
+ success: false,
1466
+ message: "Databricks driver not installed",
1467
+ error: "Run: pnpm add @databricks/sql"
1468
+ };
1469
+ }
1470
+ const startTime = Date.now();
1471
+ const client = new DBSQLClient();
1472
+ try {
1473
+ const connection = await client.connect({
1474
+ host: config.hostname,
1475
+ path: config.http_path,
1476
+ token: credentials.token
1477
+ });
1478
+ const session = await connection.openSession({
1479
+ initialCatalog: config.catalog,
1480
+ initialSchema: config.schema
1481
+ });
1482
+ const operation = await session.executeStatement("SELECT 1");
1483
+ await operation.fetchAll();
1484
+ await operation.close();
1485
+ const latencyMs = Date.now() - startTime;
1486
+ await session.close();
1487
+ await connection.close();
1488
+ return {
1489
+ success: true,
1490
+ message: "Connection successful",
1491
+ latencyMs
1492
+ };
1493
+ } catch (err) {
1494
+ try {
1495
+ await client.close();
1496
+ } catch {}
1497
+ return {
1498
+ success: false,
1499
+ message: "Connection failed",
1500
+ error: err.message
1501
+ };
1502
+ }
1503
+ }
1504
+
1505
+ //#endregion
1506
+ //#region src/lib/connection/index.ts
1507
+ var connection_exports = /* @__PURE__ */ __exportAll({
1508
+ executeQuery: () => executeQuery,
1509
+ testConnection: () => testConnection
1510
+ });
1511
+ /**
1512
+ * Test connection to a datasource
1513
+ */
1514
+ async function testConnection(datasource) {
1515
+ const { type, config, credentials } = datasource;
1516
+ switch (type) {
1517
+ case "snowflake": return testSnowflakeConnection({
1518
+ account: config.account,
1519
+ database: config.database,
1520
+ warehouse: config.warehouse,
1521
+ schema: config.schema,
1522
+ role: config.role
1523
+ }, {
1524
+ username: credentials.username,
1525
+ password: credentials.password
1526
+ });
1527
+ case "postgres": return testPostgresConnection({
1528
+ host: config.host,
1529
+ port: config.port,
1530
+ database: config.database,
1531
+ schema: config.schema,
1532
+ sslmode: config.sslmode
1533
+ }, {
1534
+ username: credentials.username,
1535
+ password: credentials.password
1536
+ });
1537
+ case "bigquery": return testBigQueryConnection({
1538
+ project_id: config.project_id,
1539
+ dataset: config.dataset,
1540
+ location: config.location
1541
+ }, {
1542
+ service_account_json: credentials.service_account_json,
1543
+ keyfile_path: credentials.keyfile_path
1544
+ });
1545
+ case "databricks": return testDatabricksConnection({
1546
+ hostname: config.hostname,
1547
+ http_path: config.http_path,
1548
+ catalog: config.catalog,
1549
+ schema: config.schema
1550
+ }, { token: credentials.token });
1551
+ default: return {
1552
+ success: false,
1553
+ message: `Unsupported warehouse type: ${type}`
1554
+ };
1555
+ }
1556
+ }
1557
+ /**
1558
+ * Execute a query against a datasource
1559
+ */
1560
+ async function executeQuery(datasource, sql, options = {}) {
1561
+ const { type, config, credentials } = datasource;
1562
+ switch (type) {
1563
+ case "snowflake": return querySnowflake({
1564
+ account: config.account,
1565
+ database: config.database,
1566
+ warehouse: config.warehouse,
1567
+ schema: config.schema,
1568
+ role: config.role
1569
+ }, {
1570
+ username: credentials.username,
1571
+ password: credentials.password
1572
+ }, sql, options);
1573
+ case "postgres": return queryPostgres({
1574
+ host: config.host,
1575
+ port: config.port,
1576
+ database: config.database,
1577
+ schema: config.schema,
1578
+ sslmode: config.sslmode
1579
+ }, {
1580
+ username: credentials.username,
1581
+ password: credentials.password
1582
+ }, sql, options);
1583
+ case "bigquery": return {
1584
+ columns: [],
1585
+ rows: [],
1586
+ rowCount: 0,
1587
+ truncated: false,
1588
+ error: "BigQuery local querying not yet implemented"
1589
+ };
1590
+ case "databricks": return {
1591
+ columns: [],
1592
+ rows: [],
1593
+ rowCount: 0,
1594
+ truncated: false,
1595
+ error: "Databricks local querying not yet implemented"
1596
+ };
1597
+ default: return {
1598
+ columns: [],
1599
+ rows: [],
1600
+ rowCount: 0,
1601
+ truncated: false,
1602
+ error: `Unsupported warehouse type: ${type}`
1603
+ };
436
1604
  }
437
1605
  }
438
1606
 
439
1607
  //#endregion
440
1608
  //#region src/commands/datasource/test.ts
441
- async function datasourceTestCommand(name) {
1609
+ async function datasourceTestCommand(name, options = {}) {
1610
+ const localDs = getLocalDatasource(name);
1611
+ if (options.remote || !localDs) {
1612
+ await testRemote(name, !localDs);
1613
+ return;
1614
+ }
1615
+ await testLocal(name, localDs);
1616
+ }
1617
+ /**
1618
+ * Test datasource locally using direct connection
1619
+ */
1620
+ async function testLocal(name, ds) {
1621
+ console.log(pc.dim(`Testing ${name} locally...`));
1622
+ console.log();
1623
+ const { resolved, missing } = resolveEnvVarsInCredentials(ds.credentials);
1624
+ if (missing.length > 0) {
1625
+ console.log(pc.red(`Missing environment variables: ${missing.join(", ")}`));
1626
+ console.log(pc.dim("Set these env vars or update .bon/datasources.yaml with actual values."));
1627
+ process.exit(1);
1628
+ }
1629
+ const result = await testConnection({
1630
+ type: ds.type,
1631
+ config: ds.config,
1632
+ credentials: resolved
1633
+ });
1634
+ if (result.success) {
1635
+ console.log(pc.green(`✓ ${result.message}`));
1636
+ if (result.latencyMs) console.log(pc.dim(` Latency: ${result.latencyMs}ms`));
1637
+ } else {
1638
+ console.log(pc.red(`✗ ${result.message}`));
1639
+ if (result.error) console.log(pc.dim(` ${result.error}`));
1640
+ process.exit(1);
1641
+ }
1642
+ }
1643
+ /**
1644
+ * Test datasource via remote API (requires login)
1645
+ */
1646
+ async function testRemote(name, notFoundLocally) {
1647
+ if (!loadCredentials()) {
1648
+ if (notFoundLocally) {
1649
+ console.log(pc.red(`Datasource "${name}" not found locally.`));
1650
+ console.log(pc.dim("Run `bon datasource add` to create it, or `bon login` to test remote datasources."));
1651
+ } else console.log(pc.red("Not logged in. Run `bon login` to test remote datasources."));
1652
+ process.exit(1);
1653
+ }
1654
+ console.log(pc.dim(`Testing ${name} via remote API...`));
1655
+ console.log();
442
1656
  try {
443
1657
  const result = await post("/api/datasources/test", { name });
444
1658
  if (result.success) {
@@ -448,7 +1662,10 @@ async function datasourceTestCommand(name) {
448
1662
  if (result.details.account) console.log(pc.dim(` Account: ${result.details.account}`));
449
1663
  if (result.details.latencyMs != null) console.log(pc.dim(` Latency: ${result.details.latencyMs}ms`));
450
1664
  }
451
- } else console.log(pc.red(result.message));
1665
+ } else {
1666
+ console.log(pc.red(result.message));
1667
+ process.exit(1);
1668
+ }
452
1669
  } catch (err) {
453
1670
  console.error(pc.red(`Failed to test data source: ${err.message}`));
454
1671
  process.exit(1);
@@ -457,60 +1674,176 @@ async function datasourceTestCommand(name) {
457
1674
 
458
1675
  //#endregion
459
1676
  //#region src/commands/datasource/remove.ts
460
- async function datasourceRemoveCommand(name) {
1677
+ async function datasourceRemoveCommand(name, options = {}) {
1678
+ const existsLocally = datasourceExists(name);
1679
+ if (options.remote) {
1680
+ await removeRemote(name);
1681
+ return;
1682
+ }
1683
+ if (existsLocally) {
1684
+ if (removeLocalDatasource(name)) console.log(pc.green(`✓ Removed "${name}" from local storage`));
1685
+ } else {
1686
+ console.error(pc.red(`Datasource "${name}" not found.`));
1687
+ console.log(pc.dim("Use --remote to remove from remote server."));
1688
+ process.exit(1);
1689
+ }
1690
+ }
1691
+ /**
1692
+ * Remove datasource from remote server
1693
+ */
1694
+ async function removeRemote(name) {
1695
+ if (!loadCredentials()) {
1696
+ console.error(pc.red("Not logged in. Run `bon login` to remove remote datasources."));
1697
+ process.exit(1);
1698
+ }
461
1699
  try {
1700
+ const { del } = await Promise.resolve().then(() => api_exports);
462
1701
  await del(`/api/datasources/${encodeURIComponent(name)}`);
463
- console.log(pc.green(`Data source "${name}" removed.`));
1702
+ console.log(pc.green(`✓ Removed "${name}" from remote server`));
464
1703
  } catch (err) {
465
- console.error(pc.red(`Failed to remove data source: ${err.message}`));
1704
+ console.error(pc.red(`Failed to remove remote datasource: ${err.message}`));
466
1705
  process.exit(1);
467
1706
  }
468
1707
  }
469
1708
 
470
1709
  //#endregion
471
- //#region src/commands/query.ts
472
- async function queryCommand(datasourceName, sql, options) {
473
- const limit = options.limit ? parseInt(options.limit, 10) : 1e3;
474
- const format = options.format ?? "toon";
1710
+ //#region src/commands/datasource/push.ts
1711
+ var push_exports = /* @__PURE__ */ __exportAll({
1712
+ datasourcePushCommand: () => datasourcePushCommand,
1713
+ pushDatasource: () => pushDatasource
1714
+ });
1715
+ /**
1716
+ * Push a local datasource to Bonnard server
1717
+ */
1718
+ async function datasourcePushCommand(name, options = {}) {
1719
+ if (!loadCredentials()) {
1720
+ console.error(pc.red("Not logged in. Run `bon login` first."));
1721
+ process.exit(1);
1722
+ }
1723
+ const datasource = getLocalDatasource(name);
1724
+ if (!datasource) {
1725
+ console.error(pc.red(`Datasource "${name}" not found in .bon/datasources.yaml`));
1726
+ console.log(pc.dim("Run `bon datasource list --local` to see available datasources."));
1727
+ process.exit(1);
1728
+ }
1729
+ const { resolved, missing } = resolveEnvVarsInCredentials(datasource.credentials);
1730
+ if (missing.length > 0) {
1731
+ console.error(pc.red(`Missing environment variables: ${missing.join(", ")}`));
1732
+ console.log(pc.dim("Set them in your environment or use plain values in .bon/datasources.yaml"));
1733
+ process.exit(1);
1734
+ }
475
1735
  try {
476
- const result = await post("/api/datasources/query", {
477
- name: datasourceName,
478
- sql,
479
- options: {
480
- schema: options.schema,
481
- database: options.database,
482
- limit
1736
+ if ((await getRemoteDatasources()).some((ds) => ds.name === name) && !options.force) {
1737
+ if (!await confirm({
1738
+ message: `Datasource "${name}" already exists on remote. Overwrite?`,
1739
+ default: false
1740
+ })) {
1741
+ console.log(pc.dim("Aborted."));
1742
+ process.exit(0);
483
1743
  }
1744
+ }
1745
+ } catch (err) {
1746
+ console.log(pc.dim(`Note: Could not check remote datasources: ${err.message}`));
1747
+ }
1748
+ console.log(pc.dim(`Pushing "${name}"...`));
1749
+ try {
1750
+ await post("/api/datasources", {
1751
+ name: datasource.name,
1752
+ warehouse_type: datasource.type,
1753
+ config: datasource.config,
1754
+ credentials: resolved
484
1755
  });
485
- if (result.error) {
486
- console.error(pc.red(result.error));
1756
+ console.log(pc.green(`✓ Datasource "${name}" pushed to Bonnard`));
1757
+ } catch (err) {
1758
+ const message = err.message;
1759
+ if (message.includes("already exists")) {
1760
+ console.error(pc.red(`Datasource "${name}" already exists on remote.`));
1761
+ console.log(pc.dim("Use --force to overwrite."));
487
1762
  process.exit(1);
488
1763
  }
489
- if (result.rowCount === 0) {
490
- console.log("No rows returned.");
491
- return;
492
- }
493
- if (format === "json") console.log(JSON.stringify(result, null, 2));
494
- else {
495
- const toon = encode({ results: result.rows });
496
- console.log(toon);
497
- }
498
- if (result.truncated) console.log(pc.dim(`(truncated to ${result.rowCount} rows)`));
499
- } catch (err) {
500
- console.error(pc.red(`Query failed: ${err.message}`));
1764
+ console.error(pc.red(`Failed to push datasource: ${message}`));
1765
+ process.exit(1);
1766
+ }
1767
+ }
1768
+ /**
1769
+ * Push a datasource programmatically (for use by deploy command)
1770
+ * Returns true on success, false on failure
1771
+ */
1772
+ async function pushDatasource(name, options = {}) {
1773
+ const datasource = getLocalDatasource(name);
1774
+ if (!datasource) {
1775
+ if (!options.silent) console.error(pc.red(`Datasource "${name}" not found locally`));
1776
+ return false;
1777
+ }
1778
+ const { resolved, missing } = resolveEnvVarsInCredentials(datasource.credentials);
1779
+ if (missing.length > 0) {
1780
+ if (!options.silent) console.error(pc.red(`Missing env vars for "${name}": ${missing.join(", ")}`));
1781
+ return false;
1782
+ }
1783
+ try {
1784
+ await post("/api/datasources", {
1785
+ name: datasource.name,
1786
+ warehouse_type: datasource.type,
1787
+ config: datasource.config,
1788
+ credentials: resolved
1789
+ });
1790
+ return true;
1791
+ } catch {
1792
+ return false;
1793
+ }
1794
+ }
1795
+
1796
+ //#endregion
1797
+ //#region src/commands/preview.ts
1798
+ async function previewCommand(datasourceName, sql, options) {
1799
+ const limit = options.limit ? parseInt(options.limit, 10) : 1e3;
1800
+ const format = options.format ?? "toon";
1801
+ const ds = getLocalDatasource(datasourceName);
1802
+ if (!ds) {
1803
+ console.error(pc.red(`Datasource "${datasourceName}" not found in .bon/datasources.yaml`));
1804
+ console.log(pc.dim("Run `bon datasource add` to create it."));
1805
+ process.exit(1);
1806
+ }
1807
+ const { resolved, missing } = resolveEnvVarsInCredentials(ds.credentials);
1808
+ if (missing.length > 0) {
1809
+ console.error(pc.red(`Missing environment variables: ${missing.join(", ")}`));
1810
+ console.log(pc.dim("Set these env vars or update .bon/datasources.yaml with actual values."));
501
1811
  process.exit(1);
502
1812
  }
1813
+ const result = await executeQuery({
1814
+ type: ds.type,
1815
+ config: ds.config,
1816
+ credentials: resolved
1817
+ }, sql, {
1818
+ limit,
1819
+ schema: options.schema,
1820
+ database: options.database
1821
+ });
1822
+ if (result.error) {
1823
+ console.error(pc.red(result.error));
1824
+ process.exit(1);
1825
+ }
1826
+ if (result.rowCount === 0) {
1827
+ console.log("No rows returned.");
1828
+ return;
1829
+ }
1830
+ if (format === "json") console.log(JSON.stringify(result, null, 2));
1831
+ else {
1832
+ const toon = encode({ results: result.rows });
1833
+ console.log(toon);
1834
+ }
1835
+ if (result.truncated) console.log(pc.dim(`(truncated to ${result.rowCount} rows)`));
503
1836
  }
504
1837
 
505
1838
  //#endregion
506
1839
  //#region src/commands/validate.ts
507
- async function validateCommand() {
1840
+ async function validateCommand(options = {}) {
508
1841
  const cwd = process.cwd();
509
1842
  if (!fs.existsSync(path.join(cwd, "bon.yaml"))) {
510
1843
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
511
1844
  process.exit(1);
512
1845
  }
513
- const { validate } = await import("./validate-Bd1D39Bj.mjs");
1846
+ const { validate } = await import("./validate-C4EHvJzJ.mjs");
514
1847
  const result = await validate(cwd);
515
1848
  if (result.cubes.length === 0 && result.views.length === 0 && result.valid) {
516
1849
  console.log(pc.yellow("No model or view files found in models/ or views/."));
@@ -525,6 +1858,75 @@ async function validateCommand() {
525
1858
  console.log();
526
1859
  if (result.cubes.length > 0) console.log(` ${pc.dim("Cubes")} (${result.cubes.length}): ${result.cubes.join(", ")}`);
527
1860
  if (result.views.length > 0) console.log(` ${pc.dim("Views")} (${result.views.length}): ${result.views.join(", ")}`);
1861
+ if (result.missingDescriptions.length > 0) {
1862
+ console.log();
1863
+ console.log(pc.yellow(`⚠ ${result.missingDescriptions.length} items missing descriptions`));
1864
+ console.log(pc.dim(" Descriptions help AI agents and analysts discover the right metrics."));
1865
+ const byParent = /* @__PURE__ */ new Map();
1866
+ for (const m of result.missingDescriptions) {
1867
+ const list = byParent.get(m.parent) || [];
1868
+ const label = m.type === "cube" || m.type === "view" ? `(${m.type})` : m.name;
1869
+ list.push(label);
1870
+ byParent.set(m.parent, list);
1871
+ }
1872
+ for (const [parent, items] of byParent) console.log(pc.dim(` ${parent}: ${items.join(", ")}`));
1873
+ }
1874
+ if (options.testConnection) {
1875
+ console.log();
1876
+ await testReferencedConnections(cwd);
1877
+ }
1878
+ }
1879
+ /**
1880
+ * Test connections for datasources referenced by models
1881
+ * Lenient: warns but doesn't fail validation
1882
+ */
1883
+ async function testReferencedConnections(cwd) {
1884
+ const { extractDatasourcesFromModels } = await import("./models-IsV2sX74.mjs");
1885
+ const { loadLocalDatasources, resolveEnvVarsInCredentials } = await Promise.resolve().then(() => local_exports);
1886
+ const { testConnection } = await Promise.resolve().then(() => connection_exports);
1887
+ const references = extractDatasourcesFromModels(cwd);
1888
+ if (references.length === 0) {
1889
+ console.log(pc.dim("No datasource references found in models."));
1890
+ return;
1891
+ }
1892
+ console.log(pc.bold("Testing connections..."));
1893
+ console.log();
1894
+ const localDatasources = loadLocalDatasources(cwd);
1895
+ let warnings = 0;
1896
+ for (const ref of references) {
1897
+ const ds = localDatasources.find((d) => d.name === ref.name);
1898
+ if (!ds) {
1899
+ console.log(pc.yellow(`⚠ ${ref.name}: not found in .bon/datasources.yaml`));
1900
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
1901
+ warnings++;
1902
+ continue;
1903
+ }
1904
+ const { resolved, missing } = resolveEnvVarsInCredentials(ds.credentials);
1905
+ if (missing.length > 0) {
1906
+ console.log(pc.yellow(`⚠ ${ref.name}: missing env vars: ${missing.join(", ")}`));
1907
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
1908
+ warnings++;
1909
+ continue;
1910
+ }
1911
+ const result = await testConnection({
1912
+ type: ds.type,
1913
+ config: ds.config,
1914
+ credentials: resolved
1915
+ });
1916
+ if (result.success) {
1917
+ const latency = result.latencyMs ? pc.dim(` (${result.latencyMs}ms)`) : "";
1918
+ console.log(pc.green(`✓ ${ref.name}${latency}`));
1919
+ } else {
1920
+ console.log(pc.yellow(`⚠ ${ref.name}: ${result.error || result.message}`));
1921
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
1922
+ warnings++;
1923
+ }
1924
+ }
1925
+ if (warnings > 0) {
1926
+ console.log();
1927
+ console.log(pc.yellow(`${warnings} connection warning(s)`));
1928
+ console.log(pc.dim("Connection issues won't block file validation, but will fail at deploy."));
1929
+ }
528
1930
  }
529
1931
 
530
1932
  //#endregion
@@ -545,14 +1947,14 @@ function collectFiles(dir, rootDir) {
545
1947
  walk(dir);
546
1948
  return files;
547
1949
  }
548
- async function deployCommand() {
1950
+ async function deployCommand(options = {}) {
549
1951
  const cwd = process.cwd();
550
1952
  if (!fs.existsSync(path.join(cwd, "bon.yaml"))) {
551
1953
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
552
1954
  process.exit(1);
553
1955
  }
554
1956
  console.log(pc.dim("Validating models..."));
555
- const { validate } = await import("./validate-Bd1D39Bj.mjs");
1957
+ const { validate } = await import("./validate-C4EHvJzJ.mjs");
556
1958
  const result = await validate(cwd);
557
1959
  if (!result.valid) {
558
1960
  console.log(pc.red("Validation failed:\n"));
@@ -564,38 +1966,455 @@ async function deployCommand() {
564
1966
  process.exit(1);
565
1967
  }
566
1968
  console.log(pc.dim(` Found ${result.cubes.length} cube(s) and ${result.views.length} view(s)`));
1969
+ if (await testAndSyncDatasources(cwd, options)) process.exit(1);
567
1970
  const files = {
568
1971
  ...collectFiles(path.join(cwd, "models"), cwd),
569
1972
  ...collectFiles(path.join(cwd, "views"), cwd)
570
1973
  };
571
1974
  const fileCount = Object.keys(files).length;
572
- console.log(pc.dim(` Deploying ${fileCount} file(s)...\n`));
1975
+ console.log(pc.dim(`Deploying ${fileCount} file(s)...`));
1976
+ console.log();
573
1977
  try {
574
1978
  const response = await post("/api/deploy", { files });
575
1979
  console.log(pc.green("Deploy successful!"));
576
1980
  console.log(`Deployment ID: ${pc.cyan(response.deployment.id)}`);
577
1981
  console.log(`Cube API: ${pc.cyan(`${response.deployment.cubeApiUrl}/cubejs-api/v1`)}`);
1982
+ console.log();
1983
+ console.log(pc.bold("Connect AI agents via MCP:"));
1984
+ console.log(` MCP URL: ${pc.cyan("https://mcp.bonnard.dev/mcp")}`);
1985
+ console.log(pc.dim(` Run \`bon mcp\` for setup instructions`));
578
1986
  } catch (err) {
579
1987
  console.log(pc.red(`Deploy failed: ${err instanceof Error ? err.message : err}`));
580
1988
  process.exit(1);
581
1989
  }
582
1990
  }
1991
+ /**
1992
+ * Test connections and sync datasources to remote
1993
+ * Returns true if any connection failed (strict mode)
1994
+ */
1995
+ async function testAndSyncDatasources(cwd, options = {}) {
1996
+ const { extractDatasourcesFromModels } = await import("./models-IsV2sX74.mjs");
1997
+ const { loadLocalDatasources, resolveEnvVarsInCredentials } = await Promise.resolve().then(() => local_exports);
1998
+ const { testConnection } = await Promise.resolve().then(() => connection_exports);
1999
+ const { pushDatasource } = await Promise.resolve().then(() => push_exports);
2000
+ const references = extractDatasourcesFromModels(cwd);
2001
+ if (references.length === 0) return false;
2002
+ console.log();
2003
+ console.log(pc.dim("Testing datasource connections..."));
2004
+ const localDatasources = loadLocalDatasources(cwd);
2005
+ let failed = false;
2006
+ const validatedDatasources = [];
2007
+ for (const ref of references) {
2008
+ const ds = localDatasources.find((d) => d.name === ref.name);
2009
+ if (!ds) {
2010
+ console.log(pc.red(`✗ ${ref.name}: not found in .bon/datasources.yaml`));
2011
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
2012
+ console.log(pc.dim(` Run: bon datasource add --from-dbt`));
2013
+ failed = true;
2014
+ continue;
2015
+ }
2016
+ const { resolved, missing } = resolveEnvVarsInCredentials(ds.credentials);
2017
+ if (missing.length > 0) {
2018
+ console.log(pc.red(`✗ ${ref.name}: missing env vars: ${missing.join(", ")}`));
2019
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
2020
+ failed = true;
2021
+ continue;
2022
+ }
2023
+ const result = await testConnection({
2024
+ type: ds.type,
2025
+ config: ds.config,
2026
+ credentials: resolved
2027
+ });
2028
+ if (result.success) {
2029
+ const latency = result.latencyMs ? pc.dim(` (${result.latencyMs}ms)`) : "";
2030
+ console.log(pc.green(`✓ ${ref.name}${latency}`));
2031
+ validatedDatasources.push(ref.name);
2032
+ } else {
2033
+ console.log(pc.red(`✗ ${ref.name}: ${result.error || result.message}`));
2034
+ console.log(pc.dim(` Used by: ${ref.cubes.join(", ")}`));
2035
+ failed = true;
2036
+ }
2037
+ }
2038
+ console.log();
2039
+ if (failed) {
2040
+ console.log(pc.red("Connection tests failed. Fix datasource issues before deploying."));
2041
+ return true;
2042
+ }
2043
+ console.log(pc.dim("Checking remote datasources..."));
2044
+ let remoteDatasources;
2045
+ try {
2046
+ remoteDatasources = await getRemoteDatasources();
2047
+ } catch (err) {
2048
+ console.log(pc.red(`Failed to fetch remote datasources: ${err.message}`));
2049
+ return true;
2050
+ }
2051
+ const remoteNames = new Set(remoteDatasources.map((ds) => ds.name));
2052
+ const missingRemote = validatedDatasources.filter((name) => !remoteNames.has(name));
2053
+ if (missingRemote.length === 0) {
2054
+ console.log(pc.green("✓ All datasources exist on remote"));
2055
+ console.log();
2056
+ return false;
2057
+ }
2058
+ console.log();
2059
+ console.log(pc.yellow(`⚠ Missing remote datasource${missingRemote.length > 1 ? "s" : ""}: ${missingRemote.join(", ")}`));
2060
+ console.log();
2061
+ if (options.ci) {
2062
+ console.log(pc.red("Deploy aborted (--ci mode)."));
2063
+ console.log(pc.dim(`Run: bon datasource push <name>`));
2064
+ return true;
2065
+ }
2066
+ if (options.pushDatasources) {
2067
+ for (const name of missingRemote) {
2068
+ console.log(pc.dim(`Pushing "${name}"...`));
2069
+ if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
2070
+ else {
2071
+ console.log(pc.red(`✗ Failed to push "${name}"`));
2072
+ return true;
2073
+ }
2074
+ }
2075
+ console.log();
2076
+ return false;
2077
+ }
2078
+ if (!await confirm({
2079
+ message: `Push ${missingRemote.length > 1 ? "these datasources" : `"${missingRemote[0]}"`} to Bonnard? (credentials will be encrypted)`,
2080
+ default: true
2081
+ })) {
2082
+ console.log(pc.dim("Deploy aborted."));
2083
+ return true;
2084
+ }
2085
+ console.log();
2086
+ for (const name of missingRemote) {
2087
+ console.log(pc.dim(`Pushing "${name}"...`));
2088
+ if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
2089
+ else {
2090
+ console.log(pc.red(`✗ Failed to push "${name}"`));
2091
+ return true;
2092
+ }
2093
+ }
2094
+ console.log();
2095
+ return false;
2096
+ }
2097
+
2098
+ //#endregion
2099
+ //#region src/commands/mcp.ts
2100
+ const MCP_URL = "https://mcp.bonnard.dev/mcp";
2101
+ function mcpCommand() {
2102
+ console.log(pc.bold("MCP Connection Info"));
2103
+ console.log();
2104
+ console.log(`MCP URL: ${pc.cyan(MCP_URL)}`);
2105
+ console.log();
2106
+ console.log(pc.bold("Setup Instructions"));
2107
+ console.log();
2108
+ console.log(pc.underline("Claude Desktop"));
2109
+ console.log(`Add to ${pc.dim("~/Library/Application Support/Claude/claude_desktop_config.json")}:`);
2110
+ console.log();
2111
+ console.log(pc.dim(` {`));
2112
+ console.log(pc.dim(` "mcpServers": {`));
2113
+ console.log(pc.dim(` "bonnard": {`));
2114
+ console.log(pc.dim(` "url": "${MCP_URL}"`));
2115
+ console.log(pc.dim(` }`));
2116
+ console.log(pc.dim(` }`));
2117
+ console.log(pc.dim(` }`));
2118
+ console.log();
2119
+ console.log(pc.underline("Cursor"));
2120
+ console.log(`Add to ${pc.dim(".cursor/mcp.json")} in your project:`);
2121
+ console.log();
2122
+ console.log(pc.dim(` {`));
2123
+ console.log(pc.dim(` "mcpServers": {`));
2124
+ console.log(pc.dim(` "bonnard": {`));
2125
+ console.log(pc.dim(` "url": "${MCP_URL}"`));
2126
+ console.log(pc.dim(` }`));
2127
+ console.log(pc.dim(` }`));
2128
+ console.log(pc.dim(` }`));
2129
+ console.log();
2130
+ console.log(pc.underline("Claude Code"));
2131
+ console.log(`Add to ${pc.dim(".mcp.json")} in your project:`);
2132
+ console.log();
2133
+ console.log(pc.dim(` {`));
2134
+ console.log(pc.dim(` "mcpServers": {`));
2135
+ console.log(pc.dim(` "bonnard": {`));
2136
+ console.log(pc.dim(` "type": "url",`));
2137
+ console.log(pc.dim(` "url": "${MCP_URL}"`));
2138
+ console.log(pc.dim(` }`));
2139
+ console.log(pc.dim(` }`));
2140
+ console.log(pc.dim(` }`));
2141
+ console.log();
2142
+ console.log(pc.dim("OAuth authentication happens automatically when you first connect."));
2143
+ console.log(pc.dim("Run `bon mcp test` to verify the MCP server is reachable."));
2144
+ }
2145
+
2146
+ //#endregion
2147
+ //#region src/commands/mcp-test.ts
2148
+ const MCP_SERVER_BASE = "https://mcp.bonnard.dev";
2149
+ async function mcpTestCommand() {
2150
+ console.log(pc.dim("Testing MCP server connection..."));
2151
+ console.log();
2152
+ const url = `${MCP_SERVER_BASE}/.well-known/oauth-authorization-server`;
2153
+ try {
2154
+ const res = await fetch(url);
2155
+ if (!res.ok) {
2156
+ console.log(pc.red(`✗ MCP server returned ${res.status}`));
2157
+ process.exit(1);
2158
+ }
2159
+ const metadata = await res.json();
2160
+ console.log(pc.green("✓ MCP server is reachable"));
2161
+ console.log();
2162
+ console.log(` Issuer: ${pc.dim(metadata.issuer || "unknown")}`);
2163
+ console.log(` Authorization: ${pc.dim(metadata.authorization_endpoint || "unknown")}`);
2164
+ console.log(` Token: ${pc.dim(metadata.token_endpoint || "unknown")}`);
2165
+ console.log(` Registration: ${pc.dim(metadata.registration_endpoint || "unknown")}`);
2166
+ console.log();
2167
+ console.log(pc.dim("OAuth endpoints are healthy. Agents can connect."));
2168
+ } catch (err) {
2169
+ console.log(pc.red(`✗ Failed to reach MCP server: ${err instanceof Error ? err.message : err}`));
2170
+ process.exit(1);
2171
+ }
2172
+ }
2173
+
2174
+ //#endregion
2175
+ //#region src/commands/docs.ts
2176
+ const __filename = fileURLToPath(import.meta.url);
2177
+ const __dirname = path.dirname(__filename);
2178
+ const DOCS_DIR = path.join(__dirname, "..", "docs");
2179
+ /**
2180
+ * Get list of all available topics by scanning the topics directory
2181
+ */
2182
+ function getAvailableTopics() {
2183
+ const topicsDir = path.join(DOCS_DIR, "topics");
2184
+ if (!fs.existsSync(topicsDir)) return [];
2185
+ return fs.readdirSync(topicsDir).filter((f) => f.endsWith(".md")).map((f) => f.replace(".md", "")).sort();
2186
+ }
2187
+ /**
2188
+ * Load the index file
2189
+ */
2190
+ function loadIndex() {
2191
+ const indexPath = path.join(DOCS_DIR, "_index.md");
2192
+ if (!fs.existsSync(indexPath)) return null;
2193
+ return fs.readFileSync(indexPath, "utf-8");
2194
+ }
2195
+ /**
2196
+ * Load a specific topic
2197
+ */
2198
+ function loadTopic(topicId) {
2199
+ const topicPath = path.join(DOCS_DIR, "topics", `${topicId}.md`);
2200
+ if (!fs.existsSync(topicPath)) return null;
2201
+ return fs.readFileSync(topicPath, "utf-8");
2202
+ }
2203
+ /**
2204
+ * Load a JSON schema
2205
+ */
2206
+ function loadSchema(schemaName) {
2207
+ const schemaPath = path.join(DOCS_DIR, "schemas", `${schemaName}.schema.json`);
2208
+ if (!fs.existsSync(schemaPath)) return null;
2209
+ return JSON.parse(fs.readFileSync(schemaPath, "utf-8"));
2210
+ }
2211
+ /**
2212
+ * Get child topics for a given topic
2213
+ */
2214
+ function getChildTopics(topicId) {
2215
+ const allTopics = getAvailableTopics();
2216
+ const prefix = topicId + ".";
2217
+ return allTopics.filter((t) => t.startsWith(prefix) && !t.slice(prefix.length).includes("."));
2218
+ }
2219
+ /**
2220
+ * Search topics for a query string
2221
+ */
2222
+ function searchTopics(query) {
2223
+ const results = [];
2224
+ const queryLower = query.toLowerCase();
2225
+ for (const topic of getAvailableTopics()) {
2226
+ const content = loadTopic(topic);
2227
+ if (!content) continue;
2228
+ const lines = content.split("\n");
2229
+ const matches = [];
2230
+ for (const line of lines) if (line.toLowerCase().includes(queryLower)) matches.push(line.trim());
2231
+ if (matches.length > 0) results.push({
2232
+ topic,
2233
+ matches: matches.slice(0, 3)
2234
+ });
2235
+ }
2236
+ return results;
2237
+ }
2238
+ /**
2239
+ * Format topic as JSON
2240
+ */
2241
+ function formatAsJson(topicId, content) {
2242
+ const lines = content.split("\n");
2243
+ const title = lines.find((l) => l.startsWith("# "))?.replace("# ", "") || topicId;
2244
+ const description = lines.find((l) => l.startsWith("> "))?.replace("> ", "") || "";
2245
+ const children = getChildTopics(topicId);
2246
+ const seeAlsoIndex = lines.findIndex((l) => l.includes("## See Also"));
2247
+ const seeAlso = [];
2248
+ if (seeAlsoIndex !== -1) for (let i = seeAlsoIndex + 1; i < lines.length; i++) {
2249
+ const match = lines[i].match(/^- (.+)$/);
2250
+ if (match) seeAlso.push(match[1]);
2251
+ else if (lines[i].startsWith("##")) break;
2252
+ }
2253
+ const moreInfoIndex = lines.findIndex((l) => l.includes("## More Information"));
2254
+ let reference;
2255
+ if (moreInfoIndex !== -1 && lines[moreInfoIndex + 2]) {
2256
+ const url = lines[moreInfoIndex + 2].trim();
2257
+ if (url.startsWith("http")) reference = url;
2258
+ }
2259
+ return JSON.stringify({
2260
+ topic: topicId,
2261
+ title,
2262
+ description,
2263
+ children,
2264
+ seeAlso,
2265
+ reference
2266
+ }, null, 2);
2267
+ }
2268
+ /**
2269
+ * Main docs command
2270
+ */
2271
+ async function docsCommand(topic, options = {}) {
2272
+ if (options.search) {
2273
+ const results = searchTopics(options.search);
2274
+ if (results.length === 0) {
2275
+ console.log(pc.yellow(`No topics found matching "${options.search}"`));
2276
+ return;
2277
+ }
2278
+ console.log(pc.bold(`Found ${results.length} topic(s) matching "${options.search}":\n`));
2279
+ for (const result of results) {
2280
+ console.log(pc.cyan(` ${result.topic}`));
2281
+ for (const match of result.matches) console.log(pc.dim(` ${match.slice(0, 80)}${match.length > 80 ? "..." : ""}`));
2282
+ console.log();
2283
+ }
2284
+ return;
2285
+ }
2286
+ if (!topic) {
2287
+ const index = loadIndex();
2288
+ if (!index) {
2289
+ console.log(pc.red("Documentation index not found."));
2290
+ console.log(pc.dim("Expected at: " + path.join(DOCS_DIR, "_index.md")));
2291
+ process.exit(1);
2292
+ }
2293
+ console.log(index);
2294
+ return;
2295
+ }
2296
+ const content = loadTopic(topic);
2297
+ if (!content) {
2298
+ const available = getAvailableTopics();
2299
+ console.log(pc.red(`Topic "${topic}" not found.`));
2300
+ console.log();
2301
+ const similar = available.filter((t) => t.includes(topic) || topic.includes(t) || t.split(".").some((part) => topic.includes(part)));
2302
+ if (similar.length > 0) {
2303
+ console.log(pc.dim("Similar topics:"));
2304
+ for (const s of similar.slice(0, 5)) console.log(pc.dim(` - ${s}`));
2305
+ } else console.log(pc.dim("Run `bon docs` to see available topics."));
2306
+ process.exit(1);
2307
+ }
2308
+ if (options.format === "json") console.log(formatAsJson(topic, content));
2309
+ else console.log(content);
2310
+ if (options.recursive) {
2311
+ const children = getChildTopics(topic);
2312
+ for (const child of children) {
2313
+ const childContent = loadTopic(child);
2314
+ if (childContent) {
2315
+ console.log("\n" + "─".repeat(60) + "\n");
2316
+ if (options.format === "json") console.log(formatAsJson(child, childContent));
2317
+ else console.log(childContent);
2318
+ }
2319
+ }
2320
+ }
2321
+ }
2322
+ /**
2323
+ * Schema subcommand
2324
+ */
2325
+ async function docsSchemaCommand(schemaName) {
2326
+ const schema = loadSchema(schemaName);
2327
+ if (!schema) {
2328
+ const schemasDir = path.join(DOCS_DIR, "schemas");
2329
+ const available = fs.existsSync(schemasDir) ? fs.readdirSync(schemasDir).filter((f) => f.endsWith(".schema.json")).map((f) => f.replace(".schema.json", "")) : [];
2330
+ console.log(pc.red(`Schema "${schemaName}" not found.`));
2331
+ if (available.length > 0) {
2332
+ console.log(pc.dim("\nAvailable schemas:"));
2333
+ for (const s of available) console.log(pc.dim(` - ${s}`));
2334
+ }
2335
+ process.exit(1);
2336
+ }
2337
+ console.log(JSON.stringify(schema, null, 2));
2338
+ }
2339
+
2340
+ //#endregion
2341
+ //#region src/commands/cube/query.ts
2342
+ /**
2343
+ * Query the deployed Cube semantic layer
2344
+ *
2345
+ * Supports two formats:
2346
+ * - JSON (default): bon cube query '{"measures": ["orders.count"]}'
2347
+ * - SQL: bon cube query --sql "SELECT MEASURE(count) FROM orders"
2348
+ */
2349
+ async function cubeQueryCommand(queryInput, options = {}) {
2350
+ if (!loadCredentials()) {
2351
+ console.error(pc.red("Not logged in. Run `bon login` first."));
2352
+ process.exit(1);
2353
+ }
2354
+ const format = options.format ?? "toon";
2355
+ const limit = options.limit ? parseInt(options.limit, 10) : void 0;
2356
+ try {
2357
+ let payload;
2358
+ if (options.sql) payload = { sql: queryInput };
2359
+ else {
2360
+ let query;
2361
+ try {
2362
+ query = JSON.parse(queryInput);
2363
+ } catch {
2364
+ console.error(pc.red("Invalid JSON query. Use --sql for SQL queries."));
2365
+ console.log(pc.dim("Example: bon cube query '{\"measures\": [\"orders.count\"]}'"));
2366
+ process.exit(1);
2367
+ }
2368
+ if (limit && !query.limit) query.limit = limit;
2369
+ payload = { query };
2370
+ }
2371
+ const result = await post("/api/cube/query", payload);
2372
+ if (result.error) {
2373
+ console.error(pc.red(`Query error: ${result.error}`));
2374
+ process.exit(1);
2375
+ }
2376
+ if (result.sql) {
2377
+ console.log(pc.dim("Generated SQL:"));
2378
+ console.log(result.sql.sql.join("\n"));
2379
+ return;
2380
+ }
2381
+ const data = result.results?.[0]?.data ?? result.data ?? [];
2382
+ if (data.length === 0) {
2383
+ console.log("No rows returned.");
2384
+ return;
2385
+ }
2386
+ if (format === "json") console.log(JSON.stringify(data, null, 2));
2387
+ else {
2388
+ const toon = encode({ results: data });
2389
+ console.log(toon);
2390
+ }
2391
+ if (limit && data.length >= limit) console.log(pc.dim(`(limited to ${limit} rows)`));
2392
+ } catch (err) {
2393
+ console.error(pc.red(`Query failed: ${err.message}`));
2394
+ process.exit(1);
2395
+ }
2396
+ }
583
2397
 
584
2398
  //#endregion
585
2399
  //#region src/bin/bon.ts
586
2400
  const { version } = createRequire(import.meta.url)("../../package.json");
587
2401
  program.name("bon").description("Bonnard semantic layer CLI").version(version);
588
- program.command("init").description("Create a new Bonnard project in the current directory").action(initCommand);
2402
+ program.command("init").description("Create bon.yaml, models/, views/, .bon/, and agent templates (.claude/, .cursor/)").action(initCommand);
589
2403
  program.command("login").description("Authenticate with Bonnard via your browser").action(loginCommand);
590
2404
  program.command("logout").description("Remove stored credentials").action(logoutCommand);
2405
+ program.command("whoami").description("Show current login status").option("--verify", "Verify session is still valid with the server").action(whoamiCommand);
591
2406
  const datasource = program.command("datasource").description("Manage warehouse data source connections");
592
- datasource.command("add").description("Add a new data source connection").action(datasourceAddCommand);
593
- datasource.command("list").description("List configured data sources").action(datasourceListCommand);
594
- datasource.command("test").description("Test data source connectivity").argument("<name>", "Data source name").action(datasourceTestCommand);
595
- datasource.command("remove").description("Remove a data source").argument("<name>", "Data source name").action(datasourceRemoveCommand);
596
- program.command("query").description("Run a SQL query against a warehouse").argument("<datasource-name>", "Data source name").argument("<sql>", "SQL query to execute").option("--schema <schema>", "Override schema").option("--database <database>", "Override database").option("--limit <limit>", "Max rows to return", "1000").option("--format <format>", "Output format: toon or json", "toon").action(queryCommand);
597
- program.command("validate").description("Validate Cube model and view YAML files").action(validateCommand);
598
- program.command("deploy").description("Deploy models to your Bonnard Cube instance").action(deployCommand);
2407
+ datasource.command("add").description("Add a data source to .bon/datasources.yaml. Use --name and --type together for non-interactive mode").option("--from-dbt [profile]", "Import from dbt profiles.yml (optionally specify profile/target)").option("--target <target>", "Target name when using --from-dbt").option("--all", "Import all connections from dbt profiles").option("--default-targets", "Import only default targets from dbt profiles (non-interactive)").option("--name <name>", "Datasource name (required for non-interactive mode)").option("--type <type>", "Warehouse type: snowflake, postgres, bigquery, databricks (required for non-interactive mode)").option("--account <account>", "Snowflake account identifier").option("--database <database>", "Database name").option("--schema <schema>", "Schema name").option("--warehouse <warehouse>", "Warehouse name (Snowflake)").option("--role <role>", "Role (Snowflake)").option("--host <host>", "Host (Postgres)").option("--port <port>", "Port (Postgres, default: 5432)").option("--project-id <projectId>", "GCP Project ID (BigQuery)").option("--dataset <dataset>", "Dataset name (BigQuery)").option("--location <location>", "Location (BigQuery)").option("--hostname <hostname>", "Server hostname (Databricks)").option("--http-path <httpPath>", "HTTP path (Databricks)").option("--catalog <catalog>", "Catalog name (Databricks)").option("--user <user>", "Username").option("--password <password>", "Password (use --password-env for env var reference)").option("--token <token>", "Access token (use --token-env for env var reference)").option("--service-account-json <json>", "Service account JSON (BigQuery)").option("--keyfile <path>", "Path to service account key file (BigQuery)").option("--password-env <varName>", "Env var name for password, stores as {{ env_var('NAME') }}").option("--token-env <varName>", "Env var name for token, stores as {{ env_var('NAME') }}").option("--force", "Overwrite existing datasource without prompting").action(datasourceAddCommand);
2408
+ datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
2409
+ datasource.command("test").description("Test data source connectivity by connecting directly to the warehouse").argument("<name>", "Data source name from .bon/datasources.yaml").option("--remote", "Test via Bonnard API instead of direct connection (requires login)").action(datasourceTestCommand);
2410
+ datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
2411
+ datasource.command("push").description("Push a local data source to Bonnard server (requires login)").argument("<name>", "Data source name from .bon/datasources.yaml").option("--force", "Overwrite if already exists on remote").action(datasourcePushCommand);
2412
+ program.command("preview").description("Preview data from a local warehouse using raw SQL (for development/exploration)").argument("<datasource>", "Data source name from .bon/datasources.yaml").argument("<sql>", "SQL query to execute").option("--schema <schema>", "Override schema").option("--database <database>", "Override database").option("--limit <limit>", "Max rows to return", "1000").option("--format <format>", "Output format: toon or json", "toon").action(previewCommand);
2413
+ program.command("validate").description("Validate YAML syntax in models/ and views/").option("--test-connection", "Also test datasource connections (warns on failure, doesn't block)").action(validateCommand);
2414
+ program.command("deploy").description("Deploy models to Bonnard. Requires login, validates models, tests connections (fails on error)").option("--ci", "Non-interactive mode (fail if missing datasources)").option("--push-datasources", "Auto-push missing datasources without prompting").action(deployCommand);
2415
+ program.command("mcp").description("MCP connection info and setup instructions").action(mcpCommand).command("test").description("Test MCP server connectivity").action(mcpTestCommand);
2416
+ program.command("cube").description("Query the deployed Cube semantic layer").command("query").description("Execute a query against the deployed semantic layer").argument("<query>", "JSON query or SQL (with --sql flag)").option("--sql", "Use Cube SQL API instead of JSON format").option("--limit <limit>", "Max rows to return").option("--format <format>", "Output format: toon or json", "toon").action(cubeQueryCommand);
2417
+ program.command("docs").description("Browse Cube documentation for building models and views").argument("[topic]", "Topic to display (e.g., cubes, cubes.measures)").option("-r, --recursive", "Show topic and all child topics").option("-s, --search <query>", "Search topics for a keyword").option("-f, --format <format>", "Output format: markdown or json", "markdown").action(docsCommand).command("schema").description("Show JSON schema for a type (cube, view, measure, etc.)").argument("<type>", "Schema type to display").action(docsSchemaCommand);
599
2418
  program.parse();
600
2419
 
601
2420
  //#endregion