@bonnard/cli 0.1.5 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/bon.mjs CHANGED
@@ -5,10 +5,10 @@ import fs from "node:fs";
5
5
  import path from "node:path";
6
6
  import { fileURLToPath } from "node:url";
7
7
  import pc from "picocolors";
8
+ import YAML from "yaml";
9
+ import os from "node:os";
8
10
  import http from "node:http";
9
11
  import crypto from "node:crypto";
10
- import os from "node:os";
11
- import YAML from "yaml";
12
12
  import { execFileSync } from "node:child_process";
13
13
  import { confirm } from "@inquirer/prompts";
14
14
  import { encode } from "@toon-format/toon";
@@ -30,6 +30,462 @@ var __exportAll = (all, symbols) => {
30
30
  };
31
31
  var __require = /* @__PURE__ */ createRequire(import.meta.url);
32
32
 
33
+ //#endregion
34
+ //#region src/lib/project.ts
35
+ /**
36
+ * The subdirectory name used for Bonnard model/view files.
37
+ * Keeps Bonnard files namespaced to avoid conflicts with existing
38
+ * project directories (e.g. dbt's models/).
39
+ */
40
+ const BONNARD_DIR = "bonnard";
41
+ /**
42
+ * Resolve Bonnard project paths relative to the working directory.
43
+ * All model/view operations should use these paths.
44
+ */
45
+ function getProjectPaths(cwd) {
46
+ const bonnardRoot = path.join(cwd, BONNARD_DIR);
47
+ return {
48
+ root: bonnardRoot,
49
+ models: path.join(bonnardRoot, "models"),
50
+ views: path.join(bonnardRoot, "views"),
51
+ config: path.join(cwd, "bon.yaml"),
52
+ localState: path.join(cwd, ".bon")
53
+ };
54
+ }
55
+
56
+ //#endregion
57
+ //#region src/lib/dbt/profiles.ts
58
+ /**
59
+ * dbt profiles.yml parser
60
+ *
61
+ * Parses ~/.dbt/profiles.yml and extracts connection configs.
62
+ * Does NOT resolve env vars - they are kept as-is for deploy time resolution.
63
+ */
64
+ const DBT_PROFILES_PATH = path.join(os.homedir(), ".dbt", "profiles.yml");
65
+ /**
66
+ * Check if dbt profiles.yml exists
67
+ */
68
+ function dbtProfilesExist(profilesPath = DBT_PROFILES_PATH) {
69
+ return fs.existsSync(profilesPath);
70
+ }
71
+ /**
72
+ * Get the default dbt profiles path
73
+ */
74
+ function getDefaultProfilesPath() {
75
+ return DBT_PROFILES_PATH;
76
+ }
77
+ /**
78
+ * Map dbt type to Bonnard warehouse type
79
+ */
80
+ function mapDbtType(dbtType) {
81
+ return {
82
+ snowflake: "snowflake",
83
+ postgres: "postgres",
84
+ postgresql: "postgres",
85
+ bigquery: "bigquery",
86
+ databricks: "databricks"
87
+ }[dbtType.toLowerCase()] ?? null;
88
+ }
89
+ /**
90
+ * Parse dbt profiles.yml and return all connections
91
+ * Config values are kept as-is (including {{ env_var(...) }} patterns)
92
+ */
93
+ function parseDbtProfiles(profilesPath = DBT_PROFILES_PATH) {
94
+ if (!fs.existsSync(profilesPath)) throw new Error(`dbt profiles not found at ${profilesPath}`);
95
+ const content = fs.readFileSync(profilesPath, "utf-8");
96
+ const profiles = YAML.parse(content);
97
+ if (!profiles || typeof profiles !== "object") throw new Error("Invalid dbt profiles.yml format");
98
+ const connections = [];
99
+ for (const [profileName, profile] of Object.entries(profiles)) {
100
+ if (profileName === "config") continue;
101
+ if (!profile.outputs || typeof profile.outputs !== "object") continue;
102
+ const defaultTarget = profile.target || "dev";
103
+ for (const [targetName, target] of Object.entries(profile.outputs)) {
104
+ if (!target || typeof target !== "object" || !target.type) continue;
105
+ const warehouseType = mapDbtType(target.type);
106
+ if (!warehouseType) continue;
107
+ connections.push({
108
+ profileName,
109
+ targetName,
110
+ isDefaultTarget: targetName === defaultTarget,
111
+ type: warehouseType,
112
+ config: target
113
+ });
114
+ }
115
+ }
116
+ return connections;
117
+ }
118
+
119
+ //#endregion
120
+ //#region src/lib/detect/scanner.ts
121
+ /**
122
+ * Project environment scanner
123
+ *
124
+ * Detects data tools (dbt, dagster, etc.), warehouse connections,
125
+ * and existing model files in the user's project directory.
126
+ */
127
+ const CONFIG_FILES = {
128
+ "dbt_project.yml": "dbt",
129
+ "dagster.yaml": "dagster",
130
+ "prefect.yaml": "prefect",
131
+ "evidence.config.yaml": "evidence",
132
+ "cube.js": "cube",
133
+ "cube.py": "cube",
134
+ "manifest.lkml": "looker",
135
+ "airflow.cfg": "airflow"
136
+ };
137
+ const CONFIG_DIRS = {
138
+ dags: "airflow",
139
+ great_expectations: "great-expectations",
140
+ gx: "great-expectations"
141
+ };
142
+ const SCAN_DIRS = [
143
+ "dbt",
144
+ "transform",
145
+ "analytics",
146
+ "data",
147
+ "warehouse",
148
+ "data-warehouse",
149
+ "orchestration",
150
+ "dagster",
151
+ "airflow"
152
+ ];
153
+ const PYTHON_PACKAGES = {
154
+ "dbt-core": "dbt",
155
+ "dbt-snowflake": "dbt",
156
+ "dbt-postgres": "dbt",
157
+ "dbt-bigquery": "dbt",
158
+ "dbt-databricks": "dbt",
159
+ dagster: "dagster",
160
+ sqlmesh: "sqlmesh",
161
+ "apache-airflow": "airflow",
162
+ prefect: "prefect",
163
+ "soda-core": "soda"
164
+ };
165
+ const NPM_PACKAGES = {
166
+ "@cubejs-backend/": "cube",
167
+ "@evidence-dev/": "evidence"
168
+ };
169
+ const SAFE_DBT_FIELDS = [
170
+ "account",
171
+ "host",
172
+ "database",
173
+ "dbname",
174
+ "schema",
175
+ "warehouse",
176
+ "role",
177
+ "port",
178
+ "project",
179
+ "dataset",
180
+ "location",
181
+ "hostname",
182
+ "http_path",
183
+ "catalog"
184
+ ];
185
+ /**
186
+ * Scan root and subdirs for known config files/directories
187
+ */
188
+ function scanForConfigFiles(cwd) {
189
+ const tools = [];
190
+ const seen = /* @__PURE__ */ new Set();
191
+ function checkDir(dir, prefix) {
192
+ for (const [filename, toolName] of Object.entries(CONFIG_FILES)) {
193
+ if (seen.has(toolName)) continue;
194
+ const filePath = path.join(dir, filename);
195
+ if (fs.existsSync(filePath)) {
196
+ const relativePath = prefix ? `${prefix}/${filename}` : filename;
197
+ tools.push({
198
+ name: toolName,
199
+ configPath: relativePath
200
+ });
201
+ seen.add(toolName);
202
+ }
203
+ }
204
+ for (const [dirname, toolName] of Object.entries(CONFIG_DIRS)) {
205
+ if (seen.has(toolName)) continue;
206
+ const dirPath = path.join(dir, dirname);
207
+ try {
208
+ if (fs.statSync(dirPath).isDirectory()) {
209
+ const relativePath = prefix ? `${prefix}/${dirname}/` : `${dirname}/`;
210
+ tools.push({
211
+ name: toolName,
212
+ configPath: relativePath
213
+ });
214
+ seen.add(toolName);
215
+ }
216
+ } catch {}
217
+ }
218
+ }
219
+ checkDir(cwd, "");
220
+ for (const subdir of SCAN_DIRS) {
221
+ const subdirPath = path.join(cwd, subdir);
222
+ try {
223
+ if (fs.statSync(subdirPath).isDirectory()) checkDir(subdirPath, subdir);
224
+ } catch {}
225
+ }
226
+ return tools;
227
+ }
228
+ /**
229
+ * Scan dependency files for known data tool packages
230
+ */
231
+ function scanDependencyFiles(cwd) {
232
+ const tools = [];
233
+ const pyprojectPath = path.join(cwd, "pyproject.toml");
234
+ if (fs.existsSync(pyprojectPath)) try {
235
+ const content = fs.readFileSync(pyprojectPath, "utf-8");
236
+ for (const [pkg, toolName] of Object.entries(PYTHON_PACKAGES)) if (content.includes(pkg)) tools.push({
237
+ name: toolName,
238
+ configPath: "pyproject.toml"
239
+ });
240
+ } catch {}
241
+ const requirementsPath = path.join(cwd, "requirements.txt");
242
+ if (fs.existsSync(requirementsPath)) try {
243
+ const content = fs.readFileSync(requirementsPath, "utf-8");
244
+ for (const [pkg, toolName] of Object.entries(PYTHON_PACKAGES)) if (content.includes(pkg)) tools.push({
245
+ name: toolName,
246
+ configPath: "requirements.txt"
247
+ });
248
+ } catch {}
249
+ const packageJsonPath = path.join(cwd, "package.json");
250
+ if (fs.existsSync(packageJsonPath)) try {
251
+ const content = fs.readFileSync(packageJsonPath, "utf-8");
252
+ const pkg = JSON.parse(content);
253
+ const allDeps = {
254
+ ...pkg.dependencies,
255
+ ...pkg.devDependencies
256
+ };
257
+ for (const depName of Object.keys(allDeps)) for (const [prefix, toolName] of Object.entries(NPM_PACKAGES)) if (depName.startsWith(prefix)) tools.push({
258
+ name: toolName,
259
+ configPath: "package.json"
260
+ });
261
+ } catch {}
262
+ return tools;
263
+ }
264
+ /**
265
+ * Deduplicate tools by name, keeping the first occurrence (config file > dependency)
266
+ */
267
+ function deduplicateTools(tools) {
268
+ const seen = /* @__PURE__ */ new Set();
269
+ return tools.filter((tool) => {
270
+ if (seen.has(tool.name)) return false;
271
+ seen.add(tool.name);
272
+ return true;
273
+ });
274
+ }
275
+ /**
276
+ * Extract safe (non-secret) fields from a config object.
277
+ * If a value looks like a dbt env_var reference, record it as "via env: VAR_NAME".
278
+ */
279
+ function extractSafeFields(config) {
280
+ const safe = {};
281
+ for (const field of SAFE_DBT_FIELDS) {
282
+ const value = config[field];
283
+ if (value === void 0 || value === null) continue;
284
+ const strValue = String(value);
285
+ const envMatch = strValue.match(/\{\{\s*env_var\s*\(\s*['"]([^'"]+)['"]/);
286
+ if (envMatch) safe[field] = `via env: ${envMatch[1]}`;
287
+ else safe[field] = strValue;
288
+ }
289
+ return safe;
290
+ }
291
+ /**
292
+ * Try to extract warehouse info from dbt profiles
293
+ */
294
+ function extractWarehouseFromDbt(cwd, tools) {
295
+ const dbtTool = tools.find((t) => t.name === "dbt");
296
+ if (!dbtTool) return null;
297
+ const dbtProjectPath = path.join(cwd, dbtTool.configPath);
298
+ let profileName;
299
+ try {
300
+ const content = fs.readFileSync(dbtProjectPath, "utf-8");
301
+ const parsed = YAML.parse(content);
302
+ profileName = parsed?.profile;
303
+ const projectName = parsed?.name;
304
+ if (projectName) dbtTool.metadata = {
305
+ ...dbtTool.metadata,
306
+ project: projectName
307
+ };
308
+ if (profileName) dbtTool.metadata = {
309
+ ...dbtTool.metadata,
310
+ profile: profileName
311
+ };
312
+ } catch {}
313
+ if (!dbtProfilesExist()) return null;
314
+ try {
315
+ const connections = parseDbtProfiles();
316
+ let connection = profileName ? connections.find((c) => c.profileName === profileName && c.isDefaultTarget) : null;
317
+ if (!connection && connections.length > 0) connection = connections[0];
318
+ if (!connection) return null;
319
+ return {
320
+ type: connection.type,
321
+ source: "dbt-profiles",
322
+ config: extractSafeFields(connection.config)
323
+ };
324
+ } catch {
325
+ return null;
326
+ }
327
+ }
328
+ /**
329
+ * Try to extract warehouse info from .env file
330
+ */
331
+ function extractWarehouseFromEnv(cwd) {
332
+ const envPath = path.join(cwd, ".env");
333
+ if (!fs.existsSync(envPath)) return null;
334
+ try {
335
+ const content = fs.readFileSync(envPath, "utf-8");
336
+ const cubeDbType = content.match(/^CUBEJS_DB_TYPE=(.+)$/m);
337
+ if (cubeDbType) {
338
+ const type = {
339
+ snowflake: "snowflake",
340
+ postgres: "postgres",
341
+ bigquery: "bigquery",
342
+ databricks: "databricks"
343
+ }[cubeDbType[1].trim().toLowerCase()];
344
+ if (type) return {
345
+ type,
346
+ source: "env",
347
+ config: { CUBEJS_DB_TYPE: cubeDbType[1].trim() }
348
+ };
349
+ }
350
+ if (content.match(/^SNOWFLAKE_ACCOUNT=/m)) {
351
+ const account = content.match(/^SNOWFLAKE_ACCOUNT=(.+)$/m);
352
+ return {
353
+ type: "snowflake",
354
+ source: "env",
355
+ config: account ? { account: account[1].trim() } : {}
356
+ };
357
+ }
358
+ if (content.match(/^PGHOST=/m) || content.match(/^DATABASE_URL=postgres/m)) return {
359
+ type: "postgres",
360
+ source: "env",
361
+ config: {}
362
+ };
363
+ } catch {}
364
+ return null;
365
+ }
366
+ /**
367
+ * Extract warehouse info from detected tools and environment
368
+ */
369
+ function extractWarehouseInfo(cwd, tools) {
370
+ if (tools.some((t) => t.name === "dbt")) {
371
+ const fromDbt = extractWarehouseFromDbt(cwd, tools);
372
+ if (fromDbt) return fromDbt;
373
+ }
374
+ return extractWarehouseFromEnv(cwd);
375
+ }
376
+ /**
377
+ * Collect existing model files from detected tool directories.
378
+ * Non-recursive scan of likely model dirs, capped at 20 paths.
379
+ */
380
+ function collectModelFiles(cwd, tools) {
381
+ const modelFiles = [];
382
+ const extensions = new Set([
383
+ ".sql",
384
+ ".yml",
385
+ ".yaml",
386
+ ".lkml"
387
+ ]);
388
+ const modelDirSet = /* @__PURE__ */ new Set();
389
+ for (const tool of tools) if (tool.name === "dbt") {
390
+ const dbtDir = path.dirname(path.join(cwd, tool.configPath));
391
+ modelDirSet.add(path.join(dbtDir, "models"));
392
+ } else if (tool.name === "looker") {
393
+ const lookDir = path.dirname(path.join(cwd, tool.configPath));
394
+ modelDirSet.add(lookDir);
395
+ }
396
+ for (const dir of [
397
+ "models",
398
+ "marts",
399
+ "staging",
400
+ "intermediate"
401
+ ]) modelDirSet.add(path.join(cwd, dir));
402
+ for (const dir of modelDirSet) {
403
+ if (modelFiles.length >= 20) break;
404
+ try {
405
+ if (!fs.statSync(dir).isDirectory()) continue;
406
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
407
+ for (const entry of entries) {
408
+ if (modelFiles.length >= 20) break;
409
+ if (entry.isFile() && extensions.has(path.extname(entry.name))) modelFiles.push(path.relative(cwd, path.join(dir, entry.name)));
410
+ if (entry.isDirectory()) try {
411
+ const subEntries = fs.readdirSync(path.join(dir, entry.name), { withFileTypes: true });
412
+ for (const subEntry of subEntries) {
413
+ if (modelFiles.length >= 20) break;
414
+ if (subEntry.isFile() && extensions.has(path.extname(subEntry.name))) modelFiles.push(path.relative(cwd, path.join(dir, entry.name, subEntry.name)));
415
+ }
416
+ } catch {}
417
+ }
418
+ } catch {}
419
+ }
420
+ return modelFiles;
421
+ }
422
+ /**
423
+ * Detect the project environment by scanning for data tools,
424
+ * warehouse connections, and existing model files.
425
+ */
426
+ function detectProjectEnvironment(cwd) {
427
+ const configTools = scanForConfigFiles(cwd);
428
+ const depTools = scanDependencyFiles(cwd);
429
+ const tools = deduplicateTools([...configTools, ...depTools]);
430
+ return {
431
+ tools,
432
+ warehouse: extractWarehouseInfo(cwd, tools),
433
+ existingModels: collectModelFiles(cwd, tools)
434
+ };
435
+ }
436
+
437
+ //#endregion
438
+ //#region src/lib/detect/context.ts
439
+ /**
440
+ * Generate a Markdown context section from detected project environment.
441
+ * Returns a string with "## This Project" and "## Your Role" sections.
442
+ */
443
+ function generateProjectContext(env) {
444
+ const sections = [];
445
+ const projectLines = ["## This Project"];
446
+ if (env.tools.length > 0) {
447
+ projectLines.push("");
448
+ projectLines.push("**Detected tools:**");
449
+ for (const tool of env.tools) {
450
+ const meta = tool.metadata ? ` (${Object.entries(tool.metadata).map(([k, v]) => `${k}: ${v}`).join(", ")})` : "";
451
+ projectLines.push(`- ${tool.name} — found at \`${tool.configPath}\`${meta}`);
452
+ }
453
+ }
454
+ if (env.warehouse) {
455
+ projectLines.push("");
456
+ projectLines.push(`**Warehouse:** ${env.warehouse.type} (detected via ${env.warehouse.source})`);
457
+ const configEntries = Object.entries(env.warehouse.config);
458
+ if (configEntries.length > 0) for (const [key, value] of configEntries) projectLines.push(`- ${key}: \`${value}\``);
459
+ }
460
+ if (env.existingModels.length > 0) {
461
+ projectLines.push("");
462
+ projectLines.push(`**Existing model files:** ${env.existingModels.length} file${env.existingModels.length === 1 ? "" : "s"} found`);
463
+ for (const modelPath of env.existingModels.slice(0, 10)) projectLines.push(`- \`${modelPath}\``);
464
+ if (env.existingModels.length > 10) projectLines.push(`- ... and ${env.existingModels.length - 10} more`);
465
+ }
466
+ sections.push(projectLines.join("\n"));
467
+ const roleLines = ["## Your Role"];
468
+ roleLines.push("");
469
+ const hasDbt = env.tools.some((t) => t.name === "dbt");
470
+ const hasWarehouse = env.warehouse !== null;
471
+ const dbtTool = env.tools.find((t) => t.name === "dbt");
472
+ if (hasDbt && hasWarehouse) roleLines.push("This user has an existing dbt project with a warehouse connection. Help them create Bonnard cubes that reference their mart/staging tables. They can import their connection with `bon datasource add --from-dbt`.");
473
+ else if (hasDbt && !hasWarehouse) roleLines.push("This user has a dbt project but warehouse profiles were not found locally. Help them set up a datasource manually with `bon datasource add`, or ensure `~/.dbt/profiles.yml` is available and re-run `bon init`.");
474
+ else if (!hasDbt && hasWarehouse) roleLines.push("This user has a warehouse connection configured. Help them create cubes directly from their database tables.");
475
+ else roleLines.push("New project — no existing data tools detected. Help them connect a warehouse (`bon datasource add`), then create their first cube.");
476
+ roleLines.push("");
477
+ roleLines.push("**Important:**");
478
+ roleLines.push("- Bonnard models go in `bonnard/models/` and views in `bonnard/views/` — do NOT modify files outside these directories");
479
+ roleLines.push("- Use `bon docs` to look up Cube YAML syntax before writing model definitions");
480
+ if (hasDbt && dbtTool) {
481
+ const dbtDir = dbtTool.configPath.includes("/") ? dbtTool.configPath.split("/").slice(0, -1).join("/") + "/" : "";
482
+ const modelsPath = dbtDir ? `${dbtDir}models/` : "models/";
483
+ roleLines.push(`- dbt models are in \`${modelsPath}\` — these are the user's transformation layer, not Bonnard's. Do not modify them.`);
484
+ }
485
+ sections.push(roleLines.join("\n"));
486
+ return sections.join("\n\n");
487
+ }
488
+
33
489
  //#endregion
34
490
  //#region src/commands/init.ts
35
491
  const __filename$1 = fileURLToPath(import.meta.url);
@@ -101,9 +557,10 @@ alwaysApply: ${alwaysApply}
101
557
  /**
102
558
  * Create agent templates (Claude Code, Cursor, and Codex)
103
559
  */
104
- function createAgentTemplates(cwd) {
560
+ function createAgentTemplates(cwd, env) {
105
561
  const createdFiles = [];
106
- const sharedBonnard = loadTemplate("shared/bonnard.md");
562
+ let sharedBonnard = loadTemplate("shared/bonnard.md");
563
+ if (env) sharedBonnard += "\n\n" + generateProjectContext(env);
107
564
  const claudeRulesDir = path.join(cwd, ".claude", "rules");
108
565
  const claudeSkillsDir = path.join(cwd, ".claude", "skills");
109
566
  fs.mkdirSync(claudeRulesDir, { recursive: true });
@@ -129,29 +586,37 @@ function createAgentTemplates(cwd) {
129
586
  async function initCommand() {
130
587
  const cwd = process.cwd();
131
588
  const projectName = path.basename(cwd);
132
- if (fs.existsSync(path.join(cwd, "bon.yaml"))) {
589
+ const paths = getProjectPaths(cwd);
590
+ if (fs.existsSync(paths.config)) {
133
591
  console.log(pc.red("A bon.yaml already exists in this directory."));
134
592
  process.exit(1);
135
593
  }
136
- fs.mkdirSync(path.join(cwd, "models"), { recursive: true });
137
- fs.mkdirSync(path.join(cwd, "views"), { recursive: true });
138
- fs.mkdirSync(path.join(cwd, ".bon"), { recursive: true });
139
- fs.writeFileSync(path.join(cwd, "bon.yaml"), BON_YAML_TEMPLATE(projectName));
594
+ fs.mkdirSync(paths.models, { recursive: true });
595
+ fs.mkdirSync(paths.views, { recursive: true });
596
+ fs.mkdirSync(paths.localState, { recursive: true });
597
+ fs.writeFileSync(paths.config, BON_YAML_TEMPLATE(projectName));
140
598
  fs.writeFileSync(path.join(cwd, ".gitignore"), GITIGNORE_TEMPLATE);
141
- const agentFiles = createAgentTemplates(cwd);
599
+ const env = detectProjectEnvironment(cwd);
600
+ const agentFiles = createAgentTemplates(cwd, env.tools.length > 0 || env.warehouse ? env : void 0);
142
601
  console.log(pc.green(`Initialised Bonnard project "${projectName}"`));
143
602
  console.log();
144
603
  console.log(pc.bold("Core files:"));
145
- console.log(` ${pc.dim("bon.yaml")} project config`);
146
- console.log(` ${pc.dim("models/")} model definitions`);
147
- console.log(` ${pc.dim("views/")} view definitions`);
148
- console.log(` ${pc.dim(".bon/")} local state (gitignored)`);
149
- console.log(` ${pc.dim(".gitignore")} git ignore rules`);
604
+ console.log(` ${pc.dim("bon.yaml")} project config`);
605
+ console.log(` ${pc.dim(`${BONNARD_DIR}/models/`)} model definitions`);
606
+ console.log(` ${pc.dim(`${BONNARD_DIR}/views/`)} view definitions`);
607
+ console.log(` ${pc.dim(".bon/")} local state (gitignored)`);
608
+ console.log(` ${pc.dim(".gitignore")} git ignore rules`);
150
609
  if (agentFiles.length > 0) {
151
610
  console.log();
152
611
  console.log(pc.bold("Agent support:"));
153
612
  for (const file of agentFiles) console.log(` ${pc.dim(file)}`);
154
613
  }
614
+ if (env.tools.length > 0 || env.warehouse) {
615
+ console.log();
616
+ console.log(pc.bold("Detected environment:"));
617
+ for (const tool of env.tools) console.log(` ${pc.cyan(tool.name)} ${pc.dim(`(${tool.configPath})`)}`);
618
+ if (env.warehouse) console.log(` ${pc.cyan(env.warehouse.type)} warehouse ${pc.dim(`(via ${env.warehouse.source})`)}`);
619
+ }
155
620
  }
156
621
 
157
622
  //#endregion
@@ -552,69 +1017,6 @@ var local_exports = /* @__PURE__ */ __exportAll({
552
1017
  saveLocalDatasources: () => saveLocalDatasources
553
1018
  });
554
1019
 
555
- //#endregion
556
- //#region src/lib/dbt/profiles.ts
557
- /**
558
- * dbt profiles.yml parser
559
- *
560
- * Parses ~/.dbt/profiles.yml and extracts connection configs.
561
- * Does NOT resolve env vars - they are kept as-is for deploy time resolution.
562
- */
563
- const DBT_PROFILES_PATH = path.join(os.homedir(), ".dbt", "profiles.yml");
564
- /**
565
- * Check if dbt profiles.yml exists
566
- */
567
- function dbtProfilesExist(profilesPath = DBT_PROFILES_PATH) {
568
- return fs.existsSync(profilesPath);
569
- }
570
- /**
571
- * Get the default dbt profiles path
572
- */
573
- function getDefaultProfilesPath() {
574
- return DBT_PROFILES_PATH;
575
- }
576
- /**
577
- * Map dbt type to Bonnard warehouse type
578
- */
579
- function mapDbtType(dbtType) {
580
- return {
581
- snowflake: "snowflake",
582
- postgres: "postgres",
583
- postgresql: "postgres",
584
- bigquery: "bigquery",
585
- databricks: "databricks"
586
- }[dbtType.toLowerCase()] ?? null;
587
- }
588
- /**
589
- * Parse dbt profiles.yml and return all connections
590
- * Config values are kept as-is (including {{ env_var(...) }} patterns)
591
- */
592
- function parseDbtProfiles(profilesPath = DBT_PROFILES_PATH) {
593
- if (!fs.existsSync(profilesPath)) throw new Error(`dbt profiles not found at ${profilesPath}`);
594
- const content = fs.readFileSync(profilesPath, "utf-8");
595
- const profiles = YAML.parse(content);
596
- if (!profiles || typeof profiles !== "object") throw new Error("Invalid dbt profiles.yml format");
597
- const connections = [];
598
- for (const [profileName, profile] of Object.entries(profiles)) {
599
- if (profileName === "config") continue;
600
- if (!profile.outputs || typeof profile.outputs !== "object") continue;
601
- const defaultTarget = profile.target || "dev";
602
- for (const [targetName, target] of Object.entries(profile.outputs)) {
603
- if (!target || typeof target !== "object" || !target.type) continue;
604
- const warehouseType = mapDbtType(target.type);
605
- if (!warehouseType) continue;
606
- connections.push({
607
- profileName,
608
- targetName,
609
- isDefaultTarget: targetName === defaultTarget,
610
- type: warehouseType,
611
- config: target
612
- });
613
- }
614
- }
615
- return connections;
616
- }
617
-
618
1020
  //#endregion
619
1021
  //#region src/lib/dbt/mapping.ts
620
1022
  /**
@@ -1839,14 +2241,15 @@ async function previewCommand(datasourceName, sql, options) {
1839
2241
  //#region src/commands/validate.ts
1840
2242
  async function validateCommand(options = {}) {
1841
2243
  const cwd = process.cwd();
1842
- if (!fs.existsSync(path.join(cwd, "bon.yaml"))) {
2244
+ const paths = getProjectPaths(cwd);
2245
+ if (!fs.existsSync(paths.config)) {
1843
2246
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
1844
2247
  process.exit(1);
1845
2248
  }
1846
- const { validate } = await import("./validate-C4EHvJzJ.mjs");
2249
+ const { validate } = await import("./validate-BUHevw7F.mjs");
1847
2250
  const result = await validate(cwd);
1848
2251
  if (result.cubes.length === 0 && result.views.length === 0 && result.valid) {
1849
- console.log(pc.yellow("No model or view files found in models/ or views/."));
2252
+ console.log(pc.yellow(`No model or view files found in ${BONNARD_DIR}/models/ or ${BONNARD_DIR}/views/.`));
1850
2253
  return;
1851
2254
  }
1852
2255
  if (!result.valid) {
@@ -1881,7 +2284,7 @@ async function validateCommand(options = {}) {
1881
2284
  * Lenient: warns but doesn't fail validation
1882
2285
  */
1883
2286
  async function testReferencedConnections(cwd) {
1884
- const { extractDatasourcesFromModels } = await import("./models-IsV2sX74.mjs");
2287
+ const { extractDatasourcesFromModels } = await import("./models-CzOWi3fU.mjs");
1885
2288
  const { loadLocalDatasources, resolveEnvVarsInCredentials } = await Promise.resolve().then(() => local_exports);
1886
2289
  const { testConnection } = await Promise.resolve().then(() => connection_exports);
1887
2290
  const references = extractDatasourcesFromModels(cwd);
@@ -1949,12 +2352,13 @@ function collectFiles(dir, rootDir) {
1949
2352
  }
1950
2353
  async function deployCommand(options = {}) {
1951
2354
  const cwd = process.cwd();
1952
- if (!fs.existsSync(path.join(cwd, "bon.yaml"))) {
2355
+ const paths = getProjectPaths(cwd);
2356
+ if (!fs.existsSync(paths.config)) {
1953
2357
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
1954
2358
  process.exit(1);
1955
2359
  }
1956
2360
  console.log(pc.dim("Validating models..."));
1957
- const { validate } = await import("./validate-C4EHvJzJ.mjs");
2361
+ const { validate } = await import("./validate-BUHevw7F.mjs");
1958
2362
  const result = await validate(cwd);
1959
2363
  if (!result.valid) {
1960
2364
  console.log(pc.red("Validation failed:\n"));
@@ -1962,14 +2366,14 @@ async function deployCommand(options = {}) {
1962
2366
  process.exit(1);
1963
2367
  }
1964
2368
  if (result.cubes.length === 0 && result.views.length === 0) {
1965
- console.log(pc.yellow("No model or view files found in models/ or views/. Nothing to deploy."));
2369
+ console.log(pc.yellow(`No model or view files found in ${BONNARD_DIR}/models/ or ${BONNARD_DIR}/views/. Nothing to deploy.`));
1966
2370
  process.exit(1);
1967
2371
  }
1968
2372
  console.log(pc.dim(` Found ${result.cubes.length} cube(s) and ${result.views.length} view(s)`));
1969
2373
  if (await testAndSyncDatasources(cwd, options)) process.exit(1);
1970
2374
  const files = {
1971
- ...collectFiles(path.join(cwd, "models"), cwd),
1972
- ...collectFiles(path.join(cwd, "views"), cwd)
2375
+ ...collectFiles(paths.models, cwd),
2376
+ ...collectFiles(paths.views, cwd)
1973
2377
  };
1974
2378
  const fileCount = Object.keys(files).length;
1975
2379
  console.log(pc.dim(`Deploying ${fileCount} file(s)...`));
@@ -1993,7 +2397,7 @@ async function deployCommand(options = {}) {
1993
2397
  * Returns true if any connection failed (strict mode)
1994
2398
  */
1995
2399
  async function testAndSyncDatasources(cwd, options = {}) {
1996
- const { extractDatasourcesFromModels } = await import("./models-IsV2sX74.mjs");
2400
+ const { extractDatasourcesFromModels } = await import("./models-CzOWi3fU.mjs");
1997
2401
  const { loadLocalDatasources, resolveEnvVarsInCredentials } = await Promise.resolve().then(() => local_exports);
1998
2402
  const { testConnection } = await Promise.resolve().then(() => connection_exports);
1999
2403
  const { pushDatasource } = await Promise.resolve().then(() => push_exports);
@@ -2399,7 +2803,7 @@ async function cubeQueryCommand(queryInput, options = {}) {
2399
2803
  //#region src/bin/bon.ts
2400
2804
  const { version } = createRequire(import.meta.url)("../../package.json");
2401
2805
  program.name("bon").description("Bonnard semantic layer CLI").version(version);
2402
- program.command("init").description("Create bon.yaml, models/, views/, .bon/, and agent templates (.claude/, .cursor/)").action(initCommand);
2806
+ program.command("init").description("Create bon.yaml, bonnard/models/, bonnard/views/, .bon/, and agent templates (.claude/, .cursor/)").action(initCommand);
2403
2807
  program.command("login").description("Authenticate with Bonnard via your browser").action(loginCommand);
2404
2808
  program.command("logout").description("Remove stored credentials").action(logoutCommand);
2405
2809
  program.command("whoami").description("Show current login status").option("--verify", "Verify session is still valid with the server").action(whoamiCommand);
@@ -2410,7 +2814,7 @@ datasource.command("test").description("Test data source connectivity by connect
2410
2814
  datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
2411
2815
  datasource.command("push").description("Push a local data source to Bonnard server (requires login)").argument("<name>", "Data source name from .bon/datasources.yaml").option("--force", "Overwrite if already exists on remote").action(datasourcePushCommand);
2412
2816
  program.command("preview").description("Preview data from a local warehouse using raw SQL (for development/exploration)").argument("<datasource>", "Data source name from .bon/datasources.yaml").argument("<sql>", "SQL query to execute").option("--schema <schema>", "Override schema").option("--database <database>", "Override database").option("--limit <limit>", "Max rows to return", "1000").option("--format <format>", "Output format: toon or json", "toon").action(previewCommand);
2413
- program.command("validate").description("Validate YAML syntax in models/ and views/").option("--test-connection", "Also test datasource connections (warns on failure, doesn't block)").action(validateCommand);
2817
+ program.command("validate").description("Validate YAML syntax in bonnard/models/ and bonnard/views/").option("--test-connection", "Also test datasource connections (warns on failure, doesn't block)").action(validateCommand);
2414
2818
  program.command("deploy").description("Deploy models to Bonnard. Requires login, validates models, tests connections (fails on error)").option("--ci", "Non-interactive mode (fail if missing datasources)").option("--push-datasources", "Auto-push missing datasources without prompting").action(deployCommand);
2415
2819
  program.command("mcp").description("MCP connection info and setup instructions").action(mcpCommand).command("test").description("Test MCP server connectivity").action(mcpTestCommand);
2416
2820
  program.command("cube").description("Query the deployed Cube semantic layer").command("query").description("Execute a query against the deployed semantic layer").argument("<query>", "JSON query or SQL (with --sql flag)").option("--sql", "Use Cube SQL API instead of JSON format").option("--limit <limit>", "Max rows to return").option("--format <format>", "Output format: toon or json", "toon").action(cubeQueryCommand);
@@ -2418,4 +2822,4 @@ program.command("docs").description("Browse Cube documentation for building mode
2418
2822
  program.parse();
2419
2823
 
2420
2824
  //#endregion
2421
- export { };
2825
+ export { getProjectPaths as t };
@@ -1,3 +1,4 @@
1
+ import { t as getProjectPaths } from "./bon.mjs";
1
2
  import fs from "node:fs";
2
3
  import path from "node:path";
3
4
  import YAML from "yaml";
@@ -48,12 +49,13 @@ function extractFromFile(filePath) {
48
49
  return datasourceToCubes;
49
50
  }
50
51
  /**
51
- * Extract all unique datasource references from models/ and views/ directories
52
+ * Extract all unique datasource references from bonnard/models/ and bonnard/views/ directories
52
53
  * Returns datasource names mapped to the cubes that use them
53
54
  */
54
55
  function extractDatasourcesFromModels(projectPath) {
55
- const modelsDir = path.join(projectPath, "models");
56
- const viewsDir = path.join(projectPath, "views");
56
+ const paths = getProjectPaths(projectPath);
57
+ const modelsDir = paths.models;
58
+ const viewsDir = paths.views;
57
59
  const allFiles = [...collectYamlFiles(modelsDir), ...collectYamlFiles(viewsDir)];
58
60
  const aggregated = /* @__PURE__ */ new Map();
59
61
  for (const file of allFiles) {
@@ -1,3 +1,4 @@
1
+ import { t as getProjectPaths } from "./bon.mjs";
1
2
  import fs from "node:fs";
2
3
  import path from "node:path";
3
4
  import YAML from "yaml";
@@ -59,8 +60,9 @@ function checkMissingDescriptions(files) {
59
60
  return missing;
60
61
  }
61
62
  function createModelRepository(projectPath) {
62
- const modelsDir = path.join(projectPath, "models");
63
- const viewsDir = path.join(projectPath, "views");
63
+ const paths = getProjectPaths(projectPath);
64
+ const modelsDir = paths.models;
65
+ const viewsDir = paths.views;
64
66
  return {
65
67
  localPath: () => projectPath,
66
68
  dataSchemaFiles: () => {
@@ -90,10 +90,10 @@ cubes:
90
90
 
91
91
  ## File Organization
92
92
 
93
- One cube per file in the `models/` directory:
93
+ One cube per file in the `bonnard/models/` directory:
94
94
 
95
95
  ```
96
- models/
96
+ bonnard/models/
97
97
  ├── orders.yaml
98
98
  ├── users.yaml
99
99
  ├── products.yaml
@@ -113,10 +113,10 @@ views:
113
113
 
114
114
  ## File Organization
115
115
 
116
- Store views in the `views/` directory:
116
+ Store views in the `bonnard/views/` directory:
117
117
 
118
118
  ```
119
- views/
119
+ bonnard/views/
120
120
  ├── orders_overview.yaml
121
121
  ├── sales_dashboard.yaml
122
122
  └── customer_360.yaml
@@ -32,9 +32,9 @@ bon deploy
32
32
  bon deploy
33
33
 
34
34
  ✓ Validating models...
35
- ✓ models/orders.yaml
36
- ✓ models/users.yaml
37
- ✓ views/orders_overview.yaml
35
+ bonnard/models/orders.yaml
36
+ bonnard/models/users.yaml
37
+ bonnard/views/orders_overview.yaml
38
38
 
39
39
  ✓ Testing connections...
40
40
  ✓ datasource "default" connected
@@ -58,8 +58,8 @@ bon deploy
58
58
  ├── 2. Test all datasource connections (must succeed)
59
59
 
60
60
  ├── 3. Upload to Bonnard API
61
- │ - cubes from models/
62
- │ - views from views/
61
+ │ - cubes from bonnard/models/
62
+ │ - views from bonnard/views/
63
63
  │ - datasource configs
64
64
 
65
65
  └── 4. Activate deployment
@@ -72,7 +72,7 @@ bon deploy
72
72
  ```
73
73
  ✗ Validating models...
74
74
 
75
- models/orders.yaml:15:5
75
+ bonnard/models/orders.yaml:15:5
76
76
  error: Unknown measure type "counts"
77
77
 
78
78
  Deploy aborted. Fix validation errors first.
@@ -102,8 +102,8 @@ Run: bon login
102
102
 
103
103
  | Source | Deployed |
104
104
  |--------|----------|
105
- | `models/*.yaml` | All cube definitions |
106
- | `views/*.yaml` | All view definitions |
105
+ | `bonnard/models/*.yaml` | All cube definitions |
106
+ | `bonnard/views/*.yaml` | All view definitions |
107
107
  | `.bon/datasources.yaml` | Connection configs (credentials encrypted) |
108
108
  | `bon.yaml` | Project settings |
109
109
 
@@ -15,7 +15,7 @@ bon init
15
15
  # 2. Add a data source
16
16
  bon datasource add
17
17
 
18
- # 3. Create models in models/ and views in views/
18
+ # 3. Create models in bonnard/models/ and views in bonnard/views/
19
19
 
20
20
  # 4. Validate your models
21
21
  bon validate
@@ -31,10 +31,11 @@ After `bon init`, your project has:
31
31
  ```
32
32
  my-project/
33
33
  ├── bon.yaml # Project configuration
34
- ├── models/ # Cube definitions
35
- └── orders.yaml
36
- ├── views/ # View definitions
37
- │ └── orders_overview.yaml
34
+ ├── bonnard/ # Semantic layer definitions
35
+ ├── models/ # Cube definitions
36
+ │ │ └── orders.yaml
37
+ │ └── views/ # View definitions
38
+ │ └── orders_overview.yaml
38
39
  └── .bon/ # Local config (gitignored)
39
40
  └── datasources.yaml # Data source credentials
40
41
  ```
@@ -46,7 +47,7 @@ my-project/
46
47
  Create cubes that map to your database tables:
47
48
 
48
49
  ```yaml
49
- # models/orders.yaml
50
+ # bonnard/models/orders.yaml
50
51
  cubes:
51
52
  - name: orders
52
53
  sql_table: public.orders
@@ -66,7 +67,7 @@ cubes:
66
67
  Create views that expose cubes to consumers:
67
68
 
68
69
  ```yaml
69
- # views/orders_overview.yaml
70
+ # bonnard/views/orders_overview.yaml
70
71
  views:
71
72
  - name: orders_overview
72
73
  cubes:
@@ -98,7 +99,7 @@ bon deploy
98
99
  ### One Cube Per File
99
100
 
100
101
  ```
101
- models/
102
+ bonnard/models/
102
103
  ├── orders.yaml
103
104
  ├── users.yaml
104
105
  ├── products.yaml
@@ -108,7 +109,7 @@ models/
108
109
  ### Related Cubes Together
109
110
 
110
111
  ```
111
- models/
112
+ bonnard/models/
112
113
  ├── sales/
113
114
  │ ├── orders.yaml
114
115
  │ └── line_items.yaml
@@ -48,9 +48,9 @@ bon validate --test-connection
48
48
 
49
49
  ```
50
50
  ✓ Validating YAML syntax...
51
- ✓ Checking models/orders.yaml
52
- ✓ Checking models/users.yaml
53
- ✓ Checking views/orders_overview.yaml
51
+ ✓ Checking bonnard/models/orders.yaml
52
+ ✓ Checking bonnard/models/users.yaml
53
+ ✓ Checking bonnard/views/orders_overview.yaml
54
54
 
55
55
  All models valid.
56
56
  ```
@@ -60,7 +60,7 @@ All models valid.
60
60
  ```
61
61
  ✗ Validating YAML syntax...
62
62
 
63
- models/orders.yaml:15:5
63
+ bonnard/models/orders.yaml:15:5
64
64
  error: Unknown measure type "counts"
65
65
 
66
66
  Did you mean "count"?
@@ -49,8 +49,8 @@ views:
49
49
 
50
50
  ## Workflow
51
51
 
52
- 1. Define models in `models/*.yaml`
53
- 2. Define views in `views/*.yaml`
52
+ 1. Define models in `bonnard/models/*.yaml`
53
+ 2. Define views in `bonnard/views/*.yaml`
54
54
  3. Run `bon validate` to check syntax
55
55
  4. Run `bon deploy` to publish
56
56
 
@@ -43,7 +43,7 @@ views:
43
43
 
44
44
  ## Workflow
45
45
 
46
- 1. Define models in `models/*.yaml`
47
- 2. Define views in `views/*.yaml`
46
+ 1. Define models in `bonnard/models/*.yaml`
47
+ 2. Define views in `bonnard/views/*.yaml`
48
48
  3. Run `bon validate` to check syntax
49
49
  4. Run `bon deploy` to publish
@@ -29,11 +29,12 @@ Data Warehouse (Snowflake, Postgres, BigQuery, Databricks)
29
29
  ```
30
30
  my-project/
31
31
  ├── bon.yaml # Project config
32
- ├── models/ # Cube definitions
33
- │ ├── orders.yaml
34
- └── customers.yaml
35
- ├── views/ # View definitions
36
- │ └── sales_overview.yaml
32
+ ├── bonnard/ # Semantic layer definitions
33
+ │ ├── models/ # Cube definitions
34
+ │ ├── orders.yaml
35
+ │ │ └── customers.yaml
36
+ │ └── views/ # View definitions
37
+ │ └── sales_overview.yaml
37
38
  └── .bon/ # Local state (gitignored)
38
39
  └── datasources.yaml # Warehouse connections
39
40
  ```
@@ -72,8 +73,8 @@ Topics follow dot notation (e.g., `cubes.dimensions.time`). Use `--recursive` to
72
73
  ## Workflow
73
74
 
74
75
  1. **Setup datasource** — `bon datasource add --from-dbt` or manual
75
- 2. **Create cubes** — Define measures/dimensions in `models/*.yaml`
76
- 3. **Create views** — Compose cubes in `views/*.yaml`
76
+ 2. **Create cubes** — Define measures/dimensions in `bonnard/models/*.yaml`
77
+ 3. **Create views** — Compose cubes in `bonnard/views/*.yaml`
77
78
  4. **Validate** — `bon validate --test-connection`
78
79
  5. **Deploy** — `bon login` then `bon deploy`
79
80
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bonnard/cli",
3
- "version": "0.1.5",
3
+ "version": "0.1.7",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "bon": "./dist/bin/bon.mjs"
@@ -9,7 +9,7 @@
9
9
  "dist"
10
10
  ],
11
11
  "scripts": {
12
- "build": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin && cp -r src/templates dist/ && cp -r src/docs dist/",
12
+ "build": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin && cp -r src/templates dist/ && mkdir -p dist/docs/topics dist/docs/schemas && cp ../content/index.md dist/docs/_index.md && cp ../content/modeling/*.md dist/docs/topics/",
13
13
  "dev": "tsdown src/bin/bon.ts --format esm --out-dir dist/bin --watch",
14
14
  "test": "vitest run"
15
15
  },
@@ -1,78 +0,0 @@
1
- # Bonnard CLI Documentation
2
-
3
- This directory contains the documentation served by `bon docs`.
4
-
5
- ## Structure
6
-
7
- ```
8
- docs/
9
- ├── _index.md # Index shown by `bon docs` (llms.txt style)
10
- ├── topics/ # Individual topic files
11
- │ ├── cubes.md
12
- │ ├── cubes.measures.md
13
- │ ├── cubes.measures.types.md
14
- │ └── ...
15
- ├── schemas/ # JSON schemas for validation
16
- │ ├── cube.schema.json
17
- │ └── view.schema.json
18
- └── README.md # This file
19
- ```
20
-
21
- ## Topic Naming Convention
22
-
23
- Topic IDs use dot notation that maps directly to filenames:
24
-
25
- | Topic ID | File |
26
- |----------|------|
27
- | `cubes` | `topics/cubes.md` |
28
- | `cubes.measures` | `topics/cubes.measures.md` |
29
- | `cubes.measures.types` | `topics/cubes.measures.types.md` |
30
-
31
- ## Topic File Format
32
-
33
- Each topic file should follow this structure:
34
-
35
- ```markdown
36
- # topic.name
37
-
38
- > Brief one-line description.
39
-
40
- ## Overview
41
-
42
- Short explanation (2-3 sentences).
43
-
44
- ## Example
45
-
46
- ```yaml
47
- # Minimal working example
48
- ```
49
-
50
- ## Reference
51
-
52
- | Property | Type | Description |
53
- |----------|------|-------------|
54
- | name | string | ... |
55
-
56
- ## See Also
57
-
58
- - related.topic
59
- - another.topic
60
-
61
- ```
62
-
63
- ## Guidelines
64
-
65
- - Keep topics concise (~20-40 lines)
66
- - Lead with examples, not theory
67
- - Use tables for property references
68
- - Include "See Also" for discoverability
69
-
70
- ## Commands
71
-
72
- ```bash
73
- bon docs # Show index
74
- bon docs <topic> # Show specific topic
75
- bon docs <topic> --recursive # Show topic + children
76
- bon docs --search <query> # Search topics
77
- bon docs schema <type> # Show JSON schema
78
- ```
@@ -1,15 +0,0 @@
1
- # Bonnard Analytics
2
-
3
- This project uses Bonnard for semantic layer analytics.
4
-
5
- ## Quick Reference
6
-
7
- | Command | Purpose |
8
- |---------|---------|
9
- | `bon validate` | Validate models and views |
10
- | `bon deploy` | Deploy to Cube |
11
- | `bon query <ds> <sql>` | Run SQL query |
12
- | `bon datasource list` | List data sources |
13
-
14
- For detailed CLI help: `/bonnard-cli`
15
- For query patterns: `/bonnard-queries`
@@ -1,20 +0,0 @@
1
- ---
2
- description: "Bonnard analytics project context"
3
- alwaysApply: true
4
- ---
5
-
6
- # Bonnard Analytics
7
-
8
- This project uses Bonnard for semantic layer analytics.
9
-
10
- ## Quick Reference
11
-
12
- | Command | Purpose |
13
- |---------|---------|
14
- | `bon validate` | Validate models and views |
15
- | `bon deploy` | Deploy to Cube |
16
- | `bon query <ds> <sql>` | Run SQL query |
17
- | `bon datasource list` | List data sources |
18
-
19
- For detailed CLI help, ask about "bonnard cli commands".
20
- For query patterns, ask about "bonnard queries".