@bonnard/cli 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/bin/bon.mjs +576 -91
  2. package/dist/bin/{models-IsV2sX74.mjs → models-CzOWi3fU.mjs} +5 -3
  3. package/dist/bin/{validate-C4EHvJzJ.mjs → validate-BUHevw7F.mjs} +4 -2
  4. package/dist/docs/_index.md +1 -0
  5. package/dist/docs/topics/cubes.data-source.md +0 -4
  6. package/dist/docs/topics/cubes.dimensions.format.md +0 -4
  7. package/dist/docs/topics/cubes.dimensions.md +0 -4
  8. package/dist/docs/topics/cubes.dimensions.primary-key.md +0 -4
  9. package/dist/docs/topics/cubes.dimensions.sub-query.md +0 -4
  10. package/dist/docs/topics/cubes.dimensions.time.md +0 -4
  11. package/dist/docs/topics/cubes.dimensions.types.md +0 -4
  12. package/dist/docs/topics/cubes.extends.md +0 -4
  13. package/dist/docs/topics/cubes.hierarchies.md +0 -4
  14. package/dist/docs/topics/cubes.joins.md +0 -4
  15. package/dist/docs/topics/cubes.md +2 -6
  16. package/dist/docs/topics/cubes.measures.calculated.md +0 -4
  17. package/dist/docs/topics/cubes.measures.drill-members.md +0 -4
  18. package/dist/docs/topics/cubes.measures.filters.md +0 -4
  19. package/dist/docs/topics/cubes.measures.format.md +0 -4
  20. package/dist/docs/topics/cubes.measures.md +0 -4
  21. package/dist/docs/topics/cubes.measures.rolling.md +0 -4
  22. package/dist/docs/topics/cubes.measures.types.md +0 -4
  23. package/dist/docs/topics/cubes.public.md +0 -4
  24. package/dist/docs/topics/cubes.refresh-key.md +0 -4
  25. package/dist/docs/topics/cubes.segments.md +0 -4
  26. package/dist/docs/topics/cubes.sql.md +0 -4
  27. package/dist/docs/topics/pre-aggregations.md +0 -4
  28. package/dist/docs/topics/pre-aggregations.rollup.md +0 -4
  29. package/dist/docs/topics/syntax.context-variables.md +0 -4
  30. package/dist/docs/topics/syntax.md +0 -4
  31. package/dist/docs/topics/syntax.references.md +0 -4
  32. package/dist/docs/topics/views.cubes.md +0 -4
  33. package/dist/docs/topics/views.folders.md +0 -4
  34. package/dist/docs/topics/views.includes.md +0 -4
  35. package/dist/docs/topics/views.md +2 -6
  36. package/dist/docs/topics/workflow.deploy.md +8 -12
  37. package/dist/docs/topics/workflow.mcp.md +100 -0
  38. package/dist/docs/topics/workflow.md +10 -13
  39. package/dist/docs/topics/workflow.query.md +0 -5
  40. package/dist/docs/topics/workflow.validate.md +4 -8
  41. package/dist/templates/claude/skills/bonnard-queries/SKILL.md +2 -2
  42. package/dist/templates/cursor/rules/bonnard-queries.mdc +2 -2
  43. package/dist/templates/shared/bonnard.md +8 -7
  44. package/package.json +3 -2
  45. package/dist/docs/README.md +0 -82
  46. package/dist/templates/claude/rules/bonnard.md +0 -15
  47. package/dist/templates/cursor/rules/bonnard.mdc +0 -20
package/dist/bin/bon.mjs CHANGED
@@ -5,10 +5,10 @@ import fs from "node:fs";
5
5
  import path from "node:path";
6
6
  import { fileURLToPath } from "node:url";
7
7
  import pc from "picocolors";
8
+ import YAML from "yaml";
9
+ import os from "node:os";
8
10
  import http from "node:http";
9
11
  import crypto from "node:crypto";
10
- import os from "node:os";
11
- import YAML from "yaml";
12
12
  import { execFileSync } from "node:child_process";
13
13
  import { confirm } from "@inquirer/prompts";
14
14
  import { encode } from "@toon-format/toon";
@@ -30,6 +30,462 @@ var __exportAll = (all, symbols) => {
30
30
  };
31
31
  var __require = /* @__PURE__ */ createRequire(import.meta.url);
32
32
 
33
+ //#endregion
34
+ //#region src/lib/project.ts
35
+ /**
36
+ * The subdirectory name used for Bonnard model/view files.
37
+ * Keeps Bonnard files namespaced to avoid conflicts with existing
38
+ * project directories (e.g. dbt's models/).
39
+ */
40
+ const BONNARD_DIR = "bonnard";
41
+ /**
42
+ * Resolve Bonnard project paths relative to the working directory.
43
+ * All model/view operations should use these paths.
44
+ */
45
+ function getProjectPaths(cwd) {
46
+ const bonnardRoot = path.join(cwd, BONNARD_DIR);
47
+ return {
48
+ root: bonnardRoot,
49
+ models: path.join(bonnardRoot, "models"),
50
+ views: path.join(bonnardRoot, "views"),
51
+ config: path.join(cwd, "bon.yaml"),
52
+ localState: path.join(cwd, ".bon")
53
+ };
54
+ }
55
+
56
+ //#endregion
57
+ //#region src/lib/dbt/profiles.ts
58
+ /**
59
+ * dbt profiles.yml parser
60
+ *
61
+ * Parses ~/.dbt/profiles.yml and extracts connection configs.
62
+ * Does NOT resolve env vars - they are kept as-is for deploy time resolution.
63
+ */
64
+ const DBT_PROFILES_PATH = path.join(os.homedir(), ".dbt", "profiles.yml");
65
+ /**
66
+ * Check if dbt profiles.yml exists
67
+ */
68
+ function dbtProfilesExist(profilesPath = DBT_PROFILES_PATH) {
69
+ return fs.existsSync(profilesPath);
70
+ }
71
+ /**
72
+ * Get the default dbt profiles path
73
+ */
74
+ function getDefaultProfilesPath() {
75
+ return DBT_PROFILES_PATH;
76
+ }
77
+ /**
78
+ * Map dbt type to Bonnard warehouse type
79
+ */
80
+ function mapDbtType(dbtType) {
81
+ return {
82
+ snowflake: "snowflake",
83
+ postgres: "postgres",
84
+ postgresql: "postgres",
85
+ bigquery: "bigquery",
86
+ databricks: "databricks"
87
+ }[dbtType.toLowerCase()] ?? null;
88
+ }
89
+ /**
90
+ * Parse dbt profiles.yml and return all connections
91
+ * Config values are kept as-is (including {{ env_var(...) }} patterns)
92
+ */
93
+ function parseDbtProfiles(profilesPath = DBT_PROFILES_PATH) {
94
+ if (!fs.existsSync(profilesPath)) throw new Error(`dbt profiles not found at ${profilesPath}`);
95
+ const content = fs.readFileSync(profilesPath, "utf-8");
96
+ const profiles = YAML.parse(content);
97
+ if (!profiles || typeof profiles !== "object") throw new Error("Invalid dbt profiles.yml format");
98
+ const connections = [];
99
+ for (const [profileName, profile] of Object.entries(profiles)) {
100
+ if (profileName === "config") continue;
101
+ if (!profile.outputs || typeof profile.outputs !== "object") continue;
102
+ const defaultTarget = profile.target || "dev";
103
+ for (const [targetName, target] of Object.entries(profile.outputs)) {
104
+ if (!target || typeof target !== "object" || !target.type) continue;
105
+ const warehouseType = mapDbtType(target.type);
106
+ if (!warehouseType) continue;
107
+ connections.push({
108
+ profileName,
109
+ targetName,
110
+ isDefaultTarget: targetName === defaultTarget,
111
+ type: warehouseType,
112
+ config: target
113
+ });
114
+ }
115
+ }
116
+ return connections;
117
+ }
118
+
119
+ //#endregion
120
+ //#region src/lib/detect/scanner.ts
121
+ /**
122
+ * Project environment scanner
123
+ *
124
+ * Detects data tools (dbt, dagster, etc.), warehouse connections,
125
+ * and existing model files in the user's project directory.
126
+ */
127
+ const CONFIG_FILES = {
128
+ "dbt_project.yml": "dbt",
129
+ "dagster.yaml": "dagster",
130
+ "prefect.yaml": "prefect",
131
+ "evidence.config.yaml": "evidence",
132
+ "cube.js": "cube",
133
+ "cube.py": "cube",
134
+ "manifest.lkml": "looker",
135
+ "airflow.cfg": "airflow"
136
+ };
137
+ const CONFIG_DIRS = {
138
+ dags: "airflow",
139
+ great_expectations: "great-expectations",
140
+ gx: "great-expectations"
141
+ };
142
+ const SCAN_DIRS = [
143
+ "dbt",
144
+ "transform",
145
+ "analytics",
146
+ "data",
147
+ "warehouse",
148
+ "data-warehouse",
149
+ "orchestration",
150
+ "dagster",
151
+ "airflow"
152
+ ];
153
+ const PYTHON_PACKAGES = {
154
+ "dbt-core": "dbt",
155
+ "dbt-snowflake": "dbt",
156
+ "dbt-postgres": "dbt",
157
+ "dbt-bigquery": "dbt",
158
+ "dbt-databricks": "dbt",
159
+ dagster: "dagster",
160
+ sqlmesh: "sqlmesh",
161
+ "apache-airflow": "airflow",
162
+ prefect: "prefect",
163
+ "soda-core": "soda"
164
+ };
165
+ const NPM_PACKAGES = {
166
+ "@cubejs-backend/": "cube",
167
+ "@evidence-dev/": "evidence"
168
+ };
169
+ const SAFE_DBT_FIELDS = [
170
+ "account",
171
+ "host",
172
+ "database",
173
+ "dbname",
174
+ "schema",
175
+ "warehouse",
176
+ "role",
177
+ "port",
178
+ "project",
179
+ "dataset",
180
+ "location",
181
+ "hostname",
182
+ "http_path",
183
+ "catalog"
184
+ ];
185
+ /**
186
+ * Scan root and subdirs for known config files/directories
187
+ */
188
+ function scanForConfigFiles(cwd) {
189
+ const tools = [];
190
+ const seen = /* @__PURE__ */ new Set();
191
+ function checkDir(dir, prefix) {
192
+ for (const [filename, toolName] of Object.entries(CONFIG_FILES)) {
193
+ if (seen.has(toolName)) continue;
194
+ const filePath = path.join(dir, filename);
195
+ if (fs.existsSync(filePath)) {
196
+ const relativePath = prefix ? `${prefix}/${filename}` : filename;
197
+ tools.push({
198
+ name: toolName,
199
+ configPath: relativePath
200
+ });
201
+ seen.add(toolName);
202
+ }
203
+ }
204
+ for (const [dirname, toolName] of Object.entries(CONFIG_DIRS)) {
205
+ if (seen.has(toolName)) continue;
206
+ const dirPath = path.join(dir, dirname);
207
+ try {
208
+ if (fs.statSync(dirPath).isDirectory()) {
209
+ const relativePath = prefix ? `${prefix}/${dirname}/` : `${dirname}/`;
210
+ tools.push({
211
+ name: toolName,
212
+ configPath: relativePath
213
+ });
214
+ seen.add(toolName);
215
+ }
216
+ } catch {}
217
+ }
218
+ }
219
+ checkDir(cwd, "");
220
+ for (const subdir of SCAN_DIRS) {
221
+ const subdirPath = path.join(cwd, subdir);
222
+ try {
223
+ if (fs.statSync(subdirPath).isDirectory()) checkDir(subdirPath, subdir);
224
+ } catch {}
225
+ }
226
+ return tools;
227
+ }
228
+ /**
229
+ * Scan dependency files for known data tool packages
230
+ */
231
+ function scanDependencyFiles(cwd) {
232
+ const tools = [];
233
+ const pyprojectPath = path.join(cwd, "pyproject.toml");
234
+ if (fs.existsSync(pyprojectPath)) try {
235
+ const content = fs.readFileSync(pyprojectPath, "utf-8");
236
+ for (const [pkg, toolName] of Object.entries(PYTHON_PACKAGES)) if (content.includes(pkg)) tools.push({
237
+ name: toolName,
238
+ configPath: "pyproject.toml"
239
+ });
240
+ } catch {}
241
+ const requirementsPath = path.join(cwd, "requirements.txt");
242
+ if (fs.existsSync(requirementsPath)) try {
243
+ const content = fs.readFileSync(requirementsPath, "utf-8");
244
+ for (const [pkg, toolName] of Object.entries(PYTHON_PACKAGES)) if (content.includes(pkg)) tools.push({
245
+ name: toolName,
246
+ configPath: "requirements.txt"
247
+ });
248
+ } catch {}
249
+ const packageJsonPath = path.join(cwd, "package.json");
250
+ if (fs.existsSync(packageJsonPath)) try {
251
+ const content = fs.readFileSync(packageJsonPath, "utf-8");
252
+ const pkg = JSON.parse(content);
253
+ const allDeps = {
254
+ ...pkg.dependencies,
255
+ ...pkg.devDependencies
256
+ };
257
+ for (const depName of Object.keys(allDeps)) for (const [prefix, toolName] of Object.entries(NPM_PACKAGES)) if (depName.startsWith(prefix)) tools.push({
258
+ name: toolName,
259
+ configPath: "package.json"
260
+ });
261
+ } catch {}
262
+ return tools;
263
+ }
264
+ /**
265
+ * Deduplicate tools by name, keeping the first occurrence (config file > dependency)
266
+ */
267
+ function deduplicateTools(tools) {
268
+ const seen = /* @__PURE__ */ new Set();
269
+ return tools.filter((tool) => {
270
+ if (seen.has(tool.name)) return false;
271
+ seen.add(tool.name);
272
+ return true;
273
+ });
274
+ }
275
+ /**
276
+ * Extract safe (non-secret) fields from a config object.
277
+ * If a value looks like a dbt env_var reference, record it as "via env: VAR_NAME".
278
+ */
279
+ function extractSafeFields(config) {
280
+ const safe = {};
281
+ for (const field of SAFE_DBT_FIELDS) {
282
+ const value = config[field];
283
+ if (value === void 0 || value === null) continue;
284
+ const strValue = String(value);
285
+ const envMatch = strValue.match(/\{\{\s*env_var\s*\(\s*['"]([^'"]+)['"]/);
286
+ if (envMatch) safe[field] = `via env: ${envMatch[1]}`;
287
+ else safe[field] = strValue;
288
+ }
289
+ return safe;
290
+ }
291
+ /**
292
+ * Try to extract warehouse info from dbt profiles
293
+ */
294
+ function extractWarehouseFromDbt(cwd, tools) {
295
+ const dbtTool = tools.find((t) => t.name === "dbt");
296
+ if (!dbtTool) return null;
297
+ const dbtProjectPath = path.join(cwd, dbtTool.configPath);
298
+ let profileName;
299
+ try {
300
+ const content = fs.readFileSync(dbtProjectPath, "utf-8");
301
+ const parsed = YAML.parse(content);
302
+ profileName = parsed?.profile;
303
+ const projectName = parsed?.name;
304
+ if (projectName) dbtTool.metadata = {
305
+ ...dbtTool.metadata,
306
+ project: projectName
307
+ };
308
+ if (profileName) dbtTool.metadata = {
309
+ ...dbtTool.metadata,
310
+ profile: profileName
311
+ };
312
+ } catch {}
313
+ if (!dbtProfilesExist()) return null;
314
+ try {
315
+ const connections = parseDbtProfiles();
316
+ let connection = profileName ? connections.find((c) => c.profileName === profileName && c.isDefaultTarget) : null;
317
+ if (!connection && connections.length > 0) connection = connections[0];
318
+ if (!connection) return null;
319
+ return {
320
+ type: connection.type,
321
+ source: "dbt-profiles",
322
+ config: extractSafeFields(connection.config)
323
+ };
324
+ } catch {
325
+ return null;
326
+ }
327
+ }
328
+ /**
329
+ * Try to extract warehouse info from .env file
330
+ */
331
+ function extractWarehouseFromEnv(cwd) {
332
+ const envPath = path.join(cwd, ".env");
333
+ if (!fs.existsSync(envPath)) return null;
334
+ try {
335
+ const content = fs.readFileSync(envPath, "utf-8");
336
+ const cubeDbType = content.match(/^CUBEJS_DB_TYPE=(.+)$/m);
337
+ if (cubeDbType) {
338
+ const type = {
339
+ snowflake: "snowflake",
340
+ postgres: "postgres",
341
+ bigquery: "bigquery",
342
+ databricks: "databricks"
343
+ }[cubeDbType[1].trim().toLowerCase()];
344
+ if (type) return {
345
+ type,
346
+ source: "env",
347
+ config: { CUBEJS_DB_TYPE: cubeDbType[1].trim() }
348
+ };
349
+ }
350
+ if (content.match(/^SNOWFLAKE_ACCOUNT=/m)) {
351
+ const account = content.match(/^SNOWFLAKE_ACCOUNT=(.+)$/m);
352
+ return {
353
+ type: "snowflake",
354
+ source: "env",
355
+ config: account ? { account: account[1].trim() } : {}
356
+ };
357
+ }
358
+ if (content.match(/^PGHOST=/m) || content.match(/^DATABASE_URL=postgres/m)) return {
359
+ type: "postgres",
360
+ source: "env",
361
+ config: {}
362
+ };
363
+ } catch {}
364
+ return null;
365
+ }
366
+ /**
367
+ * Extract warehouse info from detected tools and environment
368
+ */
369
+ function extractWarehouseInfo(cwd, tools) {
370
+ if (tools.some((t) => t.name === "dbt")) {
371
+ const fromDbt = extractWarehouseFromDbt(cwd, tools);
372
+ if (fromDbt) return fromDbt;
373
+ }
374
+ return extractWarehouseFromEnv(cwd);
375
+ }
376
+ /**
377
+ * Collect existing model files from detected tool directories.
378
+ * Non-recursive scan of likely model dirs, capped at 20 paths.
379
+ */
380
+ function collectModelFiles(cwd, tools) {
381
+ const modelFiles = [];
382
+ const extensions = new Set([
383
+ ".sql",
384
+ ".yml",
385
+ ".yaml",
386
+ ".lkml"
387
+ ]);
388
+ const modelDirSet = /* @__PURE__ */ new Set();
389
+ for (const tool of tools) if (tool.name === "dbt") {
390
+ const dbtDir = path.dirname(path.join(cwd, tool.configPath));
391
+ modelDirSet.add(path.join(dbtDir, "models"));
392
+ } else if (tool.name === "looker") {
393
+ const lookDir = path.dirname(path.join(cwd, tool.configPath));
394
+ modelDirSet.add(lookDir);
395
+ }
396
+ for (const dir of [
397
+ "models",
398
+ "marts",
399
+ "staging",
400
+ "intermediate"
401
+ ]) modelDirSet.add(path.join(cwd, dir));
402
+ for (const dir of modelDirSet) {
403
+ if (modelFiles.length >= 20) break;
404
+ try {
405
+ if (!fs.statSync(dir).isDirectory()) continue;
406
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
407
+ for (const entry of entries) {
408
+ if (modelFiles.length >= 20) break;
409
+ if (entry.isFile() && extensions.has(path.extname(entry.name))) modelFiles.push(path.relative(cwd, path.join(dir, entry.name)));
410
+ if (entry.isDirectory()) try {
411
+ const subEntries = fs.readdirSync(path.join(dir, entry.name), { withFileTypes: true });
412
+ for (const subEntry of subEntries) {
413
+ if (modelFiles.length >= 20) break;
414
+ if (subEntry.isFile() && extensions.has(path.extname(subEntry.name))) modelFiles.push(path.relative(cwd, path.join(dir, entry.name, subEntry.name)));
415
+ }
416
+ } catch {}
417
+ }
418
+ } catch {}
419
+ }
420
+ return modelFiles;
421
+ }
422
+ /**
423
+ * Detect the project environment by scanning for data tools,
424
+ * warehouse connections, and existing model files.
425
+ */
426
+ function detectProjectEnvironment(cwd) {
427
+ const configTools = scanForConfigFiles(cwd);
428
+ const depTools = scanDependencyFiles(cwd);
429
+ const tools = deduplicateTools([...configTools, ...depTools]);
430
+ return {
431
+ tools,
432
+ warehouse: extractWarehouseInfo(cwd, tools),
433
+ existingModels: collectModelFiles(cwd, tools)
434
+ };
435
+ }
436
+
437
+ //#endregion
438
+ //#region src/lib/detect/context.ts
439
+ /**
440
+ * Generate a Markdown context section from detected project environment.
441
+ * Returns a string with "## This Project" and "## Your Role" sections.
442
+ */
443
+ function generateProjectContext(env) {
444
+ const sections = [];
445
+ const projectLines = ["## This Project"];
446
+ if (env.tools.length > 0) {
447
+ projectLines.push("");
448
+ projectLines.push("**Detected tools:**");
449
+ for (const tool of env.tools) {
450
+ const meta = tool.metadata ? ` (${Object.entries(tool.metadata).map(([k, v]) => `${k}: ${v}`).join(", ")})` : "";
451
+ projectLines.push(`- ${tool.name} — found at \`${tool.configPath}\`${meta}`);
452
+ }
453
+ }
454
+ if (env.warehouse) {
455
+ projectLines.push("");
456
+ projectLines.push(`**Warehouse:** ${env.warehouse.type} (detected via ${env.warehouse.source})`);
457
+ const configEntries = Object.entries(env.warehouse.config);
458
+ if (configEntries.length > 0) for (const [key, value] of configEntries) projectLines.push(`- ${key}: \`${value}\``);
459
+ }
460
+ if (env.existingModels.length > 0) {
461
+ projectLines.push("");
462
+ projectLines.push(`**Existing model files:** ${env.existingModels.length} file${env.existingModels.length === 1 ? "" : "s"} found`);
463
+ for (const modelPath of env.existingModels.slice(0, 10)) projectLines.push(`- \`${modelPath}\``);
464
+ if (env.existingModels.length > 10) projectLines.push(`- ... and ${env.existingModels.length - 10} more`);
465
+ }
466
+ sections.push(projectLines.join("\n"));
467
+ const roleLines = ["## Your Role"];
468
+ roleLines.push("");
469
+ const hasDbt = env.tools.some((t) => t.name === "dbt");
470
+ const hasWarehouse = env.warehouse !== null;
471
+ const dbtTool = env.tools.find((t) => t.name === "dbt");
472
+ if (hasDbt && hasWarehouse) roleLines.push("This user has an existing dbt project with a warehouse connection. Help them create Bonnard cubes that reference their mart/staging tables. They can import their connection with `bon datasource add --from-dbt`.");
473
+ else if (hasDbt && !hasWarehouse) roleLines.push("This user has a dbt project but warehouse profiles were not found locally. Help them set up a datasource manually with `bon datasource add`, or ensure `~/.dbt/profiles.yml` is available and re-run `bon init`.");
474
+ else if (!hasDbt && hasWarehouse) roleLines.push("This user has a warehouse connection configured. Help them create cubes directly from their database tables.");
475
+ else roleLines.push("New project — no existing data tools detected. Help them connect a warehouse (`bon datasource add`), then create their first cube.");
476
+ roleLines.push("");
477
+ roleLines.push("**Important:**");
478
+ roleLines.push("- Bonnard models go in `bonnard/models/` and views in `bonnard/views/` — do NOT modify files outside these directories");
479
+ roleLines.push("- Use `bon docs` to look up Cube YAML syntax before writing model definitions");
480
+ if (hasDbt && dbtTool) {
481
+ const dbtDir = dbtTool.configPath.includes("/") ? dbtTool.configPath.split("/").slice(0, -1).join("/") + "/" : "";
482
+ const modelsPath = dbtDir ? `${dbtDir}models/` : "models/";
483
+ roleLines.push(`- dbt models are in \`${modelsPath}\` — these are the user's transformation layer, not Bonnard's. Do not modify them.`);
484
+ }
485
+ sections.push(roleLines.join("\n"));
486
+ return sections.join("\n\n");
487
+ }
488
+
33
489
  //#endregion
34
490
  //#region src/commands/init.ts
35
491
  const __filename$1 = fileURLToPath(import.meta.url);
@@ -101,9 +557,10 @@ alwaysApply: ${alwaysApply}
101
557
  /**
102
558
  * Create agent templates (Claude Code, Cursor, and Codex)
103
559
  */
104
- function createAgentTemplates(cwd) {
560
+ function createAgentTemplates(cwd, env) {
105
561
  const createdFiles = [];
106
- const sharedBonnard = loadTemplate("shared/bonnard.md");
562
+ let sharedBonnard = loadTemplate("shared/bonnard.md");
563
+ if (env) sharedBonnard += "\n\n" + generateProjectContext(env);
107
564
  const claudeRulesDir = path.join(cwd, ".claude", "rules");
108
565
  const claudeSkillsDir = path.join(cwd, ".claude", "skills");
109
566
  fs.mkdirSync(claudeRulesDir, { recursive: true });
@@ -129,29 +586,37 @@ function createAgentTemplates(cwd) {
129
586
  async function initCommand() {
130
587
  const cwd = process.cwd();
131
588
  const projectName = path.basename(cwd);
132
- if (fs.existsSync(path.join(cwd, "bon.yaml"))) {
589
+ const paths = getProjectPaths(cwd);
590
+ if (fs.existsSync(paths.config)) {
133
591
  console.log(pc.red("A bon.yaml already exists in this directory."));
134
592
  process.exit(1);
135
593
  }
136
- fs.mkdirSync(path.join(cwd, "models"), { recursive: true });
137
- fs.mkdirSync(path.join(cwd, "views"), { recursive: true });
138
- fs.mkdirSync(path.join(cwd, ".bon"), { recursive: true });
139
- fs.writeFileSync(path.join(cwd, "bon.yaml"), BON_YAML_TEMPLATE(projectName));
594
+ fs.mkdirSync(paths.models, { recursive: true });
595
+ fs.mkdirSync(paths.views, { recursive: true });
596
+ fs.mkdirSync(paths.localState, { recursive: true });
597
+ fs.writeFileSync(paths.config, BON_YAML_TEMPLATE(projectName));
140
598
  fs.writeFileSync(path.join(cwd, ".gitignore"), GITIGNORE_TEMPLATE);
141
- const agentFiles = createAgentTemplates(cwd);
599
+ const env = detectProjectEnvironment(cwd);
600
+ const agentFiles = createAgentTemplates(cwd, env.tools.length > 0 || env.warehouse ? env : void 0);
142
601
  console.log(pc.green(`Initialised Bonnard project "${projectName}"`));
143
602
  console.log();
144
603
  console.log(pc.bold("Core files:"));
145
- console.log(` ${pc.dim("bon.yaml")} project config`);
146
- console.log(` ${pc.dim("models/")} model definitions`);
147
- console.log(` ${pc.dim("views/")} view definitions`);
148
- console.log(` ${pc.dim(".bon/")} local state (gitignored)`);
149
- console.log(` ${pc.dim(".gitignore")} git ignore rules`);
604
+ console.log(` ${pc.dim("bon.yaml")} project config`);
605
+ console.log(` ${pc.dim(`${BONNARD_DIR}/models/`)} model definitions`);
606
+ console.log(` ${pc.dim(`${BONNARD_DIR}/views/`)} view definitions`);
607
+ console.log(` ${pc.dim(".bon/")} local state (gitignored)`);
608
+ console.log(` ${pc.dim(".gitignore")} git ignore rules`);
150
609
  if (agentFiles.length > 0) {
151
610
  console.log();
152
611
  console.log(pc.bold("Agent support:"));
153
612
  for (const file of agentFiles) console.log(` ${pc.dim(file)}`);
154
613
  }
614
+ if (env.tools.length > 0 || env.warehouse) {
615
+ console.log();
616
+ console.log(pc.bold("Detected environment:"));
617
+ for (const tool of env.tools) console.log(` ${pc.cyan(tool.name)} ${pc.dim(`(${tool.configPath})`)}`);
618
+ if (env.warehouse) console.log(` ${pc.cyan(env.warehouse.type)} warehouse ${pc.dim(`(via ${env.warehouse.source})`)}`);
619
+ }
155
620
  }
156
621
 
157
622
  //#endregion
@@ -552,69 +1017,6 @@ var local_exports = /* @__PURE__ */ __exportAll({
552
1017
  saveLocalDatasources: () => saveLocalDatasources
553
1018
  });
554
1019
 
555
- //#endregion
556
- //#region src/lib/dbt/profiles.ts
557
- /**
558
- * dbt profiles.yml parser
559
- *
560
- * Parses ~/.dbt/profiles.yml and extracts connection configs.
561
- * Does NOT resolve env vars - they are kept as-is for deploy time resolution.
562
- */
563
- const DBT_PROFILES_PATH = path.join(os.homedir(), ".dbt", "profiles.yml");
564
- /**
565
- * Check if dbt profiles.yml exists
566
- */
567
- function dbtProfilesExist(profilesPath = DBT_PROFILES_PATH) {
568
- return fs.existsSync(profilesPath);
569
- }
570
- /**
571
- * Get the default dbt profiles path
572
- */
573
- function getDefaultProfilesPath() {
574
- return DBT_PROFILES_PATH;
575
- }
576
- /**
577
- * Map dbt type to Bonnard warehouse type
578
- */
579
- function mapDbtType(dbtType) {
580
- return {
581
- snowflake: "snowflake",
582
- postgres: "postgres",
583
- postgresql: "postgres",
584
- bigquery: "bigquery",
585
- databricks: "databricks"
586
- }[dbtType.toLowerCase()] ?? null;
587
- }
588
- /**
589
- * Parse dbt profiles.yml and return all connections
590
- * Config values are kept as-is (including {{ env_var(...) }} patterns)
591
- */
592
- function parseDbtProfiles(profilesPath = DBT_PROFILES_PATH) {
593
- if (!fs.existsSync(profilesPath)) throw new Error(`dbt profiles not found at ${profilesPath}`);
594
- const content = fs.readFileSync(profilesPath, "utf-8");
595
- const profiles = YAML.parse(content);
596
- if (!profiles || typeof profiles !== "object") throw new Error("Invalid dbt profiles.yml format");
597
- const connections = [];
598
- for (const [profileName, profile] of Object.entries(profiles)) {
599
- if (profileName === "config") continue;
600
- if (!profile.outputs || typeof profile.outputs !== "object") continue;
601
- const defaultTarget = profile.target || "dev";
602
- for (const [targetName, target] of Object.entries(profile.outputs)) {
603
- if (!target || typeof target !== "object" || !target.type) continue;
604
- const warehouseType = mapDbtType(target.type);
605
- if (!warehouseType) continue;
606
- connections.push({
607
- profileName,
608
- targetName,
609
- isDefaultTarget: targetName === defaultTarget,
610
- type: warehouseType,
611
- config: target
612
- });
613
- }
614
- }
615
- return connections;
616
- }
617
-
618
1020
  //#endregion
619
1021
  //#region src/lib/dbt/mapping.ts
620
1022
  /**
@@ -1839,14 +2241,15 @@ async function previewCommand(datasourceName, sql, options) {
1839
2241
  //#region src/commands/validate.ts
1840
2242
  async function validateCommand(options = {}) {
1841
2243
  const cwd = process.cwd();
1842
- if (!fs.existsSync(path.join(cwd, "bon.yaml"))) {
2244
+ const paths = getProjectPaths(cwd);
2245
+ if (!fs.existsSync(paths.config)) {
1843
2246
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
1844
2247
  process.exit(1);
1845
2248
  }
1846
- const { validate } = await import("./validate-C4EHvJzJ.mjs");
2249
+ const { validate } = await import("./validate-BUHevw7F.mjs");
1847
2250
  const result = await validate(cwd);
1848
2251
  if (result.cubes.length === 0 && result.views.length === 0 && result.valid) {
1849
- console.log(pc.yellow("No model or view files found in models/ or views/."));
2252
+ console.log(pc.yellow(`No model or view files found in ${BONNARD_DIR}/models/ or ${BONNARD_DIR}/views/.`));
1850
2253
  return;
1851
2254
  }
1852
2255
  if (!result.valid) {
@@ -1881,7 +2284,7 @@ async function validateCommand(options = {}) {
1881
2284
  * Lenient: warns but doesn't fail validation
1882
2285
  */
1883
2286
  async function testReferencedConnections(cwd) {
1884
- const { extractDatasourcesFromModels } = await import("./models-IsV2sX74.mjs");
2287
+ const { extractDatasourcesFromModels } = await import("./models-CzOWi3fU.mjs");
1885
2288
  const { loadLocalDatasources, resolveEnvVarsInCredentials } = await Promise.resolve().then(() => local_exports);
1886
2289
  const { testConnection } = await Promise.resolve().then(() => connection_exports);
1887
2290
  const references = extractDatasourcesFromModels(cwd);
@@ -1949,12 +2352,13 @@ function collectFiles(dir, rootDir) {
1949
2352
  }
1950
2353
  async function deployCommand(options = {}) {
1951
2354
  const cwd = process.cwd();
1952
- if (!fs.existsSync(path.join(cwd, "bon.yaml"))) {
2355
+ const paths = getProjectPaths(cwd);
2356
+ if (!fs.existsSync(paths.config)) {
1953
2357
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
1954
2358
  process.exit(1);
1955
2359
  }
1956
2360
  console.log(pc.dim("Validating models..."));
1957
- const { validate } = await import("./validate-C4EHvJzJ.mjs");
2361
+ const { validate } = await import("./validate-BUHevw7F.mjs");
1958
2362
  const result = await validate(cwd);
1959
2363
  if (!result.valid) {
1960
2364
  console.log(pc.red("Validation failed:\n"));
@@ -1962,14 +2366,14 @@ async function deployCommand(options = {}) {
1962
2366
  process.exit(1);
1963
2367
  }
1964
2368
  if (result.cubes.length === 0 && result.views.length === 0) {
1965
- console.log(pc.yellow("No model or view files found in models/ or views/. Nothing to deploy."));
2369
+ console.log(pc.yellow(`No model or view files found in ${BONNARD_DIR}/models/ or ${BONNARD_DIR}/views/. Nothing to deploy.`));
1966
2370
  process.exit(1);
1967
2371
  }
1968
2372
  console.log(pc.dim(` Found ${result.cubes.length} cube(s) and ${result.views.length} view(s)`));
1969
2373
  if (await testAndSyncDatasources(cwd, options)) process.exit(1);
1970
2374
  const files = {
1971
- ...collectFiles(path.join(cwd, "models"), cwd),
1972
- ...collectFiles(path.join(cwd, "views"), cwd)
2375
+ ...collectFiles(paths.models, cwd),
2376
+ ...collectFiles(paths.views, cwd)
1973
2377
  };
1974
2378
  const fileCount = Object.keys(files).length;
1975
2379
  console.log(pc.dim(`Deploying ${fileCount} file(s)...`));
@@ -1979,6 +2383,10 @@ async function deployCommand(options = {}) {
1979
2383
  console.log(pc.green("Deploy successful!"));
1980
2384
  console.log(`Deployment ID: ${pc.cyan(response.deployment.id)}`);
1981
2385
  console.log(`Cube API: ${pc.cyan(`${response.deployment.cubeApiUrl}/cubejs-api/v1`)}`);
2386
+ console.log();
2387
+ console.log(pc.bold("Connect AI agents via MCP:"));
2388
+ console.log(` MCP URL: ${pc.cyan("https://mcp.bonnard.dev/mcp")}`);
2389
+ console.log(pc.dim(` Run \`bon mcp\` for setup instructions`));
1982
2390
  } catch (err) {
1983
2391
  console.log(pc.red(`Deploy failed: ${err instanceof Error ? err.message : err}`));
1984
2392
  process.exit(1);
@@ -1989,7 +2397,7 @@ async function deployCommand(options = {}) {
1989
2397
  * Returns true if any connection failed (strict mode)
1990
2398
  */
1991
2399
  async function testAndSyncDatasources(cwd, options = {}) {
1992
- const { extractDatasourcesFromModels } = await import("./models-IsV2sX74.mjs");
2400
+ const { extractDatasourcesFromModels } = await import("./models-CzOWi3fU.mjs");
1993
2401
  const { loadLocalDatasources, resolveEnvVarsInCredentials } = await Promise.resolve().then(() => local_exports);
1994
2402
  const { testConnection } = await Promise.resolve().then(() => connection_exports);
1995
2403
  const { pushDatasource } = await Promise.resolve().then(() => push_exports);
@@ -2091,6 +2499,82 @@ async function testAndSyncDatasources(cwd, options = {}) {
2091
2499
  return false;
2092
2500
  }
2093
2501
 
2502
+ //#endregion
2503
+ //#region src/commands/mcp.ts
2504
+ const MCP_URL = "https://mcp.bonnard.dev/mcp";
2505
+ function mcpCommand() {
2506
+ console.log(pc.bold("MCP Connection Info"));
2507
+ console.log();
2508
+ console.log(`MCP URL: ${pc.cyan(MCP_URL)}`);
2509
+ console.log();
2510
+ console.log(pc.bold("Setup Instructions"));
2511
+ console.log();
2512
+ console.log(pc.underline("Claude Desktop"));
2513
+ console.log(`Add to ${pc.dim("~/Library/Application Support/Claude/claude_desktop_config.json")}:`);
2514
+ console.log();
2515
+ console.log(pc.dim(` {`));
2516
+ console.log(pc.dim(` "mcpServers": {`));
2517
+ console.log(pc.dim(` "bonnard": {`));
2518
+ console.log(pc.dim(` "url": "${MCP_URL}"`));
2519
+ console.log(pc.dim(` }`));
2520
+ console.log(pc.dim(` }`));
2521
+ console.log(pc.dim(` }`));
2522
+ console.log();
2523
+ console.log(pc.underline("Cursor"));
2524
+ console.log(`Add to ${pc.dim(".cursor/mcp.json")} in your project:`);
2525
+ console.log();
2526
+ console.log(pc.dim(` {`));
2527
+ console.log(pc.dim(` "mcpServers": {`));
2528
+ console.log(pc.dim(` "bonnard": {`));
2529
+ console.log(pc.dim(` "url": "${MCP_URL}"`));
2530
+ console.log(pc.dim(` }`));
2531
+ console.log(pc.dim(` }`));
2532
+ console.log(pc.dim(` }`));
2533
+ console.log();
2534
+ console.log(pc.underline("Claude Code"));
2535
+ console.log(`Add to ${pc.dim(".mcp.json")} in your project:`);
2536
+ console.log();
2537
+ console.log(pc.dim(` {`));
2538
+ console.log(pc.dim(` "mcpServers": {`));
2539
+ console.log(pc.dim(` "bonnard": {`));
2540
+ console.log(pc.dim(` "type": "url",`));
2541
+ console.log(pc.dim(` "url": "${MCP_URL}"`));
2542
+ console.log(pc.dim(` }`));
2543
+ console.log(pc.dim(` }`));
2544
+ console.log(pc.dim(` }`));
2545
+ console.log();
2546
+ console.log(pc.dim("OAuth authentication happens automatically when you first connect."));
2547
+ console.log(pc.dim("Run `bon mcp test` to verify the MCP server is reachable."));
2548
+ }
2549
+
2550
+ //#endregion
2551
+ //#region src/commands/mcp-test.ts
2552
+ const MCP_SERVER_BASE = "https://mcp.bonnard.dev";
2553
+ async function mcpTestCommand() {
2554
+ console.log(pc.dim("Testing MCP server connection..."));
2555
+ console.log();
2556
+ const url = `${MCP_SERVER_BASE}/.well-known/oauth-authorization-server`;
2557
+ try {
2558
+ const res = await fetch(url);
2559
+ if (!res.ok) {
2560
+ console.log(pc.red(`✗ MCP server returned ${res.status}`));
2561
+ process.exit(1);
2562
+ }
2563
+ const metadata = await res.json();
2564
+ console.log(pc.green("✓ MCP server is reachable"));
2565
+ console.log();
2566
+ console.log(` Issuer: ${pc.dim(metadata.issuer || "unknown")}`);
2567
+ console.log(` Authorization: ${pc.dim(metadata.authorization_endpoint || "unknown")}`);
2568
+ console.log(` Token: ${pc.dim(metadata.token_endpoint || "unknown")}`);
2569
+ console.log(` Registration: ${pc.dim(metadata.registration_endpoint || "unknown")}`);
2570
+ console.log();
2571
+ console.log(pc.dim("OAuth endpoints are healthy. Agents can connect."));
2572
+ } catch (err) {
2573
+ console.log(pc.red(`✗ Failed to reach MCP server: ${err instanceof Error ? err.message : err}`));
2574
+ process.exit(1);
2575
+ }
2576
+ }
2577
+
2094
2578
  //#endregion
2095
2579
  //#region src/commands/docs.ts
2096
2580
  const __filename = fileURLToPath(import.meta.url);
@@ -2319,7 +2803,7 @@ async function cubeQueryCommand(queryInput, options = {}) {
2319
2803
  //#region src/bin/bon.ts
2320
2804
  const { version } = createRequire(import.meta.url)("../../package.json");
2321
2805
  program.name("bon").description("Bonnard semantic layer CLI").version(version);
2322
- program.command("init").description("Create bon.yaml, models/, views/, .bon/, and agent templates (.claude/, .cursor/)").action(initCommand);
2806
+ program.command("init").description("Create bon.yaml, bonnard/models/, bonnard/views/, .bon/, and agent templates (.claude/, .cursor/)").action(initCommand);
2323
2807
  program.command("login").description("Authenticate with Bonnard via your browser").action(loginCommand);
2324
2808
  program.command("logout").description("Remove stored credentials").action(logoutCommand);
2325
2809
  program.command("whoami").description("Show current login status").option("--verify", "Verify session is still valid with the server").action(whoamiCommand);
@@ -2330,11 +2814,12 @@ datasource.command("test").description("Test data source connectivity by connect
2330
2814
  datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
2331
2815
  datasource.command("push").description("Push a local data source to Bonnard server (requires login)").argument("<name>", "Data source name from .bon/datasources.yaml").option("--force", "Overwrite if already exists on remote").action(datasourcePushCommand);
2332
2816
  program.command("preview").description("Preview data from a local warehouse using raw SQL (for development/exploration)").argument("<datasource>", "Data source name from .bon/datasources.yaml").argument("<sql>", "SQL query to execute").option("--schema <schema>", "Override schema").option("--database <database>", "Override database").option("--limit <limit>", "Max rows to return", "1000").option("--format <format>", "Output format: toon or json", "toon").action(previewCommand);
2333
- program.command("validate").description("Validate YAML syntax in models/ and views/").option("--test-connection", "Also test datasource connections (warns on failure, doesn't block)").action(validateCommand);
2817
+ program.command("validate").description("Validate YAML syntax in bonnard/models/ and bonnard/views/").option("--test-connection", "Also test datasource connections (warns on failure, doesn't block)").action(validateCommand);
2334
2818
  program.command("deploy").description("Deploy models to Bonnard. Requires login, validates models, tests connections (fails on error)").option("--ci", "Non-interactive mode (fail if missing datasources)").option("--push-datasources", "Auto-push missing datasources without prompting").action(deployCommand);
2819
+ program.command("mcp").description("MCP connection info and setup instructions").action(mcpCommand).command("test").description("Test MCP server connectivity").action(mcpTestCommand);
2335
2820
  program.command("cube").description("Query the deployed Cube semantic layer").command("query").description("Execute a query against the deployed semantic layer").argument("<query>", "JSON query or SQL (with --sql flag)").option("--sql", "Use Cube SQL API instead of JSON format").option("--limit <limit>", "Max rows to return").option("--format <format>", "Output format: toon or json", "toon").action(cubeQueryCommand);
2336
2821
  program.command("docs").description("Browse Cube documentation for building models and views").argument("[topic]", "Topic to display (e.g., cubes, cubes.measures)").option("-r, --recursive", "Show topic and all child topics").option("-s, --search <query>", "Search topics for a keyword").option("-f, --format <format>", "Output format: markdown or json", "markdown").action(docsCommand).command("schema").description("Show JSON schema for a type (cube, view, measure, etc.)").argument("<type>", "Schema type to display").action(docsSchemaCommand);
2337
2822
  program.parse();
2338
2823
 
2339
2824
  //#endregion
2340
- export { };
2825
+ export { getProjectPaths as t };