@bonnard/cli 0.2.3 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/bon.mjs CHANGED
@@ -10,7 +10,6 @@ import os from "node:os";
10
10
  import http from "node:http";
11
11
  import crypto from "node:crypto";
12
12
  import { execFileSync } from "node:child_process";
13
- import { confirm } from "@inquirer/prompts";
14
13
  import { encode } from "@toon-format/toon";
15
14
 
16
15
  //#region rolldown:runtime
@@ -82,6 +81,7 @@ function mapDbtType(dbtType) {
82
81
  snowflake: "snowflake",
83
82
  postgres: "postgres",
84
83
  postgresql: "postgres",
84
+ redshift: "redshift",
85
85
  bigquery: "bigquery",
86
86
  databricks: "databricks"
87
87
  }[dbtType.toLowerCase()] ?? null;
@@ -338,6 +338,7 @@ function extractWarehouseFromEnv(cwd) {
338
338
  const type = {
339
339
  snowflake: "snowflake",
340
340
  postgres: "postgres",
341
+ redshift: "redshift",
341
342
  bigquery: "bigquery",
342
343
  databricks: "databricks"
343
344
  }[cubeDbType[1].trim().toLowerCase()];
@@ -566,21 +567,26 @@ function createAgentTemplates(cwd, env) {
566
567
  fs.mkdirSync(claudeRulesDir, { recursive: true });
567
568
  fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-get-started"), { recursive: true });
568
569
  fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-metabase-migrate"), { recursive: true });
570
+ fs.mkdirSync(path.join(claudeSkillsDir, "bonnard-design-guide"), { recursive: true });
569
571
  writeTemplateFile(sharedBonnard, path.join(claudeRulesDir, "bonnard.md"), createdFiles);
570
572
  writeTemplateFile(loadTemplate("claude/skills/bonnard-get-started/SKILL.md"), path.join(claudeSkillsDir, "bonnard-get-started", "SKILL.md"), createdFiles);
571
573
  writeTemplateFile(loadTemplate("claude/skills/bonnard-metabase-migrate/SKILL.md"), path.join(claudeSkillsDir, "bonnard-metabase-migrate", "SKILL.md"), createdFiles);
574
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-design-guide/SKILL.md"), path.join(claudeSkillsDir, "bonnard-design-guide", "SKILL.md"), createdFiles);
572
575
  mergeSettingsJson(loadJsonTemplate("claude/settings.json"), path.join(cwd, ".claude", "settings.json"), createdFiles);
573
576
  const cursorRulesDir = path.join(cwd, ".cursor", "rules");
574
577
  fs.mkdirSync(cursorRulesDir, { recursive: true });
575
578
  writeTemplateFile(withCursorFrontmatter(sharedBonnard, "Bonnard semantic layer project context", true), path.join(cursorRulesDir, "bonnard.mdc"), createdFiles);
576
579
  writeTemplateFile(loadTemplate("cursor/rules/bonnard-get-started.mdc"), path.join(cursorRulesDir, "bonnard-get-started.mdc"), createdFiles);
577
580
  writeTemplateFile(loadTemplate("cursor/rules/bonnard-metabase-migrate.mdc"), path.join(cursorRulesDir, "bonnard-metabase-migrate.mdc"), createdFiles);
581
+ writeTemplateFile(loadTemplate("cursor/rules/bonnard-design-guide.mdc"), path.join(cursorRulesDir, "bonnard-design-guide.mdc"), createdFiles);
578
582
  const codexSkillsDir = path.join(cwd, ".agents", "skills");
579
583
  fs.mkdirSync(path.join(codexSkillsDir, "bonnard-get-started"), { recursive: true });
580
584
  fs.mkdirSync(path.join(codexSkillsDir, "bonnard-metabase-migrate"), { recursive: true });
585
+ fs.mkdirSync(path.join(codexSkillsDir, "bonnard-design-guide"), { recursive: true });
581
586
  writeTemplateFile(sharedBonnard, path.join(cwd, "AGENTS.md"), createdFiles);
582
587
  writeTemplateFile(loadTemplate("claude/skills/bonnard-get-started/SKILL.md"), path.join(codexSkillsDir, "bonnard-get-started", "SKILL.md"), createdFiles);
583
588
  writeTemplateFile(loadTemplate("claude/skills/bonnard-metabase-migrate/SKILL.md"), path.join(codexSkillsDir, "bonnard-metabase-migrate", "SKILL.md"), createdFiles);
589
+ writeTemplateFile(loadTemplate("claude/skills/bonnard-design-guide/SKILL.md"), path.join(codexSkillsDir, "bonnard-design-guide", "SKILL.md"), createdFiles);
584
590
  return createdFiles;
585
591
  }
586
592
  async function initCommand() {
@@ -787,7 +793,6 @@ async function logoutCommand() {
787
793
  var api_exports = /* @__PURE__ */ __exportAll({
788
794
  del: () => del,
789
795
  get: () => get,
790
- getRemoteDatasources: () => getRemoteDatasources,
791
796
  post: () => post
792
797
  });
793
798
  const APP_URL = process.env.BON_APP_URL || "https://app.bonnard.dev";
@@ -829,12 +834,6 @@ function post(path, body) {
829
834
  function del(path) {
830
835
  return request("DELETE", path);
831
836
  }
832
- /**
833
- * Fetch remote datasources from Bonnard server
834
- */
835
- async function getRemoteDatasources() {
836
- return (await get("/api/datasources")).dataSources || [];
837
- }
838
837
 
839
838
  //#endregion
840
839
  //#region src/commands/whoami.ts
@@ -1221,6 +1220,43 @@ const WAREHOUSE_CONFIGS = [
1221
1220
  required: true
1222
1221
  }]
1223
1222
  },
1223
+ {
1224
+ value: "redshift",
1225
+ label: "Redshift",
1226
+ configFields: [
1227
+ {
1228
+ name: "host",
1229
+ message: "Host (cluster endpoint)",
1230
+ required: true
1231
+ },
1232
+ {
1233
+ name: "port",
1234
+ message: "Port",
1235
+ default: "5439"
1236
+ },
1237
+ {
1238
+ name: "database",
1239
+ message: "Database name",
1240
+ required: true
1241
+ },
1242
+ {
1243
+ name: "schema",
1244
+ message: "Schema",
1245
+ default: "public"
1246
+ }
1247
+ ],
1248
+ credentialFields: [{
1249
+ name: "username",
1250
+ flag: "user",
1251
+ message: "Username",
1252
+ required: true
1253
+ }, {
1254
+ name: "password",
1255
+ message: "Password",
1256
+ secret: true,
1257
+ required: true
1258
+ }]
1259
+ },
1224
1260
  {
1225
1261
  value: "bigquery",
1226
1262
  label: "BigQuery",
@@ -1673,7 +1709,7 @@ async function validateCommand() {
1673
1709
  console.log(pc.red("No bon.yaml found. Are you in a Bonnard project?"));
1674
1710
  process.exit(1);
1675
1711
  }
1676
- const { validate } = await import("./validate-BdqZBH2n.mjs");
1712
+ const { validate } = await import("./validate-Bc8zGNw7.mjs");
1677
1713
  const result = await validate(cwd);
1678
1714
  if (result.cubes.length === 0 && result.views.length === 0 && result.valid) {
1679
1715
  console.log(pc.yellow(`No cube or view files found in ${BONNARD_DIR}/cubes/ or ${BONNARD_DIR}/views/.`));
@@ -1708,6 +1744,14 @@ async function validateCommand() {
1708
1744
  console.log(pc.dim(" This can cause issues when multiple warehouses are configured."));
1709
1745
  console.log(pc.dim(` ${result.cubesMissingDataSource.join(", ")}`));
1710
1746
  }
1747
+ if (result.suspectPrimaryKeys.length > 0) {
1748
+ console.log();
1749
+ console.log(pc.yellow(`⚠ ${result.suspectPrimaryKeys.length} primary key(s) on time dimensions`));
1750
+ console.log(pc.dim(" Time dimensions are rarely unique. Non-unique primary keys cause dimension"));
1751
+ console.log(pc.dim(" queries to silently return empty results. Use a unique column or add a"));
1752
+ console.log(pc.dim(" ROW_NUMBER() synthetic key via the cube's sql property."));
1753
+ for (const s of result.suspectPrimaryKeys) console.log(pc.dim(` ${s.cube}.${s.dimension} (type: ${s.type})`));
1754
+ }
1711
1755
  }
1712
1756
 
1713
1757
  //#endregion
@@ -1739,7 +1783,7 @@ async function deployCommand(options = {}) {
1739
1783
  process.exit(1);
1740
1784
  }
1741
1785
  console.log(pc.dim("Validating cubes and views..."));
1742
- const { validate } = await import("./validate-BdqZBH2n.mjs");
1786
+ const { validate } = await import("./validate-Bc8zGNw7.mjs");
1743
1787
  const result = await validate(cwd);
1744
1788
  if (!result.valid) {
1745
1789
  console.log(pc.red("Validation failed:\n"));
@@ -1810,7 +1854,7 @@ async function deployCommand(options = {}) {
1810
1854
  async function testAndSyncDatasources(cwd, options = {}) {
1811
1855
  const { extractDatasourcesFromCubes } = await import("./cubes-9rklhdAJ.mjs");
1812
1856
  const { loadLocalDatasources } = await Promise.resolve().then(() => local_exports);
1813
- const { pushDatasource } = await import("./push-mZujN1Ik.mjs");
1857
+ const { pushDatasource } = await import("./push-Bv9AFGc2.mjs");
1814
1858
  const references = extractDatasourcesFromCubes(cwd);
1815
1859
  if (references.length === 0) return false;
1816
1860
  console.log();
@@ -1833,51 +1877,16 @@ async function testAndSyncDatasources(cwd, options = {}) {
1833
1877
  console.log(pc.red("Missing datasources. Fix issues before deploying."));
1834
1878
  return true;
1835
1879
  }
1836
- console.log(pc.dim("Checking remote datasources..."));
1837
- let remoteDatasources;
1838
- try {
1839
- remoteDatasources = await getRemoteDatasources();
1840
- } catch (err) {
1841
- console.log(pc.red(`Failed to fetch remote datasources: ${err.message}`));
1842
- return true;
1880
+ console.log(pc.dim("Syncing datasources..."));
1881
+ for (const name of foundDatasources) if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ ${name} synced`));
1882
+ else {
1883
+ console.log(pc.red(`✗ Failed to sync "${name}"`));
1884
+ failed = true;
1843
1885
  }
1844
- const remoteNames = new Set(remoteDatasources.map((ds) => ds.name));
1845
- const missingRemote = foundDatasources.filter((name) => !remoteNames.has(name));
1846
- if (missingRemote.length > 0) {
1847
- console.log();
1848
- console.log(pc.yellow(`⚠ Missing remote datasource${missingRemote.length > 1 ? "s" : ""}: ${missingRemote.join(", ")}`));
1886
+ if (failed) {
1849
1887
  console.log();
1850
- if (options.ci) {
1851
- console.log(pc.red("Deploy aborted (--ci mode)."));
1852
- console.log(pc.dim(`Use --push-datasources to auto-push missing datasources`));
1853
- return true;
1854
- }
1855
- if (options.pushDatasources) for (const name of missingRemote) {
1856
- console.log(pc.dim(`Pushing "${name}"...`));
1857
- if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
1858
- else {
1859
- console.log(pc.red(`✗ Failed to push "${name}"`));
1860
- return true;
1861
- }
1862
- }
1863
- else {
1864
- if (!await confirm({
1865
- message: `Push ${missingRemote.length > 1 ? "these datasources" : `"${missingRemote[0]}"`} to Bonnard? (credentials will be encrypted)`,
1866
- default: true
1867
- })) {
1868
- console.log(pc.dim("Deploy aborted."));
1869
- return true;
1870
- }
1871
- console.log();
1872
- for (const name of missingRemote) {
1873
- console.log(pc.dim(`Pushing "${name}"...`));
1874
- if (await pushDatasource(name, { silent: true })) console.log(pc.green(`✓ Pushed "${name}"`));
1875
- else {
1876
- console.log(pc.red(`✗ Failed to push "${name}"`));
1877
- return true;
1878
- }
1879
- }
1880
- }
1888
+ console.log(pc.red("Datasource sync failed. Check .bon/datasources.yaml and credentials."));
1889
+ return true;
1881
1890
  }
1882
1891
  console.log();
1883
1892
  console.log(pc.dim("Testing datasource connections..."));
@@ -3818,7 +3827,7 @@ datasource.command("add").description("Add a data source to .bon/datasources.yam
3818
3827
  datasource.command("list").description("List data sources (shows both local and remote by default)").option("--local", "Show only local data sources from .bon/datasources.yaml").option("--remote", "Show only remote data sources from Bonnard server (requires login)").action(datasourceListCommand);
3819
3828
  datasource.command("remove").description("Remove a data source from .bon/datasources.yaml (local by default)").argument("<name>", "Data source name").option("--remote", "Remove from Bonnard server instead of local (requires login)").action(datasourceRemoveCommand);
3820
3829
  program.command("validate").description("Validate YAML syntax in bonnard/cubes/ and bonnard/views/").action(validateCommand);
3821
- program.command("deploy").description("Deploy cubes and views to Bonnard. Requires login, validates, syncs datasources").option("--ci", "Non-interactive mode (fail if missing datasources)").option("--push-datasources", "Auto-push missing datasources without prompting").requiredOption("-m, --message <text>", "Deploy message describing your changes").action(deployCommand);
3830
+ program.command("deploy").description("Deploy cubes and views to Bonnard. Requires login, validates, syncs datasources").option("--ci", "Non-interactive mode").requiredOption("-m, --message <text>", "Deploy message describing your changes").action(deployCommand);
3822
3831
  program.command("deployments").description("List deployment history").option("--all", "Show all deployments (default: last 10)").option("--format <format>", "Output format: table or json", "table").action(deploymentsCommand);
3823
3832
  program.command("diff").description("Show changes in a deployment").argument("<id>", "Deployment ID").option("--format <format>", "Output format: table or json", "table").option("--breaking", "Show only breaking changes").action(diffCommand);
3824
3833
  program.command("annotate").description("Annotate deployment changes with reasoning").argument("<id>", "Deployment ID").option("--data <json>", "Annotations JSON").action(annotateCommand);
@@ -1,6 +1,6 @@
1
1
  import { n as resolveEnvVarsInCredentials, r as post, t as getLocalDatasource } from "./bon.mjs";
2
2
  import pc from "picocolors";
3
- import { confirm } from "@inquirer/prompts";
3
+ import "@inquirer/prompts";
4
4
 
5
5
  //#region src/commands/datasource/push.ts
6
6
  /**
@@ -218,11 +218,53 @@ function formatZodError(error, fileName, parsed) {
218
218
  return `${fileName}: ${location ? `${location} — ` : ""}${issue.message}`;
219
219
  });
220
220
  }
221
+ function checkViewMemberConflicts(parsedFiles, cubeMap) {
222
+ const errors = [];
223
+ for (const { fileName, parsed } of parsedFiles) for (const view of parsed.views ?? []) {
224
+ if (!view.name || !view.cubes) continue;
225
+ const seen = /* @__PURE__ */ new Map();
226
+ for (const m of view.measures ?? []) if (m.name) seen.set(m.name, `${view.name} (direct)`);
227
+ for (const d of view.dimensions ?? []) if (d.name) seen.set(d.name, `${view.name} (direct)`);
228
+ for (const s of view.segments ?? []) if (s.name) seen.set(s.name, `${view.name} (direct)`);
229
+ for (const cubeRef of view.cubes) {
230
+ const joinPath = cubeRef.join_path;
231
+ if (!joinPath) continue;
232
+ const segments = joinPath.split(".");
233
+ const targetCubeName = segments[segments.length - 1];
234
+ let memberNames = [];
235
+ if (cubeRef.includes === "*") {
236
+ const cube = cubeMap.get(targetCubeName);
237
+ if (!cube) continue;
238
+ memberNames = [
239
+ ...cube.measures,
240
+ ...cube.dimensions,
241
+ ...cube.segments
242
+ ];
243
+ } else if (Array.isArray(cubeRef.includes)) {
244
+ for (const item of cubeRef.includes) if (typeof item === "string") memberNames.push(item);
245
+ else if (item && typeof item === "object" && item.name) memberNames.push(item.alias || item.name);
246
+ } else continue;
247
+ if (Array.isArray(cubeRef.excludes)) {
248
+ const excludeSet = new Set(cubeRef.excludes);
249
+ memberNames = memberNames.filter((n) => !excludeSet.has(n));
250
+ }
251
+ for (const rawName of memberNames) {
252
+ const finalName = cubeRef.prefix ? `${targetCubeName}_${rawName}` : rawName;
253
+ const existingSource = seen.get(finalName);
254
+ if (existingSource) errors.push(`${fileName}: view '${view.name}' — member '${finalName}' from '${joinPath}' conflicts with '${existingSource}'. Use prefix: true or an alias.`);
255
+ else seen.set(finalName, joinPath);
256
+ }
257
+ }
258
+ }
259
+ return errors;
260
+ }
221
261
  function validateFiles(files) {
222
262
  const errors = [];
223
263
  const cubes = [];
224
264
  const views = [];
225
265
  const allNames = /* @__PURE__ */ new Map();
266
+ const parsedFiles = [];
267
+ const cubeMap = /* @__PURE__ */ new Map();
226
268
  for (const file of files) {
227
269
  let parsed;
228
270
  try {
@@ -240,12 +282,21 @@ function validateFiles(files) {
240
282
  errors.push(...formatZodError(result.error, file.fileName, parsed));
241
283
  continue;
242
284
  }
285
+ parsedFiles.push({
286
+ fileName: file.fileName,
287
+ parsed
288
+ });
243
289
  for (const cube of parsed.cubes ?? []) if (cube.name) {
244
290
  const existing = allNames.get(cube.name);
245
291
  if (existing) errors.push(`${file.fileName}: duplicate name '${cube.name}' (also defined in ${existing})`);
246
292
  else {
247
293
  allNames.set(cube.name, file.fileName);
248
294
  cubes.push(cube.name);
295
+ cubeMap.set(cube.name, {
296
+ measures: (cube.measures ?? []).map((m) => m.name).filter(Boolean),
297
+ dimensions: (cube.dimensions ?? []).map((d) => d.name).filter(Boolean),
298
+ segments: (cube.segments ?? []).map((s) => s.name).filter(Boolean)
299
+ });
249
300
  }
250
301
  }
251
302
  for (const view of parsed.views ?? []) if (view.name) {
@@ -257,6 +308,7 @@ function validateFiles(files) {
257
308
  }
258
309
  }
259
310
  }
311
+ if (errors.length === 0) errors.push(...checkViewMemberConflicts(parsedFiles, cubeMap));
260
312
  return {
261
313
  errors,
262
314
  cubes,
@@ -320,6 +372,22 @@ function checkMissingDescriptions(files) {
320
372
  } catch {}
321
373
  return missing;
322
374
  }
375
+ function checkSuspectPrimaryKeys(files) {
376
+ const suspects = [];
377
+ for (const file of files) try {
378
+ const parsed = YAML.parse(file.content);
379
+ if (!parsed) continue;
380
+ for (const cube of parsed.cubes || []) {
381
+ if (!cube.name) continue;
382
+ for (const dim of cube.dimensions || []) if (dim.primary_key && dim.type === "time") suspects.push({
383
+ cube: cube.name,
384
+ dimension: dim.name,
385
+ type: dim.type
386
+ });
387
+ }
388
+ } catch {}
389
+ return suspects;
390
+ }
323
391
  function checkMissingDataSource(files) {
324
392
  const missing = [];
325
393
  for (const file of files) try {
@@ -338,7 +406,8 @@ async function validate(projectPath) {
338
406
  cubes: [],
339
407
  views: [],
340
408
  missingDescriptions: [],
341
- cubesMissingDataSource: []
409
+ cubesMissingDataSource: [],
410
+ suspectPrimaryKeys: []
342
411
  };
343
412
  const result = validateFiles(files);
344
413
  if (result.errors.length > 0) return {
@@ -347,17 +416,20 @@ async function validate(projectPath) {
347
416
  cubes: [],
348
417
  views: [],
349
418
  missingDescriptions: [],
350
- cubesMissingDataSource: []
419
+ cubesMissingDataSource: [],
420
+ suspectPrimaryKeys: []
351
421
  };
352
422
  const missingDescriptions = checkMissingDescriptions(files);
353
423
  const cubesMissingDataSource = checkMissingDataSource(files);
424
+ const suspectPrimaryKeys = checkSuspectPrimaryKeys(files);
354
425
  return {
355
426
  valid: true,
356
427
  errors: [],
357
428
  cubes: result.cubes,
358
429
  views: result.views,
359
430
  missingDescriptions,
360
- cubesMissingDataSource
431
+ cubesMissingDataSource,
432
+ suspectPrimaryKeys
361
433
  };
362
434
  }
363
435
 
@@ -35,10 +35,10 @@ bon docs cubes.measures # Read modeling docs in terminal
35
35
  Deploy from GitHub Actions, GitLab CI, or any pipeline:
36
36
 
37
37
  ```bash
38
- bon deploy --ci --push-datasources -m "CI deploy"
38
+ bon deploy --ci -m "CI deploy"
39
39
  ```
40
40
 
41
- Non-interactive mode with automatic datasource sync. Fails fast if anything is misconfigured.
41
+ Non-interactive mode. Datasources are synced automatically. Fails fast if anything is misconfigured.
42
42
 
43
43
  ## Deployment versioning
44
44
 
@@ -1,84 +1,83 @@
1
1
  # Governance
2
2
 
3
- > User and group-level permissions for your semantic layer.
3
+ > Control who can see which views, columns, and rows in your semantic layer.
4
4
 
5
- Bonnard supports declarative data access policies define who can see which rows, columns, and views directly in your YAML models. No application code, no database-level workarounds.
5
+ Bonnard provides admin-managed data governancecontrol which views, columns, and rows each group of users can access. Policies are configured in the web UI and enforced automatically across MCP queries and the API. Changes take effect within one minute.
6
6
 
7
- ## Row-level security
7
+ ## How It Works
8
8
 
9
- Filter data based on user attributes. A sales manager only sees their region's data:
9
+ ```
10
+ Admin configures in web UI:
11
+ Groups → Views → Field/Row restrictions
10
12
 
11
- ```yaml
12
- cubes:
13
- - name: orders
14
- access_policy:
15
- - role: sales_manager
16
- row_level:
17
- filters:
18
- - member: region
19
- operator: equals
20
- values: ["{ securityContext.region }"]
13
+ Enforced automatically:
14
+ MCP queries + API → only see what policies allow
21
15
  ```
22
16
 
23
- Every query from that user automatically includes the filter no way to bypass it.
17
+ Governance uses **groups** as the unit of access. Each group has a set of **policies** that define which views its members can see, and optionally restrict specific columns or rows within those views.
24
18
 
25
- ## Member-level security
19
+ ## Groups
26
20
 
27
- Control which measures and dimensions each role can access. Hide sensitive fields from non-privileged users:
21
+ Groups represent teams or roles in your organization — "Sales Team", "Finance", "Executive". Create and manage groups from the **Governance** page in the Bonnard dashboard.
28
22
 
29
- ```yaml
30
- cubes:
31
- - name: orders
32
- access_policy:
33
- - role: analyst
34
- member_level:
35
- includes:
36
- - count
37
- - total_revenue
38
- - status
39
- - created_at
23
+ Each group has:
24
+ - **Name** and optional description
25
+ - **Color** for visual identification
26
+ - **View access** — which views the group can query
27
+ - **Members** — which users belong to the group
40
28
 
41
- - role: admin
42
- member_level:
43
- includes: "*"
44
- ```
29
+ Users can belong to multiple groups. Their effective access is the **union** of all group policies.
45
30
 
46
- Roles without a matching policy see nothing.
31
+ ## View-Level Access (Level 1)
47
32
 
48
- ## View-based governance
33
+ The simplest control: toggle which views a group can see. Unchecked views are completely invisible to group members — they won't appear in `explore_schema` or be queryable.
49
34
 
50
- Keep cubes private. Expose only curated views to consumers:
35
+ From the group detail page, check the views you want to grant access to and click **Save changes**. New policies default to "All fields" with no row filters.
51
36
 
52
- ```yaml
53
- cubes:
54
- - name: raw_orders
55
- public: false
37
+ ## Field-Level Access (Level 2)
56
38
 
57
- views:
58
- - name: sales_overview
59
- public: true
60
- cubes:
61
- - join_path: raw_orders
62
- includes:
63
- - revenue
64
- - order_count
65
- - status
66
- ```
39
+ Fine-tune which measures and dimensions a group can see within a view. Click the gear icon on any granted view to open the fine-tune dialog.
67
40
 
68
- Business users, AI agents, and SDK consumers only see the views you choose to expose — with clean names and descriptions.
41
+ Three modes:
42
+ - **All fields** — full access to every measure and dimension (default)
43
+ - **Only these** — whitelist specific fields; everything else is hidden
44
+ - **All except** — blacklist specific fields; everything else is visible
69
45
 
70
- ## Dynamic visibility
46
+ Hidden fields are removed from the schema — they don't appear in `explore_schema` and can't be used in queries.
71
47
 
72
- Use context variables to show or hide entire cubes based on the user's role:
48
+ ## Row-Level Filters (Level 2)
73
49
 
74
- ```yaml
75
- cubes:
76
- - name: executive_metrics
77
- public: "{{ 'true' if COMPILE_CONTEXT.role == 'executive' else 'false' }}"
78
- ```
50
+ Restrict which rows a group can see. Add row filters in the fine-tune dialog to limit data by dimension values.
51
+
52
+ For example, filter `traffic_source` to `equals B2B, Organic` so the group only sees rows where traffic_source is B2B or Organic. Multiple values in a single filter are OR'd (any match). Multiple separate filters are AND'd (all must match).
53
+
54
+ Row filters are applied server-side on every query — users cannot bypass them.
55
+
56
+ ## Members
57
+
58
+ Assign users to groups from the **Members** tab. Each user shows which groups they belong to and a preview of their effective access (which views they can query, any field or row restrictions).
59
+
60
+ Users without any group assignment see nothing — they must be added to at least one group to query governed views.
61
+
62
+ ## How Policies Are Enforced
63
+
64
+ Policies configured in the web UI are stored in Supabase and injected into the query engine at runtime. When a user queries via MCP or the API:
65
+
66
+ 1. Their JWT is enriched with group memberships
67
+ 2. The query engine loads policies for those groups
68
+ 3. View visibility, field restrictions, and row filters are applied automatically
69
+ 4. The user only sees data their policies allow
70
+
71
+ No YAML changes are needed — governance is fully managed through the dashboard.
72
+
73
+ ## Best Practices
74
+
75
+ 1. **Start with broad access, then restrict** — give groups all views first, then fine-tune as needed
76
+ 2. **Use groups for teams, not individuals** — easier to manage and audit
77
+ 3. **Test with MCP** — after changing policies, query via MCP to verify the restrictions work as expected
78
+ 4. **Review after schema deploys** — new views need to be added to group policies to become visible
79
79
 
80
80
  ## See Also
81
81
 
82
- - [cubes.public](cubes.public) — Visibility controls reference
82
+ - [features.mcp](features.mcp) — How AI agents query your semantic layer
83
83
  - [views](views) — Creating curated data views
84
- - [syntax.context-variables](syntax.context-variables) — Context variable reference
@@ -9,6 +9,7 @@ Bonnard hosts your semantic layer so you don't have to. Define cubes and views i
9
9
  Connect any combination of warehouses through a single semantic layer:
10
10
 
11
11
  - **PostgreSQL** — Direct TCP connection
12
+ - **Redshift** — Cluster or serverless endpoint
12
13
  - **Snowflake** — Account-based authentication
13
14
  - **BigQuery** — GCP service account
14
15
  - **Databricks** — Token-based workspace connection
@@ -43,8 +44,13 @@ Your models are stored securely and served from Bonnard's infrastructure. Each o
43
44
 
44
45
  Deploy in seconds. Query in milliseconds.
45
46
 
47
+ ## Built for AI agents
48
+
49
+ Views and descriptions are the discovery API for AI agents. When an agent calls `explore_schema`, it sees view names and descriptions — that's all it has to decide where to query. Well-written descriptions with scope, disambiguation, and dimension values make agents accurate. See the design guide principles in the CLI (`/bonnard-design-guide`) for details.
50
+
46
51
  ## See Also
47
52
 
48
53
  - [workflow.query](workflow.query) — Query format reference
49
54
  - [cubes](cubes) — Cube modeling guide
50
55
  - [views](views) — View modeling guide
56
+ - [features.governance](features.governance) — Access control for views and data
@@ -6,11 +6,18 @@
6
6
 
7
7
  Views are facades that expose selected measures and dimensions from one or more cubes. They define which data is available to consumers, control join paths, and organize members into logical groups.
8
8
 
9
+ **Views should represent how a team thinks about data**, not mirror your warehouse tables. Name views by what they answer (`sales_pipeline`, `customer_insights`) rather than what table they wrap (`orders_view`, `users_view`). A good semantic layer has 5-10 focused views, not 30+ thin wrappers.
10
+
9
11
  ## Example
10
12
 
11
13
  ```yaml
12
14
  views:
13
- - name: orders_overview
15
+ - name: sales_analytics
16
+ description: >-
17
+ Sales team view — order revenue, counts, and status breakdowns with
18
+ customer details. Default view for revenue and order questions. Use the
19
+ status dimension (values: pending, completed, cancelled) to filter.
20
+ For customer-level analysis, use customer_insights instead.
14
21
  cubes:
15
22
  - join_path: orders
16
23
  includes:
@@ -39,13 +46,13 @@ views:
39
46
 
40
47
  ## Why Use Views?
41
48
 
42
- ### 1. Simplify Data Access
49
+ ### 1. Curate for Audiences
43
50
 
44
- Expose only relevant members instead of entire cubes:
51
+ Expose only the measures and dimensions a specific audience needs. A single `orders` cube might contribute to a `sales_analytics` view (revenue by status), a `management_kpis` view (high-level totals), and a `finance_reporting` view (invoice amounts). Each view shows different slices of the same data.
45
52
 
46
53
  ```yaml
47
54
  views:
48
- - name: sales_dashboard
55
+ - name: sales_analytics
49
56
  cubes:
50
57
  - join_path: orders
51
58
  includes:
@@ -124,11 +131,12 @@ bonnard/views/
124
131
 
125
132
  ## Best Practices
126
133
 
127
- 1. **Create purpose-specific views** — one view per dashboard/use case
128
- 2. **Use meaningful names** — describe what the view is for
129
- 3. **Be explicit with includes** — list members rather than using "*"
130
- 4. **Alias for clarity** — rename members when needed
131
- 5. **Organize with folders** — group related members together
134
+ 1. **Name views by audience/use case** — `sales_pipeline` not `opportunities_view`. Views represent how teams think about data, not your warehouse schema.
135
+ 2. **Write descriptions that help AI agents choose** — Lead with scope ("Sales team — revenue, order counts..."), cross-reference related views ("For customer demographics, use customer_insights instead"), and include key dimension values.
136
+ 3. **Combine multiple cubes** — A view should pull from whichever cubes answer a team's questions. Don't limit views to one cube each.
137
+ 4. **Be explicit with includes** — list members rather than using "*"
138
+ 5. **Alias for clarity** — rename members when needed
139
+ 6. **Organize with folders** — group related members together
132
140
 
133
141
  ## See Also
134
142
 
@@ -19,15 +19,16 @@ A `-m` message is **required** — it describes what changed in this deployment.
19
19
  | Flag | Description |
20
20
  |------|-------------|
21
21
  | `-m "message"` | **Required.** Deployment description |
22
- | `--ci` | Non-interactive mode (fails on missing datasources) |
23
- | `--push-datasources` | Auto-push missing datasources to Bonnard |
22
+ | `--ci` | Non-interactive mode |
23
+
24
+ Datasources are always synced automatically during deploy.
24
25
 
25
26
  ### CI/CD
26
27
 
27
- For automated pipelines, combine `--ci` with `--push-datasources`:
28
+ For automated pipelines, use `--ci` for non-interactive mode:
28
29
 
29
30
  ```bash
30
- bon deploy --ci --push-datasources -m "CI deploy"
31
+ bon deploy --ci -m "CI deploy"
31
32
  ```
32
33
 
33
34
  ## Prerequisites
@@ -131,11 +131,12 @@ bonnard/cubes/
131
131
 
132
132
  ## Best Practices
133
133
 
134
- 1. **Start simple** — begin with one cube, add complexity gradually
135
- 2. **Validate often** — run `bon validate` after each change
136
- 3. **Use version control** — commit cubes and views to git
137
- 4. **Document with descriptions** — add `description` to measures/dimensions
138
- 5. **Test with queries** — verify models produce expected results
134
+ 1. **Start from questions** — collect the most common questions your team asks, then build views that answer them. Don't just mirror your warehouse tables.
135
+ 2. **Add filtered measures** — if a dashboard card has a WHERE clause beyond a date range, that filter should be a filtered measure. This is the #1 way to match real dashboard numbers.
136
+ 3. **Write descriptions for agents** — descriptions are how AI agents choose which view and measure to use. Lead with scope, cross-reference related views, include dimension values.
137
+ 4. **Validate often** — run `bon validate` after each change
138
+ 5. **Test with real questions** — after deploying, ask an AI agent via MCP the same questions your team asks. Check it picks the right view and measure.
139
+ 6. **Iterate** — expect 2-4 rounds of deploying, testing with questions, and improving descriptions before agents reliably answer the top 10 questions.
139
140
 
140
141
  ## Commands Reference
141
142