@bonnard/cli 0.2.0 → 0.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,365 @@
1
+ import { t as getProjectPaths } from "./bon.mjs";
2
+ import fs from "node:fs";
3
+ import path from "node:path";
4
+ import YAML from "yaml";
5
+ import { z } from "zod";
6
+
7
+ //#region src/lib/schema.ts
8
+ const identifier = z.string().regex(/^[_a-zA-Z][_a-zA-Z0-9]*$/, "must be a valid identifier (letters, numbers, underscores; cannot start with a number)");
9
+ const refreshKeySchema = z.object({
10
+ every: z.string().regex(/^\d+\s+(second|minute|hour|day|week)s?$/, { message: "must be a time interval like \"1 hour\", \"30 minute\", \"1 day\"" }).optional(),
11
+ sql: z.string().optional()
12
+ });
13
+ const measureTypes = [
14
+ "count",
15
+ "count_distinct",
16
+ "count_distinct_approx",
17
+ "sum",
18
+ "avg",
19
+ "min",
20
+ "max",
21
+ "number",
22
+ "string",
23
+ "time",
24
+ "boolean",
25
+ "running_total",
26
+ "number_agg"
27
+ ];
28
+ const dimensionTypes = [
29
+ "string",
30
+ "number",
31
+ "boolean",
32
+ "time",
33
+ "geo",
34
+ "switch"
35
+ ];
36
+ const relationshipTypes = [
37
+ "many_to_one",
38
+ "one_to_many",
39
+ "one_to_one"
40
+ ];
41
+ const granularities = [
42
+ "second",
43
+ "minute",
44
+ "hour",
45
+ "day",
46
+ "week",
47
+ "month",
48
+ "quarter",
49
+ "year"
50
+ ];
51
+ const preAggTypes = [
52
+ "rollup",
53
+ "original_sql",
54
+ "rollup_join",
55
+ "rollup_lambda"
56
+ ];
57
+ const formats = [
58
+ "percent",
59
+ "currency",
60
+ "number",
61
+ "imageUrl",
62
+ "link",
63
+ "id"
64
+ ];
65
+ const measureSchema = z.object({
66
+ name: identifier,
67
+ type: z.enum(measureTypes),
68
+ sql: z.string().optional(),
69
+ description: z.string().optional(),
70
+ title: z.string().optional(),
71
+ format: z.enum(formats).optional(),
72
+ public: z.boolean().optional(),
73
+ filters: z.array(z.object({ sql: z.string() })).optional(),
74
+ rolling_window: z.object({
75
+ trailing: z.string().optional(),
76
+ leading: z.string().optional(),
77
+ offset: z.string().optional()
78
+ }).optional(),
79
+ drill_members: z.array(z.string()).optional(),
80
+ meta: z.record(z.string(), z.unknown()).optional()
81
+ });
82
+ const dimensionFormatSchema = z.union([z.enum(formats), z.string().startsWith("%")]);
83
+ const dimensionSchema = z.object({
84
+ name: identifier,
85
+ type: z.enum(dimensionTypes),
86
+ sql: z.string().optional(),
87
+ primary_key: z.boolean().optional(),
88
+ sub_query: z.boolean().optional(),
89
+ propagate_filters_to_sub_query: z.boolean().optional(),
90
+ description: z.string().optional(),
91
+ title: z.string().optional(),
92
+ format: dimensionFormatSchema.optional(),
93
+ public: z.boolean().optional(),
94
+ meta: z.record(z.string(), z.unknown()).optional(),
95
+ latitude: z.object({ sql: z.string() }).optional(),
96
+ longitude: z.object({ sql: z.string() }).optional(),
97
+ case: z.object({
98
+ when: z.array(z.object({
99
+ sql: z.string(),
100
+ label: z.string()
101
+ })),
102
+ else: z.object({ label: z.string() }).optional()
103
+ }).optional()
104
+ });
105
+ const joinSchema = z.object({
106
+ name: identifier,
107
+ relationship: z.enum(relationshipTypes),
108
+ sql: z.string()
109
+ });
110
+ const segmentSchema = z.object({
111
+ name: identifier,
112
+ sql: z.string(),
113
+ description: z.string().optional(),
114
+ title: z.string().optional(),
115
+ public: z.boolean().optional()
116
+ });
117
+ const preAggregationSchema = z.object({
118
+ name: identifier,
119
+ type: z.enum(preAggTypes).optional(),
120
+ measures: z.array(z.string()).optional(),
121
+ dimensions: z.array(z.string()).optional(),
122
+ time_dimension: z.string().optional(),
123
+ granularity: z.enum(granularities).optional(),
124
+ partition_granularity: z.enum(granularities).optional(),
125
+ refresh_key: refreshKeySchema.optional(),
126
+ scheduled_refresh: z.boolean().optional()
127
+ });
128
+ const hierarchySchema = z.object({
129
+ name: identifier,
130
+ levels: z.array(z.string()),
131
+ title: z.string().optional(),
132
+ public: z.boolean().optional()
133
+ });
134
+ const cubeSchema = z.object({
135
+ name: identifier,
136
+ sql: z.string().optional(),
137
+ sql_table: z.string().optional(),
138
+ data_source: z.string().optional(),
139
+ extends: z.string().optional(),
140
+ description: z.string().optional(),
141
+ title: z.string().optional(),
142
+ public: z.boolean().optional(),
143
+ refresh_key: refreshKeySchema.optional(),
144
+ measures: z.array(measureSchema).optional(),
145
+ dimensions: z.array(dimensionSchema).optional(),
146
+ joins: z.array(joinSchema).optional(),
147
+ segments: z.array(segmentSchema).optional(),
148
+ pre_aggregations: z.array(preAggregationSchema).optional(),
149
+ hierarchies: z.array(hierarchySchema).optional()
150
+ }).refine((data) => data.sql != null || data.sql_table != null || data.extends != null, { message: "sql, sql_table, or extends is required" });
151
+ const viewIncludeItemSchema = z.union([z.string(), z.object({
152
+ name: z.string(),
153
+ alias: z.string().optional(),
154
+ title: z.string().optional(),
155
+ description: z.string().optional(),
156
+ format: z.string().optional(),
157
+ meta: z.record(z.string(), z.unknown()).optional()
158
+ })]);
159
+ const viewCubeRefSchema = z.object({
160
+ join_path: z.string(),
161
+ includes: z.union([z.literal("*"), z.array(viewIncludeItemSchema)]).optional(),
162
+ excludes: z.array(z.string()).optional(),
163
+ prefix: z.boolean().optional()
164
+ });
165
+ const folderSchema = z.lazy(() => z.object({
166
+ name: z.string(),
167
+ members: z.array(z.string()).optional(),
168
+ folders: z.array(folderSchema).optional()
169
+ }));
170
+ const viewMeasureSchema = z.object({
171
+ name: identifier,
172
+ sql: z.string(),
173
+ type: z.string(),
174
+ format: z.string().optional(),
175
+ description: z.string().optional(),
176
+ meta: z.record(z.string(), z.unknown()).optional()
177
+ });
178
+ const viewDimensionSchema = z.object({
179
+ name: identifier,
180
+ sql: z.string(),
181
+ type: z.string(),
182
+ description: z.string().optional(),
183
+ meta: z.record(z.string(), z.unknown()).optional()
184
+ });
185
+ const viewSegmentSchema = z.object({
186
+ name: identifier,
187
+ sql: z.string(),
188
+ description: z.string().optional()
189
+ });
190
+ const viewSchema = z.object({
191
+ name: identifier,
192
+ description: z.string().optional(),
193
+ title: z.string().optional(),
194
+ public: z.boolean().optional(),
195
+ cubes: z.array(viewCubeRefSchema).optional(),
196
+ measures: z.array(viewMeasureSchema).optional(),
197
+ dimensions: z.array(viewDimensionSchema).optional(),
198
+ segments: z.array(viewSegmentSchema).optional(),
199
+ folders: z.array(folderSchema).optional()
200
+ });
201
+ const fileSchema = z.object({
202
+ cubes: z.array(cubeSchema).optional(),
203
+ views: z.array(viewSchema).optional()
204
+ }).refine((data) => data.cubes && data.cubes.length > 0 || data.views && data.views.length > 0, "File must contain at least one cube or view");
205
+ function formatZodError(error, fileName, parsed) {
206
+ return error.issues.map((issue) => {
207
+ const pathParts = issue.path;
208
+ let entityContext = "";
209
+ if (pathParts.length >= 2) {
210
+ const collection = pathParts[0];
211
+ const index = pathParts[1];
212
+ const entity = parsed?.[collection]?.[index];
213
+ if (entity?.name) entityContext = ` (${entity.name})`;
214
+ }
215
+ const pathStr = pathParts.join(".");
216
+ const location = pathStr ? `${pathStr}${entityContext}` : "";
217
+ if (issue.code === "invalid_value" && "values" in issue) return `${fileName}: ${location} — invalid value, expected one of: ${issue.values.join(", ")}`;
218
+ return `${fileName}: ${location ? `${location} — ` : ""}${issue.message}`;
219
+ });
220
+ }
221
+ function validateFiles(files) {
222
+ const errors = [];
223
+ const cubes = [];
224
+ const views = [];
225
+ const allNames = /* @__PURE__ */ new Map();
226
+ for (const file of files) {
227
+ let parsed;
228
+ try {
229
+ parsed = YAML.parse(file.content);
230
+ } catch (err) {
231
+ errors.push(`${file.fileName}: YAML parse error — ${err.message}`);
232
+ continue;
233
+ }
234
+ if (!parsed || typeof parsed !== "object") {
235
+ errors.push(`${file.fileName}: file is empty or not a YAML object`);
236
+ continue;
237
+ }
238
+ const result = fileSchema.safeParse(parsed);
239
+ if (!result.success) {
240
+ errors.push(...formatZodError(result.error, file.fileName, parsed));
241
+ continue;
242
+ }
243
+ for (const cube of parsed.cubes ?? []) if (cube.name) {
244
+ const existing = allNames.get(cube.name);
245
+ if (existing) errors.push(`${file.fileName}: duplicate name '${cube.name}' (also defined in ${existing})`);
246
+ else {
247
+ allNames.set(cube.name, file.fileName);
248
+ cubes.push(cube.name);
249
+ }
250
+ }
251
+ for (const view of parsed.views ?? []) if (view.name) {
252
+ const existing = allNames.get(view.name);
253
+ if (existing) errors.push(`${file.fileName}: duplicate name '${view.name}' (also defined in ${existing})`);
254
+ else {
255
+ allNames.set(view.name, file.fileName);
256
+ views.push(view.name);
257
+ }
258
+ }
259
+ }
260
+ return {
261
+ errors,
262
+ cubes,
263
+ views
264
+ };
265
+ }
266
+
267
+ //#endregion
268
+ //#region src/lib/validate.ts
269
+ function collectYamlFiles(dir, rootDir) {
270
+ if (!fs.existsSync(dir)) return [];
271
+ const results = [];
272
+ function walk(current) {
273
+ for (const entry of fs.readdirSync(current, { withFileTypes: true })) {
274
+ const fullPath = path.join(current, entry.name);
275
+ if (entry.isDirectory()) walk(fullPath);
276
+ else if (entry.name.endsWith(".yaml") || entry.name.endsWith(".yml")) results.push({
277
+ fileName: path.relative(rootDir, fullPath),
278
+ content: fs.readFileSync(fullPath, "utf-8")
279
+ });
280
+ }
281
+ }
282
+ walk(dir);
283
+ return results;
284
+ }
285
+ function checkMissingDescriptions(files) {
286
+ const missing = [];
287
+ for (const file of files) try {
288
+ const parsed = YAML.parse(file.content);
289
+ if (!parsed) continue;
290
+ const cubes = parsed.cubes || [];
291
+ for (const cube of cubes) {
292
+ if (!cube.name) continue;
293
+ if (!cube.description) missing.push({
294
+ parent: cube.name,
295
+ type: "cube",
296
+ name: cube.name
297
+ });
298
+ const measures = cube.measures || [];
299
+ for (const measure of measures) if (measure.name && !measure.description) missing.push({
300
+ parent: cube.name,
301
+ type: "measure",
302
+ name: measure.name
303
+ });
304
+ const dimensions = cube.dimensions || [];
305
+ for (const dimension of dimensions) if (dimension.name && !dimension.description) missing.push({
306
+ parent: cube.name,
307
+ type: "dimension",
308
+ name: dimension.name
309
+ });
310
+ }
311
+ const views = parsed.views || [];
312
+ for (const view of views) {
313
+ if (!view.name) continue;
314
+ if (!view.description) missing.push({
315
+ parent: view.name,
316
+ type: "view",
317
+ name: view.name
318
+ });
319
+ }
320
+ } catch {}
321
+ return missing;
322
+ }
323
+ function checkMissingDataSource(files) {
324
+ const missing = [];
325
+ for (const file of files) try {
326
+ const parsed = YAML.parse(file.content);
327
+ if (!parsed) continue;
328
+ for (const cube of parsed.cubes || []) if (cube.name && !cube.data_source) missing.push(cube.name);
329
+ } catch {}
330
+ return missing;
331
+ }
332
+ async function validate(projectPath) {
333
+ const paths = getProjectPaths(projectPath);
334
+ const files = [...collectYamlFiles(paths.cubes, projectPath), ...collectYamlFiles(paths.views, projectPath)];
335
+ if (files.length === 0) return {
336
+ valid: true,
337
+ errors: [],
338
+ cubes: [],
339
+ views: [],
340
+ missingDescriptions: [],
341
+ cubesMissingDataSource: []
342
+ };
343
+ const result = validateFiles(files);
344
+ if (result.errors.length > 0) return {
345
+ valid: false,
346
+ errors: result.errors,
347
+ cubes: [],
348
+ views: [],
349
+ missingDescriptions: [],
350
+ cubesMissingDataSource: []
351
+ };
352
+ const missingDescriptions = checkMissingDescriptions(files);
353
+ const cubesMissingDataSource = checkMissingDataSource(files);
354
+ return {
355
+ valid: true,
356
+ errors: [],
357
+ cubes: result.cubes,
358
+ views: result.views,
359
+ missingDescriptions,
360
+ cubesMissingDataSource
361
+ };
362
+ }
363
+
364
+ //#endregion
365
+ export { validate };
@@ -27,6 +27,12 @@ measures:
27
27
 
28
28
  ## Supported Formats
29
29
 
30
+ | Format | Description |
31
+ |--------|-------------|
32
+ | `percent` | Percentage display (0.75 → 75%) |
33
+ | `currency` | Monetary display ($1,234.56) |
34
+ | `number` | Plain number with standard formatting |
35
+
30
36
  ### percent
31
37
 
32
38
  Displays value as a percentage:
@@ -53,6 +59,19 @@ Displays value as monetary amount:
53
59
 
54
60
  Output: `$1,234.56` (formatting depends on BI tool locale)
55
61
 
62
+ ### number
63
+
64
+ Standard number formatting with grouping separators:
65
+
66
+ ```yaml
67
+ - name: total_slots
68
+ type: sum
69
+ sql: slot_count
70
+ format: number
71
+ ```
72
+
73
+ Output: `1,234` instead of raw `1234`. Use this when the measure represents a count or quantity that benefits from thousand separators.
74
+
56
75
  ## Usage Notes
57
76
 
58
77
  ### Format vs Calculation
@@ -20,10 +20,9 @@ Your agent understands Bonnard's modeling language from the first prompt.
20
20
  bon init # Scaffold project + agent context
21
21
  bon datasource add --demo # Add a demo warehouse instantly
22
22
  bon datasource add --from-dbt # Import connections from dbt
23
- bon validate --test-connection # Check YAML syntax + warehouse connectivity
23
+ bon validate # Check YAML syntax
24
24
  bon deploy -m "description" # Ship to production (message required)
25
25
  bon query '{"measures":["orders.count"]}' # Test your semantic layer
26
- bon preview <ds> "SELECT ..." # Run raw SQL against a warehouse
27
26
  bon deployments # List deployment history
28
27
  bon diff <id> # View changes in a deployment
29
28
  bon annotate <id> --data '{...}' # Add context to deployment changes
@@ -81,9 +81,8 @@ views:
81
81
 
82
82
  Check for syntax errors and test connections:
83
83
 
84
- ```yaml
84
+ ```bash
85
85
  bon validate
86
- bon validate --test-connection
87
86
  ```
88
87
 
89
88
  ### 4. Deploy
@@ -147,10 +146,8 @@ bonnard/cubes/
147
146
  | `bon datasource add --demo` | Add demo dataset (no warehouse needed) |
148
147
  | `bon datasource add --from-dbt` | Import from dbt profiles |
149
148
  | `bon datasource list` | List configured sources |
150
- | `bon datasource test <name>` | Test connection |
151
- | `bon preview <ds> "SQL"` | Run raw SQL against a warehouse |
149
+ | `bon datasource test <name>` | Test connection (requires login) |
152
150
  | `bon validate` | Check cube and view syntax |
153
- | `bon validate --test-connection` | Validate + test warehouse connections |
154
151
  | `bon deploy -m "message"` | Deploy to Bonnard (message required) |
155
152
  | `bon deploy --ci` | Non-interactive deploy |
156
153
  | `bon deployments` | List deployment history |
@@ -4,16 +4,12 @@
4
4
 
5
5
  ## Overview
6
6
 
7
- The `bon validate` command checks your YAML cubes and views for syntax errors, schema violations, and optionally tests data source connections. Run this before deploying to catch issues early.
7
+ The `bon validate` command checks your YAML cubes and views for syntax errors and schema violations. Run this before deploying to catch issues early.
8
8
 
9
9
  ## Usage
10
10
 
11
11
  ```bash
12
- # Basic validation
13
12
  bon validate
14
-
15
- # Also test data source connections
16
- bon validate --test-connection
17
13
  ```
18
14
 
19
15
  ## What Gets Validated
@@ -36,12 +32,6 @@ bon validate --test-connection
36
32
  - Referenced members exist
37
33
  - Join relationships are valid
38
34
 
39
- ### Connection Testing (--test-connection)
40
-
41
- - Data source credentials work
42
- - Database is accessible
43
- - Tables/schemas exist
44
-
45
35
  ## Example Output
46
36
 
47
37
  ### Success
@@ -73,17 +63,6 @@ bonnard/cubes/orders.yaml:15:5
73
63
  1 error found.
74
64
  ```
75
65
 
76
- ### Connection Warnings
77
-
78
- ```
79
- ✓ Validating YAML syntax...
80
- ✓ All cubes and views valid.
81
-
82
- ⚠ Testing connections...
83
- ⚠ datasource "analytics": Connection timed out
84
- (This won't block deploy, but queries may fail)
85
- ```
86
-
87
66
  ## Common Errors
88
67
 
89
68
  ### Missing Required Field
@@ -125,12 +104,6 @@ measures:
125
104
  type: count
126
105
  ```
127
106
 
128
- ## Options
129
-
130
- | Option | Description |
131
- |--------|-------------|
132
- | `--test-connection` | Also test datasource connections |
133
-
134
107
  ## Exit Codes
135
108
 
136
109
  | Code | Meaning |
@@ -143,7 +116,7 @@ measures:
143
116
  1. **Run before every deploy** — `bon validate && bon deploy`
144
117
  2. **Add to CI/CD** — validate on pull requests
145
118
  3. **Fix errors first** — don't deploy with validation errors
146
- 4. **Test connections periodically** — catch credential issues early
119
+ 4. **Test connections** — use `bon datasource test <name>` to check connectivity
147
120
 
148
121
  ## See Also
149
122
 
@@ -41,25 +41,12 @@ If the test fails, common issues:
41
41
 
42
42
  ## Phase 2: Explore the Data
43
43
 
44
- Use `bon preview` to understand what tables and columns are available.
45
- **Always run this before creating cubes** — use the results to decide which
46
- tables to model and what columns to expose.
44
+ Before creating cubes, understand what tables and columns are available in your warehouse.
47
45
 
48
- ```bash
49
- # List tables use the schema from the datasource config
50
- # For demo data (contoso schema):
51
- bon preview contoso_demo "SELECT table_name FROM information_schema.tables WHERE table_schema = 'contoso'"
52
-
53
- # For user's own data (typically public schema):
54
- bon preview <datasource> "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"
55
-
56
- # Snowflake:
57
- bon preview <datasource> "SHOW TABLES"
58
-
59
- # Then sample the key tables to see columns and data:
60
- bon preview contoso_demo "SELECT * FROM contoso.fact_sales" --limit 10
61
- bon preview contoso_demo "SELECT * FROM contoso.dim_product" --limit 10
62
- ```
46
+ **Options for exploring your data:**
47
+ - Use your database IDE or CLI (e.g., `psql`, Snowflake web UI, BigQuery console) to browse tables
48
+ - Check your dbt docs or existing documentation for table schemas
49
+ - For the demo dataset, the tables are: `contoso.fact_sales`, `contoso.dim_product`, `contoso.dim_store`, `contoso.dim_customer`
63
50
 
64
51
  Note the table names, column names, and data types — you'll use these in Phase 3.
65
52
 
@@ -161,12 +148,6 @@ Validate also warns about:
161
148
  - **Missing descriptions** — descriptions help AI agents and analysts discover metrics
162
149
  - **Missing `data_source`** — cubes without an explicit `data_source` use the default warehouse, which can cause issues when multiple warehouses are configured
163
150
 
164
- Optionally test the datasource connection too:
165
-
166
- ```bash
167
- bon validate --test-connection
168
- ```
169
-
170
151
  ## Phase 6: Deploy
171
152
 
172
153
  Log in (if not already) and deploy: