yamchart 0.3.10 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-2OTODCAK.js +162 -0
- package/dist/chunk-2OTODCAK.js.map +1 -0
- package/dist/chunk-BZSE3IA2.js +368 -0
- package/dist/chunk-BZSE3IA2.js.map +1 -0
- package/dist/{chunk-GT54UGPT.js → chunk-CVUCTA4A.js} +45 -363
- package/dist/chunk-CVUCTA4A.js.map +1 -0
- package/dist/chunk-HYNGY4JQ.js +169 -0
- package/dist/chunk-HYNGY4JQ.js.map +1 -0
- package/dist/connection-utils-M7JZBBCN.js +18 -0
- package/dist/connection-utils-M7JZBBCN.js.map +1 -0
- package/dist/describe-7N5BUVAV.js +38 -0
- package/dist/describe-7N5BUVAV.js.map +1 -0
- package/dist/{dev-RBS7PTJE.js → dev-C775BWRE.js} +43 -13
- package/dist/dev-C775BWRE.js.map +1 -0
- package/dist/index.js +97 -3
- package/dist/index.js.map +1 -1
- package/dist/{init-FTSEOTAD.js → init-CI4VARQG.js} +2 -1
- package/dist/init-CI4VARQG.js.map +1 -0
- package/dist/public/assets/{index-xXsNnf9d.css → index-B_3PjPed.css} +1 -1
- package/dist/public/assets/{index-D9hfHuVH.js → index-BcOMnndW.js} +161 -161
- package/dist/public/assets/{index.es-B4AYqvku.js → index.es-DcFK9OeM.js} +1 -1
- package/dist/public/index.html +2 -2
- package/dist/query-BQLJ2GJG.js +32 -0
- package/dist/query-BQLJ2GJG.js.map +1 -0
- package/dist/tables-4ELQSIWW.js +39 -0
- package/dist/tables-4ELQSIWW.js.map +1 -0
- package/dist/templates/default/yamchart.yaml +14 -0
- package/dist/templates/empty/yamchart.yaml +14 -0
- package/package.json +2 -2
- package/dist/chunk-GT54UGPT.js.map +0 -1
- package/dist/dev-RBS7PTJE.js.map +0 -1
- package/dist/init-FTSEOTAD.js.map +0 -1
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
// src/commands/introspection.ts
|
|
2
|
+
function escapeSqlValue(value) {
|
|
3
|
+
return value.replace(/'/g, "''");
|
|
4
|
+
}
|
|
5
|
+
function normalizeTableType(tableType) {
|
|
6
|
+
const upper = tableType.toUpperCase();
|
|
7
|
+
if (upper === "BASE TABLE") return "TABLE";
|
|
8
|
+
return upper;
|
|
9
|
+
}
|
|
10
|
+
function getField(row, lowercase, uppercase) {
|
|
11
|
+
if (lowercase in row) return row[lowercase];
|
|
12
|
+
if (uppercase in row) return row[uppercase];
|
|
13
|
+
return void 0;
|
|
14
|
+
}
|
|
15
|
+
function getTablesQuery(type, options) {
|
|
16
|
+
switch (type) {
|
|
17
|
+
case "duckdb":
|
|
18
|
+
case "postgres": {
|
|
19
|
+
const conditions = [
|
|
20
|
+
"table_schema NOT IN ('information_schema', 'pg_catalog')"
|
|
21
|
+
];
|
|
22
|
+
if (options?.schema) {
|
|
23
|
+
conditions.push(`table_schema = '${escapeSqlValue(options.schema)}'`);
|
|
24
|
+
}
|
|
25
|
+
const sql = `SELECT table_schema, table_name, table_type FROM information_schema.tables WHERE ${conditions.join(" AND ")}`;
|
|
26
|
+
return {
|
|
27
|
+
sql,
|
|
28
|
+
normalize: (rows) => rows.map((row) => ({
|
|
29
|
+
schema: String(row.table_schema),
|
|
30
|
+
name: String(row.table_name),
|
|
31
|
+
type: normalizeTableType(String(row.table_type))
|
|
32
|
+
}))
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
case "mysql": {
|
|
36
|
+
let sql = "SELECT table_schema, table_name, table_type FROM information_schema.tables";
|
|
37
|
+
if (options?.schema) {
|
|
38
|
+
sql += ` WHERE table_schema = '${escapeSqlValue(options.schema)}'`;
|
|
39
|
+
}
|
|
40
|
+
return {
|
|
41
|
+
sql,
|
|
42
|
+
normalize: (rows) => rows.map((row) => ({
|
|
43
|
+
schema: String(getField(row, "table_schema", "TABLE_SCHEMA")),
|
|
44
|
+
name: String(getField(row, "table_name", "TABLE_NAME")),
|
|
45
|
+
type: normalizeTableType(String(getField(row, "table_type", "TABLE_TYPE")))
|
|
46
|
+
}))
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
case "snowflake": {
|
|
50
|
+
const conditions = [];
|
|
51
|
+
if (options?.schema) {
|
|
52
|
+
conditions.push(`TABLE_SCHEMA = '${escapeSqlValue(options.schema)}'`);
|
|
53
|
+
}
|
|
54
|
+
if (options?.database) {
|
|
55
|
+
conditions.push(`TABLE_CATALOG = '${escapeSqlValue(options.database)}'`);
|
|
56
|
+
}
|
|
57
|
+
let sql = "SELECT TABLE_SCHEMA, TABLE_NAME, TABLE_TYPE FROM INFORMATION_SCHEMA.TABLES";
|
|
58
|
+
if (conditions.length > 0) {
|
|
59
|
+
sql += ` WHERE ${conditions.join(" AND ")}`;
|
|
60
|
+
}
|
|
61
|
+
return {
|
|
62
|
+
sql,
|
|
63
|
+
normalize: (rows) => rows.map((row) => ({
|
|
64
|
+
schema: String(row.TABLE_SCHEMA),
|
|
65
|
+
name: String(row.TABLE_NAME),
|
|
66
|
+
type: normalizeTableType(String(row.TABLE_TYPE))
|
|
67
|
+
}))
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
case "sqlite": {
|
|
71
|
+
const sql = "SELECT name, type FROM sqlite_master WHERE type IN ('table', 'view') AND name NOT LIKE 'sqlite_%'";
|
|
72
|
+
return {
|
|
73
|
+
sql,
|
|
74
|
+
normalize: (rows) => rows.map((row) => ({
|
|
75
|
+
schema: "",
|
|
76
|
+
name: String(row.name),
|
|
77
|
+
type: String(row.type).toUpperCase()
|
|
78
|
+
}))
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
default:
|
|
82
|
+
throw new Error(`Unsupported database type: ${type}`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
function getDescribeQuery(type, table) {
|
|
86
|
+
switch (type) {
|
|
87
|
+
case "duckdb": {
|
|
88
|
+
return {
|
|
89
|
+
sql: `DESCRIBE ${table}`,
|
|
90
|
+
normalize: (rows) => rows.map((row) => ({
|
|
91
|
+
name: String(row.column_name),
|
|
92
|
+
type: String(row.column_type),
|
|
93
|
+
nullable: String(row.null)
|
|
94
|
+
}))
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
case "postgres": {
|
|
98
|
+
const parts = table.split(".");
|
|
99
|
+
const tableName = parts.length > 1 ? parts[parts.length - 1] : table;
|
|
100
|
+
const schemaName = parts.length > 1 ? parts[0] : null;
|
|
101
|
+
const conditions = [`table_name = '${escapeSqlValue(tableName)}'`];
|
|
102
|
+
if (schemaName) {
|
|
103
|
+
conditions.push(`table_schema = '${escapeSqlValue(schemaName)}'`);
|
|
104
|
+
}
|
|
105
|
+
const sql = `SELECT column_name, data_type, is_nullable FROM information_schema.columns WHERE ${conditions.join(" AND ")} ORDER BY ordinal_position`;
|
|
106
|
+
return {
|
|
107
|
+
sql,
|
|
108
|
+
normalize: (rows) => rows.map((row) => ({
|
|
109
|
+
name: String(row.column_name),
|
|
110
|
+
type: String(row.data_type),
|
|
111
|
+
nullable: String(row.is_nullable)
|
|
112
|
+
}))
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
case "mysql": {
|
|
116
|
+
const parts = table.split(".");
|
|
117
|
+
const tableName = parts.length > 1 ? parts[parts.length - 1] : table;
|
|
118
|
+
const schemaName = parts.length > 1 ? parts[0] : null;
|
|
119
|
+
const conditions = [`table_name = '${escapeSqlValue(tableName)}'`];
|
|
120
|
+
if (schemaName) {
|
|
121
|
+
conditions.push(`table_schema = '${escapeSqlValue(schemaName)}'`);
|
|
122
|
+
}
|
|
123
|
+
const sql = `SELECT column_name, column_type, is_nullable FROM information_schema.columns WHERE ${conditions.join(" AND ")} ORDER BY ordinal_position`;
|
|
124
|
+
return {
|
|
125
|
+
sql,
|
|
126
|
+
normalize: (rows) => rows.map((row) => ({
|
|
127
|
+
name: String(getField(row, "column_name", "COLUMN_NAME")),
|
|
128
|
+
type: String(getField(row, "column_type", "COLUMN_TYPE")),
|
|
129
|
+
nullable: String(getField(row, "is_nullable", "IS_NULLABLE"))
|
|
130
|
+
}))
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
case "snowflake": {
|
|
134
|
+
return {
|
|
135
|
+
sql: `DESCRIBE TABLE ${table}`,
|
|
136
|
+
normalize: (rows) => rows.map((row) => ({
|
|
137
|
+
name: String(row.name),
|
|
138
|
+
type: String(row.type),
|
|
139
|
+
nullable: String(row["null?"]) === "Y" ? "YES" : "NO"
|
|
140
|
+
}))
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
case "sqlite": {
|
|
144
|
+
return {
|
|
145
|
+
sql: `PRAGMA table_info(${table})`,
|
|
146
|
+
normalize: (rows) => rows.map((row) => ({
|
|
147
|
+
name: String(row.name),
|
|
148
|
+
type: String(row.type),
|
|
149
|
+
nullable: Number(row.notnull) === 1 ? "NO" : "YES"
|
|
150
|
+
}))
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
default:
|
|
154
|
+
throw new Error(`Unsupported database type: ${type}`);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
export {
|
|
159
|
+
getTablesQuery,
|
|
160
|
+
getDescribeQuery
|
|
161
|
+
};
|
|
162
|
+
//# sourceMappingURL=chunk-2OTODCAK.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/commands/introspection.ts"],"sourcesContent":["/**\n * Per-engine SQL generation and result normalization for database introspection.\n *\n * Pure logic — no I/O, no connectors. Generates SQL strings and provides\n * normalize functions to transform raw query results into consistent shapes.\n */\n\nexport interface NormalizedTable {\n schema: string;\n name: string;\n type: string;\n}\n\nexport interface NormalizedColumn {\n name: string;\n type: string;\n nullable: string;\n}\n\nexport interface TablesQuery {\n sql: string;\n normalize: (rows: Record<string, unknown>[]) => NormalizedTable[];\n}\n\nexport interface DescribeQuery {\n sql: string;\n normalize: (rows: Record<string, unknown>[]) => NormalizedColumn[];\n}\n\n/**\n * Escape a SQL string literal value by doubling single quotes.\n */\nfunction escapeSqlValue(value: string): string {\n return value.replace(/'/g, \"''\");\n}\n\n/**\n * Normalize table_type values: 'BASE TABLE' -> 'TABLE', everything else uppercased.\n */\nfunction normalizeTableType(tableType: string): string {\n const upper = tableType.toUpperCase();\n if (upper === 'BASE TABLE') return 'TABLE';\n return upper;\n}\n\n/**\n * Helper to read a property from a row, trying lowercase first then uppercase.\n * MySQL drivers may return column names in either case.\n */\nfunction getField(row: Record<string, unknown>, lowercase: string, uppercase: string): unknown {\n if (lowercase in row) return row[lowercase];\n if (uppercase in row) return row[uppercase];\n return undefined;\n}\n\n/**\n * Generate a SQL query to list tables/views and a normalizer for the results.\n *\n * Supported types: duckdb, postgres, mysql, snowflake, sqlite.\n */\nexport function getTablesQuery(\n type: string,\n options?: { schema?: string; database?: string },\n): TablesQuery {\n switch (type) {\n case 'duckdb':\n case 'postgres': {\n const conditions = [\n \"table_schema NOT IN ('information_schema', 'pg_catalog')\",\n ];\n if (options?.schema) {\n conditions.push(`table_schema = '${escapeSqlValue(options.schema)}'`);\n }\n const sql = `SELECT table_schema, table_name, table_type FROM information_schema.tables WHERE ${conditions.join(' AND ')}`;\n return {\n sql,\n normalize: (rows) =>\n rows.map((row) => ({\n schema: String(row.table_schema),\n name: String(row.table_name),\n type: normalizeTableType(String(row.table_type)),\n })),\n };\n }\n\n case 'mysql': {\n let sql = 'SELECT table_schema, table_name, table_type FROM information_schema.tables';\n if (options?.schema) {\n sql += ` WHERE table_schema = '${escapeSqlValue(options.schema)}'`;\n }\n return {\n sql,\n normalize: (rows) =>\n rows.map((row) => ({\n schema: String(getField(row, 'table_schema', 'TABLE_SCHEMA')),\n name: String(getField(row, 'table_name', 'TABLE_NAME')),\n type: normalizeTableType(String(getField(row, 'table_type', 'TABLE_TYPE'))),\n })),\n };\n }\n\n case 'snowflake': {\n const conditions: string[] = [];\n if (options?.schema) {\n conditions.push(`TABLE_SCHEMA = '${escapeSqlValue(options.schema)}'`);\n }\n if (options?.database) {\n conditions.push(`TABLE_CATALOG = '${escapeSqlValue(options.database)}'`);\n }\n let sql = 'SELECT TABLE_SCHEMA, TABLE_NAME, TABLE_TYPE FROM INFORMATION_SCHEMA.TABLES';\n if (conditions.length > 0) {\n sql += ` WHERE ${conditions.join(' AND ')}`;\n }\n return {\n sql,\n normalize: (rows) =>\n rows.map((row) => ({\n schema: String(row.TABLE_SCHEMA),\n name: String(row.TABLE_NAME),\n type: normalizeTableType(String(row.TABLE_TYPE)),\n })),\n };\n }\n\n case 'sqlite': {\n const sql = \"SELECT name, type FROM sqlite_master WHERE type IN ('table', 'view') AND name NOT LIKE 'sqlite_%'\";\n return {\n sql,\n normalize: (rows) =>\n rows.map((row) => ({\n schema: '',\n name: String(row.name),\n type: String(row.type).toUpperCase(),\n })),\n };\n }\n\n default:\n throw new Error(`Unsupported database type: ${type}`);\n }\n}\n\n/**\n * Generate a SQL query to describe a table's columns and a normalizer for the results.\n *\n * Supported types: duckdb, postgres, mysql, snowflake, sqlite.\n * Accepts schema-qualified names (e.g. 'public.orders', 'ANALYTICS.PUBLIC.ORDERS').\n */\nexport function getDescribeQuery(type: string, table: string): DescribeQuery {\n switch (type) {\n case 'duckdb': {\n return {\n sql: `DESCRIBE ${table}`,\n normalize: (rows) =>\n rows.map((row) => ({\n name: String(row.column_name),\n type: String(row.column_type),\n nullable: String(row.null),\n })),\n };\n }\n\n case 'postgres': {\n const parts = table.split('.');\n const tableName = parts.length > 1 ? parts[parts.length - 1]! : table;\n const schemaName = parts.length > 1 ? parts[0]! : null;\n\n const conditions = [`table_name = '${escapeSqlValue(tableName)}'`];\n if (schemaName) {\n conditions.push(`table_schema = '${escapeSqlValue(schemaName)}'`);\n }\n\n const sql = `SELECT column_name, data_type, is_nullable FROM information_schema.columns WHERE ${conditions.join(' AND ')} ORDER BY ordinal_position`;\n return {\n sql,\n normalize: (rows) =>\n rows.map((row) => ({\n name: String(row.column_name),\n type: String(row.data_type),\n nullable: String(row.is_nullable),\n })),\n };\n }\n\n case 'mysql': {\n const parts = table.split('.');\n const tableName = parts.length > 1 ? parts[parts.length - 1]! : table;\n const schemaName = parts.length > 1 ? parts[0]! : null;\n\n const conditions = [`table_name = '${escapeSqlValue(tableName)}'`];\n if (schemaName) {\n conditions.push(`table_schema = '${escapeSqlValue(schemaName)}'`);\n }\n\n const sql = `SELECT column_name, column_type, is_nullable FROM information_schema.columns WHERE ${conditions.join(' AND ')} ORDER BY ordinal_position`;\n return {\n sql,\n normalize: (rows) =>\n rows.map((row) => ({\n name: String(getField(row, 'column_name', 'COLUMN_NAME')),\n type: String(getField(row, 'column_type', 'COLUMN_TYPE')),\n nullable: String(getField(row, 'is_nullable', 'IS_NULLABLE')),\n })),\n };\n }\n\n case 'snowflake': {\n return {\n sql: `DESCRIBE TABLE ${table}`,\n normalize: (rows) =>\n rows.map((row) => ({\n name: String(row.name),\n type: String(row.type),\n nullable: String(row['null?']) === 'Y' ? 'YES' : 'NO',\n })),\n };\n }\n\n case 'sqlite': {\n return {\n sql: `PRAGMA table_info(${table})`,\n normalize: (rows) =>\n rows.map((row) => ({\n name: String(row.name),\n type: String(row.type),\n nullable: Number(row.notnull) === 1 ? 'NO' : 'YES',\n })),\n };\n }\n\n default:\n throw new Error(`Unsupported database type: ${type}`);\n }\n}\n"],"mappings":";AAgCA,SAAS,eAAe,OAAuB;AAC7C,SAAO,MAAM,QAAQ,MAAM,IAAI;AACjC;AAKA,SAAS,mBAAmB,WAA2B;AACrD,QAAM,QAAQ,UAAU,YAAY;AACpC,MAAI,UAAU,aAAc,QAAO;AACnC,SAAO;AACT;AAMA,SAAS,SAAS,KAA8B,WAAmB,WAA4B;AAC7F,MAAI,aAAa,IAAK,QAAO,IAAI,SAAS;AAC1C,MAAI,aAAa,IAAK,QAAO,IAAI,SAAS;AAC1C,SAAO;AACT;AAOO,SAAS,eACd,MACA,SACa;AACb,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK,YAAY;AACf,YAAM,aAAa;AAAA,QACjB;AAAA,MACF;AACA,UAAI,SAAS,QAAQ;AACnB,mBAAW,KAAK,mBAAmB,eAAe,QAAQ,MAAM,CAAC,GAAG;AAAA,MACtE;AACA,YAAM,MAAM,oFAAoF,WAAW,KAAK,OAAO,CAAC;AACxH,aAAO;AAAA,QACL;AAAA,QACA,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,QAAQ,OAAO,IAAI,YAAY;AAAA,UAC/B,MAAM,OAAO,IAAI,UAAU;AAAA,UAC3B,MAAM,mBAAmB,OAAO,IAAI,UAAU,CAAC;AAAA,QACjD,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA,KAAK,SAAS;AACZ,UAAI,MAAM;AACV,UAAI,SAAS,QAAQ;AACnB,eAAO,0BAA0B,eAAe,QAAQ,MAAM,CAAC;AAAA,MACjE;AACA,aAAO;AAAA,QACL;AAAA,QACA,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,QAAQ,OAAO,SAAS,KAAK,gBAAgB,cAAc,CAAC;AAAA,UAC5D,MAAM,OAAO,SAAS,KAAK,cAAc,YAAY,CAAC;AAAA,UACtD,MAAM,mBAAmB,OAAO,SAAS,KAAK,cAAc,YAAY,CAAC,CAAC;AAAA,QAC5E,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA,KAAK,aAAa;AAChB,YAAM,aAAuB,CAAC;AAC9B,UAAI,SAAS,QAAQ;AACnB,mBAAW,KAAK,mBAAmB,eAAe,QAAQ,MAAM,CAAC,GAAG;AAAA,MACtE;AACA,UAAI,SAAS,UAAU;AACrB,mBAAW,KAAK,oBAAoB,eAAe,QAAQ,QAAQ,CAAC,GAAG;AAAA,MACzE;AACA,UAAI,MAAM;AACV,UAAI,WAAW,SAAS,GAAG;AACzB,eAAO,UAAU,WAAW,KAAK,OAAO,CAAC;AAAA,MAC3C;AACA,aAAO;AAAA,QACL;AAAA,QACA,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,QAAQ,OAAO,IAAI,YAAY;AAAA,UAC/B,MAAM,OAAO,IAAI,UAAU;AAAA,UAC3B,MAAM,mBAAmB,OAAO,IAAI,UAAU,CAAC;AAAA,QACjD,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA,KAAK,UAAU;AACb,YAAM,MAAM;AACZ,aAAO;AAAA,QACL;AAAA,QACA,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,QAAQ;AAAA,UACR,MAAM,OAAO,IAAI,IAAI;AAAA,UACrB,MAAM,OAAO,IAAI,IAAI,EAAE,YAAY;AAAA,QACrC,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA;AACE,YAAM,IAAI,MAAM,8BAA8B,IAAI,EAAE;AAAA,EACxD;AACF;AAQO,SAAS,iBAAiB,MAAc,OAA8B;AAC3E,UAAQ,MAAM;AAAA,IACZ,KAAK,UAAU;AACb,aAAO;AAAA,QACL,KAAK,YAAY,KAAK;AAAA,QACtB,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,MAAM,OAAO,IAAI,WAAW;AAAA,UAC5B,MAAM,OAAO,IAAI,WAAW;AAAA,UAC5B,UAAU,OAAO,IAAI,IAAI;AAAA,QAC3B,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA,KAAK,YAAY;AACf,YAAM,QAAQ,MAAM,MAAM,GAAG;AAC7B,YAAM,YAAY,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,CAAC,IAAK;AAChE,YAAM,aAAa,MAAM,SAAS,IAAI,MAAM,CAAC,IAAK;AAElD,YAAM,aAAa,CAAC,iBAAiB,eAAe,SAAS,CAAC,GAAG;AACjE,UAAI,YAAY;AACd,mBAAW,KAAK,mBAAmB,eAAe,UAAU,CAAC,GAAG;AAAA,MAClE;AAEA,YAAM,MAAM,oFAAoF,WAAW,KAAK,OAAO,CAAC;AACxH,aAAO;AAAA,QACL;AAAA,QACA,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,MAAM,OAAO,IAAI,WAAW;AAAA,UAC5B,MAAM,OAAO,IAAI,SAAS;AAAA,UAC1B,UAAU,OAAO,IAAI,WAAW;AAAA,QAClC,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA,KAAK,SAAS;AACZ,YAAM,QAAQ,MAAM,MAAM,GAAG;AAC7B,YAAM,YAAY,MAAM,SAAS,IAAI,MAAM,MAAM,SAAS,CAAC,IAAK;AAChE,YAAM,aAAa,MAAM,SAAS,IAAI,MAAM,CAAC,IAAK;AAElD,YAAM,aAAa,CAAC,iBAAiB,eAAe,SAAS,CAAC,GAAG;AACjE,UAAI,YAAY;AACd,mBAAW,KAAK,mBAAmB,eAAe,UAAU,CAAC,GAAG;AAAA,MAClE;AAEA,YAAM,MAAM,sFAAsF,WAAW,KAAK,OAAO,CAAC;AAC1H,aAAO;AAAA,QACL;AAAA,QACA,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,MAAM,OAAO,SAAS,KAAK,eAAe,aAAa,CAAC;AAAA,UACxD,MAAM,OAAO,SAAS,KAAK,eAAe,aAAa,CAAC;AAAA,UACxD,UAAU,OAAO,SAAS,KAAK,eAAe,aAAa,CAAC;AAAA,QAC9D,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA,KAAK,aAAa;AAChB,aAAO;AAAA,QACL,KAAK,kBAAkB,KAAK;AAAA,QAC5B,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,MAAM,OAAO,IAAI,IAAI;AAAA,UACrB,MAAM,OAAO,IAAI,IAAI;AAAA,UACrB,UAAU,OAAO,IAAI,OAAO,CAAC,MAAM,MAAM,QAAQ;AAAA,QACnD,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA,KAAK,UAAU;AACb,aAAO;AAAA,QACL,KAAK,qBAAqB,KAAK;AAAA,QAC/B,WAAW,CAAC,SACV,KAAK,IAAI,CAAC,SAAS;AAAA,UACjB,MAAM,OAAO,IAAI,IAAI;AAAA,UACrB,MAAM,OAAO,IAAI,IAAI;AAAA,UACrB,UAAU,OAAO,IAAI,OAAO,MAAM,IAAI,OAAO;AAAA,QAC/C,EAAE;AAAA,MACN;AAAA,IACF;AAAA,IAEA;AACE,YAAM,IAAI,MAAM,8BAA8B,IAAI,EAAE;AAAA,EACxD;AACF;","names":[]}
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ChartSchema,
|
|
3
|
+
ConnectionSchema,
|
|
4
|
+
DashboardSchema,
|
|
5
|
+
ProjectSchema,
|
|
6
|
+
ScheduleSchema
|
|
7
|
+
} from "./chunk-CVUCTA4A.js";
|
|
8
|
+
import {
|
|
9
|
+
parseModelMetadata
|
|
10
|
+
} from "./chunk-DMGAHFXP.js";
|
|
11
|
+
|
|
12
|
+
// src/commands/validate.ts
|
|
13
|
+
import { readFile, readdir, access } from "fs/promises";
|
|
14
|
+
import { join, extname, relative } from "path";
|
|
15
|
+
import { parse as parseYaml } from "yaml";
|
|
16
|
+
function formatZodErrors(error) {
|
|
17
|
+
return error.errors.map((e) => {
|
|
18
|
+
const path = e.path.length > 0 ? e.path.join(".") : "(root)";
|
|
19
|
+
return `${path}: ${e.message}`;
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
async function validateProject(projectDir, options) {
|
|
23
|
+
const errors = [];
|
|
24
|
+
const warnings = [];
|
|
25
|
+
let filesChecked = 0;
|
|
26
|
+
let filesPassed = 0;
|
|
27
|
+
const config = {
|
|
28
|
+
project: null,
|
|
29
|
+
connections: /* @__PURE__ */ new Map(),
|
|
30
|
+
models: /* @__PURE__ */ new Map(),
|
|
31
|
+
charts: /* @__PURE__ */ new Map(),
|
|
32
|
+
dashboards: /* @__PURE__ */ new Map(),
|
|
33
|
+
schedules: /* @__PURE__ */ new Map()
|
|
34
|
+
};
|
|
35
|
+
const projectPath = join(projectDir, "yamchart.yaml");
|
|
36
|
+
try {
|
|
37
|
+
await access(projectPath);
|
|
38
|
+
filesChecked++;
|
|
39
|
+
const content = await readFile(projectPath, "utf-8");
|
|
40
|
+
const parsed = parseYaml(content);
|
|
41
|
+
const result = ProjectSchema.safeParse(parsed);
|
|
42
|
+
if (result.success) {
|
|
43
|
+
config.project = result.data;
|
|
44
|
+
filesPassed++;
|
|
45
|
+
} else {
|
|
46
|
+
errors.push({
|
|
47
|
+
file: "yamchart.yaml",
|
|
48
|
+
message: `Invalid schema:
|
|
49
|
+
${formatZodErrors(result.error).map((e) => ` - ${e}`).join("\n")}`
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
} catch {
|
|
53
|
+
errors.push({
|
|
54
|
+
file: "yamchart.yaml",
|
|
55
|
+
message: "yamchart.yaml not found"
|
|
56
|
+
});
|
|
57
|
+
return {
|
|
58
|
+
success: false,
|
|
59
|
+
errors,
|
|
60
|
+
warnings,
|
|
61
|
+
stats: { files: filesChecked, passed: filesPassed, failed: filesChecked - filesPassed }
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
const connectionsDir = join(projectDir, "connections");
|
|
65
|
+
try {
|
|
66
|
+
await access(connectionsDir);
|
|
67
|
+
const files = await readdir(connectionsDir);
|
|
68
|
+
for (const file of files) {
|
|
69
|
+
if (extname(file) !== ".yaml" && extname(file) !== ".yml") continue;
|
|
70
|
+
filesChecked++;
|
|
71
|
+
const filePath = join(connectionsDir, file);
|
|
72
|
+
const content = await readFile(filePath, "utf-8");
|
|
73
|
+
const parsed = parseYaml(content);
|
|
74
|
+
const result = ConnectionSchema.safeParse(parsed);
|
|
75
|
+
if (result.success) {
|
|
76
|
+
config.connections.set(result.data.name, result.data);
|
|
77
|
+
filesPassed++;
|
|
78
|
+
} else {
|
|
79
|
+
errors.push({
|
|
80
|
+
file: `connections/${file}`,
|
|
81
|
+
message: `Invalid schema:
|
|
82
|
+
${formatZodErrors(result.error).map((e) => ` - ${e}`).join("\n")}`
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
} catch {
|
|
87
|
+
}
|
|
88
|
+
const modelsDir = join(projectDir, "models");
|
|
89
|
+
const modelStats = { filesChecked: 0, filesPassed: 0 };
|
|
90
|
+
try {
|
|
91
|
+
await access(modelsDir);
|
|
92
|
+
await validateModelsDir(modelsDir, projectDir, config, errors, modelStats);
|
|
93
|
+
filesChecked += modelStats.filesChecked;
|
|
94
|
+
filesPassed += modelStats.filesPassed;
|
|
95
|
+
} catch {
|
|
96
|
+
}
|
|
97
|
+
const chartsDir = join(projectDir, "charts");
|
|
98
|
+
try {
|
|
99
|
+
await access(chartsDir);
|
|
100
|
+
const files = await readdir(chartsDir);
|
|
101
|
+
for (const file of files) {
|
|
102
|
+
if (extname(file) !== ".yaml" && extname(file) !== ".yml") continue;
|
|
103
|
+
filesChecked++;
|
|
104
|
+
const filePath = join(chartsDir, file);
|
|
105
|
+
const content = await readFile(filePath, "utf-8");
|
|
106
|
+
const parsed = parseYaml(content);
|
|
107
|
+
const result = ChartSchema.safeParse(parsed);
|
|
108
|
+
if (result.success) {
|
|
109
|
+
config.charts.set(result.data.name, result.data);
|
|
110
|
+
filesPassed++;
|
|
111
|
+
} else {
|
|
112
|
+
errors.push({
|
|
113
|
+
file: `charts/${file}`,
|
|
114
|
+
message: `Invalid schema:
|
|
115
|
+
${formatZodErrors(result.error).map((e) => ` - ${e}`).join("\n")}`
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
} catch {
|
|
120
|
+
}
|
|
121
|
+
const dashboardsDir = join(projectDir, "dashboards");
|
|
122
|
+
try {
|
|
123
|
+
await access(dashboardsDir);
|
|
124
|
+
const files = await readdir(dashboardsDir);
|
|
125
|
+
for (const file of files) {
|
|
126
|
+
if (extname(file) !== ".yaml" && extname(file) !== ".yml") continue;
|
|
127
|
+
filesChecked++;
|
|
128
|
+
const filePath = join(dashboardsDir, file);
|
|
129
|
+
const content = await readFile(filePath, "utf-8");
|
|
130
|
+
const parsed = parseYaml(content);
|
|
131
|
+
const result = DashboardSchema.safeParse(parsed);
|
|
132
|
+
if (result.success) {
|
|
133
|
+
config.dashboards.set(result.data.name, result.data);
|
|
134
|
+
filesPassed++;
|
|
135
|
+
} else {
|
|
136
|
+
errors.push({
|
|
137
|
+
file: `dashboards/${file}`,
|
|
138
|
+
message: `Invalid schema:
|
|
139
|
+
${formatZodErrors(result.error).map((e) => ` - ${e}`).join("\n")}`
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
} catch {
|
|
144
|
+
}
|
|
145
|
+
const schedulesDir = join(projectDir, "schedules");
|
|
146
|
+
try {
|
|
147
|
+
await access(schedulesDir);
|
|
148
|
+
const files = await readdir(schedulesDir);
|
|
149
|
+
for (const file of files) {
|
|
150
|
+
if (extname(file) !== ".yaml" && extname(file) !== ".yml") continue;
|
|
151
|
+
filesChecked++;
|
|
152
|
+
const filePath = join(schedulesDir, file);
|
|
153
|
+
const content = await readFile(filePath, "utf-8");
|
|
154
|
+
const parsed = parseYaml(content);
|
|
155
|
+
const result = ScheduleSchema.safeParse(parsed);
|
|
156
|
+
if (result.success) {
|
|
157
|
+
config.schedules.set(result.data.name, result.data);
|
|
158
|
+
filesPassed++;
|
|
159
|
+
} else {
|
|
160
|
+
errors.push({
|
|
161
|
+
file: `schedules/${file}`,
|
|
162
|
+
message: `Invalid schema:
|
|
163
|
+
${formatZodErrors(result.error).map((e) => ` - ${e}`).join("\n")}`
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
} catch {
|
|
168
|
+
}
|
|
169
|
+
crossReferenceValidation(config, errors, warnings);
|
|
170
|
+
let dryRunStats;
|
|
171
|
+
if (options.dryRun) {
|
|
172
|
+
dryRunStats = await dryRunValidation(projectDir, config, options.connection, errors);
|
|
173
|
+
}
|
|
174
|
+
return {
|
|
175
|
+
success: errors.length === 0,
|
|
176
|
+
errors,
|
|
177
|
+
warnings,
|
|
178
|
+
stats: {
|
|
179
|
+
files: filesChecked,
|
|
180
|
+
passed: filesPassed,
|
|
181
|
+
failed: filesChecked - filesPassed
|
|
182
|
+
},
|
|
183
|
+
dryRunStats
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
async function validateModelsDir(dir, projectDir, config, errors, stats) {
|
|
187
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
188
|
+
for (const entry of entries) {
|
|
189
|
+
const fullPath = join(dir, entry.name);
|
|
190
|
+
if (entry.isDirectory()) {
|
|
191
|
+
await validateModelsDir(fullPath, projectDir, config, errors, stats);
|
|
192
|
+
} else if (extname(entry.name) === ".sql") {
|
|
193
|
+
stats.filesChecked++;
|
|
194
|
+
const relPath = relative(projectDir, fullPath);
|
|
195
|
+
const content = await readFile(fullPath, "utf-8");
|
|
196
|
+
try {
|
|
197
|
+
const parsed = parseModelMetadata(content);
|
|
198
|
+
config.models.set(parsed.name, { name: parsed.name, sql: parsed.sql });
|
|
199
|
+
stats.filesPassed++;
|
|
200
|
+
} catch (err) {
|
|
201
|
+
errors.push({
|
|
202
|
+
file: relPath,
|
|
203
|
+
message: err instanceof Error ? err.message : "Failed to parse model"
|
|
204
|
+
});
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
function crossReferenceValidation(config, errors, warnings) {
|
|
210
|
+
for (const [chartName, chart] of config.charts) {
|
|
211
|
+
if (chart.source.model && !config.models.has(chart.source.model)) {
|
|
212
|
+
const suggestion = findSimilar(chart.source.model, Array.from(config.models.keys()));
|
|
213
|
+
errors.push({
|
|
214
|
+
file: `charts/${chartName}.yaml`,
|
|
215
|
+
message: `Unknown model reference "${chart.source.model}"`,
|
|
216
|
+
suggestion: suggestion ? `Did you mean "${suggestion}"?` : void 0
|
|
217
|
+
});
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
for (const [scheduleName, schedule] of config.schedules) {
|
|
221
|
+
const chartNames = schedule.type === "report" ? schedule.charts : [schedule.chart];
|
|
222
|
+
for (const chartName of chartNames) {
|
|
223
|
+
if (!config.charts.has(chartName)) {
|
|
224
|
+
const suggestion = findSimilar(chartName, Array.from(config.charts.keys()));
|
|
225
|
+
warnings.push({
|
|
226
|
+
file: `schedules/${scheduleName}.yaml`,
|
|
227
|
+
message: `Unknown chart reference "${chartName}"`,
|
|
228
|
+
suggestion: suggestion ? `Did you mean "${suggestion}"?` : void 0
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
const cronFields = schedule.schedule.trim().split(/\s+/);
|
|
233
|
+
if (cronFields.length < 5 || cronFields.length > 6) {
|
|
234
|
+
errors.push({
|
|
235
|
+
file: `schedules/${scheduleName}.yaml`,
|
|
236
|
+
message: `Invalid cron expression "${schedule.schedule}" (expected 5-6 fields)`
|
|
237
|
+
});
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
if (config.project?.defaults?.connection) {
|
|
241
|
+
const connName = config.project.defaults.connection;
|
|
242
|
+
if (!config.connections.has(connName)) {
|
|
243
|
+
const suggestion = findSimilar(connName, Array.from(config.connections.keys()));
|
|
244
|
+
errors.push({
|
|
245
|
+
file: "yamchart.yaml",
|
|
246
|
+
message: `Default connection "${connName}" not found`,
|
|
247
|
+
suggestion: suggestion ? `Did you mean "${suggestion}"?` : void 0
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
function findSimilar(target, candidates) {
|
|
253
|
+
const threshold = 3;
|
|
254
|
+
for (const candidate of candidates) {
|
|
255
|
+
if (levenshtein(target.toLowerCase(), candidate.toLowerCase()) <= threshold) {
|
|
256
|
+
return candidate;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
return null;
|
|
260
|
+
}
|
|
261
|
+
function levenshtein(a, b) {
|
|
262
|
+
const matrix = [];
|
|
263
|
+
for (let i = 0; i <= b.length; i++) {
|
|
264
|
+
matrix[i] = [i];
|
|
265
|
+
}
|
|
266
|
+
for (let j = 0; j <= a.length; j++) {
|
|
267
|
+
matrix[0][j] = j;
|
|
268
|
+
}
|
|
269
|
+
for (let i = 1; i <= b.length; i++) {
|
|
270
|
+
for (let j = 1; j <= a.length; j++) {
|
|
271
|
+
if (b.charAt(i - 1) === a.charAt(j - 1)) {
|
|
272
|
+
matrix[i][j] = matrix[i - 1][j - 1];
|
|
273
|
+
} else {
|
|
274
|
+
matrix[i][j] = Math.min(
|
|
275
|
+
matrix[i - 1][j - 1] + 1,
|
|
276
|
+
matrix[i][j - 1] + 1,
|
|
277
|
+
matrix[i - 1][j] + 1
|
|
278
|
+
);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
return matrix[b.length][a.length];
|
|
283
|
+
}
|
|
284
|
+
async function dryRunValidation(projectDir, config, connectionName, errors) {
|
|
285
|
+
const { DuckDBConnector } = await import("./dist-A7VMHB26.js");
|
|
286
|
+
let passed = 0;
|
|
287
|
+
let failed = 0;
|
|
288
|
+
const connName = connectionName || config.project?.defaults?.connection;
|
|
289
|
+
if (!connName) {
|
|
290
|
+
errors.push({
|
|
291
|
+
file: "yamchart.yaml",
|
|
292
|
+
message: "No connection specified for dry-run (use --connection or set defaults.connection)"
|
|
293
|
+
});
|
|
294
|
+
return { passed, failed: 1 };
|
|
295
|
+
}
|
|
296
|
+
const connection = config.connections.get(connName);
|
|
297
|
+
if (!connection) {
|
|
298
|
+
errors.push({
|
|
299
|
+
file: "yamchart.yaml",
|
|
300
|
+
message: `Connection "${connName}" not found`
|
|
301
|
+
});
|
|
302
|
+
return { passed, failed: 1 };
|
|
303
|
+
}
|
|
304
|
+
if (connection.type !== "duckdb") {
|
|
305
|
+
errors.push({
|
|
306
|
+
file: `connections/${connName}.yaml`,
|
|
307
|
+
message: `Dry-run not yet supported for connection type "${connection.type}"`
|
|
308
|
+
});
|
|
309
|
+
return { passed, failed: 1 };
|
|
310
|
+
}
|
|
311
|
+
const connPath = join(projectDir, "connections", `${connName}.yaml`);
|
|
312
|
+
const connContent = await readFile(connPath, "utf-8");
|
|
313
|
+
const connConfig = parseYaml(connContent);
|
|
314
|
+
const dbPath = connConfig.config.path.startsWith("/") ? connConfig.config.path : join(projectDir, connConfig.config.path);
|
|
315
|
+
const connector = new DuckDBConnector({ path: dbPath });
|
|
316
|
+
try {
|
|
317
|
+
await connector.connect();
|
|
318
|
+
for (const [modelName, model] of config.models) {
|
|
319
|
+
const result = await connector.explain(model.sql);
|
|
320
|
+
if (result.valid) {
|
|
321
|
+
passed++;
|
|
322
|
+
} else {
|
|
323
|
+
failed++;
|
|
324
|
+
errors.push({
|
|
325
|
+
file: `models/${modelName}.sql`,
|
|
326
|
+
message: result.error || "Query validation failed"
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
} finally {
|
|
331
|
+
await connector.disconnect();
|
|
332
|
+
}
|
|
333
|
+
return { passed, failed };
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// src/utils/config.ts
|
|
337
|
+
import { access as access2 } from "fs/promises";
|
|
338
|
+
import { join as join2, dirname, resolve } from "path";
|
|
339
|
+
import { config as loadDotenv } from "dotenv";
|
|
340
|
+
async function findProjectRoot(startDir) {
|
|
341
|
+
let currentDir = resolve(startDir);
|
|
342
|
+
const root = dirname(currentDir);
|
|
343
|
+
while (currentDir !== root) {
|
|
344
|
+
const configPath = join2(currentDir, "yamchart.yaml");
|
|
345
|
+
try {
|
|
346
|
+
await access2(configPath);
|
|
347
|
+
return currentDir;
|
|
348
|
+
} catch {
|
|
349
|
+
currentDir = dirname(currentDir);
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
try {
|
|
353
|
+
await access2(join2(root, "yamchart.yaml"));
|
|
354
|
+
return root;
|
|
355
|
+
} catch {
|
|
356
|
+
return null;
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
function loadEnvFile(projectDir) {
|
|
360
|
+
loadDotenv({ path: join2(projectDir, ".env") });
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
export {
|
|
364
|
+
validateProject,
|
|
365
|
+
findProjectRoot,
|
|
366
|
+
loadEnvFile
|
|
367
|
+
};
|
|
368
|
+
//# sourceMappingURL=chunk-BZSE3IA2.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/commands/validate.ts","../src/utils/config.ts"],"sourcesContent":["import { readFile, readdir, access } from 'fs/promises';\nimport { join, extname, relative } from 'path';\nimport { parse as parseYaml } from 'yaml';\nimport {\n ProjectSchema,\n ConnectionSchema,\n ChartSchema,\n DashboardSchema,\n ScheduleSchema,\n type Schedule,\n} from '@yamchart/schema';\nimport type { ZodError } from 'zod';\nimport { parseModelMetadata } from '@yamchart/query';\n\nfunction formatZodErrors(error: ZodError): string[] {\n return error.errors.map((e) => {\n const path = e.path.length > 0 ? e.path.join('.') : '(root)';\n return `${path}: ${e.message}`;\n });\n}\n\nexport interface ValidationError {\n file: string;\n line?: number;\n message: string;\n suggestion?: string;\n}\n\nexport interface ValidationResult {\n success: boolean;\n errors: ValidationError[];\n warnings: ValidationError[];\n stats: {\n files: number;\n passed: number;\n failed: number;\n };\n dryRunStats?: {\n passed: number;\n failed: number;\n };\n}\n\nexport interface ValidateOptions {\n dryRun: boolean;\n connection?: string;\n}\n\ninterface LoadedConfig {\n project: { name: string; version: string; defaults?: { connection?: string } } | null;\n connections: Map<string, { name: string; type: string }>;\n models: Map<string, { name: string; sql: string }>;\n charts: Map<string, { name: string; source: { model?: string; sql?: string } }>;\n dashboards: Map<string, { name: string; layout: unknown }>;\n schedules: Map<string, Schedule>;\n}\n\nexport async function validateProject(\n projectDir: string,\n options: ValidateOptions\n): Promise<ValidationResult> {\n const errors: ValidationError[] = [];\n const warnings: ValidationError[] = [];\n let filesChecked = 0;\n let filesPassed = 0;\n\n const config: LoadedConfig = {\n project: null,\n connections: new Map(),\n models: new Map(),\n charts: new Map(),\n dashboards: new Map(),\n schedules: new Map(),\n };\n\n // Phase 1: Schema validation\n\n // Validate yamchart.yaml\n const projectPath = join(projectDir, 'yamchart.yaml');\n try {\n await access(projectPath);\n filesChecked++;\n const content = await readFile(projectPath, 'utf-8');\n const parsed = parseYaml(content);\n const result = ProjectSchema.safeParse(parsed);\n\n if (result.success) {\n config.project = result.data;\n filesPassed++;\n } else {\n errors.push({\n file: 'yamchart.yaml',\n message: `Invalid schema:\\n${formatZodErrors(result.error).map(e => ` - ${e}`).join('\\n')}`,\n });\n }\n } catch {\n errors.push({\n file: 'yamchart.yaml',\n message: 'yamchart.yaml not found',\n });\n return {\n success: false,\n errors,\n warnings,\n stats: { files: filesChecked, passed: filesPassed, failed: filesChecked - filesPassed },\n };\n }\n\n // Validate connections\n const connectionsDir = join(projectDir, 'connections');\n try {\n await access(connectionsDir);\n const files = await readdir(connectionsDir);\n\n for (const file of files) {\n if (extname(file) !== '.yaml' && extname(file) !== '.yml') continue;\n filesChecked++;\n\n const filePath = join(connectionsDir, file);\n const content = await readFile(filePath, 'utf-8');\n const parsed = parseYaml(content);\n const result = ConnectionSchema.safeParse(parsed);\n\n if (result.success) {\n config.connections.set(result.data.name, result.data);\n filesPassed++;\n } else {\n errors.push({\n file: `connections/${file}`,\n message: `Invalid schema:\\n${formatZodErrors(result.error).map(e => ` - ${e}`).join('\\n')}`,\n });\n }\n }\n } catch {\n // No connections directory is ok\n }\n\n // Validate models\n const modelsDir = join(projectDir, 'models');\n const modelStats = { filesChecked: 0, filesPassed: 0 };\n try {\n await access(modelsDir);\n await validateModelsDir(modelsDir, projectDir, config, errors, modelStats);\n filesChecked += modelStats.filesChecked;\n filesPassed += modelStats.filesPassed;\n } catch {\n // No models directory is ok\n }\n\n // Validate charts\n const chartsDir = join(projectDir, 'charts');\n try {\n await access(chartsDir);\n const files = await readdir(chartsDir);\n\n for (const file of files) {\n if (extname(file) !== '.yaml' && extname(file) !== '.yml') continue;\n filesChecked++;\n\n const filePath = join(chartsDir, file);\n const content = await readFile(filePath, 'utf-8');\n const parsed = parseYaml(content);\n const result = ChartSchema.safeParse(parsed);\n\n if (result.success) {\n config.charts.set(result.data.name, result.data);\n filesPassed++;\n } else {\n errors.push({\n file: `charts/${file}`,\n message: `Invalid schema:\\n${formatZodErrors(result.error).map(e => ` - ${e}`).join('\\n')}`,\n });\n }\n }\n } catch {\n // No charts directory is ok\n }\n\n // Validate dashboards\n const dashboardsDir = join(projectDir, 'dashboards');\n try {\n await access(dashboardsDir);\n const files = await readdir(dashboardsDir);\n\n for (const file of files) {\n if (extname(file) !== '.yaml' && extname(file) !== '.yml') continue;\n filesChecked++;\n\n const filePath = join(dashboardsDir, file);\n const content = await readFile(filePath, 'utf-8');\n const parsed = parseYaml(content);\n const result = DashboardSchema.safeParse(parsed);\n\n if (result.success) {\n config.dashboards.set(result.data.name, result.data);\n filesPassed++;\n } else {\n errors.push({\n file: `dashboards/${file}`,\n message: `Invalid schema:\\n${formatZodErrors(result.error).map(e => ` - ${e}`).join('\\n')}`,\n });\n }\n }\n } catch {\n // No dashboards directory is ok\n }\n\n // Validate schedules\n const schedulesDir = join(projectDir, 'schedules');\n try {\n await access(schedulesDir);\n const files = await readdir(schedulesDir);\n\n for (const file of files) {\n if (extname(file) !== '.yaml' && extname(file) !== '.yml') continue;\n filesChecked++;\n\n const filePath = join(schedulesDir, file);\n const content = await readFile(filePath, 'utf-8');\n const parsed = parseYaml(content);\n const result = ScheduleSchema.safeParse(parsed);\n\n if (result.success) {\n config.schedules.set(result.data.name, result.data);\n filesPassed++;\n } else {\n errors.push({\n file: `schedules/${file}`,\n message: `Invalid schema:\\n${formatZodErrors(result.error).map(e => ` - ${e}`).join('\\n')}`,\n });\n }\n }\n } catch {\n // No schedules directory is ok\n }\n\n // Phase 2: Cross-reference validation\n crossReferenceValidation(config, errors, warnings);\n\n // Phase 3: Dry-run query validation (if enabled)\n let dryRunStats: { passed: number; failed: number } | undefined;\n if (options.dryRun) {\n dryRunStats = await dryRunValidation(projectDir, config, options.connection, errors);\n }\n\n return {\n success: errors.length === 0,\n errors,\n warnings,\n stats: {\n files: filesChecked,\n passed: filesPassed,\n failed: filesChecked - filesPassed,\n },\n dryRunStats,\n };\n}\n\nasync function validateModelsDir(\n dir: string,\n projectDir: string,\n config: LoadedConfig,\n errors: ValidationError[],\n stats: { filesChecked: number; filesPassed: number }\n): Promise<void> {\n const entries = await readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n\n if (entry.isDirectory()) {\n await validateModelsDir(fullPath, projectDir, config, errors, stats);\n } else if (extname(entry.name) === '.sql') {\n stats.filesChecked++;\n const relPath = relative(projectDir, fullPath);\n const content = await readFile(fullPath, 'utf-8');\n\n try {\n const parsed = parseModelMetadata(content);\n config.models.set(parsed.name, { name: parsed.name, sql: parsed.sql });\n stats.filesPassed++;\n } catch (err) {\n errors.push({\n file: relPath,\n message: err instanceof Error ? err.message : 'Failed to parse model',\n });\n }\n }\n }\n}\n\nfunction crossReferenceValidation(\n config: LoadedConfig,\n errors: ValidationError[],\n warnings: ValidationError[]\n): void {\n // Check that charts reference existing models\n for (const [chartName, chart] of config.charts) {\n if (chart.source.model && !config.models.has(chart.source.model)) {\n const suggestion = findSimilar(chart.source.model, Array.from(config.models.keys()));\n errors.push({\n file: `charts/${chartName}.yaml`,\n message: `Unknown model reference \"${chart.source.model}\"`,\n suggestion: suggestion ? `Did you mean \"${suggestion}\"?` : undefined,\n });\n }\n }\n\n // Check that schedules reference existing charts\n for (const [scheduleName, schedule] of config.schedules) {\n const chartNames = schedule.type === 'report' ? schedule.charts : [schedule.chart];\n for (const chartName of chartNames) {\n if (!config.charts.has(chartName)) {\n const suggestion = findSimilar(chartName, Array.from(config.charts.keys()));\n warnings.push({\n file: `schedules/${scheduleName}.yaml`,\n message: `Unknown chart reference \"${chartName}\"`,\n suggestion: suggestion ? `Did you mean \"${suggestion}\"?` : undefined,\n });\n }\n }\n\n // Validate cron expression (basic: 5 or 6 space-separated fields)\n const cronFields = schedule.schedule.trim().split(/\\s+/);\n if (cronFields.length < 5 || cronFields.length > 6) {\n errors.push({\n file: `schedules/${scheduleName}.yaml`,\n message: `Invalid cron expression \"${schedule.schedule}\" (expected 5-6 fields)`,\n });\n }\n }\n\n // Check that default connection exists\n if (config.project?.defaults?.connection) {\n const connName = config.project.defaults.connection;\n if (!config.connections.has(connName)) {\n const suggestion = findSimilar(connName, Array.from(config.connections.keys()));\n errors.push({\n file: 'yamchart.yaml',\n message: `Default connection \"${connName}\" not found`,\n suggestion: suggestion ? `Did you mean \"${suggestion}\"?` : undefined,\n });\n }\n }\n}\n\nfunction findSimilar(target: string, candidates: string[]): string | null {\n const threshold = 3; // Levenshtein distance threshold\n\n for (const candidate of candidates) {\n if (levenshtein(target.toLowerCase(), candidate.toLowerCase()) <= threshold) {\n return candidate;\n }\n }\n return null;\n}\n\nfunction levenshtein(a: string, b: string): number {\n const matrix: number[][] = [];\n\n for (let i = 0; i <= b.length; i++) {\n matrix[i] = [i];\n }\n for (let j = 0; j <= a.length; j++) {\n matrix[0]![j] = j;\n }\n\n for (let i = 1; i <= b.length; i++) {\n for (let j = 1; j <= a.length; j++) {\n if (b.charAt(i - 1) === a.charAt(j - 1)) {\n matrix[i]![j] = matrix[i - 1]![j - 1]!;\n } else {\n matrix[i]![j] = Math.min(\n matrix[i - 1]![j - 1]! + 1,\n matrix[i]![j - 1]! + 1,\n matrix[i - 1]![j]! + 1\n );\n }\n }\n }\n\n return matrix[b.length]![a.length]!;\n}\n\nasync function dryRunValidation(\n projectDir: string,\n config: LoadedConfig,\n connectionName: string | undefined,\n errors: ValidationError[]\n): Promise<{ passed: number; failed: number }> {\n const { DuckDBConnector } = await import('@yamchart/query');\n\n let passed = 0;\n let failed = 0;\n\n // Determine which connection to use\n const connName = connectionName || config.project?.defaults?.connection;\n if (!connName) {\n errors.push({\n file: 'yamchart.yaml',\n message: 'No connection specified for dry-run (use --connection or set defaults.connection)',\n });\n return { passed, failed: 1 };\n }\n\n const connection = config.connections.get(connName);\n if (!connection) {\n errors.push({\n file: 'yamchart.yaml',\n message: `Connection \"${connName}\" not found`,\n });\n return { passed, failed: 1 };\n }\n\n // Only DuckDB supported for now\n if (connection.type !== 'duckdb') {\n errors.push({\n file: `connections/${connName}.yaml`,\n message: `Dry-run not yet supported for connection type \"${connection.type}\"`,\n });\n return { passed, failed: 1 };\n }\n\n // Load full connection config to get path\n const connPath = join(projectDir, 'connections', `${connName}.yaml`);\n const connContent = await readFile(connPath, 'utf-8');\n const connConfig = parseYaml(connContent) as { config: { path: string } };\n\n // Resolve path relative to project\n const dbPath = connConfig.config.path.startsWith('/')\n ? connConfig.config.path\n : join(projectDir, connConfig.config.path);\n\n const connector = new DuckDBConnector({ path: dbPath });\n\n try {\n await connector.connect();\n\n for (const [modelName, model] of config.models) {\n const result = await connector.explain(model.sql);\n\n if (result.valid) {\n passed++;\n } else {\n failed++;\n errors.push({\n file: `models/${modelName}.sql`,\n message: result.error || 'Query validation failed',\n });\n }\n }\n } finally {\n await connector.disconnect();\n }\n\n return { passed, failed };\n}\n","import { access } from 'fs/promises';\nimport { join, dirname, resolve } from 'path';\nimport { config as loadDotenv } from 'dotenv';\n\n/**\n * Find the project root by looking for yamchart.yaml.\n * Searches current directory and parent directories.\n */\nexport async function findProjectRoot(startDir: string): Promise<string | null> {\n let currentDir = resolve(startDir);\n const root = dirname(currentDir);\n\n while (currentDir !== root) {\n const configPath = join(currentDir, 'yamchart.yaml');\n try {\n await access(configPath);\n return currentDir;\n } catch {\n currentDir = dirname(currentDir);\n }\n }\n\n // Check root directory too\n try {\n await access(join(root, 'yamchart.yaml'));\n return root;\n } catch {\n return null;\n }\n}\n\n/**\n * Load .env file from project directory.\n */\nexport function loadEnvFile(projectDir: string): void {\n loadDotenv({ path: join(projectDir, '.env') });\n}\n\n/**\n * Resolve ${VAR} syntax in a string from environment variables.\n */\nexport function resolveEnvVars(value: string): string {\n return value.replace(/\\$\\{([^}]+)\\}/g, (match, varName) => {\n const envValue = process.env[varName];\n if (envValue === undefined) {\n throw new Error(`Environment variable not found: ${varName}`);\n }\n return envValue;\n });\n}\n\n/**\n * Recursively resolve env vars in an object.\n */\nexport function resolveEnvVarsInObject<T>(obj: T): T {\n if (typeof obj === 'string') {\n return resolveEnvVars(obj) as T;\n }\n if (Array.isArray(obj)) {\n return obj.map(resolveEnvVarsInObject) as T;\n }\n if (obj !== null && typeof obj === 'object') {\n const result: Record<string, unknown> = {};\n for (const [key, value] of Object.entries(obj)) {\n result[key] = resolveEnvVarsInObject(value);\n }\n return result as T;\n }\n return obj;\n}\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,UAAU,SAAS,cAAc;AAC1C,SAAS,MAAM,SAAS,gBAAgB;AACxC,SAAS,SAAS,iBAAiB;AAYnC,SAAS,gBAAgB,OAA2B;AAClD,SAAO,MAAM,OAAO,IAAI,CAAC,MAAM;AAC7B,UAAM,OAAO,EAAE,KAAK,SAAS,IAAI,EAAE,KAAK,KAAK,GAAG,IAAI;AACpD,WAAO,GAAG,IAAI,KAAK,EAAE,OAAO;AAAA,EAC9B,CAAC;AACH;AAsCA,eAAsB,gBACpB,YACA,SAC2B;AAC3B,QAAM,SAA4B,CAAC;AACnC,QAAM,WAA8B,CAAC;AACrC,MAAI,eAAe;AACnB,MAAI,cAAc;AAElB,QAAM,SAAuB;AAAA,IAC3B,SAAS;AAAA,IACT,aAAa,oBAAI,IAAI;AAAA,IACrB,QAAQ,oBAAI,IAAI;AAAA,IAChB,QAAQ,oBAAI,IAAI;AAAA,IAChB,YAAY,oBAAI,IAAI;AAAA,IACpB,WAAW,oBAAI,IAAI;AAAA,EACrB;AAKA,QAAM,cAAc,KAAK,YAAY,eAAe;AACpD,MAAI;AACF,UAAM,OAAO,WAAW;AACxB;AACA,UAAM,UAAU,MAAM,SAAS,aAAa,OAAO;AACnD,UAAM,SAAS,UAAU,OAAO;AAChC,UAAM,SAAS,cAAc,UAAU,MAAM;AAE7C,QAAI,OAAO,SAAS;AAClB,aAAO,UAAU,OAAO;AACxB;AAAA,IACF,OAAO;AACL,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,SAAS;AAAA,EAAoB,gBAAgB,OAAO,KAAK,EAAE,IAAI,OAAK,OAAO,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC;AAAA,MAC5F,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AACN,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AACD,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,OAAO,EAAE,OAAO,cAAc,QAAQ,aAAa,QAAQ,eAAe,YAAY;AAAA,IACxF;AAAA,EACF;AAGA,QAAM,iBAAiB,KAAK,YAAY,aAAa;AACrD,MAAI;AACF,UAAM,OAAO,cAAc;AAC3B,UAAM,QAAQ,MAAM,QAAQ,cAAc;AAE1C,eAAW,QAAQ,OAAO;AACxB,UAAI,QAAQ,IAAI,MAAM,WAAW,QAAQ,IAAI,MAAM,OAAQ;AAC3D;AAEA,YAAM,WAAW,KAAK,gBAAgB,IAAI;AAC1C,YAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,YAAM,SAAS,UAAU,OAAO;AAChC,YAAM,SAAS,iBAAiB,UAAU,MAAM;AAEhD,UAAI,OAAO,SAAS;AAClB,eAAO,YAAY,IAAI,OAAO,KAAK,MAAM,OAAO,IAAI;AACpD;AAAA,MACF,OAAO;AACL,eAAO,KAAK;AAAA,UACV,MAAM,eAAe,IAAI;AAAA,UACzB,SAAS;AAAA,EAAoB,gBAAgB,OAAO,KAAK,EAAE,IAAI,OAAK,OAAO,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC;AAAA,QAC5F,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,QAAM,YAAY,KAAK,YAAY,QAAQ;AAC3C,QAAM,aAAa,EAAE,cAAc,GAAG,aAAa,EAAE;AACrD,MAAI;AACF,UAAM,OAAO,SAAS;AACtB,UAAM,kBAAkB,WAAW,YAAY,QAAQ,QAAQ,UAAU;AACzE,oBAAgB,WAAW;AAC3B,mBAAe,WAAW;AAAA,EAC5B,QAAQ;AAAA,EAER;AAGA,QAAM,YAAY,KAAK,YAAY,QAAQ;AAC3C,MAAI;AACF,UAAM,OAAO,SAAS;AACtB,UAAM,QAAQ,MAAM,QAAQ,SAAS;AAErC,eAAW,QAAQ,OAAO;AACxB,UAAI,QAAQ,IAAI,MAAM,WAAW,QAAQ,IAAI,MAAM,OAAQ;AAC3D;AAEA,YAAM,WAAW,KAAK,WAAW,IAAI;AACrC,YAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,YAAM,SAAS,UAAU,OAAO;AAChC,YAAM,SAAS,YAAY,UAAU,MAAM;AAE3C,UAAI,OAAO,SAAS;AAClB,eAAO,OAAO,IAAI,OAAO,KAAK,MAAM,OAAO,IAAI;AAC/C;AAAA,MACF,OAAO;AACL,eAAO,KAAK;AAAA,UACV,MAAM,UAAU,IAAI;AAAA,UACpB,SAAS;AAAA,EAAoB,gBAAgB,OAAO,KAAK,EAAE,IAAI,OAAK,OAAO,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC;AAAA,QAC5F,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,QAAM,gBAAgB,KAAK,YAAY,YAAY;AACnD,MAAI;AACF,UAAM,OAAO,aAAa;AAC1B,UAAM,QAAQ,MAAM,QAAQ,aAAa;AAEzC,eAAW,QAAQ,OAAO;AACxB,UAAI,QAAQ,IAAI,MAAM,WAAW,QAAQ,IAAI,MAAM,OAAQ;AAC3D;AAEA,YAAM,WAAW,KAAK,eAAe,IAAI;AACzC,YAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,YAAM,SAAS,UAAU,OAAO;AAChC,YAAM,SAAS,gBAAgB,UAAU,MAAM;AAE/C,UAAI,OAAO,SAAS;AAClB,eAAO,WAAW,IAAI,OAAO,KAAK,MAAM,OAAO,IAAI;AACnD;AAAA,MACF,OAAO;AACL,eAAO,KAAK;AAAA,UACV,MAAM,cAAc,IAAI;AAAA,UACxB,SAAS;AAAA,EAAoB,gBAAgB,OAAO,KAAK,EAAE,IAAI,OAAK,OAAO,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC;AAAA,QAC5F,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,QAAM,eAAe,KAAK,YAAY,WAAW;AACjD,MAAI;AACF,UAAM,OAAO,YAAY;AACzB,UAAM,QAAQ,MAAM,QAAQ,YAAY;AAExC,eAAW,QAAQ,OAAO;AACxB,UAAI,QAAQ,IAAI,MAAM,WAAW,QAAQ,IAAI,MAAM,OAAQ;AAC3D;AAEA,YAAM,WAAW,KAAK,cAAc,IAAI;AACxC,YAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAChD,YAAM,SAAS,UAAU,OAAO;AAChC,YAAM,SAAS,eAAe,UAAU,MAAM;AAE9C,UAAI,OAAO,SAAS;AAClB,eAAO,UAAU,IAAI,OAAO,KAAK,MAAM,OAAO,IAAI;AAClD;AAAA,MACF,OAAO;AACL,eAAO,KAAK;AAAA,UACV,MAAM,aAAa,IAAI;AAAA,UACvB,SAAS;AAAA,EAAoB,gBAAgB,OAAO,KAAK,EAAE,IAAI,OAAK,OAAO,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC;AAAA,QAC5F,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,2BAAyB,QAAQ,QAAQ,QAAQ;AAGjD,MAAI;AACJ,MAAI,QAAQ,QAAQ;AAClB,kBAAc,MAAM,iBAAiB,YAAY,QAAQ,QAAQ,YAAY,MAAM;AAAA,EACrF;AAEA,SAAO;AAAA,IACL,SAAS,OAAO,WAAW;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,OAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,QAAQ,eAAe;AAAA,IACzB;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAe,kBACb,KACA,YACA,QACA,QACA,OACe;AACf,QAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAE1D,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAW,KAAK,KAAK,MAAM,IAAI;AAErC,QAAI,MAAM,YAAY,GAAG;AACvB,YAAM,kBAAkB,UAAU,YAAY,QAAQ,QAAQ,KAAK;AAAA,IACrE,WAAW,QAAQ,MAAM,IAAI,MAAM,QAAQ;AACzC,YAAM;AACN,YAAM,UAAU,SAAS,YAAY,QAAQ;AAC7C,YAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAEhD,UAAI;AACF,cAAM,SAAS,mBAAmB,OAAO;AACzC,eAAO,OAAO,IAAI,OAAO,MAAM,EAAE,MAAM,OAAO,MAAM,KAAK,OAAO,IAAI,CAAC;AACrE,cAAM;AAAA,MACR,SAAS,KAAK;AACZ,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,SAAS,eAAe,QAAQ,IAAI,UAAU;AAAA,QAChD,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,yBACP,QACA,QACA,UACM;AAEN,aAAW,CAAC,WAAW,KAAK,KAAK,OAAO,QAAQ;AAC9C,QAAI,MAAM,OAAO,SAAS,CAAC,OAAO,OAAO,IAAI,MAAM,OAAO,KAAK,GAAG;AAChE,YAAM,aAAa,YAAY,MAAM,OAAO,OAAO,MAAM,KAAK,OAAO,OAAO,KAAK,CAAC,CAAC;AACnF,aAAO,KAAK;AAAA,QACV,MAAM,UAAU,SAAS;AAAA,QACzB,SAAS,4BAA4B,MAAM,OAAO,KAAK;AAAA,QACvD,YAAY,aAAa,iBAAiB,UAAU,OAAO;AAAA,MAC7D,CAAC;AAAA,IACH;AAAA,EACF;AAGA,aAAW,CAAC,cAAc,QAAQ,KAAK,OAAO,WAAW;AACvD,UAAM,aAAa,SAAS,SAAS,WAAW,SAAS,SAAS,CAAC,SAAS,KAAK;AACjF,eAAW,aAAa,YAAY;AAClC,UAAI,CAAC,OAAO,OAAO,IAAI,SAAS,GAAG;AACjC,cAAM,aAAa,YAAY,WAAW,MAAM,KAAK,OAAO,OAAO,KAAK,CAAC,CAAC;AAC1E,iBAAS,KAAK;AAAA,UACZ,MAAM,aAAa,YAAY;AAAA,UAC/B,SAAS,4BAA4B,SAAS;AAAA,UAC9C,YAAY,aAAa,iBAAiB,UAAU,OAAO;AAAA,QAC7D,CAAC;AAAA,MACH;AAAA,IACF;AAGA,UAAM,aAAa,SAAS,SAAS,KAAK,EAAE,MAAM,KAAK;AACvD,QAAI,WAAW,SAAS,KAAK,WAAW,SAAS,GAAG;AAClD,aAAO,KAAK;AAAA,QACV,MAAM,aAAa,YAAY;AAAA,QAC/B,SAAS,4BAA4B,SAAS,QAAQ;AAAA,MACxD,CAAC;AAAA,IACH;AAAA,EACF;AAGA,MAAI,OAAO,SAAS,UAAU,YAAY;AACxC,UAAM,WAAW,OAAO,QAAQ,SAAS;AACzC,QAAI,CAAC,OAAO,YAAY,IAAI,QAAQ,GAAG;AACrC,YAAM,aAAa,YAAY,UAAU,MAAM,KAAK,OAAO,YAAY,KAAK,CAAC,CAAC;AAC9E,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,SAAS,uBAAuB,QAAQ;AAAA,QACxC,YAAY,aAAa,iBAAiB,UAAU,OAAO;AAAA,MAC7D,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAEA,SAAS,YAAY,QAAgB,YAAqC;AACxE,QAAM,YAAY;AAElB,aAAW,aAAa,YAAY;AAClC,QAAI,YAAY,OAAO,YAAY,GAAG,UAAU,YAAY,CAAC,KAAK,WAAW;AAC3E,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEA,SAAS,YAAY,GAAW,GAAmB;AACjD,QAAM,SAAqB,CAAC;AAE5B,WAAS,IAAI,GAAG,KAAK,EAAE,QAAQ,KAAK;AAClC,WAAO,CAAC,IAAI,CAAC,CAAC;AAAA,EAChB;AACA,WAAS,IAAI,GAAG,KAAK,EAAE,QAAQ,KAAK;AAClC,WAAO,CAAC,EAAG,CAAC,IAAI;AAAA,EAClB;AAEA,WAAS,IAAI,GAAG,KAAK,EAAE,QAAQ,KAAK;AAClC,aAAS,IAAI,GAAG,KAAK,EAAE,QAAQ,KAAK;AAClC,UAAI,EAAE,OAAO,IAAI,CAAC,MAAM,EAAE,OAAO,IAAI,CAAC,GAAG;AACvC,eAAO,CAAC,EAAG,CAAC,IAAI,OAAO,IAAI,CAAC,EAAG,IAAI,CAAC;AAAA,MACtC,OAAO;AACL,eAAO,CAAC,EAAG,CAAC,IAAI,KAAK;AAAA,UACnB,OAAO,IAAI,CAAC,EAAG,IAAI,CAAC,IAAK;AAAA,UACzB,OAAO,CAAC,EAAG,IAAI,CAAC,IAAK;AAAA,UACrB,OAAO,IAAI,CAAC,EAAG,CAAC,IAAK;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,OAAO,EAAE,MAAM,EAAG,EAAE,MAAM;AACnC;AAEA,eAAe,iBACb,YACA,QACA,gBACA,QAC6C;AAC7C,QAAM,EAAE,gBAAgB,IAAI,MAAM,OAAO,oBAAiB;AAE1D,MAAI,SAAS;AACb,MAAI,SAAS;AAGb,QAAM,WAAW,kBAAkB,OAAO,SAAS,UAAU;AAC7D,MAAI,CAAC,UAAU;AACb,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,SAAS;AAAA,IACX,CAAC;AACD,WAAO,EAAE,QAAQ,QAAQ,EAAE;AAAA,EAC7B;AAEA,QAAM,aAAa,OAAO,YAAY,IAAI,QAAQ;AAClD,MAAI,CAAC,YAAY;AACf,WAAO,KAAK;AAAA,MACV,MAAM;AAAA,MACN,SAAS,eAAe,QAAQ;AAAA,IAClC,CAAC;AACD,WAAO,EAAE,QAAQ,QAAQ,EAAE;AAAA,EAC7B;AAGA,MAAI,WAAW,SAAS,UAAU;AAChC,WAAO,KAAK;AAAA,MACV,MAAM,eAAe,QAAQ;AAAA,MAC7B,SAAS,kDAAkD,WAAW,IAAI;AAAA,IAC5E,CAAC;AACD,WAAO,EAAE,QAAQ,QAAQ,EAAE;AAAA,EAC7B;AAGA,QAAM,WAAW,KAAK,YAAY,eAAe,GAAG,QAAQ,OAAO;AACnE,QAAM,cAAc,MAAM,SAAS,UAAU,OAAO;AACpD,QAAM,aAAa,UAAU,WAAW;AAGxC,QAAM,SAAS,WAAW,OAAO,KAAK,WAAW,GAAG,IAChD,WAAW,OAAO,OAClB,KAAK,YAAY,WAAW,OAAO,IAAI;AAE3C,QAAM,YAAY,IAAI,gBAAgB,EAAE,MAAM,OAAO,CAAC;AAEtD,MAAI;AACF,UAAM,UAAU,QAAQ;AAExB,eAAW,CAAC,WAAW,KAAK,KAAK,OAAO,QAAQ;AAC9C,YAAM,SAAS,MAAM,UAAU,QAAQ,MAAM,GAAG;AAEhD,UAAI,OAAO,OAAO;AAChB;AAAA,MACF,OAAO;AACL;AACA,eAAO,KAAK;AAAA,UACV,MAAM,UAAU,SAAS;AAAA,UACzB,SAAS,OAAO,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF,UAAE;AACA,UAAM,UAAU,WAAW;AAAA,EAC7B;AAEA,SAAO,EAAE,QAAQ,OAAO;AAC1B;;;ACxcA,SAAS,UAAAA,eAAc;AACvB,SAAS,QAAAC,OAAM,SAAS,eAAe;AACvC,SAAS,UAAU,kBAAkB;AAMrC,eAAsB,gBAAgB,UAA0C;AAC9E,MAAI,aAAa,QAAQ,QAAQ;AACjC,QAAM,OAAO,QAAQ,UAAU;AAE/B,SAAO,eAAe,MAAM;AAC1B,UAAM,aAAaA,MAAK,YAAY,eAAe;AACnD,QAAI;AACF,YAAMD,QAAO,UAAU;AACvB,aAAO;AAAA,IACT,QAAQ;AACN,mBAAa,QAAQ,UAAU;AAAA,IACjC;AAAA,EACF;AAGA,MAAI;AACF,UAAMA,QAAOC,MAAK,MAAM,eAAe,CAAC;AACxC,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,YAAY,YAA0B;AACpD,aAAW,EAAE,MAAMA,MAAK,YAAY,MAAM,EAAE,CAAC;AAC/C;","names":["access","join"]}
|