@databricks/appkit 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -33,7 +33,7 @@ function findInstalledPackages() {
33
33
  const installed = [];
34
34
 
35
35
  for (const pkg of PACKAGES) {
36
- const claudePath = path.join(cwd, "node_modules", pkg.name, "CLAUDE.md");
36
+ const claudePath = path.join(cwd, "node_modules", pkg.name, "package.json");
37
37
  if (fs.existsSync(claudePath)) {
38
38
  installed.push(pkg);
39
39
  }
@@ -6,7 +6,7 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
6
6
  var __getOwnPropNames = Object.getOwnPropertyNames;
7
7
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
8
  var __esmMin = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
9
- var __export = (all, symbols) => {
9
+ var __exportAll = (all, symbols) => {
10
10
  let target = {};
11
11
  for (var name in all) {
12
12
  __defProp(target, name, {
@@ -36,4 +36,4 @@ var __copyProps = (to, from, except, desc) => {
36
36
  var __toCommonJS = (mod) => __hasOwnProp.call(mod, "module.exports") ? mod["module.exports"] : __copyProps(__defProp({}, "__esModule", { value: true }), mod);
37
37
 
38
38
  //#endregion
39
- export { __esmMin, __export, __toCommonJS };
39
+ export { __esmMin, __exportAll, __toCommonJS };
@@ -1,6 +1,6 @@
1
1
  //#region package.json
2
2
  var name = "@databricks/appkit";
3
- var version = "0.1.2";
3
+ var version = "0.1.4";
4
4
 
5
5
  //#endregion
6
6
  export { name, version };
@@ -5,6 +5,7 @@ import fs from "node:fs";
5
5
  //#region src/type-generator/cache.ts
6
6
  const CACHE_VERSION = "1";
7
7
  const CACHE_FILE = ".appkit-types-cache.json";
8
+ const CACHE_DIR = path.join(process.cwd(), "node_modules", ".databricks", "appkit");
8
9
  /**
9
10
  * Hash the SQL query
10
11
  * Uses MD5 to hash the SQL query
@@ -17,12 +18,12 @@ function hashSQL(sql) {
17
18
  /**
18
19
  * Load the cache from the file system
19
20
  * If the cache is not found, run the query explain
20
- * @param cacheDir - the directory to load the cache from
21
21
  * @returns - the cache
22
22
  */
23
- function loadCache(cacheDir) {
24
- const cachePath = path.join(cacheDir, CACHE_FILE);
23
+ function loadCache() {
24
+ const cachePath = path.join(CACHE_DIR, CACHE_FILE);
25
25
  try {
26
+ if (!fs.existsSync(CACHE_DIR)) fs.mkdirSync(CACHE_DIR, { recursive: true });
26
27
  if (fs.existsSync(cachePath)) {
27
28
  const cache = JSON.parse(fs.readFileSync(cachePath, "utf8"));
28
29
  if (cache.version === CACHE_VERSION) return cache;
@@ -36,11 +37,10 @@ function loadCache(cacheDir) {
36
37
  /**
37
38
  * Save the cache to the file system
38
39
  * The cache is saved as a JSON file, it is used to avoid running the query explain multiple times
39
- * @param cacheDir - the directory to save the cache to
40
40
  * @param cache - cache object to save
41
41
  */
42
- function saveCache(cacheDir, cache) {
43
- const cachePath = path.join(cacheDir, CACHE_FILE);
42
+ function saveCache(cache) {
43
+ const cachePath = path.join(CACHE_DIR, CACHE_FILE);
44
44
  fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2), "utf8");
45
45
  }
46
46
 
@@ -1 +1 @@
1
- {"version":3,"file":"cache.js","names":[],"sources":["../../src/type-generator/cache.ts"],"sourcesContent":["import crypto from \"node:crypto\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\n\n/**\n * Cache types\n * @property hash - the hash of the SQL query\n * @property type - the type of the query\n */\ninterface CacheEntry {\n hash: string;\n type: string;\n}\n\n/**\n * Cache interface\n * @property version - the version of the cache\n * @property queries - the queries in the cache\n */\ninterface Cache {\n version: string;\n queries: Record<string, CacheEntry>;\n}\n\nexport const CACHE_VERSION = \"1\";\nconst CACHE_FILE = \".appkit-types-cache.json\";\n\n/**\n * Hash the SQL query\n * Uses MD5 to hash the SQL query\n * @param sql - the SQL query to hash\n * @returns - the hash of the SQL query\n */\nexport function hashSQL(sql: string): string {\n return crypto.createHash(\"md5\").update(sql).digest(\"hex\");\n}\n\n/**\n * Load the cache from the file system\n * If the cache is not found, run the query explain\n * @param cacheDir - the directory to load the cache from\n * @returns - the cache\n */\nexport function loadCache(cacheDir: string): Cache {\n const cachePath = path.join(cacheDir, CACHE_FILE);\n try {\n if (fs.existsSync(cachePath)) {\n const cache = JSON.parse(fs.readFileSync(cachePath, \"utf8\")) as Cache;\n if (cache.version === CACHE_VERSION) {\n return cache;\n }\n }\n } catch {\n // ignore cache errors\n }\n return { version: CACHE_VERSION, queries: {} };\n}\n\n/**\n * Save the cache to the file system\n * The cache is saved as a JSON file, it is used to avoid running the query explain multiple times\n * @param cacheDir - the directory to save the cache to\n * @param cache - cache object to save\n */\nexport function saveCache(cacheDir: string, cache: Cache): void {\n const cachePath = path.join(cacheDir, CACHE_FILE);\n fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2), \"utf8\");\n}\n"],"mappings":";;;;;AAwBA,MAAa,gBAAgB;AAC7B,MAAM,aAAa;;;;;;;AAQnB,SAAgB,QAAQ,KAAqB;AAC3C,QAAO,OAAO,WAAW,MAAM,CAAC,OAAO,IAAI,CAAC,OAAO,MAAM;;;;;;;;AAS3D,SAAgB,UAAU,UAAyB;CACjD,MAAM,YAAY,KAAK,KAAK,UAAU,WAAW;AACjD,KAAI;AACF,MAAI,GAAG,WAAW,UAAU,EAAE;GAC5B,MAAM,QAAQ,KAAK,MAAM,GAAG,aAAa,WAAW,OAAO,CAAC;AAC5D,OAAI,MAAM,YAAY,cACpB,QAAO;;SAGL;AAGR,QAAO;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE;;;;;;;;AAShD,SAAgB,UAAU,UAAkB,OAAoB;CAC9D,MAAM,YAAY,KAAK,KAAK,UAAU,WAAW;AACjD,IAAG,cAAc,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,EAAE,OAAO"}
1
+ {"version":3,"file":"cache.js","names":[],"sources":["../../src/type-generator/cache.ts"],"sourcesContent":["import crypto from \"node:crypto\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\n\n/**\n * Cache types\n * @property hash - the hash of the SQL query\n * @property type - the type of the query\n */\ninterface CacheEntry {\n hash: string;\n type: string;\n}\n\n/**\n * Cache interface\n * @property version - the version of the cache\n * @property queries - the queries in the cache\n */\ninterface Cache {\n version: string;\n queries: Record<string, CacheEntry>;\n}\n\nexport const CACHE_VERSION = \"1\";\nconst CACHE_FILE = \".appkit-types-cache.json\";\nconst CACHE_DIR = path.join(\n process.cwd(),\n \"node_modules\",\n \".databricks\",\n \"appkit\",\n);\n\n/**\n * Hash the SQL query\n * Uses MD5 to hash the SQL query\n * @param sql - the SQL query to hash\n * @returns - the hash of the SQL query\n */\nexport function hashSQL(sql: string): string {\n return crypto.createHash(\"md5\").update(sql).digest(\"hex\");\n}\n\n/**\n * Load the cache from the file system\n * If the cache is not found, run the query explain\n * @returns - the cache\n */\nexport function loadCache(): Cache {\n const cachePath = path.join(CACHE_DIR, CACHE_FILE);\n try {\n if (!fs.existsSync(CACHE_DIR)) {\n fs.mkdirSync(CACHE_DIR, { recursive: true });\n }\n\n if (fs.existsSync(cachePath)) {\n const cache = JSON.parse(fs.readFileSync(cachePath, \"utf8\")) as Cache;\n if (cache.version === CACHE_VERSION) {\n return cache;\n }\n }\n } catch {\n // ignore cache errors\n }\n return { version: CACHE_VERSION, queries: {} };\n}\n\n/**\n * Save the cache to the file system\n * The cache is saved as a JSON file, it is used to avoid running the query explain multiple times\n * @param cache - cache object to save\n */\nexport function saveCache(cache: Cache): void {\n const cachePath = path.join(CACHE_DIR, CACHE_FILE);\n fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2), \"utf8\");\n}\n"],"mappings":";;;;;AAwBA,MAAa,gBAAgB;AAC7B,MAAM,aAAa;AACnB,MAAM,YAAY,KAAK,KACrB,QAAQ,KAAK,EACb,gBACA,eACA,SACD;;;;;;;AAQD,SAAgB,QAAQ,KAAqB;AAC3C,QAAO,OAAO,WAAW,MAAM,CAAC,OAAO,IAAI,CAAC,OAAO,MAAM;;;;;;;AAQ3D,SAAgB,YAAmB;CACjC,MAAM,YAAY,KAAK,KAAK,WAAW,WAAW;AAClD,KAAI;AACF,MAAI,CAAC,GAAG,WAAW,UAAU,CAC3B,IAAG,UAAU,WAAW,EAAE,WAAW,MAAM,CAAC;AAG9C,MAAI,GAAG,WAAW,UAAU,EAAE;GAC5B,MAAM,QAAQ,KAAK,MAAM,GAAG,aAAa,WAAW,OAAO,CAAC;AAC5D,OAAI,MAAM,YAAY,cACpB,QAAO;;SAGL;AAGR,QAAO;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE;;;;;;;AAQhD,SAAgB,UAAU,OAAoB;CAC5C,MAAM,YAAY,KAAK,KAAK,WAAW,WAAW;AAClD,IAAG,cAAc,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,EAAE,OAAO"}
@@ -68,7 +68,7 @@ async function generateQueriesFromDescribe(queryFolder, warehouseId, options = {
68
68
  const cache = noCache ? {
69
69
  version: CACHE_VERSION,
70
70
  queries: {}
71
- } : loadCache(queryFolder);
71
+ } : loadCache();
72
72
  const client = new WorkspaceClient({});
73
73
  const querySchemas = [];
74
74
  const failedQueries = [];
@@ -122,7 +122,7 @@ async function generateQueriesFromDescribe(queryFolder, warehouseId, options = {
122
122
  });
123
123
  }
124
124
  }
125
- saveCache(queryFolder, cache);
125
+ saveCache(cache);
126
126
  if (failedQueries.length > 0) console.warn(` Warning: ${failedQueries.length} queries failed\n`);
127
127
  return querySchemas;
128
128
  }
@@ -1 +1 @@
1
- {"version":3,"file":"query-registry.js","names":["paramTypes: Record<string, string>","querySchemas: QuerySchema[]","failedQueries: { name: string; error: string }[]","typeMap: Record<string, string>"],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(sql: string): string[] {\n const matches = sql.matchAll(/:([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n params.add(match[1]);\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): string {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n\n const paramTypes = extractParameterTypes(sql);\n\n // generate parameters types with JSDoc hints\n const paramsType =\n params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n // if no type annotation, use SQLTypeMarker (union type)\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: Array<{\n ${resultFields.join(\";\\n \")};\n }>;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false } = options;\n\n // read all query files in the folder\n const queryFiles = fs\n .readdirSync(queryFolder)\n .filter((file) => file.endsWith(\".sql\"));\n\n console.log(` Found ${queryFiles.length} SQL queries\\n`);\n\n // load cache\n const cache = noCache\n ? { version: CACHE_VERSION, queries: {} }\n : loadCache(queryFolder);\n\n const client = new WorkspaceClient({});\n const querySchemas: QuerySchema[] = [];\n const failedQueries: { name: string; error: string }[] = [];\n const spinner = new Spinner();\n\n // process each query file\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const queryName = path.basename(file, \".sql\");\n\n // read query file content\n const sql = fs.readFileSync(path.join(queryFolder, file), \"utf8\");\n const sqlHash = hashSQL(sql);\n\n // check cache\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash) {\n querySchemas.push({ name: queryName, type: cached.type });\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n spinner.stop(`✓ ${queryName} (cached)`);\n continue;\n }\n\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n\n const sqlWithDefaults = sql.replace(/:([a-zA-Z_]\\w*)/g, \"''\");\n\n // strip trailing semicolon for DESCRIBE QUERY\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n\n // execute DESCRIBE QUERY to get schema without running the actual query\n try {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n if (result.status.state === \"FAILED\") {\n spinner.stop(`✗ ${queryName} - failed`);\n failedQueries.push({\n name: queryName,\n error: \"Query execution failed\",\n });\n continue;\n }\n\n // convert result to query schema\n const type = convertToQueryType(result, sql, queryName);\n querySchemas.push({ name: queryName, type });\n\n // update cache\n cache.queries[queryName] = { hash: sqlHash, type };\n\n spinner.stop(`✓ ${queryName}`);\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : \"Unknown error\";\n spinner.stop(`✗ ${queryName} - ${errorMessage}`);\n failedQueries.push({ name: queryName, error: errorMessage });\n }\n }\n\n // save cache\n saveCache(queryFolder, cache);\n\n // log warning if there are failed queries\n if (failedQueries.length > 0) {\n console.warn(` Warning: ${failedQueries.length} queries failed\\n`);\n }\n\n return querySchemas;\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;;;;;AAiBA,SAAgB,kBAAkB,KAAuB;CACvD,MAAM,UAAU,IAAI,SAAS,mBAAmB;CAChD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,QAAO,IAAI,MAAM,GAAG;AAEtB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;AAErD,SAAgB,mBACd,QACA,KACA,WACQ;CAER,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CAED,MAAM,aAAa,sBAAsB,IAAI;AAmC7C,QAAO;aACI,UAAU;kBAhCnB,OAAO,SAAS,IACZ,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAE3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB,wBAqBqB;;QAlBN,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B,CAMiB,KAAK,YAAY,CAAC;;;;AAKvC,SAAgB,sBAAsB,KAAqC;CACzE,MAAMA,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAiC,EAAE,EACX;CACxB,MAAM,EAAE,UAAU,UAAU;CAG5B,MAAM,aAAa,GAChB,YAAY,YAAY,CACxB,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AAE1C,SAAQ,IAAI,WAAW,WAAW,OAAO,gBAAgB;CAGzD,MAAM,QAAQ,UACV;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GACvC,UAAU,YAAY;CAE1B,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAMC,eAA8B,EAAE;CACtC,MAAMC,gBAAmD,EAAE;CAC3D,MAAM,UAAU,IAAI,SAAS;AAG7B,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EACxB,MAAM,YAAY,KAAK,SAAS,MAAM,OAAO;EAG7C,MAAM,MAAM,GAAG,aAAa,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO;EACjE,MAAM,UAAU,QAAQ,IAAI;EAG5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,SAAS;AACrC,gBAAa,KAAK;IAAE,MAAM;IAAW,MAAM,OAAO;IAAM,CAAC;AACzD,WAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;AACxE,WAAQ,KAAK,KAAK,UAAU,WAAW;AACvC;;AAGF,UAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;EAKxE,MAAM,aAHkB,IAAI,QAAQ,oBAAoB,KAAK,CAG1B,MAAM,CAAC,QAAQ,SAAS,GAAG;AAG9D,MAAI;GACF,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF,OAAI,OAAO,OAAO,UAAU,UAAU;AACpC,YAAQ,KAAK,KAAK,UAAU,WAAW;AACvC,kBAAc,KAAK;KACjB,MAAM;KACN,OAAO;KACR,CAAC;AACF;;GAIF,MAAM,OAAO,mBAAmB,QAAQ,KAAK,UAAU;AACvD,gBAAa,KAAK;IAAE,MAAM;IAAW;IAAM,CAAC;AAG5C,SAAM,QAAQ,aAAa;IAAE,MAAM;IAAS;IAAM;AAElD,WAAQ,KAAK,KAAK,YAAY;WACvB,OAAO;GACd,MAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU;AAC3C,WAAQ,KAAK,KAAK,UAAU,KAAK,eAAe;AAChD,iBAAc,KAAK;IAAE,MAAM;IAAW,OAAO;IAAc,CAAC;;;AAKhE,WAAU,aAAa,MAAM;AAG7B,KAAI,cAAc,SAAS,EACzB,SAAQ,KAAK,cAAc,cAAc,OAAO,mBAAmB;AAGrE,QAAO;;;;;;;;;;;;AAaT,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAMC,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
1
+ {"version":3,"file":"query-registry.js","names":["paramTypes: Record<string, string>","querySchemas: QuerySchema[]","failedQueries: { name: string; error: string }[]","typeMap: Record<string, string>"],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(sql: string): string[] {\n const matches = sql.matchAll(/:([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n params.add(match[1]);\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): string {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n\n const paramTypes = extractParameterTypes(sql);\n\n // generate parameters types with JSDoc hints\n const paramsType =\n params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n // if no type annotation, use SQLTypeMarker (union type)\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: Array<{\n ${resultFields.join(\";\\n \")};\n }>;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false } = options;\n\n // read all query files in the folder\n const queryFiles = fs\n .readdirSync(queryFolder)\n .filter((file) => file.endsWith(\".sql\"));\n\n console.log(` Found ${queryFiles.length} SQL queries\\n`);\n\n // load cache\n const cache = noCache ? { version: CACHE_VERSION, queries: {} } : loadCache();\n\n const client = new WorkspaceClient({});\n const querySchemas: QuerySchema[] = [];\n const failedQueries: { name: string; error: string }[] = [];\n const spinner = new Spinner();\n\n // process each query file\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const queryName = path.basename(file, \".sql\");\n\n // read query file content\n const sql = fs.readFileSync(path.join(queryFolder, file), \"utf8\");\n const sqlHash = hashSQL(sql);\n\n // check cache\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash) {\n querySchemas.push({ name: queryName, type: cached.type });\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n spinner.stop(`✓ ${queryName} (cached)`);\n continue;\n }\n\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n\n const sqlWithDefaults = sql.replace(/:([a-zA-Z_]\\w*)/g, \"''\");\n\n // strip trailing semicolon for DESCRIBE QUERY\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n\n // execute DESCRIBE QUERY to get schema without running the actual query\n try {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n if (result.status.state === \"FAILED\") {\n spinner.stop(`✗ ${queryName} - failed`);\n failedQueries.push({\n name: queryName,\n error: \"Query execution failed\",\n });\n continue;\n }\n\n // convert result to query schema\n const type = convertToQueryType(result, sql, queryName);\n querySchemas.push({ name: queryName, type });\n\n // update cache\n cache.queries[queryName] = { hash: sqlHash, type };\n\n spinner.stop(`✓ ${queryName}`);\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : \"Unknown error\";\n spinner.stop(`✗ ${queryName} - ${errorMessage}`);\n failedQueries.push({ name: queryName, error: errorMessage });\n }\n }\n\n // save cache\n saveCache(cache);\n\n // log warning if there are failed queries\n if (failedQueries.length > 0) {\n console.warn(` Warning: ${failedQueries.length} queries failed\\n`);\n }\n\n return querySchemas;\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;;;;;AAiBA,SAAgB,kBAAkB,KAAuB;CACvD,MAAM,UAAU,IAAI,SAAS,mBAAmB;CAChD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,QAAO,IAAI,MAAM,GAAG;AAEtB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;AAErD,SAAgB,mBACd,QACA,KACA,WACQ;CAER,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CAED,MAAM,aAAa,sBAAsB,IAAI;AAmC7C,QAAO;aACI,UAAU;kBAhCnB,OAAO,SAAS,IACZ,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAE3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB,wBAqBqB;;QAlBN,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B,CAMiB,KAAK,YAAY,CAAC;;;;AAKvC,SAAgB,sBAAsB,KAAqC;CACzE,MAAMA,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAiC,EAAE,EACX;CACxB,MAAM,EAAE,UAAU,UAAU;CAG5B,MAAM,aAAa,GAChB,YAAY,YAAY,CACxB,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AAE1C,SAAQ,IAAI,WAAW,WAAW,OAAO,gBAAgB;CAGzD,MAAM,QAAQ,UAAU;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GAAG,WAAW;CAE7E,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAMC,eAA8B,EAAE;CACtC,MAAMC,gBAAmD,EAAE;CAC3D,MAAM,UAAU,IAAI,SAAS;AAG7B,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EACxB,MAAM,YAAY,KAAK,SAAS,MAAM,OAAO;EAG7C,MAAM,MAAM,GAAG,aAAa,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO;EACjE,MAAM,UAAU,QAAQ,IAAI;EAG5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,SAAS;AACrC,gBAAa,KAAK;IAAE,MAAM;IAAW,MAAM,OAAO;IAAM,CAAC;AACzD,WAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;AACxE,WAAQ,KAAK,KAAK,UAAU,WAAW;AACvC;;AAGF,UAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;EAKxE,MAAM,aAHkB,IAAI,QAAQ,oBAAoB,KAAK,CAG1B,MAAM,CAAC,QAAQ,SAAS,GAAG;AAG9D,MAAI;GACF,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF,OAAI,OAAO,OAAO,UAAU,UAAU;AACpC,YAAQ,KAAK,KAAK,UAAU,WAAW;AACvC,kBAAc,KAAK;KACjB,MAAM;KACN,OAAO;KACR,CAAC;AACF;;GAIF,MAAM,OAAO,mBAAmB,QAAQ,KAAK,UAAU;AACvD,gBAAa,KAAK;IAAE,MAAM;IAAW;IAAM,CAAC;AAG5C,SAAM,QAAQ,aAAa;IAAE,MAAM;IAAS;IAAM;AAElD,WAAQ,KAAK,KAAK,YAAY;WACvB,OAAO;GACd,MAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU;AAC3C,WAAQ,KAAK,KAAK,UAAU,KAAK,eAAe;AAChD,iBAAc,KAAK;IAAE,MAAM;IAAW,OAAO;IAAc,CAAC;;;AAKhE,WAAU,MAAM;AAGhB,KAAI,cAAc,SAAS,EACzB,SAAQ,KAAK,cAAc,cAAc,OAAO,mBAAmB;AAGrE,QAAO;;;;;;;;;;;;AAaT,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAMC,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
@@ -1 +1 @@
1
- {"version":3,"file":"vite-plugin.d.ts","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":[],"mappings":";;;;;;AACmC;AAmBnC,UAbU,wBAAA,CAauB;EAAA,OAAA,CAAA,EAAA,MAAA;;cAAsC,CAAA,EAAA,MAAA,EAAA;;;;;;;;iBAAvD,iBAAA,WAA4B,2BAA2B"}
1
+ {"version":3,"file":"vite-plugin.d.ts","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":[],"mappings":";;;;;;AACmC;AAoBnC,UAbU,wBAAA,CAauB;EAAA,OAAA,CAAA,EAAA,MAAA;;cAAsC,CAAA,EAAA,MAAA,EAAA;;;;;;;;iBAAvD,iBAAA,WAA4B,2BAA2B"}
@@ -1,5 +1,6 @@
1
1
  import { generateFromEntryPoint } from "./index.js";
2
2
  import path from "node:path";
3
+ import fs from "node:fs";
3
4
 
4
5
  //#region src/type-generator/vite-plugin.ts
5
6
  /**
@@ -32,10 +33,18 @@ function appKitTypesPlugin(options) {
32
33
  }
33
34
  return {
34
35
  name: "appkit-types",
36
+ apply() {
37
+ if (!(process.env.DATABRICKS_WAREHOUSE_ID || "")) {
38
+ console.warn("[AppKit] Warehouse ID not found. Skipping type generation.");
39
+ return false;
40
+ }
41
+ if (!fs.existsSync(path.join(process.cwd(), "config", "queries"))) return false;
42
+ return true;
43
+ },
35
44
  configResolved(config) {
36
45
  root = config.root;
37
46
  outFile = path.resolve(root, options?.outFile ?? "src/appKitTypes.d.ts");
38
- watchFolders = (options?.watchFolders ?? ["../config/queries"]).map((folder) => path.resolve(root, folder));
47
+ watchFolders = options?.watchFolders ?? [path.join(process.cwd(), "config", "queries")];
39
48
  },
40
49
  buildStart() {
41
50
  generate();
@@ -1 +1 @@
1
- {"version":3,"file":"vite-plugin.js","names":["root: string","outFile: string","watchFolders: string[]"],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import path from \"node:path\";\nimport type { Plugin } from \"vite\";\nimport { generateFromEntryPoint } from \"./index\";\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls generateFromEntryPoint under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let outFile: string;\n let watchFolders: string[];\n\n async function generate() {\n try {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return;\n }\n\n await generateFromEntryPoint({\n outFile,\n queryFolder: watchFolders[0],\n warehouseId,\n noCache: false,\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n console.error(\"[AppKit] Error generating types:\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n\n configResolved(config) {\n root = config.root;\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n watchFolders = (options?.watchFolders ?? [\"../config/queries\"]).map(\n (folder) => path.resolve(root, folder),\n );\n },\n\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;;;;;AAoBA,SAAgB,kBAAkB,SAA4C;CAC5E,IAAIA;CACJ,IAAIC;CACJ,IAAIC;CAEJ,eAAe,WAAW;AACxB,MAAI;GACF,MAAM,cAAc,QAAQ,IAAI,2BAA2B;AAE3D,OAAI,CAAC,aAAa;AAChB,YAAQ,KACN,6DACD;AACD;;AAGF,SAAM,uBAAuB;IAC3B;IACA,aAAa,aAAa;IAC1B;IACA,SAAS;IACV,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,WAAQ,MAAM,oCAAoC,MAAM;;;AAI5D,QAAO;EACL,MAAM;EAEN,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AACxE,mBAAgB,SAAS,gBAAgB,CAAC,oBAAoB,EAAE,KAC7D,WAAW,KAAK,QAAQ,MAAM,OAAO,CACvC;;EAGH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
1
+ {"version":3,"file":"vite-plugin.js","names":["root: string","outFile: string","watchFolders: string[]"],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import path from \"node:path\";\nimport type { Plugin } from \"vite\";\nimport fs from \"node:fs\";\nimport { generateFromEntryPoint } from \"./index\";\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls generateFromEntryPoint under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let outFile: string;\n let watchFolders: string[];\n\n async function generate() {\n try {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return;\n }\n\n await generateFromEntryPoint({\n outFile,\n queryFolder: watchFolders[0],\n warehouseId,\n noCache: false,\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n console.error(\"[AppKit] Error generating types:\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n\n apply() {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return false;\n }\n\n if (!fs.existsSync(path.join(process.cwd(), \"config\", \"queries\"))) {\n return false;\n }\n\n return true;\n },\n\n configResolved(config) {\n root = config.root;\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n watchFolders = options?.watchFolders ?? [\n path.join(process.cwd(), \"config\", \"queries\"),\n ];\n },\n\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;;;;;;AAqBA,SAAgB,kBAAkB,SAA4C;CAC5E,IAAIA;CACJ,IAAIC;CACJ,IAAIC;CAEJ,eAAe,WAAW;AACxB,MAAI;GACF,MAAM,cAAc,QAAQ,IAAI,2BAA2B;AAE3D,OAAI,CAAC,aAAa;AAChB,YAAQ,KACN,6DACD;AACD;;AAGF,SAAM,uBAAuB;IAC3B;IACA,aAAa,aAAa;IAC1B;IACA,SAAS;IACV,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,WAAQ,MAAM,oCAAoC,MAAM;;;AAI5D,QAAO;EACL,MAAM;EAEN,QAAQ;AAGN,OAAI,EAFgB,QAAQ,IAAI,2BAA2B,KAEzC;AAChB,YAAQ,KACN,6DACD;AACD,WAAO;;AAGT,OAAI,CAAC,GAAG,WAAW,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAAC,CAC/D,QAAO;AAGT,UAAO;;EAGT,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AACxE,kBAAe,SAAS,gBAAgB,CACtC,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAC9C;;EAGH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
@@ -1,11 +1,11 @@
1
- import { __esmMin, __export } from "../_virtual/rolldown_runtime.js";
1
+ import { __esmMin, __exportAll } from "../_virtual/rolldown_runtime.js";
2
2
  import { databricksClientMiddleware, getRequestContext, getWorkspaceClient, init_databricks_client_middleware } from "./databricks-client-middleware.js";
3
3
  import { init_env_validator, validateEnv } from "./env-validator.js";
4
4
  import { deepMerge, init_merge } from "./merge.js";
5
5
  import { init_vite_config_merge, mergeConfigDedup } from "./vite-config-merge.js";
6
6
 
7
7
  //#region src/utils/index.ts
8
- var utils_exports = /* @__PURE__ */ __export({
8
+ var utils_exports = /* @__PURE__ */ __exportAll({
9
9
  databricksClientMiddleware: () => databricksClientMiddleware,
10
10
  deepMerge: () => deepMerge,
11
11
  getRequestContext: () => getRequestContext,
package/llms.txt CHANGED
@@ -75,46 +75,24 @@ Why this layout:
75
75
  "dev": "NODE_ENV=development tsx watch server/index.ts",
76
76
  "build": "npm run build:server && npm run build:client",
77
77
  "build:server": "tsdown --out-dir build server/index.ts",
78
- "build:client": "cd client && npm run build",
78
+ "build:client": "tsc -b && vite build --config client/vite.config.ts",
79
79
  "start": "node build/index.mjs"
80
80
  },
81
81
  "dependencies": {
82
- "@databricks/appkit": "^0.0.2"
82
+ "@databricks/appkit": "^0.1.2"
83
+ "@databricks/appkit-ui": "^0.1.2",
84
+ "react": "^19.2.3",
85
+ "react-dom": "^19.2.3"
83
86
  },
84
87
  "devDependencies": {
85
88
  "@types/node": "^20.0.0",
89
+ "@types/react": "^19.0.0",
90
+ "@types/react-dom": "^19.0.0",
91
+ "@vitejs/plugin-react": "^5.1.1",
86
92
  "tsdown": "^0.15.7",
87
93
  "tsx": "^4.19.0",
88
- "typescript": "~5.6.0"
89
- }
90
- }
91
- ```
92
-
93
- ### `client/package.json`
94
-
95
- ```json
96
- {
97
- "name": "client",
98
- "private": true,
99
- "version": "0.0.0",
100
- "type": "module",
101
- "scripts": {
102
- "dev": "vite",
103
- "build": "vite build",
104
- "preview": "vite preview"
105
- },
106
- "dependencies": {
107
- "@databricks/appkit-ui": "^0.0.2",
108
- "react": "^18.0.0",
109
- "react-dom": "^18.0.0",
110
- "recharts": "^3.0.0"
111
- },
112
- "devDependencies": {
113
- "@types/react": "^18.0.0",
114
- "@types/react-dom": "^18.0.0",
115
- "@vitejs/plugin-react": "^5.0.0",
116
94
  "typescript": "~5.6.0",
117
- "vite": "^6.0.0"
95
+ "vite": "^7.2.4"
118
96
  }
119
97
  }
120
98
  ```
@@ -208,7 +186,6 @@ await createApp({
208
186
  ```bash
209
187
  # Install dependencies
210
188
  npm install
211
- cd client && npm install && cd ..
212
189
 
213
190
  # Development (starts backend + Vite dev server)
214
191
  npm run dev
@@ -225,19 +202,14 @@ If you already have a React/Vite app and want to add AppKit:
225
202
  ### 1. Install dependencies
226
203
 
227
204
  ```bash
228
- npm install @databricks/appkit
229
- npm install -D tsx tsdown
205
+ npm install @databricks/appkit @databricks/appkit-ui react react-dom
206
+ npm install -D tsx tsdown vite @vitejs/plugin-react typescript
230
207
 
231
208
  # If you don't already have a client/ folder, create one and move your Vite app into it:
232
209
  # - move index.html -> client/index.html
233
210
  # - move vite.config.ts -> client/vite.config.ts
234
211
  # - move src/ -> client/src/
235
212
  #
236
- # Then install client deps:
237
- cd client
238
- npm install @databricks/appkit-ui react react-dom recharts
239
- npm install -D vite @vitejs/plugin-react typescript
240
- cd ..
241
213
  ```
242
214
 
243
215
  ### 2. Create `server/index.ts` (new file)
@@ -258,7 +230,7 @@ await createApp({
258
230
  "dev": "NODE_ENV=development tsx watch server/index.ts",
259
231
  "build": "npm run build:server && npm run build:client",
260
232
  "build:server": "tsdown --out-dir build server/index.ts",
261
- "build:client": "cd client && npm run build",
233
+ "build:client": "tsc -b && vite build --config client/vite.config.ts",
262
234
  "start": "node build/index.mjs"
263
235
  }
264
236
  }
@@ -276,7 +248,7 @@ await createApp({
276
248
  import { createApp, server, analytics } from "@databricks/appkit";
277
249
 
278
250
  await createApp({
279
- plugins: [server(), analytics({})],
251
+ plugins: [server(), analytics()],
280
252
  });
281
253
  ```
282
254
 
@@ -312,13 +284,17 @@ These are typically **provided by Databricks Apps runtime** (exact set can vary
312
284
 
313
285
  For local development, you need to authenticate with Databricks. Options:
314
286
 
315
- **Option 1: Databricks CLI profile (recommended)**
287
+ **Option 1: Databricks CLI Auth (recommended)**
316
288
 
317
289
  ```bash
318
290
  # Configure once
319
- databricks configure --profile my-profile
291
+ databricks auth login --host [host] --profile [profile-name]
292
+
293
+ # If you used `DEFAULT` as the profile name then you can just run
320
294
 
321
- # Then run with profile
295
+ `npm run dev`
296
+
297
+ # To run with a specific profile
322
298
  DATABRICKS_CONFIG_PROFILE=my-profile npm run dev
323
299
  # If your Databricks SDK expects a different variable name, try:
324
300
  # DATABRICKS_PROFILE=my-profile npm run dev
@@ -462,7 +438,7 @@ HTTP endpoints exposed (mounted under `/api/analytics`):
462
438
  Formats:
463
439
 
464
440
  - `format: "JSON"` (default) returns JSON rows
465
- - `format: "ARROW"` returns an Arrow “external links” payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId`
441
+ - `format: "ARROW"` returns an Arrow “statement_id” payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId`
466
442
 
467
443
  ### Request context (`getRequestContext()`)
468
444
 
@@ -980,6 +956,108 @@ function LoadingCard() {
980
956
  }
981
957
  ```
982
958
 
959
+ ## Stylesheet
960
+
961
+ In the main css file import the following
962
+
963
+ ```css
964
+ @import "@databricks/appkit-ui/styles.css";
965
+ ```
966
+
967
+ That will provide a default theme for the app using css variables.
968
+
969
+ ### Customizing theme (light/dark mode)
970
+
971
+ - Full list of variables to customize the theme.
972
+
973
+ ```css
974
+ @import "@databricks/appkit-ui/styles.css";
975
+
976
+ :root {
977
+ --radius: 0.625rem;
978
+ --background: oklch(1 0 0);
979
+ --foreground: oklch(0.141 0.005 285.823);
980
+ --card: oklch(1 0 0);
981
+ --card-foreground: oklch(0.141 0.005 285.823);
982
+ --popover: oklch(1 0 0);
983
+ --popover-foreground: oklch(0.141 0.005 285.823);
984
+ --primary: oklch(0.21 0.006 285.885);
985
+ --primary-foreground: oklch(0.985 0 0);
986
+ --secondary: oklch(0.967 0.001 286.375);
987
+ --secondary-foreground: oklch(0.21 0.006 285.885);
988
+ --muted: oklch(0.967 0.001 286.375);
989
+ --muted-foreground: oklch(0.552 0.016 285.938);
990
+ --accent: oklch(0.967 0.001 286.375);
991
+ --accent-foreground: oklch(0.21 0.006 285.885);
992
+ --destructive: oklch(0.577 0.245 27.325);
993
+ --destructive-foreground: oklch(0.985 0 0);
994
+ --success: oklch(0.603 0.135 166.892);
995
+ --success-foreground: oklch(1 0 0);
996
+ --warning: oklch(0.795 0.157 78.748);
997
+ --warning-foreground: oklch(0.199 0.027 238.732);
998
+ --border: oklch(0.92 0.004 286.32);
999
+ --input: oklch(0.92 0.004 286.32);
1000
+ --ring: oklch(0.705 0.015 286.067);
1001
+ --chart-1: oklch(0.646 0.222 41.116);
1002
+ --chart-2: oklch(0.6 0.118 184.704);
1003
+ --chart-3: oklch(0.398 0.07 227.392);
1004
+ --chart-4: oklch(0.828 0.189 84.429);
1005
+ --chart-5: oklch(0.769 0.188 70.08);
1006
+ --sidebar: oklch(0.985 0 0);
1007
+ --sidebar-foreground: oklch(0.141 0.005 285.823);
1008
+ --sidebar-primary: oklch(0.21 0.006 285.885);
1009
+ --sidebar-primary-foreground: oklch(0.985 0 0);
1010
+ --sidebar-accent: oklch(0.967 0.001 286.375);
1011
+ --sidebar-accent-foreground: oklch(0.21 0.006 285.885);
1012
+ --sidebar-border: oklch(0.92 0.004 286.32);
1013
+ --sidebar-ring: oklch(0.705 0.015 286.067);
1014
+ }
1015
+
1016
+ @media (prefers-color-scheme: dark) {
1017
+ :root {
1018
+ --background: oklch(0.141 0.005 285.823);
1019
+ --foreground: oklch(0.985 0 0);
1020
+ --card: oklch(0.21 0.006 285.885);
1021
+ --card-foreground: oklch(0.985 0 0);
1022
+ --popover: oklch(0.21 0.006 285.885);
1023
+ --popover-foreground: oklch(0.985 0 0);
1024
+ --primary: oklch(0.92 0.004 286.32);
1025
+ --primary-foreground: oklch(0.21 0.006 285.885);
1026
+ --secondary: oklch(0.274 0.006 286.033);
1027
+ --secondary-foreground: oklch(0.985 0 0);
1028
+ --muted: oklch(0.274 0.006 286.033);
1029
+ --muted-foreground: oklch(0.705 0.015 286.067);
1030
+ --accent: oklch(0.274 0.006 286.033);
1031
+ --accent-foreground: oklch(0.985 0 0);
1032
+ --destructive: oklch(0.704 0.191 22.216);
1033
+ --destructive-foreground: oklch(0.985 0 0);
1034
+ --success: oklch(0.67 0.12 167);
1035
+ --success-foreground: oklch(1 0 0);
1036
+ --warning: oklch(0.83 0.165 85);
1037
+ --warning-foreground: oklch(0.199 0.027 238.732);
1038
+ --border: oklch(1 0 0 / 10%);
1039
+ --input: oklch(1 0 0 / 15%);
1040
+ --ring: oklch(0.552 0.016 285.938);
1041
+ --chart-1: oklch(0.488 0.243 264.376);
1042
+ --chart-2: oklch(0.696 0.17 162.48);
1043
+ --chart-3: oklch(0.769 0.188 70.08);
1044
+ --chart-4: oklch(0.627 0.265 303.9);
1045
+ --chart-5: oklch(0.645 0.246 16.439);
1046
+ --sidebar: oklch(0.21 0.006 285.885);
1047
+ --sidebar-foreground: oklch(0.985 0 0);
1048
+ --sidebar-primary: oklch(0.488 0.243 264.376);
1049
+ --sidebar-primary-foreground: oklch(0.985 0 0);
1050
+ --sidebar-accent: oklch(0.274 0.006 286.033);
1051
+ --sidebar-accent-foreground: oklch(0.985 0 0);
1052
+ --sidebar-border: oklch(1 0 0 / 10%);
1053
+ --sidebar-ring: oklch(0.552 0.016 285.938);
1054
+ }
1055
+ }
1056
+
1057
+ ```
1058
+
1059
+ - If any variable is changed, it must be changed for both light and dark mode.
1060
+
983
1061
  ## Type generation (QueryRegistry + IntelliSense)
984
1062
 
985
1063
  Goal: generate `client/src/appKitTypes.d.ts` so query keys, params, and result rows are type-safe.
@@ -1054,7 +1132,6 @@ env:
1054
1132
  - `tsx` is in devDependencies for dev server
1055
1133
  - `dev` script uses `NODE_ENV=development tsx watch server/index.ts`
1056
1134
  - `client/index.html` exists with `<div id="root"></div>` and script pointing to `client/src/main.tsx`
1057
- - `client/package.json` exists and includes `@databricks/appkit-ui`
1058
1135
 
1059
1136
  - **Backend**
1060
1137
  - `await createApp({ plugins: [...] })` is used (or `void createApp` with intent)
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@databricks/appkit",
3
3
  "type": "module",
4
- "version": "0.1.2",
4
+ "version": "0.1.4",
5
5
  "main": "./dist/index.js",
6
6
  "types": "./dist/index.d.ts",
7
7
  "packageManager": "pnpm@10.21.0",
@@ -15,6 +15,7 @@
15
15
  "scripts",
16
16
  "CLAUDE.md",
17
17
  "llms.txt",
18
+ "AGENTS.md",
18
19
  "README.md",
19
20
  "DCO",
20
21
  "NOTICE.md"
@@ -33,7 +34,7 @@
33
34
  "clean:full": "rm -rf dist node_modules tmp",
34
35
  "clean": "rm -rf dist tmp",
35
36
  "dist": "tsx ../../tools/dist.ts",
36
- "tarball": "tsx ../../tools/dist.ts && npm pack ./tmp --pack-destination ./tmp",
37
+ "tarball": "rm -rf tmp && tsx ../../tools/dist.ts && npm pack ./tmp --pack-destination ./tmp",
37
38
  "typecheck": "tsc --noEmit",
38
39
  "postinstall": "node scripts/postinstall.js"
39
40
  },