@databricks/appkit 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/generate-types.js +27 -0
- package/dist/appkit/package.js +1 -1
- package/dist/type-generator/cache.js +49 -0
- package/dist/type-generator/cache.js.map +1 -0
- package/dist/type-generator/index.js +46 -0
- package/dist/type-generator/index.js.map +1 -0
- package/dist/type-generator/query-registry.js +170 -0
- package/dist/type-generator/query-registry.js.map +1 -0
- package/dist/type-generator/spinner.js +37 -0
- package/dist/type-generator/spinner.js.map +1 -0
- package/dist/type-generator/types.js +45 -0
- package/dist/type-generator/types.js.map +1 -0
- package/dist/type-generator/vite-plugin.d.ts +1 -1
- package/dist/type-generator/vite-plugin.d.ts.map +1 -1
- package/dist/type-generator/vite-plugin.js +13 -8
- package/dist/type-generator/vite-plugin.js.map +1 -1
- package/package.json +2 -2
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
import { generateFromEntryPoint } from "../dist/type-generator/index.js";
|
|
5
|
+
|
|
6
|
+
// Parse arguments
|
|
7
|
+
const args = process.argv.slice(2);
|
|
8
|
+
const noCache = args.includes("--no-cache");
|
|
9
|
+
const positionalArgs = args.filter((arg) => !arg.startsWith("--"));
|
|
10
|
+
|
|
11
|
+
const rootDir = positionalArgs[0] || process.cwd();
|
|
12
|
+
const outFile =
|
|
13
|
+
positionalArgs[1] || path.join(process.cwd(), "client/src/appKitTypes.d.ts");
|
|
14
|
+
|
|
15
|
+
const queryFolder = path.join(rootDir, "config/queries");
|
|
16
|
+
|
|
17
|
+
const warehouseId = positionalArgs[2] || process.env.DATABRICKS_WAREHOUSE_ID;
|
|
18
|
+
if (!warehouseId) {
|
|
19
|
+
throw new Error("DATABRICKS_WAREHOUSE_ID is not set");
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
await generateFromEntryPoint({
|
|
23
|
+
queryFolder,
|
|
24
|
+
outFile,
|
|
25
|
+
warehouseId,
|
|
26
|
+
noCache,
|
|
27
|
+
});
|
package/dist/appkit/package.js
CHANGED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import crypto from "node:crypto";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
|
|
5
|
+
//#region src/type-generator/cache.ts
|
|
6
|
+
const CACHE_VERSION = "1";
|
|
7
|
+
const CACHE_FILE = ".appkit-types-cache.json";
|
|
8
|
+
/**
|
|
9
|
+
* Hash the SQL query
|
|
10
|
+
* Uses MD5 to hash the SQL query
|
|
11
|
+
* @param sql - the SQL query to hash
|
|
12
|
+
* @returns - the hash of the SQL query
|
|
13
|
+
*/
|
|
14
|
+
function hashSQL(sql) {
|
|
15
|
+
return crypto.createHash("md5").update(sql).digest("hex");
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Load the cache from the file system
|
|
19
|
+
* If the cache is not found, run the query explain
|
|
20
|
+
* @param cacheDir - the directory to load the cache from
|
|
21
|
+
* @returns - the cache
|
|
22
|
+
*/
|
|
23
|
+
function loadCache(cacheDir) {
|
|
24
|
+
const cachePath = path.join(cacheDir, CACHE_FILE);
|
|
25
|
+
try {
|
|
26
|
+
if (fs.existsSync(cachePath)) {
|
|
27
|
+
const cache = JSON.parse(fs.readFileSync(cachePath, "utf8"));
|
|
28
|
+
if (cache.version === CACHE_VERSION) return cache;
|
|
29
|
+
}
|
|
30
|
+
} catch {}
|
|
31
|
+
return {
|
|
32
|
+
version: CACHE_VERSION,
|
|
33
|
+
queries: {}
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Save the cache to the file system
|
|
38
|
+
* The cache is saved as a JSON file, it is used to avoid running the query explain multiple times
|
|
39
|
+
* @param cacheDir - the directory to save the cache to
|
|
40
|
+
* @param cache - cache object to save
|
|
41
|
+
*/
|
|
42
|
+
function saveCache(cacheDir, cache) {
|
|
43
|
+
const cachePath = path.join(cacheDir, CACHE_FILE);
|
|
44
|
+
fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2), "utf8");
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
//#endregion
|
|
48
|
+
export { CACHE_VERSION, hashSQL, loadCache, saveCache };
|
|
49
|
+
//# sourceMappingURL=cache.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cache.js","names":[],"sources":["../../src/type-generator/cache.ts"],"sourcesContent":["import crypto from \"node:crypto\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\n\n/**\n * Cache types\n * @property hash - the hash of the SQL query\n * @property type - the type of the query\n */\ninterface CacheEntry {\n hash: string;\n type: string;\n}\n\n/**\n * Cache interface\n * @property version - the version of the cache\n * @property queries - the queries in the cache\n */\ninterface Cache {\n version: string;\n queries: Record<string, CacheEntry>;\n}\n\nexport const CACHE_VERSION = \"1\";\nconst CACHE_FILE = \".appkit-types-cache.json\";\n\n/**\n * Hash the SQL query\n * Uses MD5 to hash the SQL query\n * @param sql - the SQL query to hash\n * @returns - the hash of the SQL query\n */\nexport function hashSQL(sql: string): string {\n return crypto.createHash(\"md5\").update(sql).digest(\"hex\");\n}\n\n/**\n * Load the cache from the file system\n * If the cache is not found, run the query explain\n * @param cacheDir - the directory to load the cache from\n * @returns - the cache\n */\nexport function loadCache(cacheDir: string): Cache {\n const cachePath = path.join(cacheDir, CACHE_FILE);\n try {\n if (fs.existsSync(cachePath)) {\n const cache = JSON.parse(fs.readFileSync(cachePath, \"utf8\")) as Cache;\n if (cache.version === CACHE_VERSION) {\n return cache;\n }\n }\n } catch {\n // ignore cache errors\n }\n return { version: CACHE_VERSION, queries: {} };\n}\n\n/**\n * Save the cache to the file system\n * The cache is saved as a JSON file, it is used to avoid running the query explain multiple times\n * @param cacheDir - the directory to save the cache to\n * @param cache - cache object to save\n */\nexport function saveCache(cacheDir: string, cache: Cache): void {\n const cachePath = path.join(cacheDir, CACHE_FILE);\n fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2), \"utf8\");\n}\n"],"mappings":";;;;;AAwBA,MAAa,gBAAgB;AAC7B,MAAM,aAAa;;;;;;;AAQnB,SAAgB,QAAQ,KAAqB;AAC3C,QAAO,OAAO,WAAW,MAAM,CAAC,OAAO,IAAI,CAAC,OAAO,MAAM;;;;;;;;AAS3D,SAAgB,UAAU,UAAyB;CACjD,MAAM,YAAY,KAAK,KAAK,UAAU,WAAW;AACjD,KAAI;AACF,MAAI,GAAG,WAAW,UAAU,EAAE;GAC5B,MAAM,QAAQ,KAAK,MAAM,GAAG,aAAa,WAAW,OAAO,CAAC;AAC5D,OAAI,MAAM,YAAY,cACpB,QAAO;;SAGL;AAGR,QAAO;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE;;;;;;;;AAShD,SAAgB,UAAU,UAAkB,OAAoB;CAC9D,MAAM,YAAY,KAAK,KAAK,UAAU,WAAW;AACjD,IAAG,cAAc,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,EAAE,OAAO"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { generateQueriesFromDescribe } from "./query-registry.js";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import dotenv from "dotenv";
|
|
4
|
+
|
|
5
|
+
//#region src/type-generator/index.ts
|
|
6
|
+
dotenv.config();
|
|
7
|
+
/**
|
|
8
|
+
* Generate type declarations for QueryRegistry
|
|
9
|
+
* Create the d.ts file from the plugin routes and query schemas
|
|
10
|
+
* @param querySchemas - the list of query schemas
|
|
11
|
+
* @returns - the type declarations as a string
|
|
12
|
+
*/
|
|
13
|
+
function generateTypeDeclarations(querySchemas = []) {
|
|
14
|
+
const queryEntries = querySchemas.map(({ name, type }) => {
|
|
15
|
+
return ` ${name}: ${type.split("\n").map((line, i) => i === 0 ? line : ` ${line}`).join("\n")}`;
|
|
16
|
+
}).join(";\n");
|
|
17
|
+
return `// Auto-generated by AppKit - DO NOT EDIT
|
|
18
|
+
// Generated by 'npx appkit-generate-types' or Vite plugin during build
|
|
19
|
+
import "@databricks/appkit-ui/react";
|
|
20
|
+
import type { SQLTypeMarker, SQLStringMarker, SQLNumberMarker, SQLBooleanMarker, SQLBinaryMarker, SQLDateMarker, SQLTimestampMarker } from "@databricks/appkit-ui/js";
|
|
21
|
+
|
|
22
|
+
declare module "@databricks/appkit-ui/react" {
|
|
23
|
+
interface QueryRegistry {${queryEntries ? `\n${queryEntries};\n ` : ""}}
|
|
24
|
+
}
|
|
25
|
+
`;
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Entry point for generating type declarations from all imported files
|
|
29
|
+
* @param options - the options for the generation
|
|
30
|
+
* @param options.entryPoint - the entry point file
|
|
31
|
+
* @param options.outFile - the output file
|
|
32
|
+
* @param options.querySchemaFile - optional path to query schema file (e.g. config/queries/schema.ts)
|
|
33
|
+
*/
|
|
34
|
+
async function generateFromEntryPoint(options) {
|
|
35
|
+
const { outFile, queryFolder, warehouseId, noCache } = options;
|
|
36
|
+
console.log("\n[AppKit] Starting type generation...\n");
|
|
37
|
+
let queryRegistry = [];
|
|
38
|
+
if (queryFolder) queryRegistry = await generateQueriesFromDescribe(queryFolder, warehouseId, { noCache });
|
|
39
|
+
const typeDeclarations = generateTypeDeclarations(queryRegistry);
|
|
40
|
+
fs.writeFileSync(outFile, typeDeclarations, "utf-8");
|
|
41
|
+
console.log("\n[AppKit] Type generation complete!\n");
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
//#endregion
|
|
45
|
+
export { generateFromEntryPoint };
|
|
46
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":["queryRegistry: QuerySchema[]"],"sources":["../../src/type-generator/index.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport dotenv from \"dotenv\";\nimport { generateQueriesFromDescribe } from \"./query-registry\";\nimport type { QuerySchema } from \"./types\";\n\ndotenv.config();\n\n/**\n * Generate type declarations for QueryRegistry\n * Create the d.ts file from the plugin routes and query schemas\n * @param querySchemas - the list of query schemas\n * @returns - the type declarations as a string\n */\nfunction generateTypeDeclarations(querySchemas: QuerySchema[] = []): string {\n const queryEntries = querySchemas\n .map(({ name, type }) => {\n const indentedType = type\n .split(\"\\n\")\n .map((line, i) => (i === 0 ? line : ` ${line}`))\n .join(\"\\n\");\n return ` ${name}: ${indentedType}`;\n })\n .join(\";\\n\");\n\n const querySection = queryEntries ? `\\n${queryEntries};\\n ` : \"\";\n\n return `// Auto-generated by AppKit - DO NOT EDIT\n// Generated by 'npx appkit-generate-types' or Vite plugin during build\nimport \"@databricks/appkit-ui/react\";\nimport type { SQLTypeMarker, SQLStringMarker, SQLNumberMarker, SQLBooleanMarker, SQLBinaryMarker, SQLDateMarker, SQLTimestampMarker } from \"@databricks/appkit-ui/js\";\n\ndeclare module \"@databricks/appkit-ui/react\" {\n interface QueryRegistry {${querySection}}\n}\n`;\n}\n\n/**\n * Entry point for generating type declarations from all imported files\n * @param options - the options for the generation\n * @param options.entryPoint - the entry point file\n * @param options.outFile - the output file\n * @param options.querySchemaFile - optional path to query schema file (e.g. config/queries/schema.ts)\n */\nexport async function generateFromEntryPoint(options: {\n outFile: string;\n queryFolder?: string;\n warehouseId: string;\n noCache?: boolean;\n}) {\n const { outFile, queryFolder, warehouseId, noCache } = options;\n\n console.log(\"\\n[AppKit] Starting type generation...\\n\");\n\n let queryRegistry: QuerySchema[] = [];\n if (queryFolder)\n queryRegistry = await generateQueriesFromDescribe(\n queryFolder,\n warehouseId,\n {\n noCache,\n },\n );\n\n const typeDeclarations = generateTypeDeclarations(queryRegistry);\n\n fs.writeFileSync(outFile, typeDeclarations, \"utf-8\");\n\n console.log(\"\\n[AppKit] Type generation complete!\\n\");\n}\n"],"mappings":";;;;;AAKA,OAAO,QAAQ;;;;;;;AAQf,SAAS,yBAAyB,eAA8B,EAAE,EAAU;CAC1E,MAAM,eAAe,aAClB,KAAK,EAAE,MAAM,WAAW;AAKvB,SAAO,OAAO,KAAK,IAJE,KAClB,MAAM,KAAK,CACX,KAAK,MAAM,MAAO,MAAM,IAAI,OAAO,OAAO,OAAQ,CAClD,KAAK,KAAK;GAEb,CACD,KAAK,MAAM;AAId,QAAO;;;;;;6BAFc,eAAe,KAAK,aAAa,SAAS,GAQvB;;;;;;;;;;;AAY1C,eAAsB,uBAAuB,SAK1C;CACD,MAAM,EAAE,SAAS,aAAa,aAAa,YAAY;AAEvD,SAAQ,IAAI,2CAA2C;CAEvD,IAAIA,gBAA+B,EAAE;AACrC,KAAI,YACF,iBAAgB,MAAM,4BACpB,aACA,aACA,EACE,SACD,CACF;CAEH,MAAM,mBAAmB,yBAAyB,cAAc;AAEhE,IAAG,cAAc,SAAS,kBAAkB,QAAQ;AAEpD,SAAQ,IAAI,yCAAyC"}
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import { CACHE_VERSION, hashSQL, loadCache, saveCache } from "./cache.js";
|
|
2
|
+
import { Spinner } from "./spinner.js";
|
|
3
|
+
import { sqlTypeToHelper, sqlTypeToMarker } from "./types.js";
|
|
4
|
+
import { WorkspaceClient } from "@databricks/sdk-experimental";
|
|
5
|
+
import path from "node:path";
|
|
6
|
+
import fs from "node:fs";
|
|
7
|
+
|
|
8
|
+
//#region src/type-generator/query-registry.ts
|
|
9
|
+
/**
|
|
10
|
+
* Extract parameters from a SQL query
|
|
11
|
+
* @param sql - the SQL query to extract parameters from
|
|
12
|
+
* @returns an array of parameter names
|
|
13
|
+
*/
|
|
14
|
+
function extractParameters(sql) {
|
|
15
|
+
const matches = sql.matchAll(/:([a-zA-Z_]\w*)/g);
|
|
16
|
+
const params = /* @__PURE__ */ new Set();
|
|
17
|
+
for (const match of matches) params.add(match[1]);
|
|
18
|
+
return Array.from(params);
|
|
19
|
+
}
|
|
20
|
+
const SERVER_INJECTED_PARAMS = ["workspaceId"];
|
|
21
|
+
function convertToQueryType(result, sql, queryName) {
|
|
22
|
+
const columns = (result.result?.data_array || []).map((row) => ({
|
|
23
|
+
name: row[0] || "",
|
|
24
|
+
type_name: row[1]?.toUpperCase() || "STRING",
|
|
25
|
+
comment: row[2] || void 0
|
|
26
|
+
}));
|
|
27
|
+
const params = extractParameters(sql).filter((p) => !SERVER_INJECTED_PARAMS.includes(p));
|
|
28
|
+
const paramTypes = extractParameterTypes(sql);
|
|
29
|
+
return `{
|
|
30
|
+
name: "${queryName}";
|
|
31
|
+
parameters: ${params.length > 0 ? `{\n ${params.map((p) => {
|
|
32
|
+
const sqlType = paramTypes[p];
|
|
33
|
+
const markerType = sqlType ? sqlTypeToMarker[sqlType] : "SQLTypeMarker";
|
|
34
|
+
const helper = sqlType ? sqlTypeToHelper[sqlType] : "sql.*()";
|
|
35
|
+
return `/** ${sqlType || "any"} - use ${helper} */\n ${p}: ${markerType}`;
|
|
36
|
+
}).join(";\n ")};\n }` : "Record<string, never>"};
|
|
37
|
+
result: Array<{
|
|
38
|
+
${columns.map((column) => {
|
|
39
|
+
const mappedType = typeMap[normalizeTypeName(column.type_name)] || "unknown";
|
|
40
|
+
const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name) ? column.name : `"${column.name}"`;
|
|
41
|
+
return `${column.comment ? `/** ${column.comment} */\n ` : `/** @sqlType ${column.type_name} */\n `}${name}: ${mappedType}`;
|
|
42
|
+
}).join(";\n ")};
|
|
43
|
+
}>;
|
|
44
|
+
}`;
|
|
45
|
+
}
|
|
46
|
+
function extractParameterTypes(sql) {
|
|
47
|
+
const paramTypes = {};
|
|
48
|
+
const matches = sql.matchAll(/--\s*@param\s+(\w+)\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi);
|
|
49
|
+
for (const match of matches) {
|
|
50
|
+
const [, paramName, paramType] = match;
|
|
51
|
+
paramTypes[paramName] = paramType.toUpperCase();
|
|
52
|
+
}
|
|
53
|
+
return paramTypes;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Generate query schemas from a folder of SQL files
|
|
57
|
+
* It uses DESCRIBE QUERY to get the schema without executing the query
|
|
58
|
+
* @param queryFolder - the folder containing the SQL files
|
|
59
|
+
* @param warehouseId - the warehouse id to use for schema analysis
|
|
60
|
+
* @param options - options for the query generation
|
|
61
|
+
* @param options.noCache - if true, skip the cache and regenerate all types
|
|
62
|
+
* @returns an array of query schemas
|
|
63
|
+
*/
|
|
64
|
+
async function generateQueriesFromDescribe(queryFolder, warehouseId, options = {}) {
|
|
65
|
+
const { noCache = false } = options;
|
|
66
|
+
const queryFiles = fs.readdirSync(queryFolder).filter((file) => file.endsWith(".sql"));
|
|
67
|
+
console.log(` Found ${queryFiles.length} SQL queries\n`);
|
|
68
|
+
const cache = noCache ? {
|
|
69
|
+
version: CACHE_VERSION,
|
|
70
|
+
queries: {}
|
|
71
|
+
} : loadCache(queryFolder);
|
|
72
|
+
const client = new WorkspaceClient({});
|
|
73
|
+
const querySchemas = [];
|
|
74
|
+
const failedQueries = [];
|
|
75
|
+
const spinner = new Spinner();
|
|
76
|
+
for (let i = 0; i < queryFiles.length; i++) {
|
|
77
|
+
const file = queryFiles[i];
|
|
78
|
+
const queryName = path.basename(file, ".sql");
|
|
79
|
+
const sql = fs.readFileSync(path.join(queryFolder, file), "utf8");
|
|
80
|
+
const sqlHash = hashSQL(sql);
|
|
81
|
+
const cached = cache.queries[queryName];
|
|
82
|
+
if (cached && cached.hash === sqlHash) {
|
|
83
|
+
querySchemas.push({
|
|
84
|
+
name: queryName,
|
|
85
|
+
type: cached.type
|
|
86
|
+
});
|
|
87
|
+
spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);
|
|
88
|
+
spinner.stop(`✓ ${queryName} (cached)`);
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);
|
|
92
|
+
const cleanedSql = sql.replace(/:([a-zA-Z_]\w*)/g, "''").trim().replace(/;\s*$/, "");
|
|
93
|
+
try {
|
|
94
|
+
const result = await client.statementExecution.executeStatement({
|
|
95
|
+
statement: `DESCRIBE QUERY ${cleanedSql}`,
|
|
96
|
+
warehouse_id: warehouseId
|
|
97
|
+
});
|
|
98
|
+
if (result.status.state === "FAILED") {
|
|
99
|
+
spinner.stop(`✗ ${queryName} - failed`);
|
|
100
|
+
failedQueries.push({
|
|
101
|
+
name: queryName,
|
|
102
|
+
error: "Query execution failed"
|
|
103
|
+
});
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
const type = convertToQueryType(result, sql, queryName);
|
|
107
|
+
querySchemas.push({
|
|
108
|
+
name: queryName,
|
|
109
|
+
type
|
|
110
|
+
});
|
|
111
|
+
cache.queries[queryName] = {
|
|
112
|
+
hash: sqlHash,
|
|
113
|
+
type
|
|
114
|
+
};
|
|
115
|
+
spinner.stop(`✓ ${queryName}`);
|
|
116
|
+
} catch (error) {
|
|
117
|
+
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
|
118
|
+
spinner.stop(`✗ ${queryName} - ${errorMessage}`);
|
|
119
|
+
failedQueries.push({
|
|
120
|
+
name: queryName,
|
|
121
|
+
error: errorMessage
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
saveCache(queryFolder, cache);
|
|
126
|
+
if (failedQueries.length > 0) console.warn(` Warning: ${failedQueries.length} queries failed\n`);
|
|
127
|
+
return querySchemas;
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Normalize SQL type name by removing parameters/generics
|
|
131
|
+
* Examples:
|
|
132
|
+
* DECIMAL(38,6) -> DECIMAL
|
|
133
|
+
* ARRAY<STRING> -> ARRAY
|
|
134
|
+
* MAP<STRING,INT> -> MAP
|
|
135
|
+
* STRUCT<name:STRING> -> STRUCT
|
|
136
|
+
* INTERVAL DAY TO SECOND -> INTERVAL
|
|
137
|
+
* GEOGRAPHY(4326) -> GEOGRAPHY
|
|
138
|
+
*/
|
|
139
|
+
function normalizeTypeName(typeName) {
|
|
140
|
+
return typeName.replace(/\(.*\)$/, "").replace(/<.*>$/, "").split(" ")[0];
|
|
141
|
+
}
|
|
142
|
+
/** Type Map for Databricks data types to JavaScript types */
|
|
143
|
+
const typeMap = {
|
|
144
|
+
STRING: "string",
|
|
145
|
+
BINARY: "string",
|
|
146
|
+
BOOLEAN: "boolean",
|
|
147
|
+
TINYINT: "number",
|
|
148
|
+
SMALLINT: "number",
|
|
149
|
+
INT: "number",
|
|
150
|
+
BIGINT: "number",
|
|
151
|
+
FLOAT: "number",
|
|
152
|
+
DOUBLE: "number",
|
|
153
|
+
DECIMAL: "number",
|
|
154
|
+
DATE: "string",
|
|
155
|
+
TIMESTAMP: "string",
|
|
156
|
+
TIMESTAMP_NTZ: "string",
|
|
157
|
+
INTERVAL: "string",
|
|
158
|
+
ARRAY: "unknown[]",
|
|
159
|
+
MAP: "Record<string, unknown>",
|
|
160
|
+
STRUCT: "Record<string, unknown>",
|
|
161
|
+
OBJECT: "Record<string, unknown>",
|
|
162
|
+
VARIANT: "unknown",
|
|
163
|
+
GEOGRAPHY: "unknown",
|
|
164
|
+
GEOMETRY: "unknown",
|
|
165
|
+
VOID: "null"
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
//#endregion
|
|
169
|
+
export { generateQueriesFromDescribe };
|
|
170
|
+
//# sourceMappingURL=query-registry.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"query-registry.js","names":["paramTypes: Record<string, string>","querySchemas: QuerySchema[]","failedQueries: { name: string; error: string }[]","typeMap: Record<string, string>"],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(sql: string): string[] {\n const matches = sql.matchAll(/:([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n params.add(match[1]);\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): string {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n\n const paramTypes = extractParameterTypes(sql);\n\n // generate parameters types with JSDoc hints\n const paramsType =\n params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n // if no type annotation, use SQLTypeMarker (union type)\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: Array<{\n ${resultFields.join(\";\\n \")};\n }>;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false } = options;\n\n // read all query files in the folder\n const queryFiles = fs\n .readdirSync(queryFolder)\n .filter((file) => file.endsWith(\".sql\"));\n\n console.log(` Found ${queryFiles.length} SQL queries\\n`);\n\n // load cache\n const cache = noCache\n ? { version: CACHE_VERSION, queries: {} }\n : loadCache(queryFolder);\n\n const client = new WorkspaceClient({});\n const querySchemas: QuerySchema[] = [];\n const failedQueries: { name: string; error: string }[] = [];\n const spinner = new Spinner();\n\n // process each query file\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const queryName = path.basename(file, \".sql\");\n\n // read query file content\n const sql = fs.readFileSync(path.join(queryFolder, file), \"utf8\");\n const sqlHash = hashSQL(sql);\n\n // check cache\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash) {\n querySchemas.push({ name: queryName, type: cached.type });\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n spinner.stop(`✓ ${queryName} (cached)`);\n continue;\n }\n\n spinner.start(`Processing ${queryName} (${i + 1}/${queryFiles.length})`);\n\n const sqlWithDefaults = sql.replace(/:([a-zA-Z_]\\w*)/g, \"''\");\n\n // strip trailing semicolon for DESCRIBE QUERY\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n\n // execute DESCRIBE QUERY to get schema without running the actual query\n try {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n if (result.status.state === \"FAILED\") {\n spinner.stop(`✗ ${queryName} - failed`);\n failedQueries.push({\n name: queryName,\n error: \"Query execution failed\",\n });\n continue;\n }\n\n // convert result to query schema\n const type = convertToQueryType(result, sql, queryName);\n querySchemas.push({ name: queryName, type });\n\n // update cache\n cache.queries[queryName] = { hash: sqlHash, type };\n\n spinner.stop(`✓ ${queryName}`);\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : \"Unknown error\";\n spinner.stop(`✗ ${queryName} - ${errorMessage}`);\n failedQueries.push({ name: queryName, error: errorMessage });\n }\n }\n\n // save cache\n saveCache(queryFolder, cache);\n\n // log warning if there are failed queries\n if (failedQueries.length > 0) {\n console.warn(` Warning: ${failedQueries.length} queries failed\\n`);\n }\n\n return querySchemas;\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;;;;;AAiBA,SAAgB,kBAAkB,KAAuB;CACvD,MAAM,UAAU,IAAI,SAAS,mBAAmB;CAChD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,QAAO,IAAI,MAAM,GAAG;AAEtB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;AAErD,SAAgB,mBACd,QACA,KACA,WACQ;CAER,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CAED,MAAM,aAAa,sBAAsB,IAAI;AAmC7C,QAAO;aACI,UAAU;kBAhCnB,OAAO,SAAS,IACZ,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAE3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB,wBAqBqB;;QAlBN,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B,CAMiB,KAAK,YAAY,CAAC;;;;AAKvC,SAAgB,sBAAsB,KAAqC;CACzE,MAAMA,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAiC,EAAE,EACX;CACxB,MAAM,EAAE,UAAU,UAAU;CAG5B,MAAM,aAAa,GAChB,YAAY,YAAY,CACxB,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AAE1C,SAAQ,IAAI,WAAW,WAAW,OAAO,gBAAgB;CAGzD,MAAM,QAAQ,UACV;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GACvC,UAAU,YAAY;CAE1B,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAMC,eAA8B,EAAE;CACtC,MAAMC,gBAAmD,EAAE;CAC3D,MAAM,UAAU,IAAI,SAAS;AAG7B,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EACxB,MAAM,YAAY,KAAK,SAAS,MAAM,OAAO;EAG7C,MAAM,MAAM,GAAG,aAAa,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO;EACjE,MAAM,UAAU,QAAQ,IAAI;EAG5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,SAAS;AACrC,gBAAa,KAAK;IAAE,MAAM;IAAW,MAAM,OAAO;IAAM,CAAC;AACzD,WAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;AACxE,WAAQ,KAAK,KAAK,UAAU,WAAW;AACvC;;AAGF,UAAQ,MAAM,cAAc,UAAU,IAAI,IAAI,EAAE,GAAG,WAAW,OAAO,GAAG;EAKxE,MAAM,aAHkB,IAAI,QAAQ,oBAAoB,KAAK,CAG1B,MAAM,CAAC,QAAQ,SAAS,GAAG;AAG9D,MAAI;GACF,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF,OAAI,OAAO,OAAO,UAAU,UAAU;AACpC,YAAQ,KAAK,KAAK,UAAU,WAAW;AACvC,kBAAc,KAAK;KACjB,MAAM;KACN,OAAO;KACR,CAAC;AACF;;GAIF,MAAM,OAAO,mBAAmB,QAAQ,KAAK,UAAU;AACvD,gBAAa,KAAK;IAAE,MAAM;IAAW;IAAM,CAAC;AAG5C,SAAM,QAAQ,aAAa;IAAE,MAAM;IAAS;IAAM;AAElD,WAAQ,KAAK,KAAK,YAAY;WACvB,OAAO;GACd,MAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU;AAC3C,WAAQ,KAAK,KAAK,UAAU,KAAK,eAAe;AAChD,iBAAc,KAAK;IAAE,MAAM;IAAW,OAAO;IAAc,CAAC;;;AAKhE,WAAU,aAAa,MAAM;AAG7B,KAAI,cAAc,SAAS,EACzB,SAAQ,KAAK,cAAc,cAAc,OAAO,mBAAmB;AAGrE,QAAO;;;;;;;;;;;;AAaT,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAMC,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
//#region src/type-generator/spinner.ts
|
|
2
|
+
/**
|
|
3
|
+
* Simple loading spinner for CLI
|
|
4
|
+
*/
|
|
5
|
+
var Spinner = class {
|
|
6
|
+
constructor() {
|
|
7
|
+
this.frames = [
|
|
8
|
+
" ",
|
|
9
|
+
". ",
|
|
10
|
+
".. ",
|
|
11
|
+
"..."
|
|
12
|
+
];
|
|
13
|
+
this.current = 0;
|
|
14
|
+
this.interval = null;
|
|
15
|
+
this.text = "";
|
|
16
|
+
}
|
|
17
|
+
start(text) {
|
|
18
|
+
this.text = text;
|
|
19
|
+
this.current = 0;
|
|
20
|
+
process.stdout.write(` ${this.text}${this.frames[0]}`);
|
|
21
|
+
this.interval = setInterval(() => {
|
|
22
|
+
this.current = (this.current + 1) % this.frames.length;
|
|
23
|
+
process.stdout.write(`\r ${this.text}${this.frames[this.current]}`);
|
|
24
|
+
}, 300);
|
|
25
|
+
}
|
|
26
|
+
stop(finalText) {
|
|
27
|
+
if (this.interval) {
|
|
28
|
+
clearInterval(this.interval);
|
|
29
|
+
this.interval = null;
|
|
30
|
+
}
|
|
31
|
+
process.stdout.write(`\x1b[2K\r ${finalText || this.text}\n`);
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
//#endregion
|
|
36
|
+
export { Spinner };
|
|
37
|
+
//# sourceMappingURL=spinner.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"spinner.js","names":[],"sources":["../../src/type-generator/spinner.ts"],"sourcesContent":["/**\n * Simple loading spinner for CLI\n */\nexport class Spinner {\n private frames = [\" \", \". \", \".. \", \"...\"];\n private current = 0;\n private interval: NodeJS.Timeout | null = null;\n private text = \"\";\n\n start(text: string) {\n this.text = text;\n this.current = 0;\n process.stdout.write(` ${this.text}${this.frames[0]}`);\n this.interval = setInterval(() => {\n this.current = (this.current + 1) % this.frames.length;\n process.stdout.write(`\\r ${this.text}${this.frames[this.current]}`);\n }, 300);\n }\n\n stop(finalText?: string) {\n if (this.interval) {\n clearInterval(this.interval);\n this.interval = null;\n }\n // clear the line and write the final text\n process.stdout.write(`\\x1b[2K\\r ${finalText || this.text}\\n`);\n }\n}\n"],"mappings":";;;;AAGA,IAAa,UAAb,MAAqB;;gBACF;GAAC;GAAO;GAAO;GAAO;GAAM;iBAC3B;kBACwB;cAC3B;;CAEf,MAAM,MAAc;AAClB,OAAK,OAAO;AACZ,OAAK,UAAU;AACf,UAAQ,OAAO,MAAM,KAAK,KAAK,OAAO,KAAK,OAAO,KAAK;AACvD,OAAK,WAAW,kBAAkB;AAChC,QAAK,WAAW,KAAK,UAAU,KAAK,KAAK,OAAO;AAChD,WAAQ,OAAO,MAAM,OAAO,KAAK,OAAO,KAAK,OAAO,KAAK,WAAW;KACnE,IAAI;;CAGT,KAAK,WAAoB;AACvB,MAAI,KAAK,UAAU;AACjB,iBAAc,KAAK,SAAS;AAC5B,QAAK,WAAW;;AAGlB,UAAQ,OAAO,MAAM,cAAc,aAAa,KAAK,KAAK,IAAI"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
//#region src/type-generator/types.ts
|
|
2
|
+
/**
|
|
3
|
+
* Map of SQL types to their corresponding marker types
|
|
4
|
+
* Used to convert SQL types to their corresponding marker types
|
|
5
|
+
*/
|
|
6
|
+
const sqlTypeToMarker = {
|
|
7
|
+
STRING: "SQLStringMarker",
|
|
8
|
+
BINARY: "SQLBinaryMarker",
|
|
9
|
+
BOOLEAN: "SQLBooleanMarker",
|
|
10
|
+
NUMERIC: "SQLNumberMarker",
|
|
11
|
+
INT: "SQLNumberMarker",
|
|
12
|
+
BIGINT: "SQLNumberMarker",
|
|
13
|
+
TINYINT: "SQLNumberMarker",
|
|
14
|
+
SMALLINT: "SQLNumberMarker",
|
|
15
|
+
FLOAT: "SQLNumberMarker",
|
|
16
|
+
DOUBLE: "SQLNumberMarker",
|
|
17
|
+
DECIMAL: "SQLNumberMarker",
|
|
18
|
+
DATE: "SQLDateMarker",
|
|
19
|
+
TIMESTAMP: "SQLTimestampMarker",
|
|
20
|
+
TIMESTAMP_NTZ: "SQLTimestampMarker"
|
|
21
|
+
};
|
|
22
|
+
/**
|
|
23
|
+
* Map of SQL types to their corresponding helper function names
|
|
24
|
+
* Used to generate JSDoc hints for parameters
|
|
25
|
+
*/
|
|
26
|
+
const sqlTypeToHelper = {
|
|
27
|
+
STRING: "sql.string()",
|
|
28
|
+
BINARY: "sql.binary()",
|
|
29
|
+
BOOLEAN: "sql.boolean()",
|
|
30
|
+
NUMERIC: "sql.number()",
|
|
31
|
+
INT: "sql.number()",
|
|
32
|
+
BIGINT: "sql.number()",
|
|
33
|
+
TINYINT: "sql.number()",
|
|
34
|
+
SMALLINT: "sql.number()",
|
|
35
|
+
FLOAT: "sql.number()",
|
|
36
|
+
DOUBLE: "sql.number()",
|
|
37
|
+
DECIMAL: "sql.number()",
|
|
38
|
+
DATE: "sql.date()",
|
|
39
|
+
TIMESTAMP: "sql.timestamp()",
|
|
40
|
+
TIMESTAMP_NTZ: "sql.timestamp()"
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
//#endregion
|
|
44
|
+
export { sqlTypeToHelper, sqlTypeToMarker };
|
|
45
|
+
//# sourceMappingURL=types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","names":["sqlTypeToMarker: Record<string, string>","sqlTypeToHelper: Record<string, string>"],"sources":["../../src/type-generator/types.ts"],"sourcesContent":["/**\n * Databricks statement execution response interface for DESCRIBE QUERY\n * @property statement_id - the id of the statement\n * @property status - the status of the statement\n * @property result - the result containing column schema as rows [col_name, data_type, comment]\n */\nexport interface DatabricksStatementExecutionResponse {\n statement_id: string;\n status: { state: string };\n result?: {\n data_array?: (string | null)[][];\n };\n}\n\n/**\n * Map of SQL types to their corresponding marker types\n * Used to convert SQL types to their corresponding marker types\n */\nexport const sqlTypeToMarker: Record<string, string> = {\n // string\n STRING: \"SQLStringMarker\",\n BINARY: \"SQLBinaryMarker\",\n // boolean\n BOOLEAN: \"SQLBooleanMarker\",\n // numeric\n NUMERIC: \"SQLNumberMarker\",\n INT: \"SQLNumberMarker\",\n BIGINT: \"SQLNumberMarker\",\n TINYINT: \"SQLNumberMarker\",\n SMALLINT: \"SQLNumberMarker\",\n FLOAT: \"SQLNumberMarker\",\n DOUBLE: \"SQLNumberMarker\",\n DECIMAL: \"SQLNumberMarker\",\n // date/time\n DATE: \"SQLDateMarker\",\n TIMESTAMP: \"SQLTimestampMarker\",\n TIMESTAMP_NTZ: \"SQLTimestampMarker\",\n};\n\n/**\n * Map of SQL types to their corresponding helper function names\n * Used to generate JSDoc hints for parameters\n */\nexport const sqlTypeToHelper: Record<string, string> = {\n // string\n STRING: \"sql.string()\",\n BINARY: \"sql.binary()\",\n // boolean\n BOOLEAN: \"sql.boolean()\",\n // numeric\n NUMERIC: \"sql.number()\",\n INT: \"sql.number()\",\n BIGINT: \"sql.number()\",\n TINYINT: \"sql.number()\",\n SMALLINT: \"sql.number()\",\n FLOAT: \"sql.number()\",\n DOUBLE: \"sql.number()\",\n DECIMAL: \"sql.number()\",\n // date/time\n DATE: \"sql.date()\",\n TIMESTAMP: \"sql.timestamp()\",\n TIMESTAMP_NTZ: \"sql.timestamp()\",\n};\n\n/**\n * Query schema interface\n * @property name - the name of the query\n * @property type - the type of the query (string, number, boolean, object, array, etc.)\n */\nexport interface QuerySchema {\n name: string;\n type: string;\n}\n"],"mappings":";;;;;AAkBA,MAAaA,kBAA0C;CAErD,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,KAAK;CACL,QAAQ;CACR,SAAS;CACT,UAAU;CACV,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CAChB;;;;;AAMD,MAAaC,kBAA0C;CAErD,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,KAAK;CACL,QAAQ;CACR,SAAS;CACT,UAAU;CACV,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CAChB"}
|
|
@@ -12,7 +12,7 @@ interface AppKitTypesPluginOptions {
|
|
|
12
12
|
}
|
|
13
13
|
/**
|
|
14
14
|
* Vite plugin to generate types for AppKit queries.
|
|
15
|
-
* Calls
|
|
15
|
+
* Calls generateFromEntryPoint under the hood.
|
|
16
16
|
* @param options - Options to override default values.
|
|
17
17
|
* @returns Vite plugin to generate types for AppKit queries.
|
|
18
18
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"vite-plugin.d.ts","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":[],"mappings":";;;;;;
|
|
1
|
+
{"version":3,"file":"vite-plugin.d.ts","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":[],"mappings":";;;;;;AACmC;AAmBnC,UAbU,wBAAA,CAauB;EAAA,OAAA,CAAA,EAAA,MAAA;;cAAsC,CAAA,EAAA,MAAA,EAAA;;;;;;;;iBAAvD,iBAAA,WAA4B,2BAA2B"}
|
|
@@ -1,23 +1,29 @@
|
|
|
1
|
+
import { generateFromEntryPoint } from "./index.js";
|
|
1
2
|
import path from "node:path";
|
|
2
|
-
import { execSync } from "node:child_process";
|
|
3
3
|
|
|
4
4
|
//#region src/type-generator/vite-plugin.ts
|
|
5
5
|
/**
|
|
6
6
|
* Vite plugin to generate types for AppKit queries.
|
|
7
|
-
* Calls
|
|
7
|
+
* Calls generateFromEntryPoint under the hood.
|
|
8
8
|
* @param options - Options to override default values.
|
|
9
9
|
* @returns Vite plugin to generate types for AppKit queries.
|
|
10
10
|
*/
|
|
11
11
|
function appKitTypesPlugin(options) {
|
|
12
12
|
let root;
|
|
13
|
-
let appRoot;
|
|
14
13
|
let outFile;
|
|
15
14
|
let watchFolders;
|
|
16
|
-
function generate() {
|
|
15
|
+
async function generate() {
|
|
17
16
|
try {
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
17
|
+
const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || "";
|
|
18
|
+
if (!warehouseId) {
|
|
19
|
+
console.warn("[AppKit] Warehouse ID not found. Skipping type generation.");
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
await generateFromEntryPoint({
|
|
23
|
+
outFile,
|
|
24
|
+
queryFolder: watchFolders[0],
|
|
25
|
+
warehouseId,
|
|
26
|
+
noCache: false
|
|
21
27
|
});
|
|
22
28
|
} catch (error) {
|
|
23
29
|
if (process.env.NODE_ENV === "production") throw error;
|
|
@@ -28,7 +34,6 @@ function appKitTypesPlugin(options) {
|
|
|
28
34
|
name: "appkit-types",
|
|
29
35
|
configResolved(config) {
|
|
30
36
|
root = config.root;
|
|
31
|
-
appRoot = path.resolve(root, "..");
|
|
32
37
|
outFile = path.resolve(root, options?.outFile ?? "src/appKitTypes.d.ts");
|
|
33
38
|
watchFolders = (options?.watchFolders ?? ["../config/queries"]).map((folder) => path.resolve(root, folder));
|
|
34
39
|
},
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"vite-plugin.js","names":["root: string","
|
|
1
|
+
{"version":3,"file":"vite-plugin.js","names":["root: string","outFile: string","watchFolders: string[]"],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import path from \"node:path\";\nimport type { Plugin } from \"vite\";\nimport { generateFromEntryPoint } from \"./index\";\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls generateFromEntryPoint under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let outFile: string;\n let watchFolders: string[];\n\n async function generate() {\n try {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return;\n }\n\n await generateFromEntryPoint({\n outFile,\n queryFolder: watchFolders[0],\n warehouseId,\n noCache: false,\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n console.error(\"[AppKit] Error generating types:\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n\n configResolved(config) {\n root = config.root;\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n watchFolders = (options?.watchFolders ?? [\"../config/queries\"]).map(\n (folder) => path.resolve(root, folder),\n );\n },\n\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;;;;;AAoBA,SAAgB,kBAAkB,SAA4C;CAC5E,IAAIA;CACJ,IAAIC;CACJ,IAAIC;CAEJ,eAAe,WAAW;AACxB,MAAI;GACF,MAAM,cAAc,QAAQ,IAAI,2BAA2B;AAE3D,OAAI,CAAC,aAAa;AAChB,YAAQ,KACN,6DACD;AACD;;AAGF,SAAM,uBAAuB;IAC3B;IACA,aAAa,aAAa;IAC1B;IACA,SAAS;IACV,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,WAAQ,MAAM,oCAAoC,MAAM;;;AAI5D,QAAO;EACL,MAAM;EAEN,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AACxE,mBAAgB,SAAS,gBAAgB,CAAC,oBAAoB,EAAE,KAC7D,WAAW,KAAK,QAAQ,MAAM,OAAO,CACvC;;EAGH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@databricks/appkit",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.1.
|
|
4
|
+
"version": "0.1.2",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"types": "./dist/index.d.ts",
|
|
7
7
|
"packageManager": "pnpm@10.21.0",
|
|
@@ -24,7 +24,7 @@
|
|
|
24
24
|
"./package.json": "./package.json"
|
|
25
25
|
},
|
|
26
26
|
"bin": {
|
|
27
|
-
"appkit-generate-types": "./bin/generate-types.
|
|
27
|
+
"appkit-generate-types": "./bin/generate-types.js",
|
|
28
28
|
"appkit-setup": "./bin/setup-claude.js"
|
|
29
29
|
},
|
|
30
30
|
"scripts": {
|