@databricks/appkit 0.22.0 → 0.23.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/CLAUDE.md +10 -0
  2. package/NOTICE.md +1 -0
  3. package/dist/appkit/package.js +1 -1
  4. package/dist/cli/commands/generate-types.js +15 -13
  5. package/dist/cli/commands/generate-types.js.map +1 -1
  6. package/dist/connectors/serving/client.js +47 -0
  7. package/dist/connectors/serving/client.js.map +1 -0
  8. package/dist/index.d.ts +6 -1
  9. package/dist/index.js +4 -1
  10. package/dist/index.js.map +1 -1
  11. package/dist/plugin/execution-result.d.ts +26 -0
  12. package/dist/plugin/execution-result.d.ts.map +1 -0
  13. package/dist/plugin/index.d.ts +1 -0
  14. package/dist/plugin/interceptors/retry.js +1 -1
  15. package/dist/plugin/interceptors/retry.js.map +1 -1
  16. package/dist/plugin/plugin.d.ts +7 -4
  17. package/dist/plugin/plugin.d.ts.map +1 -1
  18. package/dist/plugin/plugin.js +36 -5
  19. package/dist/plugin/plugin.js.map +1 -1
  20. package/dist/plugins/analytics/analytics.d.ts.map +1 -1
  21. package/dist/plugins/analytics/analytics.js +2 -3
  22. package/dist/plugins/analytics/analytics.js.map +1 -1
  23. package/dist/plugins/files/plugin.d.ts +1 -0
  24. package/dist/plugins/files/plugin.d.ts.map +1 -1
  25. package/dist/plugins/files/plugin.js +36 -59
  26. package/dist/plugins/files/plugin.js.map +1 -1
  27. package/dist/plugins/index.d.ts +4 -1
  28. package/dist/plugins/index.js +2 -0
  29. package/dist/plugins/server/index.d.ts +1 -1
  30. package/dist/plugins/server/vite-dev-server.js +6 -1
  31. package/dist/plugins/server/vite-dev-server.js.map +1 -1
  32. package/dist/plugins/serving/defaults.js +10 -0
  33. package/dist/plugins/serving/defaults.js.map +1 -0
  34. package/dist/plugins/serving/index.d.ts +2 -0
  35. package/dist/plugins/serving/index.js +3 -0
  36. package/dist/plugins/serving/manifest.js +53 -0
  37. package/dist/plugins/serving/manifest.js.map +1 -0
  38. package/dist/plugins/serving/schema-filter.js +52 -0
  39. package/dist/plugins/serving/schema-filter.js.map +1 -0
  40. package/dist/plugins/serving/serving.d.ts +38 -0
  41. package/dist/plugins/serving/serving.d.ts.map +1 -0
  42. package/dist/plugins/serving/serving.js +213 -0
  43. package/dist/plugins/serving/serving.js.map +1 -0
  44. package/dist/plugins/serving/types.d.ts +58 -0
  45. package/dist/plugins/serving/types.d.ts.map +1 -0
  46. package/dist/shared/src/execute.d.ts +1 -1
  47. package/dist/stream/stream-manager.js +1 -0
  48. package/dist/stream/stream-manager.js.map +1 -1
  49. package/dist/stream/types.js +2 -1
  50. package/dist/stream/types.js.map +1 -1
  51. package/dist/type-generator/cache.js +1 -1
  52. package/dist/type-generator/cache.js.map +1 -1
  53. package/dist/type-generator/index.js +3 -1
  54. package/dist/type-generator/index.js.map +1 -1
  55. package/dist/type-generator/query-registry.js +77 -4
  56. package/dist/type-generator/query-registry.js.map +1 -1
  57. package/dist/type-generator/serving/cache.js +38 -0
  58. package/dist/type-generator/serving/cache.js.map +1 -0
  59. package/dist/type-generator/serving/converter.js +108 -0
  60. package/dist/type-generator/serving/converter.js.map +1 -0
  61. package/dist/type-generator/serving/fetcher.js +54 -0
  62. package/dist/type-generator/serving/fetcher.js.map +1 -0
  63. package/dist/type-generator/serving/generator.js +185 -0
  64. package/dist/type-generator/serving/generator.js.map +1 -0
  65. package/dist/type-generator/serving/server-file-extractor.d.ts +22 -0
  66. package/dist/type-generator/serving/server-file-extractor.d.ts.map +1 -0
  67. package/dist/type-generator/serving/server-file-extractor.js +131 -0
  68. package/dist/type-generator/serving/server-file-extractor.js.map +1 -0
  69. package/dist/type-generator/serving/vite-plugin.d.ts +24 -0
  70. package/dist/type-generator/serving/vite-plugin.d.ts.map +1 -0
  71. package/dist/type-generator/serving/vite-plugin.js +60 -0
  72. package/dist/type-generator/serving/vite-plugin.js.map +1 -0
  73. package/docs/api/appkit/Class.Plugin.md +8 -3
  74. package/docs/api/appkit/Function.appKitServingTypesPlugin.md +24 -0
  75. package/docs/api/appkit/Function.extractServingEndpoints.md +22 -0
  76. package/docs/api/appkit/Function.findServerFile.md +20 -0
  77. package/docs/api/appkit/Interface.EndpointConfig.md +23 -0
  78. package/docs/api/appkit/Interface.ServingEndpointEntry.md +30 -0
  79. package/docs/api/appkit/Interface.ServingEndpointRegistry.md +3 -0
  80. package/docs/api/appkit/TypeAlias.ExecutionResult.md +36 -0
  81. package/docs/api/appkit/TypeAlias.ServingFactory.md +15 -0
  82. package/docs/api/appkit.md +39 -31
  83. package/docs/faq.md +66 -0
  84. package/docs/plugins/serving.md +223 -0
  85. package/llms.txt +10 -0
  86. package/package.json +2 -2
  87. package/sbom.cdx.json +1 -1
@@ -1,5 +1,6 @@
1
1
  import { createLogger } from "../logging/logger.js";
2
2
  import { generateQueriesFromDescribe } from "./query-registry.js";
3
+ import { generateServingTypes as generateServingTypes$1 } from "./serving/generator.js";
3
4
  import fs from "node:fs/promises";
4
5
  import dotenv from "dotenv";
5
6
 
@@ -52,7 +53,8 @@ async function generateFromEntryPoint(options) {
52
53
  await fs.writeFile(outFile, typeDeclarations, "utf-8");
53
54
  logger.debug("Type generation complete!");
54
55
  }
56
+ const generateServingTypes = generateServingTypes$1;
55
57
 
56
58
  //#endregion
57
- export { generateFromEntryPoint };
59
+ export { generateFromEntryPoint, generateServingTypes };
58
60
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":[],"sources":["../../src/type-generator/index.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport dotenv from \"dotenv\";\nimport { createLogger } from \"../logging/logger\";\nimport { generateQueriesFromDescribe } from \"./query-registry\";\nimport type { QuerySchema } from \"./types\";\n\ndotenv.config();\n\nconst logger = createLogger(\"type-generator\");\n\n/**\n * Generate type declarations for QueryRegistry\n * Create the d.ts file from the plugin routes and query schemas\n * @param querySchemas - the list of query schemas\n * @returns - the type declarations as a string\n */\nfunction generateTypeDeclarations(querySchemas: QuerySchema[] = []): string {\n const queryEntries = querySchemas\n .map(({ name, type }) => {\n const indentedType = type\n .split(\"\\n\")\n .map((line, i) => (i === 0 ? line : ` ${line}`))\n .join(\"\\n\");\n return ` ${name}: ${indentedType}`;\n })\n .join(\";\\n\");\n\n const querySection = queryEntries ? `\\n${queryEntries};\\n ` : \"\";\n\n return `// Auto-generated by AppKit - DO NOT EDIT\n// Generated by 'npx @databricks/appkit generate-types' or Vite plugin during build\nimport \"@databricks/appkit-ui/react\";\nimport type { SQLTypeMarker, SQLStringMarker, SQLNumberMarker, SQLBooleanMarker, SQLBinaryMarker, SQLDateMarker, SQLTimestampMarker } from \"@databricks/appkit-ui/js\";\n\ndeclare module \"@databricks/appkit-ui/react\" {\n interface QueryRegistry {${querySection}}\n}\n`;\n}\n\n/**\n * Entry point for generating type declarations from all imported files\n * @param options - the options for the generation\n * @param options.entryPoint - the entry point file\n * @param options.outFile - the output file\n * @param options.querySchemaFile - optional path to query schema file (e.g. config/queries/schema.ts)\n */\nexport async function generateFromEntryPoint(options: {\n outFile: string;\n queryFolder?: string;\n warehouseId: string;\n noCache?: boolean;\n}) {\n const { outFile, queryFolder, warehouseId, noCache } = options;\n\n logger.debug(\"Starting type generation...\");\n\n let queryRegistry: QuerySchema[] = [];\n if (queryFolder)\n queryRegistry = await generateQueriesFromDescribe(\n queryFolder,\n warehouseId,\n {\n noCache,\n },\n );\n\n const failedQueries = queryRegistry.filter((q) =>\n q.type.includes(\"result: unknown\"),\n );\n if (failedQueries.length > 0) {\n const names = failedQueries.map((q) => q.name).join(\", \");\n throw new Error(\n [\n `Type generation failed: ${failedQueries.length} ${failedQueries.length === 1 ? \"query\" : \"queries\"} could not be described: ${names}.`,\n `DESCRIBE QUERY failed for these queries — see the error codes above for details.`,\n `Common causes: SQL syntax errors, missing tables/views, or warehouse format incompatibilities.`,\n `To debug: run the failing query directly in a SQL editor against warehouse ${warehouseId}.`,\n ].join(\"\\n\"),\n );\n }\n\n const typeDeclarations = generateTypeDeclarations(queryRegistry);\n\n await fs.writeFile(outFile, typeDeclarations, \"utf-8\");\n\n logger.debug(\"Type generation complete!\");\n}\n"],"mappings":";;;;;;AAMA,OAAO,QAAQ;AAEf,MAAM,SAAS,aAAa,iBAAiB;;;;;;;AAQ7C,SAAS,yBAAyB,eAA8B,EAAE,EAAU;CAC1E,MAAM,eAAe,aAClB,KAAK,EAAE,MAAM,WAAW;AAKvB,SAAO,OAAO,KAAK,IAJE,KAClB,MAAM,KAAK,CACX,KAAK,MAAM,MAAO,MAAM,IAAI,OAAO,OAAO,OAAQ,CAClD,KAAK,KAAK;GAEb,CACD,KAAK,MAAM;AAId,QAAO;;;;;;6BAFc,eAAe,KAAK,aAAa,SAAS,GAQvB;;;;;;;;;;;AAY1C,eAAsB,uBAAuB,SAK1C;CACD,MAAM,EAAE,SAAS,aAAa,aAAa,YAAY;AAEvD,QAAO,MAAM,8BAA8B;CAE3C,IAAI,gBAA+B,EAAE;AACrC,KAAI,YACF,iBAAgB,MAAM,4BACpB,aACA,aACA,EACE,SACD,CACF;CAEH,MAAM,gBAAgB,cAAc,QAAQ,MAC1C,EAAE,KAAK,SAAS,kBAAkB,CACnC;AACD,KAAI,cAAc,SAAS,GAAG;EAC5B,MAAM,QAAQ,cAAc,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK;AACzD,QAAM,IAAI,MACR;GACE,2BAA2B,cAAc,OAAO,GAAG,cAAc,WAAW,IAAI,UAAU,UAAU,2BAA2B,MAAM;GACrI;GACA;GACA,8EAA8E,YAAY;GAC3F,CAAC,KAAK,KAAK,CACb;;CAGH,MAAM,mBAAmB,yBAAyB,cAAc;AAEhE,OAAM,GAAG,UAAU,SAAS,kBAAkB,QAAQ;AAEtD,QAAO,MAAM,4BAA4B"}
1
+ {"version":3,"file":"index.js","names":["generateServingTypesImpl"],"sources":["../../src/type-generator/index.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport dotenv from \"dotenv\";\nimport { createLogger } from \"../logging/logger\";\nimport { generateQueriesFromDescribe } from \"./query-registry\";\nimport { generateServingTypes as generateServingTypesImpl } from \"./serving/generator\";\nimport type { QuerySchema } from \"./types\";\n\ndotenv.config();\n\nconst logger = createLogger(\"type-generator\");\n\n/**\n * Generate type declarations for QueryRegistry\n * Create the d.ts file from the plugin routes and query schemas\n * @param querySchemas - the list of query schemas\n * @returns - the type declarations as a string\n */\nfunction generateTypeDeclarations(querySchemas: QuerySchema[] = []): string {\n const queryEntries = querySchemas\n .map(({ name, type }) => {\n const indentedType = type\n .split(\"\\n\")\n .map((line, i) => (i === 0 ? line : ` ${line}`))\n .join(\"\\n\");\n return ` ${name}: ${indentedType}`;\n })\n .join(\";\\n\");\n\n const querySection = queryEntries ? `\\n${queryEntries};\\n ` : \"\";\n\n return `// Auto-generated by AppKit - DO NOT EDIT\n// Generated by 'npx @databricks/appkit generate-types' or Vite plugin during build\nimport \"@databricks/appkit-ui/react\";\nimport type { SQLTypeMarker, SQLStringMarker, SQLNumberMarker, SQLBooleanMarker, SQLBinaryMarker, SQLDateMarker, SQLTimestampMarker } from \"@databricks/appkit-ui/js\";\n\ndeclare module \"@databricks/appkit-ui/react\" {\n interface QueryRegistry {${querySection}}\n}\n`;\n}\n\n/**\n * Entry point for generating type declarations from all imported files\n * @param options - the options for the generation\n * @param options.entryPoint - the entry point file\n * @param options.outFile - the output file\n * @param options.querySchemaFile - optional path to query schema file (e.g. config/queries/schema.ts)\n */\nexport async function generateFromEntryPoint(options: {\n outFile: string;\n queryFolder?: string;\n warehouseId: string;\n noCache?: boolean;\n}) {\n const { outFile, queryFolder, warehouseId, noCache } = options;\n\n logger.debug(\"Starting type generation...\");\n\n let queryRegistry: QuerySchema[] = [];\n if (queryFolder)\n queryRegistry = await generateQueriesFromDescribe(\n queryFolder,\n warehouseId,\n {\n noCache,\n },\n );\n\n const failedQueries = queryRegistry.filter((q) =>\n q.type.includes(\"result: unknown\"),\n );\n if (failedQueries.length > 0) {\n const names = failedQueries.map((q) => q.name).join(\", \");\n throw new Error(\n [\n `Type generation failed: ${failedQueries.length} ${failedQueries.length === 1 ? \"query\" : \"queries\"} could not be described: ${names}.`,\n `DESCRIBE QUERY failed for these queries — see the error codes above for details.`,\n `Common causes: SQL syntax errors, missing tables/views, or warehouse format incompatibilities.`,\n `To debug: run the failing query directly in a SQL editor against warehouse ${warehouseId}.`,\n ].join(\"\\n\"),\n );\n }\n\n const typeDeclarations = generateTypeDeclarations(queryRegistry);\n\n await fs.writeFile(outFile, typeDeclarations, \"utf-8\");\n\n logger.debug(\"Type generation complete!\");\n}\n\n// Rolldown tree-shaking only preserves \"own exports\" (locally defined) — not re-exports.\n// A local binding ensures the serving vite plugin's import keeps this in the dependency graph,\n// mirroring how generateFromEntryPoint (also defined here) is preserved via the analytics vite plugin.\nexport const generateServingTypes = generateServingTypesImpl;\n"],"mappings":";;;;;;;AAOA,OAAO,QAAQ;AAEf,MAAM,SAAS,aAAa,iBAAiB;;;;;;;AAQ7C,SAAS,yBAAyB,eAA8B,EAAE,EAAU;CAC1E,MAAM,eAAe,aAClB,KAAK,EAAE,MAAM,WAAW;AAKvB,SAAO,OAAO,KAAK,IAJE,KAClB,MAAM,KAAK,CACX,KAAK,MAAM,MAAO,MAAM,IAAI,OAAO,OAAO,OAAQ,CAClD,KAAK,KAAK;GAEb,CACD,KAAK,MAAM;AAId,QAAO;;;;;;6BAFc,eAAe,KAAK,aAAa,SAAS,GAQvB;;;;;;;;;;;AAY1C,eAAsB,uBAAuB,SAK1C;CACD,MAAM,EAAE,SAAS,aAAa,aAAa,YAAY;AAEvD,QAAO,MAAM,8BAA8B;CAE3C,IAAI,gBAA+B,EAAE;AACrC,KAAI,YACF,iBAAgB,MAAM,4BACpB,aACA,aACA,EACE,SACD,CACF;CAEH,MAAM,gBAAgB,cAAc,QAAQ,MAC1C,EAAE,KAAK,SAAS,kBAAkB,CACnC;AACD,KAAI,cAAc,SAAS,GAAG;EAC5B,MAAM,QAAQ,cAAc,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK;AACzD,QAAM,IAAI,MACR;GACE,2BAA2B,cAAc,OAAO,GAAG,cAAc,WAAW,IAAI,UAAU,UAAU,2BAA2B,MAAM;GACrI;GACA;GACA,8EAA8E,YAAY;GAC3F,CAAC,KAAK,KAAK,CACb;;CAGH,MAAM,mBAAmB,yBAAyB,cAAc;AAEhE,OAAM,GAAG,UAAU,SAAS,kBAAkB,QAAQ;AAEtD,QAAO,MAAM,4BAA4B;;AAM3C,MAAa,uBAAuBA"}
@@ -10,6 +10,39 @@ import pc from "picocolors";
10
10
  //#region src/type-generator/query-registry.ts
11
11
  const logger = createLogger("type-generator:query-registry");
12
12
  /**
13
+ * Regex breakdown:
14
+ * '(?:[^']|'')*' — matches a SQL string literal, including escaped '' pairs
15
+ * | — alternation: whichever branch matches first at a position wins
16
+ * --[^\n]* — matches a single-line SQL comment
17
+ *
18
+ * Because the regex engine scans left-to-right, a `'` is consumed as a string
19
+ * literal before any `--` inside it could match as a comment — giving us
20
+ * correct single-pass ordering without a manual state machine.
21
+ *
22
+ * V1: no block-comment support (deferred to next PR).
23
+ */
24
+ const PROTECTED_RANGE_RE = /'(?:[^']|'')*'|--[^\n]*/g;
25
+ /**
26
+ * Numeric-context patterns for positional type inference.
27
+ * Hoisted to module scope — safe because matchAll() clones the regex internally.
28
+ */
29
+ const NUMERIC_PATTERNS = [
30
+ /\bLIMIT\s+:([a-zA-Z_]\w*)/gi,
31
+ /\bOFFSET\s+:([a-zA-Z_]\w*)/gi,
32
+ /\bTOP\s+:([a-zA-Z_]\w*)/gi,
33
+ /\bFETCH\s+FIRST\s+:([a-zA-Z_]\w*)\s+ROWS/gi,
34
+ /[+\-*/]\s*:([a-zA-Z_]\w*)/g,
35
+ /:([a-zA-Z_]\w*)\s*[+\-*/]/g
36
+ ];
37
+ function getProtectedRanges(sql) {
38
+ const ranges = [];
39
+ for (const m of sql.matchAll(PROTECTED_RANGE_RE)) ranges.push([m.index, m.index + m[0].length]);
40
+ return ranges;
41
+ }
42
+ function isInsideProtectedRange(offset, ranges) {
43
+ return ranges.some(([start, end]) => offset >= start && offset < end);
44
+ }
45
+ /**
13
46
  * Parse a raw API/SDK error into a structured code + message.
14
47
  * Handles Databricks-style JSON bodies embedded in the message string,
15
48
  * e.g. `Response from server (Bad Request) {"error_code":"...","message":"..."}`.
@@ -30,10 +63,11 @@ function parseError(raw) {
30
63
  * @param sql - the SQL query to extract parameters from
31
64
  * @returns an array of parameter names
32
65
  */
33
- function extractParameters(sql) {
34
- const matches = sql.matchAll(/:([a-zA-Z_]\w*)/g);
66
+ function extractParameters(sql, ranges) {
67
+ const protectedRanges = ranges ?? getProtectedRanges(sql);
68
+ const matches = sql.matchAll(/(?<!:):([a-zA-Z_]\w*)/g);
35
69
  const params = /* @__PURE__ */ new Set();
36
- for (const match of matches) params.add(match[1]);
70
+ for (const match of matches) if (!isInsideProtectedRange(match.index, protectedRanges)) params.add(match[1]);
37
71
  return Array.from(params);
38
72
  }
39
73
  const SERVER_INJECTED_PARAMS = ["workspaceId"];
@@ -95,6 +129,29 @@ function extractParameterTypes(sql) {
95
129
  }
96
130
  return paramTypes;
97
131
  }
132
+ function defaultForType(sqlType) {
133
+ switch (sqlType?.toUpperCase()) {
134
+ case "NUMERIC": return "0";
135
+ case "STRING": return "''";
136
+ case "BOOLEAN": return "true";
137
+ case "DATE": return "'2000-01-01'";
138
+ case "TIMESTAMP": return "'2000-01-01T00:00:00Z'";
139
+ case "BINARY": return "X'00'";
140
+ default: return "''";
141
+ }
142
+ }
143
+ /**
144
+ * Infer parameter types from positional context in SQL.
145
+ * V1 only infers NUMERIC from patterns like LIMIT, OFFSET, TOP,
146
+ * FETCH FIRST ... ROWS, and arithmetic operators.
147
+ * Parameters inside string literals or SQL comments are ignored.
148
+ */
149
+ function inferParameterTypes(sql, ranges) {
150
+ const inferred = {};
151
+ const protectedRanges = ranges ?? getProtectedRanges(sql);
152
+ for (const pattern of NUMERIC_PATTERNS) for (const match of sql.matchAll(pattern)) if (!isInsideProtectedRange(match.index, protectedRanges)) inferred[match[1]] = "NUMERIC";
153
+ return inferred;
154
+ }
98
155
  /**
99
156
  * Generate query schemas from a folder of SQL files
100
157
  * It uses DESCRIBE QUERY to get the schema without executing the query
@@ -139,7 +196,23 @@ async function generateQueriesFromDescribe(queryFolder, warehouseId, options = {
139
196
  status: "HIT"
140
197
  });
141
198
  } else {
142
- const cleanedSql = sql.replace(/:([a-zA-Z_]\w*)/g, "''").trim().replace(/;\s*$/, "");
199
+ const protectedRanges = getProtectedRanges(sql);
200
+ const annotatedTypes = extractParameterTypes(sql);
201
+ const parameterTypes = {
202
+ ...inferParameterTypes(sql, protectedRanges),
203
+ ...annotatedTypes
204
+ };
205
+ const sqlWithDefaults = sql.replace(/(?<!:):([a-zA-Z_]\w*)/g, (original, paramName, offset) => {
206
+ if (isInsideProtectedRange(offset, protectedRanges)) return original;
207
+ return defaultForType(parameterTypes[paramName]);
208
+ });
209
+ const allParams = extractParameters(sql, protectedRanges);
210
+ for (const param of allParams) {
211
+ if (SERVER_INJECTED_PARAMS.includes(param)) continue;
212
+ if (parameterTypes[param]) continue;
213
+ logger.warn("%s: parameter \":%s\" has no type annotation or inference. Add %s to the query file.", queryFiles[i], param, `-- @param ${param} <TYPE>`);
214
+ }
215
+ const cleanedSql = sqlWithDefaults.trim().replace(/;\s*$/, "");
143
216
  uncachedQueries.push({
144
217
  index: i,
145
218
  queryName,
@@ -1 +1 @@
1
- {"version":3,"file":"query-registry.js","names":[],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport pc from \"picocolors\";\nimport { createLogger } from \"../logging/logger\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\nconst logger = createLogger(\"type-generator:query-registry\");\n\n/**\n * Parse a raw API/SDK error into a structured code + message.\n * Handles Databricks-style JSON bodies embedded in the message string,\n * e.g. `Response from server (Bad Request) {\"error_code\":\"...\",\"message\":\"...\"}`.\n */\nfunction parseError(raw: string): { code?: string; message: string } {\n const jsonMatch = raw.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n try {\n const parsed = JSON.parse(jsonMatch[0]);\n if (parsed.error_code || parsed.message) {\n return {\n code: parsed.error_code,\n message: parsed.message || raw,\n };\n }\n } catch {\n // not valid JSON, fall through\n }\n }\n return { message: raw };\n}\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(sql: string): string[] {\n const matches = sql.matchAll(/:([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n params.add(match[1]);\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\n/**\n * Generates the TypeScript type literal for query parameters from SQL.\n * Shared by both the success and failure paths.\n */\nfunction formatParametersType(sql: string): string {\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n const paramTypes = extractParameterTypes(sql);\n\n return params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n}\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): { type: string; hasResults: boolean } {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const paramsType = formatParametersType(sql);\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n const hasResults = resultFields.length > 0;\n\n const type = `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: ${\n hasResults\n ? `Array<{\n ${resultFields.join(\";\\n \")};\n }>`\n : \"unknown\"\n };\n }`;\n\n return { type, hasResults };\n}\n\n/**\n * Used when DESCRIBE QUERY fails so the query still appears in QueryRegistry.\n * Generates a type with unknown result from SQL alone (no warehouse call).\n */\nfunction generateUnknownResultQuery(sql: string, queryName: string): string {\n const paramsType = formatParametersType(sql);\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: unknown;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean; concurrency?: number } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false, concurrency: rawConcurrency = 10 } = options;\n const concurrency =\n typeof rawConcurrency === \"number\" && Number.isFinite(rawConcurrency)\n ? Math.max(1, Math.floor(rawConcurrency))\n : 10;\n\n // read all query files and cache in parallel\n const [allFiles, cache] = await Promise.all([\n fs.readdir(queryFolder),\n noCache\n ? ({ version: CACHE_VERSION, queries: {} } as Awaited<\n ReturnType<typeof loadCache>\n >)\n : loadCache(),\n ]);\n\n const queryFiles = allFiles.filter((file) => file.endsWith(\".sql\"));\n logger.debug(\"Found %d SQL queries\", queryFiles.length);\n\n const client = new WorkspaceClient({});\n const spinner = new Spinner();\n\n // Read all SQL files in parallel\n const sqlContents = await Promise.all(\n queryFiles.map((file) => fs.readFile(path.join(queryFolder, file), \"utf8\")),\n );\n\n const startTime = performance.now();\n\n // Phase 1: Check cache, separate cached vs uncached\n const cachedResults: Array<{ index: number; schema: QuerySchema }> = [];\n const uncachedQueries: Array<{\n index: number;\n queryName: string;\n sql: string;\n sqlHash: string;\n cleanedSql: string;\n }> = [];\n const logEntries: Array<{\n queryName: string;\n status: \"HIT\" | \"MISS\";\n failed?: boolean;\n error?: { code?: string; message: string };\n }> = [];\n\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const rawName = path.basename(file, \".sql\");\n const queryName = normalizeQueryName(rawName);\n\n const sql = sqlContents[i];\n const sqlHash = hashSQL(sql);\n\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash && !cached.retry) {\n cachedResults.push({\n index: i,\n schema: { name: queryName, type: cached.type },\n });\n logEntries.push({ queryName, status: \"HIT\" });\n } else {\n const sqlWithDefaults = sql.replace(/:([a-zA-Z_]\\w*)/g, \"''\");\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n uncachedQueries.push({ index: i, queryName, sql, sqlHash, cleanedSql });\n }\n }\n\n // Phase 2: Execute all uncached DESCRIBE calls in parallel\n type DescribeResult =\n | {\n status: \"ok\";\n index: number;\n schema: QuerySchema;\n cacheEntry: { hash: string; type: string; retry: boolean };\n }\n | {\n status: \"fail\";\n index: number;\n schema: QuerySchema;\n cacheEntry: { hash: string; type: string; retry: boolean };\n error: { code?: string; message: string };\n };\n\n const freshResults: Array<{ index: number; schema: QuerySchema }> = [];\n\n if (uncachedQueries.length > 0) {\n let completed = 0;\n const total = uncachedQueries.length;\n spinner.start(\n `Describing ${total} ${total === 1 ? \"query\" : \"queries\"} (0/${total})`,\n );\n\n const describeOne = async ({\n index,\n queryName,\n sql,\n sqlHash,\n cleanedSql,\n }: (typeof uncachedQueries)[number]): Promise<DescribeResult> => {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n completed++;\n spinner.update(\n `Describing ${total} ${total === 1 ? \"query\" : \"queries\"} (${completed}/${total})`,\n );\n\n logger.debug(\n \"DESCRIBE result for %s: state=%s, rows=%d\",\n queryName,\n result.status.state,\n result.result?.data_array?.length ?? 0,\n );\n\n if (result.status.state === \"FAILED\") {\n const sqlError =\n result.status.error?.message || \"Query execution failed\";\n logger.warn(\"DESCRIBE failed for %s: %s\", queryName, sqlError);\n const type = generateUnknownResultQuery(sql, queryName);\n return {\n status: \"fail\",\n index,\n schema: { name: queryName, type },\n cacheEntry: { hash: sqlHash, type, retry: true },\n error: parseError(sqlError),\n };\n }\n\n const { type, hasResults } = convertToQueryType(result, sql, queryName);\n return {\n status: \"ok\",\n index,\n schema: { name: queryName, type },\n cacheEntry: { hash: sqlHash, type, retry: !hasResults },\n };\n };\n\n // Process in chunks, saving cache after each chunk\n const processBatchResults = (\n settled: PromiseSettledResult<DescribeResult>[],\n batchOffset: number,\n ) => {\n for (let i = 0; i < settled.length; i++) {\n const entry = settled[i];\n const { queryName } = uncachedQueries[batchOffset + i];\n\n if (entry.status === \"fulfilled\") {\n const res = entry.value;\n freshResults.push({ index: res.index, schema: res.schema });\n cache.queries[queryName] = res.cacheEntry;\n logEntries.push({\n queryName,\n status: \"MISS\",\n failed: res.status === \"fail\",\n error: res.status === \"fail\" ? res.error : undefined,\n });\n } else {\n const { sql, sqlHash, index } = uncachedQueries[batchOffset + i];\n const reason =\n entry.reason instanceof Error\n ? entry.reason.message\n : String(entry.reason);\n logger.warn(\"DESCRIBE rejected for %s: %s\", queryName, reason);\n const type = generateUnknownResultQuery(sql, queryName);\n freshResults.push({ index, schema: { name: queryName, type } });\n cache.queries[queryName] = { hash: sqlHash, type, retry: true };\n logEntries.push({\n queryName,\n status: \"MISS\",\n failed: true,\n error: parseError(reason),\n });\n }\n }\n };\n\n if (uncachedQueries.length > concurrency) {\n for (let b = 0; b < uncachedQueries.length; b += concurrency) {\n const batch = uncachedQueries.slice(b, b + concurrency);\n const batchResults = await Promise.allSettled(batch.map(describeOne));\n processBatchResults(batchResults, b);\n await saveCache(cache);\n }\n } else {\n const settled = await Promise.allSettled(\n uncachedQueries.map(describeOne),\n );\n processBatchResults(settled, 0);\n await saveCache(cache);\n }\n\n spinner.stop(\"\");\n }\n\n const elapsed = ((performance.now() - startTime) / 1000).toFixed(2);\n\n // Print formatted table\n if (logEntries.length > 0) {\n const maxNameLen = Math.max(...logEntries.map((e) => e.queryName.length));\n const separator = pc.dim(\"─\".repeat(50));\n console.log(\"\");\n console.log(\n ` ${pc.bold(\"Typegen Queries\")} ${pc.dim(`(${logEntries.length})`)}`,\n );\n console.log(` ${separator}`);\n for (const entry of logEntries) {\n const tag = entry.failed\n ? pc.bold(pc.red(\"ERROR\"))\n : entry.status === \"HIT\"\n ? `cache ${pc.bold(pc.green(\"HIT \"))}`\n : `cache ${pc.bold(pc.yellow(\"MISS \"))}`;\n const rawName = entry.queryName.padEnd(maxNameLen);\n const name = entry.failed ? pc.dim(pc.strikethrough(rawName)) : rawName;\n const errorCode = entry.error?.message.match(/\\[([^\\]]+)\\]/)?.[1];\n const reason = errorCode ? ` ${pc.dim(errorCode)}` : \"\";\n console.log(` ${tag} ${name}${reason}`);\n }\n const newCount = logEntries.filter(\n (e) => e.status === \"MISS\" && !e.failed,\n ).length;\n const cacheCount = logEntries.filter(\n (e) => e.status === \"HIT\" && !e.failed,\n ).length;\n const errorCount = logEntries.filter((e) => e.failed).length;\n console.log(` ${separator}`);\n const parts = [`${newCount} new`, `${cacheCount} from cache`];\n if (errorCount > 0)\n parts.push(`${errorCount} ${errorCount === 1 ? \"error\" : \"errors\"}`);\n console.log(` ${parts.join(\", \")}. ${pc.dim(`${elapsed}s`)}`);\n console.log(\"\");\n }\n\n // Merge and sort by original file index for deterministic output\n return [...cachedResults, ...freshResults]\n .sort((a, b) => a.index - b.index)\n .map((r) => r.schema);\n}\n\n/**\n * Normalize query name by removing the .obo extension\n * @param queryName - the query name to normalize\n * @returns the normalized query name\n */\nfunction normalizeQueryName(fileName: string): string {\n return fileName.replace(/\\.obo$/, \"\");\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;;AAcA,MAAM,SAAS,aAAa,gCAAgC;;;;;;AAO5D,SAAS,WAAW,KAAiD;CACnE,MAAM,YAAY,IAAI,MAAM,cAAc;AAC1C,KAAI,UACF,KAAI;EACF,MAAM,SAAS,KAAK,MAAM,UAAU,GAAG;AACvC,MAAI,OAAO,cAAc,OAAO,QAC9B,QAAO;GACL,MAAM,OAAO;GACb,SAAS,OAAO,WAAW;GAC5B;SAEG;AAIV,QAAO,EAAE,SAAS,KAAK;;;;;;;AAQzB,SAAgB,kBAAkB,KAAuB;CACvD,MAAM,UAAU,IAAI,SAAS,mBAAmB;CAChD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,QAAO,IAAI,MAAM,GAAG;AAEtB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;;;;;AAMrD,SAAS,qBAAqB,KAAqB;CACjD,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CACD,MAAM,aAAa,sBAAsB,IAAI;AAE7C,QAAO,OAAO,SAAS,IACnB,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAC3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB;;AAGN,SAAgB,mBACd,QACA,KACA,WACuC;CAEvC,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,aAAa,qBAAqB,IAAI;CAG5C,MAAM,eAAe,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B;CAEF,MAAM,aAAa,aAAa,SAAS;AAczC,QAAO;EAAE,MAZI;aACF,UAAU;kBACL,WAAW;cAEvB,aACI;QACF,aAAa,KAAK,YAAY,CAAC;UAE7B,UACL;;EAGY;EAAY;;;;;;AAO7B,SAAS,2BAA2B,KAAa,WAA2B;AAG1E,QAAO;aACI,UAAU;kBAHF,qBAAqB,IAAI,CAIjB;;;;AAK7B,SAAgB,sBAAsB,KAAqC;CACzE,MAAM,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAuD,EAAE,EACjC;CACxB,MAAM,EAAE,UAAU,OAAO,aAAa,iBAAiB,OAAO;CAC9D,MAAM,cACJ,OAAO,mBAAmB,YAAY,OAAO,SAAS,eAAe,GACjE,KAAK,IAAI,GAAG,KAAK,MAAM,eAAe,CAAC,GACvC;CAGN,MAAM,CAAC,UAAU,SAAS,MAAM,QAAQ,IAAI,CAC1C,GAAG,QAAQ,YAAY,EACvB,UACK;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GAGxC,WAAW,CAChB,CAAC;CAEF,MAAM,aAAa,SAAS,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AACnE,QAAO,MAAM,wBAAwB,WAAW,OAAO;CAEvD,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAM,UAAU,IAAI,SAAS;CAG7B,MAAM,cAAc,MAAM,QAAQ,IAChC,WAAW,KAAK,SAAS,GAAG,SAAS,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO,CAAC,CAC5E;CAED,MAAM,YAAY,YAAY,KAAK;CAGnC,MAAM,gBAA+D,EAAE;CACvE,MAAM,kBAMD,EAAE;CACP,MAAM,aAKD,EAAE;AAEP,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EAExB,MAAM,YAAY,mBADF,KAAK,SAAS,MAAM,OAAO,CACE;EAE7C,MAAM,MAAM,YAAY;EACxB,MAAM,UAAU,QAAQ,IAAI;EAE5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,WAAW,CAAC,OAAO,OAAO;AACtD,iBAAc,KAAK;IACjB,OAAO;IACP,QAAQ;KAAE,MAAM;KAAW,MAAM,OAAO;KAAM;IAC/C,CAAC;AACF,cAAW,KAAK;IAAE;IAAW,QAAQ;IAAO,CAAC;SACxC;GAEL,MAAM,aADkB,IAAI,QAAQ,oBAAoB,KAAK,CAC1B,MAAM,CAAC,QAAQ,SAAS,GAAG;AAC9D,mBAAgB,KAAK;IAAE,OAAO;IAAG;IAAW;IAAK;IAAS;IAAY,CAAC;;;CAoB3E,MAAM,eAA8D,EAAE;AAEtE,KAAI,gBAAgB,SAAS,GAAG;EAC9B,IAAI,YAAY;EAChB,MAAM,QAAQ,gBAAgB;AAC9B,UAAQ,MACN,cAAc,MAAM,GAAG,UAAU,IAAI,UAAU,UAAU,MAAM,MAAM,GACtE;EAED,MAAM,cAAc,OAAO,EACzB,OACA,WACA,KACA,SACA,iBAC+D;GAC/D,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF;AACA,WAAQ,OACN,cAAc,MAAM,GAAG,UAAU,IAAI,UAAU,UAAU,IAAI,UAAU,GAAG,MAAM,GACjF;AAED,UAAO,MACL,6CACA,WACA,OAAO,OAAO,OACd,OAAO,QAAQ,YAAY,UAAU,EACtC;AAED,OAAI,OAAO,OAAO,UAAU,UAAU;IACpC,MAAM,WACJ,OAAO,OAAO,OAAO,WAAW;AAClC,WAAO,KAAK,8BAA8B,WAAW,SAAS;IAC9D,MAAM,OAAO,2BAA2B,KAAK,UAAU;AACvD,WAAO;KACL,QAAQ;KACR;KACA,QAAQ;MAAE,MAAM;MAAW;MAAM;KACjC,YAAY;MAAE,MAAM;MAAS;MAAM,OAAO;MAAM;KAChD,OAAO,WAAW,SAAS;KAC5B;;GAGH,MAAM,EAAE,MAAM,eAAe,mBAAmB,QAAQ,KAAK,UAAU;AACvE,UAAO;IACL,QAAQ;IACR;IACA,QAAQ;KAAE,MAAM;KAAW;KAAM;IACjC,YAAY;KAAE,MAAM;KAAS;KAAM,OAAO,CAAC;KAAY;IACxD;;EAIH,MAAM,uBACJ,SACA,gBACG;AACH,QAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;IACvC,MAAM,QAAQ,QAAQ;IACtB,MAAM,EAAE,cAAc,gBAAgB,cAAc;AAEpD,QAAI,MAAM,WAAW,aAAa;KAChC,MAAM,MAAM,MAAM;AAClB,kBAAa,KAAK;MAAE,OAAO,IAAI;MAAO,QAAQ,IAAI;MAAQ,CAAC;AAC3D,WAAM,QAAQ,aAAa,IAAI;AAC/B,gBAAW,KAAK;MACd;MACA,QAAQ;MACR,QAAQ,IAAI,WAAW;MACvB,OAAO,IAAI,WAAW,SAAS,IAAI,QAAQ;MAC5C,CAAC;WACG;KACL,MAAM,EAAE,KAAK,SAAS,UAAU,gBAAgB,cAAc;KAC9D,MAAM,SACJ,MAAM,kBAAkB,QACpB,MAAM,OAAO,UACb,OAAO,MAAM,OAAO;AAC1B,YAAO,KAAK,gCAAgC,WAAW,OAAO;KAC9D,MAAM,OAAO,2BAA2B,KAAK,UAAU;AACvD,kBAAa,KAAK;MAAE;MAAO,QAAQ;OAAE,MAAM;OAAW;OAAM;MAAE,CAAC;AAC/D,WAAM,QAAQ,aAAa;MAAE,MAAM;MAAS;MAAM,OAAO;MAAM;AAC/D,gBAAW,KAAK;MACd;MACA,QAAQ;MACR,QAAQ;MACR,OAAO,WAAW,OAAO;MAC1B,CAAC;;;;AAKR,MAAI,gBAAgB,SAAS,YAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK,aAAa;GAC5D,MAAM,QAAQ,gBAAgB,MAAM,GAAG,IAAI,YAAY;AAEvD,uBADqB,MAAM,QAAQ,WAAW,MAAM,IAAI,YAAY,CAAC,EACnC,EAAE;AACpC,SAAM,UAAU,MAAM;;OAEnB;AAIL,uBAHgB,MAAM,QAAQ,WAC5B,gBAAgB,IAAI,YAAY,CACjC,EAC4B,EAAE;AAC/B,SAAM,UAAU,MAAM;;AAGxB,UAAQ,KAAK,GAAG;;CAGlB,MAAM,YAAY,YAAY,KAAK,GAAG,aAAa,KAAM,QAAQ,EAAE;AAGnE,KAAI,WAAW,SAAS,GAAG;EACzB,MAAM,aAAa,KAAK,IAAI,GAAG,WAAW,KAAK,MAAM,EAAE,UAAU,OAAO,CAAC;EACzE,MAAM,YAAY,GAAG,IAAI,IAAI,OAAO,GAAG,CAAC;AACxC,UAAQ,IAAI,GAAG;AACf,UAAQ,IACN,KAAK,GAAG,KAAK,kBAAkB,CAAC,GAAG,GAAG,IAAI,IAAI,WAAW,OAAO,GAAG,GACpE;AACD,UAAQ,IAAI,KAAK,YAAY;AAC7B,OAAK,MAAM,SAAS,YAAY;GAC9B,MAAM,MAAM,MAAM,SACd,GAAG,KAAK,GAAG,IAAI,QAAQ,CAAC,GACxB,MAAM,WAAW,QACf,SAAS,GAAG,KAAK,GAAG,MAAM,QAAQ,CAAC,KACnC,SAAS,GAAG,KAAK,GAAG,OAAO,QAAQ,CAAC;GAC1C,MAAM,UAAU,MAAM,UAAU,OAAO,WAAW;GAClD,MAAM,OAAO,MAAM,SAAS,GAAG,IAAI,GAAG,cAAc,QAAQ,CAAC,GAAG;GAChE,MAAM,YAAY,MAAM,OAAO,QAAQ,MAAM,eAAe,GAAG;GAC/D,MAAM,SAAS,YAAY,KAAK,GAAG,IAAI,UAAU,KAAK;AACtD,WAAQ,IAAI,KAAK,IAAI,IAAI,OAAO,SAAS;;EAE3C,MAAM,WAAW,WAAW,QACzB,MAAM,EAAE,WAAW,UAAU,CAAC,EAAE,OAClC,CAAC;EACF,MAAM,aAAa,WAAW,QAC3B,MAAM,EAAE,WAAW,SAAS,CAAC,EAAE,OACjC,CAAC;EACF,MAAM,aAAa,WAAW,QAAQ,MAAM,EAAE,OAAO,CAAC;AACtD,UAAQ,IAAI,KAAK,YAAY;EAC7B,MAAM,QAAQ,CAAC,GAAG,SAAS,OAAO,GAAG,WAAW,aAAa;AAC7D,MAAI,aAAa,EACf,OAAM,KAAK,GAAG,WAAW,GAAG,eAAe,IAAI,UAAU,WAAW;AACtE,UAAQ,IAAI,KAAK,MAAM,KAAK,KAAK,CAAC,IAAI,GAAG,IAAI,GAAG,QAAQ,GAAG,GAAG;AAC9D,UAAQ,IAAI,GAAG;;AAIjB,QAAO,CAAC,GAAG,eAAe,GAAG,aAAa,CACvC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,KAAK,MAAM,EAAE,OAAO;;;;;;;AAQzB,SAAS,mBAAmB,UAA0B;AACpD,QAAO,SAAS,QAAQ,UAAU,GAAG;;;;;;;;;;;;AAavC,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAM,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
1
+ {"version":3,"file":"query-registry.js","names":[],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport pc from \"picocolors\";\nimport { createLogger } from \"../logging/logger\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\nconst logger = createLogger(\"type-generator:query-registry\");\n\n/**\n * Regex breakdown:\n * '(?:[^']|'')*' — matches a SQL string literal, including escaped '' pairs\n * | — alternation: whichever branch matches first at a position wins\n * --[^\\n]* — matches a single-line SQL comment\n *\n * Because the regex engine scans left-to-right, a `'` is consumed as a string\n * literal before any `--` inside it could match as a comment — giving us\n * correct single-pass ordering without a manual state machine.\n *\n * V1: no block-comment support (deferred to next PR).\n */\nconst PROTECTED_RANGE_RE = /'(?:[^']|'')*'|--[^\\n]*/g;\n\n/**\n * Numeric-context patterns for positional type inference.\n * Hoisted to module scope — safe because matchAll() clones the regex internally.\n */\nconst NUMERIC_PATTERNS: RegExp[] = [\n /\\bLIMIT\\s+:([a-zA-Z_]\\w*)/gi,\n /\\bOFFSET\\s+:([a-zA-Z_]\\w*)/gi,\n /\\bTOP\\s+:([a-zA-Z_]\\w*)/gi,\n /\\bFETCH\\s+FIRST\\s+:([a-zA-Z_]\\w*)\\s+ROWS/gi,\n // V1 limitation: arithmetic operators may false-positive for date\n // expressions like `:start_date - INTERVAL '1 day'`. A smarter\n // heuristic (e.g. look-ahead for INTERVAL) is deferred to a future PR.\n /[+\\-*/]\\s*:([a-zA-Z_]\\w*)/g,\n /:([a-zA-Z_]\\w*)\\s*[+\\-*/]/g,\n];\n\nexport function getProtectedRanges(sql: string): Array<[number, number]> {\n const ranges: Array<[number, number]> = [];\n for (const m of sql.matchAll(PROTECTED_RANGE_RE)) {\n ranges.push([m.index, m.index + m[0].length]);\n }\n return ranges;\n}\n\nfunction isInsideProtectedRange(\n offset: number,\n ranges: Array<[number, number]>,\n): boolean {\n return ranges.some(([start, end]) => offset >= start && offset < end);\n}\n\n/**\n * Parse a raw API/SDK error into a structured code + message.\n * Handles Databricks-style JSON bodies embedded in the message string,\n * e.g. `Response from server (Bad Request) {\"error_code\":\"...\",\"message\":\"...\"}`.\n */\nfunction parseError(raw: string): { code?: string; message: string } {\n const jsonMatch = raw.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n try {\n const parsed = JSON.parse(jsonMatch[0]);\n if (parsed.error_code || parsed.message) {\n return {\n code: parsed.error_code,\n message: parsed.message || raw,\n };\n }\n } catch {\n // not valid JSON, fall through\n }\n }\n return { message: raw };\n}\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(\n sql: string,\n ranges?: Array<[number, number]>,\n): string[] {\n const protectedRanges = ranges ?? getProtectedRanges(sql);\n const matches = sql.matchAll(/(?<!:):([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n if (!isInsideProtectedRange(match.index, protectedRanges)) {\n params.add(match[1]);\n }\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\n/**\n * Generates the TypeScript type literal for query parameters from SQL.\n * Shared by both the success and failure paths.\n */\nfunction formatParametersType(sql: string): string {\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n const paramTypes = extractParameterTypes(sql);\n\n return params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n}\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): { type: string; hasResults: boolean } {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const paramsType = formatParametersType(sql);\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n const hasResults = resultFields.length > 0;\n\n const type = `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: ${\n hasResults\n ? `Array<{\n ${resultFields.join(\";\\n \")};\n }>`\n : \"unknown\"\n };\n }`;\n\n return { type, hasResults };\n}\n\n/**\n * Used when DESCRIBE QUERY fails so the query still appears in QueryRegistry.\n * Generates a type with unknown result from SQL alone (no warehouse call).\n */\nfunction generateUnknownResultQuery(sql: string, queryName: string): string {\n const paramsType = formatParametersType(sql);\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: unknown;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\nexport function defaultForType(sqlType: string | undefined): string {\n switch (sqlType?.toUpperCase()) {\n case \"NUMERIC\":\n return \"0\";\n case \"STRING\":\n return \"''\";\n case \"BOOLEAN\":\n return \"true\";\n case \"DATE\":\n return \"'2000-01-01'\";\n case \"TIMESTAMP\":\n return \"'2000-01-01T00:00:00Z'\";\n case \"BINARY\":\n return \"X'00'\";\n default:\n return \"''\";\n }\n}\n\n/**\n * Infer parameter types from positional context in SQL.\n * V1 only infers NUMERIC from patterns like LIMIT, OFFSET, TOP,\n * FETCH FIRST ... ROWS, and arithmetic operators.\n * Parameters inside string literals or SQL comments are ignored.\n */\nexport function inferParameterTypes(\n sql: string,\n ranges?: Array<[number, number]>,\n): Record<string, string> {\n const inferred: Record<string, string> = {};\n const protectedRanges = ranges ?? getProtectedRanges(sql);\n\n for (const pattern of NUMERIC_PATTERNS) {\n for (const match of sql.matchAll(pattern)) {\n if (!isInsideProtectedRange(match.index, protectedRanges)) {\n inferred[match[1]] = \"NUMERIC\";\n }\n }\n }\n\n return inferred;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean; concurrency?: number } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false, concurrency: rawConcurrency = 10 } = options;\n const concurrency =\n typeof rawConcurrency === \"number\" && Number.isFinite(rawConcurrency)\n ? Math.max(1, Math.floor(rawConcurrency))\n : 10;\n\n // read all query files and cache in parallel\n const [allFiles, cache] = await Promise.all([\n fs.readdir(queryFolder),\n noCache\n ? ({ version: CACHE_VERSION, queries: {} } as Awaited<\n ReturnType<typeof loadCache>\n >)\n : loadCache(),\n ]);\n\n const queryFiles = allFiles.filter((file) => file.endsWith(\".sql\"));\n logger.debug(\"Found %d SQL queries\", queryFiles.length);\n\n const client = new WorkspaceClient({});\n const spinner = new Spinner();\n\n // Read all SQL files in parallel\n const sqlContents = await Promise.all(\n queryFiles.map((file) => fs.readFile(path.join(queryFolder, file), \"utf8\")),\n );\n\n const startTime = performance.now();\n\n // Phase 1: Check cache, separate cached vs uncached\n const cachedResults: Array<{ index: number; schema: QuerySchema }> = [];\n const uncachedQueries: Array<{\n index: number;\n queryName: string;\n sql: string;\n sqlHash: string;\n cleanedSql: string;\n }> = [];\n const logEntries: Array<{\n queryName: string;\n status: \"HIT\" | \"MISS\";\n failed?: boolean;\n error?: { code?: string; message: string };\n }> = [];\n\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const rawName = path.basename(file, \".sql\");\n const queryName = normalizeQueryName(rawName);\n\n const sql = sqlContents[i];\n const sqlHash = hashSQL(sql);\n\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash && !cached.retry) {\n cachedResults.push({\n index: i,\n schema: { name: queryName, type: cached.type },\n });\n logEntries.push({ queryName, status: \"HIT\" });\n } else {\n const protectedRanges = getProtectedRanges(sql);\n const annotatedTypes = extractParameterTypes(sql);\n const inferredTypes = inferParameterTypes(sql, protectedRanges);\n const parameterTypes = { ...inferredTypes, ...annotatedTypes };\n const sqlWithDefaults = sql.replace(\n /(?<!:):([a-zA-Z_]\\w*)/g,\n (original, paramName, offset) => {\n if (isInsideProtectedRange(offset, protectedRanges)) {\n return original;\n }\n return defaultForType(parameterTypes[paramName]);\n },\n );\n\n // Warn about unresolved parameters\n const allParams = extractParameters(sql, protectedRanges);\n for (const param of allParams) {\n if (SERVER_INJECTED_PARAMS.includes(param)) continue;\n if (parameterTypes[param]) continue;\n logger.warn(\n '%s: parameter \":%s\" has no type annotation or inference. Add %s to the query file.',\n queryFiles[i],\n param,\n `-- @param ${param} <TYPE>`,\n );\n }\n\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n uncachedQueries.push({ index: i, queryName, sql, sqlHash, cleanedSql });\n }\n }\n\n // Phase 2: Execute all uncached DESCRIBE calls in parallel\n type DescribeResult =\n | {\n status: \"ok\";\n index: number;\n schema: QuerySchema;\n cacheEntry: { hash: string; type: string; retry: boolean };\n }\n | {\n status: \"fail\";\n index: number;\n schema: QuerySchema;\n cacheEntry: { hash: string; type: string; retry: boolean };\n error: { code?: string; message: string };\n };\n\n const freshResults: Array<{ index: number; schema: QuerySchema }> = [];\n\n if (uncachedQueries.length > 0) {\n let completed = 0;\n const total = uncachedQueries.length;\n spinner.start(\n `Describing ${total} ${total === 1 ? \"query\" : \"queries\"} (0/${total})`,\n );\n\n const describeOne = async ({\n index,\n queryName,\n sql,\n sqlHash,\n cleanedSql,\n }: (typeof uncachedQueries)[number]): Promise<DescribeResult> => {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n completed++;\n spinner.update(\n `Describing ${total} ${total === 1 ? \"query\" : \"queries\"} (${completed}/${total})`,\n );\n\n logger.debug(\n \"DESCRIBE result for %s: state=%s, rows=%d\",\n queryName,\n result.status.state,\n result.result?.data_array?.length ?? 0,\n );\n\n if (result.status.state === \"FAILED\") {\n const sqlError =\n result.status.error?.message || \"Query execution failed\";\n logger.warn(\"DESCRIBE failed for %s: %s\", queryName, sqlError);\n const type = generateUnknownResultQuery(sql, queryName);\n return {\n status: \"fail\",\n index,\n schema: { name: queryName, type },\n cacheEntry: { hash: sqlHash, type, retry: true },\n error: parseError(sqlError),\n };\n }\n\n const { type, hasResults } = convertToQueryType(result, sql, queryName);\n return {\n status: \"ok\",\n index,\n schema: { name: queryName, type },\n cacheEntry: { hash: sqlHash, type, retry: !hasResults },\n };\n };\n\n // Process in chunks, saving cache after each chunk\n const processBatchResults = (\n settled: PromiseSettledResult<DescribeResult>[],\n batchOffset: number,\n ) => {\n for (let i = 0; i < settled.length; i++) {\n const entry = settled[i];\n const { queryName } = uncachedQueries[batchOffset + i];\n\n if (entry.status === \"fulfilled\") {\n const res = entry.value;\n freshResults.push({ index: res.index, schema: res.schema });\n cache.queries[queryName] = res.cacheEntry;\n logEntries.push({\n queryName,\n status: \"MISS\",\n failed: res.status === \"fail\",\n error: res.status === \"fail\" ? res.error : undefined,\n });\n } else {\n const { sql, sqlHash, index } = uncachedQueries[batchOffset + i];\n const reason =\n entry.reason instanceof Error\n ? entry.reason.message\n : String(entry.reason);\n logger.warn(\"DESCRIBE rejected for %s: %s\", queryName, reason);\n const type = generateUnknownResultQuery(sql, queryName);\n freshResults.push({ index, schema: { name: queryName, type } });\n cache.queries[queryName] = { hash: sqlHash, type, retry: true };\n logEntries.push({\n queryName,\n status: \"MISS\",\n failed: true,\n error: parseError(reason),\n });\n }\n }\n };\n\n if (uncachedQueries.length > concurrency) {\n for (let b = 0; b < uncachedQueries.length; b += concurrency) {\n const batch = uncachedQueries.slice(b, b + concurrency);\n const batchResults = await Promise.allSettled(batch.map(describeOne));\n processBatchResults(batchResults, b);\n await saveCache(cache);\n }\n } else {\n const settled = await Promise.allSettled(\n uncachedQueries.map(describeOne),\n );\n processBatchResults(settled, 0);\n await saveCache(cache);\n }\n\n spinner.stop(\"\");\n }\n\n const elapsed = ((performance.now() - startTime) / 1000).toFixed(2);\n\n // Print formatted table\n if (logEntries.length > 0) {\n const maxNameLen = Math.max(...logEntries.map((e) => e.queryName.length));\n const separator = pc.dim(\"─\".repeat(50));\n console.log(\"\");\n console.log(\n ` ${pc.bold(\"Typegen Queries\")} ${pc.dim(`(${logEntries.length})`)}`,\n );\n console.log(` ${separator}`);\n for (const entry of logEntries) {\n const tag = entry.failed\n ? pc.bold(pc.red(\"ERROR\"))\n : entry.status === \"HIT\"\n ? `cache ${pc.bold(pc.green(\"HIT \"))}`\n : `cache ${pc.bold(pc.yellow(\"MISS \"))}`;\n const rawName = entry.queryName.padEnd(maxNameLen);\n const name = entry.failed ? pc.dim(pc.strikethrough(rawName)) : rawName;\n const errorCode = entry.error?.message.match(/\\[([^\\]]+)\\]/)?.[1];\n const reason = errorCode ? ` ${pc.dim(errorCode)}` : \"\";\n console.log(` ${tag} ${name}${reason}`);\n }\n const newCount = logEntries.filter(\n (e) => e.status === \"MISS\" && !e.failed,\n ).length;\n const cacheCount = logEntries.filter(\n (e) => e.status === \"HIT\" && !e.failed,\n ).length;\n const errorCount = logEntries.filter((e) => e.failed).length;\n console.log(` ${separator}`);\n const parts = [`${newCount} new`, `${cacheCount} from cache`];\n if (errorCount > 0)\n parts.push(`${errorCount} ${errorCount === 1 ? \"error\" : \"errors\"}`);\n console.log(` ${parts.join(\", \")}. ${pc.dim(`${elapsed}s`)}`);\n console.log(\"\");\n }\n\n // Merge and sort by original file index for deterministic output\n return [...cachedResults, ...freshResults]\n .sort((a, b) => a.index - b.index)\n .map((r) => r.schema);\n}\n\n/**\n * Normalize query name by removing the .obo extension\n * @param queryName - the query name to normalize\n * @returns the normalized query name\n */\nfunction normalizeQueryName(fileName: string): string {\n return fileName.replace(/\\.obo$/, \"\");\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;;AAcA,MAAM,SAAS,aAAa,gCAAgC;;;;;;;;;;;;;AAc5D,MAAM,qBAAqB;;;;;AAM3B,MAAM,mBAA6B;CACjC;CACA;CACA;CACA;CAIA;CACA;CACD;AAED,SAAgB,mBAAmB,KAAsC;CACvE,MAAM,SAAkC,EAAE;AAC1C,MAAK,MAAM,KAAK,IAAI,SAAS,mBAAmB,CAC9C,QAAO,KAAK,CAAC,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,OAAO,CAAC;AAE/C,QAAO;;AAGT,SAAS,uBACP,QACA,QACS;AACT,QAAO,OAAO,MAAM,CAAC,OAAO,SAAS,UAAU,SAAS,SAAS,IAAI;;;;;;;AAQvE,SAAS,WAAW,KAAiD;CACnE,MAAM,YAAY,IAAI,MAAM,cAAc;AAC1C,KAAI,UACF,KAAI;EACF,MAAM,SAAS,KAAK,MAAM,UAAU,GAAG;AACvC,MAAI,OAAO,cAAc,OAAO,QAC9B,QAAO;GACL,MAAM,OAAO;GACb,SAAS,OAAO,WAAW;GAC5B;SAEG;AAIV,QAAO,EAAE,SAAS,KAAK;;;;;;;AAQzB,SAAgB,kBACd,KACA,QACU;CACV,MAAM,kBAAkB,UAAU,mBAAmB,IAAI;CACzD,MAAM,UAAU,IAAI,SAAS,yBAAyB;CACtD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,KAAI,CAAC,uBAAuB,MAAM,OAAO,gBAAgB,CACvD,QAAO,IAAI,MAAM,GAAG;AAGxB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;;;;;AAMrD,SAAS,qBAAqB,KAAqB;CACjD,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CACD,MAAM,aAAa,sBAAsB,IAAI;AAE7C,QAAO,OAAO,SAAS,IACnB,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAC3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB;;AAGN,SAAgB,mBACd,QACA,KACA,WACuC;CAEvC,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,aAAa,qBAAqB,IAAI;CAG5C,MAAM,eAAe,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B;CAEF,MAAM,aAAa,aAAa,SAAS;AAczC,QAAO;EAAE,MAZI;aACF,UAAU;kBACL,WAAW;cAEvB,aACI;QACF,aAAa,KAAK,YAAY,CAAC;UAE7B,UACL;;EAGY;EAAY;;;;;;AAO7B,SAAS,2BAA2B,KAAa,WAA2B;AAG1E,QAAO;aACI,UAAU;kBAHF,qBAAqB,IAAI,CAIjB;;;;AAK7B,SAAgB,sBAAsB,KAAqC;CACzE,MAAM,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;AAGT,SAAgB,eAAe,SAAqC;AAClE,SAAQ,SAAS,aAAa,EAA9B;EACE,KAAK,UACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK,UACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,YACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,QACE,QAAO;;;;;;;;;AAUb,SAAgB,oBACd,KACA,QACwB;CACxB,MAAM,WAAmC,EAAE;CAC3C,MAAM,kBAAkB,UAAU,mBAAmB,IAAI;AAEzD,MAAK,MAAM,WAAW,iBACpB,MAAK,MAAM,SAAS,IAAI,SAAS,QAAQ,CACvC,KAAI,CAAC,uBAAuB,MAAM,OAAO,gBAAgB,CACvD,UAAS,MAAM,MAAM;AAK3B,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAuD,EAAE,EACjC;CACxB,MAAM,EAAE,UAAU,OAAO,aAAa,iBAAiB,OAAO;CAC9D,MAAM,cACJ,OAAO,mBAAmB,YAAY,OAAO,SAAS,eAAe,GACjE,KAAK,IAAI,GAAG,KAAK,MAAM,eAAe,CAAC,GACvC;CAGN,MAAM,CAAC,UAAU,SAAS,MAAM,QAAQ,IAAI,CAC1C,GAAG,QAAQ,YAAY,EACvB,UACK;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GAGxC,WAAW,CAChB,CAAC;CAEF,MAAM,aAAa,SAAS,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AACnE,QAAO,MAAM,wBAAwB,WAAW,OAAO;CAEvD,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAM,UAAU,IAAI,SAAS;CAG7B,MAAM,cAAc,MAAM,QAAQ,IAChC,WAAW,KAAK,SAAS,GAAG,SAAS,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO,CAAC,CAC5E;CAED,MAAM,YAAY,YAAY,KAAK;CAGnC,MAAM,gBAA+D,EAAE;CACvE,MAAM,kBAMD,EAAE;CACP,MAAM,aAKD,EAAE;AAEP,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EAExB,MAAM,YAAY,mBADF,KAAK,SAAS,MAAM,OAAO,CACE;EAE7C,MAAM,MAAM,YAAY;EACxB,MAAM,UAAU,QAAQ,IAAI;EAE5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,WAAW,CAAC,OAAO,OAAO;AACtD,iBAAc,KAAK;IACjB,OAAO;IACP,QAAQ;KAAE,MAAM;KAAW,MAAM,OAAO;KAAM;IAC/C,CAAC;AACF,cAAW,KAAK;IAAE;IAAW,QAAQ;IAAO,CAAC;SACxC;GACL,MAAM,kBAAkB,mBAAmB,IAAI;GAC/C,MAAM,iBAAiB,sBAAsB,IAAI;GAEjD,MAAM,iBAAiB;IAAE,GADH,oBAAoB,KAAK,gBAAgB;IACpB,GAAG;IAAgB;GAC9D,MAAM,kBAAkB,IAAI,QAC1B,2BACC,UAAU,WAAW,WAAW;AAC/B,QAAI,uBAAuB,QAAQ,gBAAgB,CACjD,QAAO;AAET,WAAO,eAAe,eAAe,WAAW;KAEnD;GAGD,MAAM,YAAY,kBAAkB,KAAK,gBAAgB;AACzD,QAAK,MAAM,SAAS,WAAW;AAC7B,QAAI,uBAAuB,SAAS,MAAM,CAAE;AAC5C,QAAI,eAAe,OAAQ;AAC3B,WAAO,KACL,wFACA,WAAW,IACX,OACA,aAAa,MAAM,SACpB;;GAGH,MAAM,aAAa,gBAAgB,MAAM,CAAC,QAAQ,SAAS,GAAG;AAC9D,mBAAgB,KAAK;IAAE,OAAO;IAAG;IAAW;IAAK;IAAS;IAAY,CAAC;;;CAoB3E,MAAM,eAA8D,EAAE;AAEtE,KAAI,gBAAgB,SAAS,GAAG;EAC9B,IAAI,YAAY;EAChB,MAAM,QAAQ,gBAAgB;AAC9B,UAAQ,MACN,cAAc,MAAM,GAAG,UAAU,IAAI,UAAU,UAAU,MAAM,MAAM,GACtE;EAED,MAAM,cAAc,OAAO,EACzB,OACA,WACA,KACA,SACA,iBAC+D;GAC/D,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF;AACA,WAAQ,OACN,cAAc,MAAM,GAAG,UAAU,IAAI,UAAU,UAAU,IAAI,UAAU,GAAG,MAAM,GACjF;AAED,UAAO,MACL,6CACA,WACA,OAAO,OAAO,OACd,OAAO,QAAQ,YAAY,UAAU,EACtC;AAED,OAAI,OAAO,OAAO,UAAU,UAAU;IACpC,MAAM,WACJ,OAAO,OAAO,OAAO,WAAW;AAClC,WAAO,KAAK,8BAA8B,WAAW,SAAS;IAC9D,MAAM,OAAO,2BAA2B,KAAK,UAAU;AACvD,WAAO;KACL,QAAQ;KACR;KACA,QAAQ;MAAE,MAAM;MAAW;MAAM;KACjC,YAAY;MAAE,MAAM;MAAS;MAAM,OAAO;MAAM;KAChD,OAAO,WAAW,SAAS;KAC5B;;GAGH,MAAM,EAAE,MAAM,eAAe,mBAAmB,QAAQ,KAAK,UAAU;AACvE,UAAO;IACL,QAAQ;IACR;IACA,QAAQ;KAAE,MAAM;KAAW;KAAM;IACjC,YAAY;KAAE,MAAM;KAAS;KAAM,OAAO,CAAC;KAAY;IACxD;;EAIH,MAAM,uBACJ,SACA,gBACG;AACH,QAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;IACvC,MAAM,QAAQ,QAAQ;IACtB,MAAM,EAAE,cAAc,gBAAgB,cAAc;AAEpD,QAAI,MAAM,WAAW,aAAa;KAChC,MAAM,MAAM,MAAM;AAClB,kBAAa,KAAK;MAAE,OAAO,IAAI;MAAO,QAAQ,IAAI;MAAQ,CAAC;AAC3D,WAAM,QAAQ,aAAa,IAAI;AAC/B,gBAAW,KAAK;MACd;MACA,QAAQ;MACR,QAAQ,IAAI,WAAW;MACvB,OAAO,IAAI,WAAW,SAAS,IAAI,QAAQ;MAC5C,CAAC;WACG;KACL,MAAM,EAAE,KAAK,SAAS,UAAU,gBAAgB,cAAc;KAC9D,MAAM,SACJ,MAAM,kBAAkB,QACpB,MAAM,OAAO,UACb,OAAO,MAAM,OAAO;AAC1B,YAAO,KAAK,gCAAgC,WAAW,OAAO;KAC9D,MAAM,OAAO,2BAA2B,KAAK,UAAU;AACvD,kBAAa,KAAK;MAAE;MAAO,QAAQ;OAAE,MAAM;OAAW;OAAM;MAAE,CAAC;AAC/D,WAAM,QAAQ,aAAa;MAAE,MAAM;MAAS;MAAM,OAAO;MAAM;AAC/D,gBAAW,KAAK;MACd;MACA,QAAQ;MACR,QAAQ;MACR,OAAO,WAAW,OAAO;MAC1B,CAAC;;;;AAKR,MAAI,gBAAgB,SAAS,YAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK,aAAa;GAC5D,MAAM,QAAQ,gBAAgB,MAAM,GAAG,IAAI,YAAY;AAEvD,uBADqB,MAAM,QAAQ,WAAW,MAAM,IAAI,YAAY,CAAC,EACnC,EAAE;AACpC,SAAM,UAAU,MAAM;;OAEnB;AAIL,uBAHgB,MAAM,QAAQ,WAC5B,gBAAgB,IAAI,YAAY,CACjC,EAC4B,EAAE;AAC/B,SAAM,UAAU,MAAM;;AAGxB,UAAQ,KAAK,GAAG;;CAGlB,MAAM,YAAY,YAAY,KAAK,GAAG,aAAa,KAAM,QAAQ,EAAE;AAGnE,KAAI,WAAW,SAAS,GAAG;EACzB,MAAM,aAAa,KAAK,IAAI,GAAG,WAAW,KAAK,MAAM,EAAE,UAAU,OAAO,CAAC;EACzE,MAAM,YAAY,GAAG,IAAI,IAAI,OAAO,GAAG,CAAC;AACxC,UAAQ,IAAI,GAAG;AACf,UAAQ,IACN,KAAK,GAAG,KAAK,kBAAkB,CAAC,GAAG,GAAG,IAAI,IAAI,WAAW,OAAO,GAAG,GACpE;AACD,UAAQ,IAAI,KAAK,YAAY;AAC7B,OAAK,MAAM,SAAS,YAAY;GAC9B,MAAM,MAAM,MAAM,SACd,GAAG,KAAK,GAAG,IAAI,QAAQ,CAAC,GACxB,MAAM,WAAW,QACf,SAAS,GAAG,KAAK,GAAG,MAAM,QAAQ,CAAC,KACnC,SAAS,GAAG,KAAK,GAAG,OAAO,QAAQ,CAAC;GAC1C,MAAM,UAAU,MAAM,UAAU,OAAO,WAAW;GAClD,MAAM,OAAO,MAAM,SAAS,GAAG,IAAI,GAAG,cAAc,QAAQ,CAAC,GAAG;GAChE,MAAM,YAAY,MAAM,OAAO,QAAQ,MAAM,eAAe,GAAG;GAC/D,MAAM,SAAS,YAAY,KAAK,GAAG,IAAI,UAAU,KAAK;AACtD,WAAQ,IAAI,KAAK,IAAI,IAAI,OAAO,SAAS;;EAE3C,MAAM,WAAW,WAAW,QACzB,MAAM,EAAE,WAAW,UAAU,CAAC,EAAE,OAClC,CAAC;EACF,MAAM,aAAa,WAAW,QAC3B,MAAM,EAAE,WAAW,SAAS,CAAC,EAAE,OACjC,CAAC;EACF,MAAM,aAAa,WAAW,QAAQ,MAAM,EAAE,OAAO,CAAC;AACtD,UAAQ,IAAI,KAAK,YAAY;EAC7B,MAAM,QAAQ,CAAC,GAAG,SAAS,OAAO,GAAG,WAAW,aAAa;AAC7D,MAAI,aAAa,EACf,OAAM,KAAK,GAAG,WAAW,GAAG,eAAe,IAAI,UAAU,WAAW;AACtE,UAAQ,IAAI,KAAK,MAAM,KAAK,KAAK,CAAC,IAAI,GAAG,IAAI,GAAG,QAAQ,GAAG,GAAG;AAC9D,UAAQ,IAAI,GAAG;;AAIjB,QAAO,CAAC,GAAG,eAAe,GAAG,aAAa,CACvC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,KAAK,MAAM,EAAE,OAAO;;;;;;;AAQzB,SAAS,mBAAmB,UAA0B;AACpD,QAAO,SAAS,QAAQ,UAAU,GAAG;;;;;;;;;;;;AAavC,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAM,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
@@ -0,0 +1,38 @@
1
+ import { createLogger } from "../../logging/logger.js";
2
+ import crypto from "node:crypto";
3
+ import fs from "node:fs/promises";
4
+ import path from "node:path";
5
+
6
+ //#region src/type-generator/serving/cache.ts
7
+ const logger = createLogger("type-generator:serving:cache");
8
+ const CACHE_VERSION = "1";
9
+ const CACHE_FILE = ".appkit-serving-types-cache.json";
10
+ const CACHE_DIR = path.join(process.cwd(), "node_modules", ".databricks", "appkit");
11
+ function hashSchema(schemaJson) {
12
+ return crypto.createHash("sha256").update(schemaJson).digest("hex");
13
+ }
14
+ async function loadServingCache() {
15
+ const cachePath = path.join(CACHE_DIR, CACHE_FILE);
16
+ try {
17
+ await fs.mkdir(CACHE_DIR, { recursive: true });
18
+ const raw = await fs.readFile(cachePath, "utf8");
19
+ const cache = JSON.parse(raw);
20
+ if (cache.version === CACHE_VERSION) return cache;
21
+ logger.debug("Cache version mismatch, starting fresh");
22
+ } catch (err) {
23
+ if (err.code !== "ENOENT") logger.warn("Cache file is corrupted, flushing cache completely.");
24
+ }
25
+ return {
26
+ version: CACHE_VERSION,
27
+ endpoints: {}
28
+ };
29
+ }
30
+ async function saveServingCache(cache) {
31
+ const cachePath = path.join(CACHE_DIR, CACHE_FILE);
32
+ await fs.mkdir(CACHE_DIR, { recursive: true });
33
+ await fs.writeFile(cachePath, JSON.stringify(cache, null, 2), "utf8");
34
+ }
35
+
36
+ //#endregion
37
+ export { CACHE_VERSION, hashSchema, loadServingCache, saveServingCache };
38
+ //# sourceMappingURL=cache.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cache.js","names":[],"sources":["../../../src/type-generator/serving/cache.ts"],"sourcesContent":["import crypto from \"node:crypto\";\nimport fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { createLogger } from \"../../logging/logger\";\n\nconst logger = createLogger(\"type-generator:serving:cache\");\n\nexport const CACHE_VERSION = \"1\";\nconst CACHE_FILE = \".appkit-serving-types-cache.json\";\nconst CACHE_DIR = path.join(\n process.cwd(),\n \"node_modules\",\n \".databricks\",\n \"appkit\",\n);\n\nexport interface ServingCacheEntry {\n hash: string;\n requestType: string;\n responseType: string;\n chunkType: string | null;\n requestKeys: string[];\n}\n\nexport interface ServingCache {\n version: string;\n endpoints: Record<string, ServingCacheEntry>;\n}\n\nexport function hashSchema(schemaJson: string): string {\n return crypto.createHash(\"sha256\").update(schemaJson).digest(\"hex\");\n}\n\nexport async function loadServingCache(): Promise<ServingCache> {\n const cachePath = path.join(CACHE_DIR, CACHE_FILE);\n try {\n await fs.mkdir(CACHE_DIR, { recursive: true });\n const raw = await fs.readFile(cachePath, \"utf8\");\n const cache = JSON.parse(raw) as ServingCache;\n if (cache.version === CACHE_VERSION) {\n return cache;\n }\n logger.debug(\"Cache version mismatch, starting fresh\");\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code !== \"ENOENT\") {\n logger.warn(\"Cache file is corrupted, flushing cache completely.\");\n }\n }\n return { version: CACHE_VERSION, endpoints: {} };\n}\n\nexport async function saveServingCache(cache: ServingCache): Promise<void> {\n const cachePath = path.join(CACHE_DIR, CACHE_FILE);\n await fs.mkdir(CACHE_DIR, { recursive: true });\n await fs.writeFile(cachePath, JSON.stringify(cache, null, 2), \"utf8\");\n}\n"],"mappings":";;;;;;AAKA,MAAM,SAAS,aAAa,+BAA+B;AAE3D,MAAa,gBAAgB;AAC7B,MAAM,aAAa;AACnB,MAAM,YAAY,KAAK,KACrB,QAAQ,KAAK,EACb,gBACA,eACA,SACD;AAeD,SAAgB,WAAW,YAA4B;AACrD,QAAO,OAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;AAGrE,eAAsB,mBAA0C;CAC9D,MAAM,YAAY,KAAK,KAAK,WAAW,WAAW;AAClD,KAAI;AACF,QAAM,GAAG,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;EAC9C,MAAM,MAAM,MAAM,GAAG,SAAS,WAAW,OAAO;EAChD,MAAM,QAAQ,KAAK,MAAM,IAAI;AAC7B,MAAI,MAAM,YAAY,cACpB,QAAO;AAET,SAAO,MAAM,yCAAyC;UAC/C,KAAK;AACZ,MAAK,IAA8B,SAAS,SAC1C,QAAO,KAAK,sDAAsD;;AAGtE,QAAO;EAAE,SAAS;EAAe,WAAW,EAAE;EAAE;;AAGlD,eAAsB,iBAAiB,OAAoC;CACzE,MAAM,YAAY,KAAK,KAAK,WAAW,WAAW;AAClD,OAAM,GAAG,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;AAC9C,OAAM,GAAG,UAAU,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,EAAE,OAAO"}
@@ -0,0 +1,108 @@
1
+ //#region src/type-generator/serving/converter.ts
2
+ /**
3
+ * Converts an OpenAPI schema to a TypeScript type string.
4
+ */
5
+ function schemaToTypeString(schema, indent = 0) {
6
+ const pad = " ".repeat(indent);
7
+ if (schema.oneOf) return schema.oneOf.map((s) => schemaToTypeString(s, indent)).join(" | ");
8
+ if (schema.enum) return schema.enum.map((v) => JSON.stringify(v)).join(" | ");
9
+ switch (schema.type) {
10
+ case "string": return "string";
11
+ case "integer":
12
+ case "number": return "number";
13
+ case "boolean": return "boolean";
14
+ case "array": {
15
+ if (!schema.items) return "unknown[]";
16
+ const itemType = schemaToTypeString(schema.items, indent);
17
+ if (itemType.includes(" | ") && !itemType.startsWith("{")) return `(${itemType})[]`;
18
+ return `${itemType}[]`;
19
+ }
20
+ case "object": {
21
+ if (!schema.properties) return "Record<string, unknown>";
22
+ const required = new Set(schema.required ?? []);
23
+ return `{\n${Object.entries(schema.properties).map(([key, prop]) => {
24
+ const optional = !required.has(key) ? "?" : "";
25
+ const nullable = prop.nullable ? " | null" : "";
26
+ const typeStr = schemaToTypeString(prop, indent + 1);
27
+ return `${pad} ${prop.format && (prop.type === "number" || prop.type === "integer") ? `/** @openapi ${prop.format}${prop.nullable ? ", nullable" : ""} */\n${pad} ` : prop.nullable && prop.type === "integer" ? `/** @openapi integer, nullable */\n${pad} ` : ""}${key}${optional}: ${typeStr}${nullable};`;
28
+ }).join("\n")}\n${pad}}`;
29
+ }
30
+ default: return "unknown";
31
+ }
32
+ }
33
+ /**
34
+ * Extracts the top-level property keys from the request schema.
35
+ * Strips the `stream` property (plugin-controlled).
36
+ */
37
+ function extractRequestKeys(operation) {
38
+ const schema = operation.requestBody?.content?.["application/json"]?.schema;
39
+ if (!schema?.properties) return [];
40
+ return Object.keys(schema.properties).filter((k) => k !== "stream");
41
+ }
42
+ /**
43
+ * Extracts and converts the request schema from an OpenAPI path operation.
44
+ * Strips the `stream` property from the request type.
45
+ */
46
+ function convertRequestSchema(operation) {
47
+ const schema = operation.requestBody?.content?.["application/json"]?.schema;
48
+ if (!schema || !schema.properties) return "Record<string, unknown>";
49
+ const { stream: _stream, ...filteredProps } = schema.properties;
50
+ const filteredRequired = (schema.required ?? []).filter((r) => r !== "stream");
51
+ return schemaToTypeString({
52
+ ...schema,
53
+ properties: filteredProps,
54
+ required: filteredRequired.length > 0 ? filteredRequired : void 0
55
+ });
56
+ }
57
+ /**
58
+ * Extracts and converts the response schema from an OpenAPI path operation.
59
+ */
60
+ function convertResponseSchema(operation) {
61
+ const schema = (operation.responses?.["200"])?.content?.["application/json"]?.schema;
62
+ if (!schema) return "unknown";
63
+ return schemaToTypeString(schema);
64
+ }
65
+ /**
66
+ * Derives a streaming chunk type from the response schema.
67
+ * Returns null if the response doesn't follow OpenAI-compatible format.
68
+ *
69
+ * OpenAI-compatible heuristic: response has `choices` array where items
70
+ * have a `message` object property.
71
+ */
72
+ function deriveChunkType(operation) {
73
+ const schema = (operation.responses?.["200"])?.content?.["application/json"]?.schema;
74
+ if (!schema?.properties) return null;
75
+ const choicesProp = schema.properties.choices;
76
+ if (!choicesProp || choicesProp.type !== "array" || !choicesProp.items) return null;
77
+ const choiceItemProps = choicesProp.items.properties;
78
+ if (!choiceItemProps?.message) return null;
79
+ const messageSchema = choiceItemProps.message;
80
+ const chunkProperties = {};
81
+ for (const [key, prop] of Object.entries(schema.properties)) {
82
+ if (key === "usage") continue;
83
+ if (key === "choices") {
84
+ const chunkChoiceProps = {};
85
+ for (const [ck, cp] of Object.entries(choiceItemProps)) if (ck === "message") chunkChoiceProps.delta = { ...messageSchema };
86
+ else if (ck === "finish_reason") chunkChoiceProps[ck] = {
87
+ ...cp,
88
+ nullable: true
89
+ };
90
+ else chunkChoiceProps[ck] = cp;
91
+ chunkProperties[key] = {
92
+ type: "array",
93
+ items: {
94
+ type: "object",
95
+ properties: chunkChoiceProps
96
+ }
97
+ };
98
+ } else chunkProperties[key] = prop;
99
+ }
100
+ return schemaToTypeString({
101
+ type: "object",
102
+ properties: chunkProperties
103
+ });
104
+ }
105
+
106
+ //#endregion
107
+ export { convertRequestSchema, convertResponseSchema, deriveChunkType, extractRequestKeys };
108
+ //# sourceMappingURL=converter.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"converter.js","names":[],"sources":["../../../src/type-generator/serving/converter.ts"],"sourcesContent":["import type { OpenApiOperation, OpenApiSchema } from \"./fetcher\";\n\n/**\n * Converts an OpenAPI schema to a TypeScript type string.\n */\nfunction schemaToTypeString(schema: OpenApiSchema, indent = 0): string {\n const pad = \" \".repeat(indent);\n\n if (schema.oneOf) {\n return schema.oneOf.map((s) => schemaToTypeString(s, indent)).join(\" | \");\n }\n\n if (schema.enum) {\n return schema.enum.map((v) => JSON.stringify(v)).join(\" | \");\n }\n\n switch (schema.type) {\n case \"string\":\n return \"string\";\n case \"integer\":\n case \"number\":\n return \"number\";\n case \"boolean\":\n return \"boolean\";\n case \"array\": {\n if (!schema.items) return \"unknown[]\";\n const itemType = schemaToTypeString(schema.items, indent);\n // Wrap union types in parens for array\n if (itemType.includes(\" | \") && !itemType.startsWith(\"{\")) {\n return `(${itemType})[]`;\n }\n return `${itemType}[]`;\n }\n case \"object\": {\n if (!schema.properties) return \"Record<string, unknown>\";\n const required = new Set(schema.required ?? []);\n const entries = Object.entries(schema.properties).map(([key, prop]) => {\n const optional = !required.has(key) ? \"?\" : \"\";\n const nullable = prop.nullable ? \" | null\" : \"\";\n const typeStr = schemaToTypeString(prop, indent + 1);\n const formatComment =\n prop.format && (prop.type === \"number\" || prop.type === \"integer\")\n ? `/** @openapi ${prop.format}${prop.nullable ? \", nullable\" : \"\"} */\\n${pad} `\n : prop.nullable && prop.type === \"integer\"\n ? `/** @openapi integer, nullable */\\n${pad} `\n : \"\";\n return `${pad} ${formatComment}${key}${optional}: ${typeStr}${nullable};`;\n });\n return `{\\n${entries.join(\"\\n\")}\\n${pad}}`;\n }\n default:\n return \"unknown\";\n }\n}\n\n/**\n * Extracts the top-level property keys from the request schema.\n * Strips the `stream` property (plugin-controlled).\n */\nexport function extractRequestKeys(operation: OpenApiOperation): string[] {\n const schema = operation.requestBody?.content?.[\"application/json\"]?.schema;\n if (!schema?.properties) return [];\n return Object.keys(schema.properties).filter((k) => k !== \"stream\");\n}\n\n/**\n * Extracts and converts the request schema from an OpenAPI path operation.\n * Strips the `stream` property from the request type.\n */\nexport function convertRequestSchema(operation: OpenApiOperation): string {\n const schema = operation.requestBody?.content?.[\"application/json\"]?.schema;\n if (!schema || !schema.properties) return \"Record<string, unknown>\";\n\n // Strip `stream` property — the plugin controls this\n const { stream: _stream, ...filteredProps } = schema.properties;\n const filteredRequired = (schema.required ?? []).filter(\n (r) => r !== \"stream\",\n );\n\n const filteredSchema: OpenApiSchema = {\n ...schema,\n properties: filteredProps,\n required: filteredRequired.length > 0 ? filteredRequired : undefined,\n };\n\n return schemaToTypeString(filteredSchema);\n}\n\n/**\n * Extracts and converts the response schema from an OpenAPI path operation.\n */\nexport function convertResponseSchema(operation: OpenApiOperation): string {\n const response = operation.responses?.[\"200\"];\n const schema = response?.content?.[\"application/json\"]?.schema;\n if (!schema) return \"unknown\";\n return schemaToTypeString(schema);\n}\n\n/**\n * Derives a streaming chunk type from the response schema.\n * Returns null if the response doesn't follow OpenAI-compatible format.\n *\n * OpenAI-compatible heuristic: response has `choices` array where items\n * have a `message` object property.\n */\nexport function deriveChunkType(operation: OpenApiOperation): string | null {\n const response = operation.responses?.[\"200\"];\n const schema = response?.content?.[\"application/json\"]?.schema;\n if (!schema?.properties) return null;\n\n const choicesProp = schema.properties.choices;\n if (!choicesProp || choicesProp.type !== \"array\" || !choicesProp.items)\n return null;\n\n const choiceItemProps = choicesProp.items.properties;\n if (!choiceItemProps?.message) return null;\n\n // It's OpenAI-compatible. Build the chunk type by transforming.\n const messageSchema = choiceItemProps.message;\n\n // Build chunk schema: replace message with delta (Partial), make finish_reason nullable, drop usage\n const chunkProperties: Record<string, OpenApiSchema> = {};\n\n for (const [key, prop] of Object.entries(schema.properties)) {\n if (key === \"usage\") continue; // Drop usage from chunks\n if (key === \"choices\") {\n // Transform choices items\n const chunkChoiceProps: Record<string, OpenApiSchema> = {};\n for (const [ck, cp] of Object.entries(choiceItemProps)) {\n if (ck === \"message\") {\n // Replace message with delta: Partial<message>\n chunkChoiceProps.delta = { ...messageSchema };\n } else if (ck === \"finish_reason\") {\n chunkChoiceProps[ck] = { ...cp, nullable: true };\n } else {\n chunkChoiceProps[ck] = cp;\n }\n }\n chunkProperties[key] = {\n type: \"array\",\n items: {\n type: \"object\",\n properties: chunkChoiceProps,\n },\n };\n } else {\n chunkProperties[key] = prop;\n }\n }\n\n const chunkSchema: OpenApiSchema = {\n type: \"object\",\n properties: chunkProperties,\n };\n\n // Delta properties are already optional (no `required` array in the schema),\n // so schemaToTypeString renders them with `?:` — no Partial<> wrapper needed.\n return schemaToTypeString(chunkSchema);\n}\n"],"mappings":";;;;AAKA,SAAS,mBAAmB,QAAuB,SAAS,GAAW;CACrE,MAAM,MAAM,KAAK,OAAO,OAAO;AAE/B,KAAI,OAAO,MACT,QAAO,OAAO,MAAM,KAAK,MAAM,mBAAmB,GAAG,OAAO,CAAC,CAAC,KAAK,MAAM;AAG3E,KAAI,OAAO,KACT,QAAO,OAAO,KAAK,KAAK,MAAM,KAAK,UAAU,EAAE,CAAC,CAAC,KAAK,MAAM;AAG9D,SAAQ,OAAO,MAAf;EACE,KAAK,SACH,QAAO;EACT,KAAK;EACL,KAAK,SACH,QAAO;EACT,KAAK,UACH,QAAO;EACT,KAAK,SAAS;AACZ,OAAI,CAAC,OAAO,MAAO,QAAO;GAC1B,MAAM,WAAW,mBAAmB,OAAO,OAAO,OAAO;AAEzD,OAAI,SAAS,SAAS,MAAM,IAAI,CAAC,SAAS,WAAW,IAAI,CACvD,QAAO,IAAI,SAAS;AAEtB,UAAO,GAAG,SAAS;;EAErB,KAAK,UAAU;AACb,OAAI,CAAC,OAAO,WAAY,QAAO;GAC/B,MAAM,WAAW,IAAI,IAAI,OAAO,YAAY,EAAE,CAAC;AAa/C,UAAO,MAZS,OAAO,QAAQ,OAAO,WAAW,CAAC,KAAK,CAAC,KAAK,UAAU;IACrE,MAAM,WAAW,CAAC,SAAS,IAAI,IAAI,GAAG,MAAM;IAC5C,MAAM,WAAW,KAAK,WAAW,YAAY;IAC7C,MAAM,UAAU,mBAAmB,MAAM,SAAS,EAAE;AAOpD,WAAO,GAAG,IAAI,IALZ,KAAK,WAAW,KAAK,SAAS,YAAY,KAAK,SAAS,aACpD,gBAAgB,KAAK,SAAS,KAAK,WAAW,eAAe,GAAG,OAAO,IAAI,MAC3E,KAAK,YAAY,KAAK,SAAS,YAC7B,sCAAsC,IAAI,MAC1C,KAC0B,MAAM,SAAS,IAAI,UAAU,SAAS;KACxE,CACmB,KAAK,KAAK,CAAC,IAAI,IAAI;;EAE1C,QACE,QAAO;;;;;;;AAQb,SAAgB,mBAAmB,WAAuC;CACxE,MAAM,SAAS,UAAU,aAAa,UAAU,qBAAqB;AACrE,KAAI,CAAC,QAAQ,WAAY,QAAO,EAAE;AAClC,QAAO,OAAO,KAAK,OAAO,WAAW,CAAC,QAAQ,MAAM,MAAM,SAAS;;;;;;AAOrE,SAAgB,qBAAqB,WAAqC;CACxE,MAAM,SAAS,UAAU,aAAa,UAAU,qBAAqB;AACrE,KAAI,CAAC,UAAU,CAAC,OAAO,WAAY,QAAO;CAG1C,MAAM,EAAE,QAAQ,SAAS,GAAG,kBAAkB,OAAO;CACrD,MAAM,oBAAoB,OAAO,YAAY,EAAE,EAAE,QAC9C,MAAM,MAAM,SACd;AAQD,QAAO,mBAN+B;EACpC,GAAG;EACH,YAAY;EACZ,UAAU,iBAAiB,SAAS,IAAI,mBAAmB;EAC5D,CAEwC;;;;;AAM3C,SAAgB,sBAAsB,WAAqC;CAEzE,MAAM,UADW,UAAU,YAAY,SACd,UAAU,qBAAqB;AACxD,KAAI,CAAC,OAAQ,QAAO;AACpB,QAAO,mBAAmB,OAAO;;;;;;;;;AAUnC,SAAgB,gBAAgB,WAA4C;CAE1E,MAAM,UADW,UAAU,YAAY,SACd,UAAU,qBAAqB;AACxD,KAAI,CAAC,QAAQ,WAAY,QAAO;CAEhC,MAAM,cAAc,OAAO,WAAW;AACtC,KAAI,CAAC,eAAe,YAAY,SAAS,WAAW,CAAC,YAAY,MAC/D,QAAO;CAET,MAAM,kBAAkB,YAAY,MAAM;AAC1C,KAAI,CAAC,iBAAiB,QAAS,QAAO;CAGtC,MAAM,gBAAgB,gBAAgB;CAGtC,MAAM,kBAAiD,EAAE;AAEzD,MAAK,MAAM,CAAC,KAAK,SAAS,OAAO,QAAQ,OAAO,WAAW,EAAE;AAC3D,MAAI,QAAQ,QAAS;AACrB,MAAI,QAAQ,WAAW;GAErB,MAAM,mBAAkD,EAAE;AAC1D,QAAK,MAAM,CAAC,IAAI,OAAO,OAAO,QAAQ,gBAAgB,CACpD,KAAI,OAAO,UAET,kBAAiB,QAAQ,EAAE,GAAG,eAAe;YACpC,OAAO,gBAChB,kBAAiB,MAAM;IAAE,GAAG;IAAI,UAAU;IAAM;OAEhD,kBAAiB,MAAM;AAG3B,mBAAgB,OAAO;IACrB,MAAM;IACN,OAAO;KACL,MAAM;KACN,YAAY;KACb;IACF;QAED,iBAAgB,OAAO;;AAW3B,QAAO,mBAP4B;EACjC,MAAM;EACN,YAAY;EACb,CAIqC"}
@@ -0,0 +1,54 @@
1
+ import { createLogger } from "../../logging/logger.js";
2
+ import { ApiError } from "@databricks/sdk-experimental";
3
+
4
+ //#region src/type-generator/serving/fetcher.ts
5
+ const logger = createLogger("type-generator:serving:fetcher");
6
+ /**
7
+ * Fetches the OpenAPI schema for a serving endpoint using the SDK.
8
+ * Returns null if the endpoint is not found or access is denied.
9
+ */
10
+ async function fetchOpenApiSchema(client, endpointName, servedModel) {
11
+ try {
12
+ const response = await client.servingEndpoints.getOpenApi({ name: endpointName });
13
+ if (!response.contents) {
14
+ logger.warn("Empty OpenAPI response for '%s', skipping type generation", endpointName);
15
+ return null;
16
+ }
17
+ const text = await new Response(response.contents).text();
18
+ const rawSpec = JSON.parse(text);
19
+ if (typeof rawSpec !== "object" || rawSpec === null || !("paths" in rawSpec) || typeof rawSpec.paths !== "object") {
20
+ logger.warn("Invalid OpenAPI schema structure for '%s', skipping", endpointName);
21
+ return null;
22
+ }
23
+ const spec = rawSpec;
24
+ const pathKeys = Object.keys(spec.paths ?? {});
25
+ if (pathKeys.length === 0) {
26
+ logger.warn("No paths in OpenAPI schema for '%s'", endpointName);
27
+ return null;
28
+ }
29
+ let pathKey;
30
+ if (servedModel) {
31
+ const match = pathKeys.find((k) => k.includes(`/${servedModel}/`));
32
+ if (!match) {
33
+ logger.warn("Served model '%s' not found in schema for '%s', using first path", servedModel, endpointName);
34
+ pathKey = pathKeys[0];
35
+ } else pathKey = match;
36
+ } else pathKey = pathKeys[0];
37
+ return {
38
+ spec,
39
+ pathKey
40
+ };
41
+ } catch (err) {
42
+ if (err instanceof ApiError) {
43
+ const status = err.statusCode ?? 0;
44
+ if (status === 404) logger.warn("Endpoint '%s' not found, skipping type generation", endpointName);
45
+ else if (status === 403) logger.warn("Access denied to endpoint '%s' schema, skipping type generation", endpointName);
46
+ else logger.warn("Failed to fetch schema for '%s' (HTTP %d), skipping: %s", endpointName, status, err.message);
47
+ } else logger.warn("Error fetching schema for '%s': %s", endpointName, err.message);
48
+ return null;
49
+ }
50
+ }
51
+
52
+ //#endregion
53
+ export { fetchOpenApiSchema };
54
+ //# sourceMappingURL=fetcher.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"fetcher.js","names":[],"sources":["../../../src/type-generator/serving/fetcher.ts"],"sourcesContent":["import { ApiError, type WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { createLogger } from \"../../logging/logger\";\n\nconst logger = createLogger(\"type-generator:serving:fetcher\");\n\ninterface OpenApiSpec {\n openapi: string;\n info: { title: string; version: string };\n paths: Record<string, Record<string, OpenApiOperation>>;\n}\n\nexport interface OpenApiOperation {\n requestBody?: {\n content: {\n \"application/json\": {\n schema: OpenApiSchema;\n };\n };\n };\n responses?: Record<\n string,\n {\n content?: {\n \"application/json\": {\n schema: OpenApiSchema;\n };\n };\n }\n >;\n}\n\nexport interface OpenApiSchema {\n type?: string;\n properties?: Record<string, OpenApiSchema>;\n required?: string[];\n items?: OpenApiSchema;\n enum?: string[];\n nullable?: boolean;\n oneOf?: OpenApiSchema[];\n format?: string;\n}\n\n/**\n * Fetches the OpenAPI schema for a serving endpoint using the SDK.\n * Returns null if the endpoint is not found or access is denied.\n */\nexport async function fetchOpenApiSchema(\n client: WorkspaceClient,\n endpointName: string,\n servedModel?: string,\n): Promise<{ spec: OpenApiSpec; pathKey: string } | null> {\n try {\n const response = await client.servingEndpoints.getOpenApi({\n name: endpointName,\n });\n\n if (!response.contents) {\n logger.warn(\n \"Empty OpenAPI response for '%s', skipping type generation\",\n endpointName,\n );\n return null;\n }\n\n const text = await new Response(response.contents).text();\n const rawSpec: unknown = JSON.parse(text);\n\n if (\n typeof rawSpec !== \"object\" ||\n rawSpec === null ||\n !(\"paths\" in rawSpec) ||\n typeof (rawSpec as OpenApiSpec).paths !== \"object\"\n ) {\n logger.warn(\n \"Invalid OpenAPI schema structure for '%s', skipping\",\n endpointName,\n );\n return null;\n }\n const spec = rawSpec as OpenApiSpec;\n\n // Find the right path key\n const pathKeys = Object.keys(spec.paths ?? {});\n if (pathKeys.length === 0) {\n logger.warn(\"No paths in OpenAPI schema for '%s'\", endpointName);\n return null;\n }\n\n let pathKey: string;\n if (servedModel) {\n const match = pathKeys.find((k) => k.includes(`/${servedModel}/`));\n if (!match) {\n logger.warn(\n \"Served model '%s' not found in schema for '%s', using first path\",\n servedModel,\n endpointName,\n );\n pathKey = pathKeys[0];\n } else {\n pathKey = match;\n }\n } else {\n pathKey = pathKeys[0];\n }\n\n return { spec, pathKey };\n } catch (err) {\n if (err instanceof ApiError) {\n const status = err.statusCode ?? 0;\n if (status === 404) {\n logger.warn(\n \"Endpoint '%s' not found, skipping type generation\",\n endpointName,\n );\n } else if (status === 403) {\n logger.warn(\n \"Access denied to endpoint '%s' schema, skipping type generation\",\n endpointName,\n );\n } else {\n logger.warn(\n \"Failed to fetch schema for '%s' (HTTP %d), skipping: %s\",\n endpointName,\n status,\n err.message,\n );\n }\n } else {\n logger.warn(\n \"Error fetching schema for '%s': %s\",\n endpointName,\n (err as Error).message,\n );\n }\n return null;\n }\n}\n"],"mappings":";;;;AAGA,MAAM,SAAS,aAAa,iCAAiC;;;;;AA2C7D,eAAsB,mBACpB,QACA,cACA,aACwD;AACxD,KAAI;EACF,MAAM,WAAW,MAAM,OAAO,iBAAiB,WAAW,EACxD,MAAM,cACP,CAAC;AAEF,MAAI,CAAC,SAAS,UAAU;AACtB,UAAO,KACL,6DACA,aACD;AACD,UAAO;;EAGT,MAAM,OAAO,MAAM,IAAI,SAAS,SAAS,SAAS,CAAC,MAAM;EACzD,MAAM,UAAmB,KAAK,MAAM,KAAK;AAEzC,MACE,OAAO,YAAY,YACnB,YAAY,QACZ,EAAE,WAAW,YACb,OAAQ,QAAwB,UAAU,UAC1C;AACA,UAAO,KACL,uDACA,aACD;AACD,UAAO;;EAET,MAAM,OAAO;EAGb,MAAM,WAAW,OAAO,KAAK,KAAK,SAAS,EAAE,CAAC;AAC9C,MAAI,SAAS,WAAW,GAAG;AACzB,UAAO,KAAK,uCAAuC,aAAa;AAChE,UAAO;;EAGT,IAAI;AACJ,MAAI,aAAa;GACf,MAAM,QAAQ,SAAS,MAAM,MAAM,EAAE,SAAS,IAAI,YAAY,GAAG,CAAC;AAClE,OAAI,CAAC,OAAO;AACV,WAAO,KACL,oEACA,aACA,aACD;AACD,cAAU,SAAS;SAEnB,WAAU;QAGZ,WAAU,SAAS;AAGrB,SAAO;GAAE;GAAM;GAAS;UACjB,KAAK;AACZ,MAAI,eAAe,UAAU;GAC3B,MAAM,SAAS,IAAI,cAAc;AACjC,OAAI,WAAW,IACb,QAAO,KACL,qDACA,aACD;YACQ,WAAW,IACpB,QAAO,KACL,mEACA,aACD;OAED,QAAO,KACL,2DACA,cACA,QACA,IAAI,QACL;QAGH,QAAO,KACL,sCACA,cACC,IAAc,QAChB;AAEH,SAAO"}