@databricks/appkit 0.22.0 → 0.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. package/CLAUDE.md +11 -0
  2. package/NOTICE.md +1 -0
  3. package/dist/appkit/package.js +1 -1
  4. package/dist/cache/index.js.map +1 -1
  5. package/dist/cli/commands/docs.js +7 -1
  6. package/dist/cli/commands/docs.js.map +1 -1
  7. package/dist/cli/commands/generate-types.js +27 -15
  8. package/dist/cli/commands/generate-types.js.map +1 -1
  9. package/dist/cli/commands/lint.js +3 -1
  10. package/dist/cli/commands/lint.js.map +1 -1
  11. package/dist/cli/commands/plugin/add-resource/add-resource.js +73 -8
  12. package/dist/cli/commands/plugin/add-resource/add-resource.js.map +1 -1
  13. package/dist/cli/commands/plugin/create/create.js +164 -20
  14. package/dist/cli/commands/plugin/create/create.js.map +1 -1
  15. package/dist/cli/commands/plugin/create/resource-defaults.js +5 -1
  16. package/dist/cli/commands/plugin/create/resource-defaults.js.map +1 -1
  17. package/dist/cli/commands/plugin/index.js +7 -1
  18. package/dist/cli/commands/plugin/index.js.map +1 -1
  19. package/dist/cli/commands/plugin/list/list.js +7 -1
  20. package/dist/cli/commands/plugin/list/list.js.map +1 -1
  21. package/dist/cli/commands/plugin/sync/sync.js +27 -14
  22. package/dist/cli/commands/plugin/sync/sync.js.map +1 -1
  23. package/dist/cli/commands/plugin/validate/validate.js +39 -9
  24. package/dist/cli/commands/plugin/validate/validate.js.map +1 -1
  25. package/dist/cli/commands/setup.js +6 -5
  26. package/dist/cli/commands/setup.js.map +1 -1
  27. package/dist/connectors/index.js +1 -0
  28. package/dist/connectors/lakebase/index.js.map +1 -1
  29. package/dist/connectors/lakebase-v1/client.js.map +1 -1
  30. package/dist/connectors/serving/client.js +47 -0
  31. package/dist/connectors/serving/client.js.map +1 -0
  32. package/dist/connectors/vector-search/client.js +9 -0
  33. package/dist/connectors/vector-search/client.js.map +1 -0
  34. package/dist/connectors/vector-search/index.js +3 -0
  35. package/dist/index.d.ts +6 -1
  36. package/dist/index.js +4 -1
  37. package/dist/index.js.map +1 -1
  38. package/dist/plugin/dev-reader.js.map +1 -1
  39. package/dist/plugin/execution-result.d.ts +26 -0
  40. package/dist/plugin/execution-result.d.ts.map +1 -0
  41. package/dist/plugin/index.d.ts +1 -0
  42. package/dist/plugin/interceptors/retry.js +1 -1
  43. package/dist/plugin/interceptors/retry.js.map +1 -1
  44. package/dist/plugin/plugin.d.ts +7 -4
  45. package/dist/plugin/plugin.d.ts.map +1 -1
  46. package/dist/plugin/plugin.js +36 -5
  47. package/dist/plugin/plugin.js.map +1 -1
  48. package/dist/plugins/analytics/analytics.d.ts.map +1 -1
  49. package/dist/plugins/analytics/analytics.js +2 -3
  50. package/dist/plugins/analytics/analytics.js.map +1 -1
  51. package/dist/plugins/files/plugin.d.ts +1 -0
  52. package/dist/plugins/files/plugin.d.ts.map +1 -1
  53. package/dist/plugins/files/plugin.js +36 -59
  54. package/dist/plugins/files/plugin.js.map +1 -1
  55. package/dist/plugins/index.d.ts +4 -1
  56. package/dist/plugins/index.js +2 -0
  57. package/dist/plugins/server/index.d.ts +1 -1
  58. package/dist/plugins/server/vite-dev-server.js +6 -1
  59. package/dist/plugins/server/vite-dev-server.js.map +1 -1
  60. package/dist/plugins/serving/defaults.js +10 -0
  61. package/dist/plugins/serving/defaults.js.map +1 -0
  62. package/dist/plugins/serving/index.d.ts +2 -0
  63. package/dist/plugins/serving/index.js +3 -0
  64. package/dist/plugins/serving/manifest.js +53 -0
  65. package/dist/plugins/serving/manifest.js.map +1 -0
  66. package/dist/plugins/serving/schema-filter.js +52 -0
  67. package/dist/plugins/serving/schema-filter.js.map +1 -0
  68. package/dist/plugins/serving/serving.d.ts +38 -0
  69. package/dist/plugins/serving/serving.d.ts.map +1 -0
  70. package/dist/plugins/serving/serving.js +227 -0
  71. package/dist/plugins/serving/serving.js.map +1 -0
  72. package/dist/plugins/serving/types.d.ts +59 -0
  73. package/dist/plugins/serving/types.d.ts.map +1 -0
  74. package/dist/shared/src/execute.d.ts +1 -1
  75. package/dist/stream/stream-manager.js +1 -0
  76. package/dist/stream/stream-manager.js.map +1 -1
  77. package/dist/stream/types.js +2 -1
  78. package/dist/stream/types.js.map +1 -1
  79. package/dist/type-generator/cache.js +1 -1
  80. package/dist/type-generator/cache.js.map +1 -1
  81. package/dist/type-generator/index.js +15 -1
  82. package/dist/type-generator/index.js.map +1 -1
  83. package/dist/type-generator/migration.js +155 -0
  84. package/dist/type-generator/migration.js.map +1 -0
  85. package/dist/type-generator/query-registry.js +77 -4
  86. package/dist/type-generator/query-registry.js.map +1 -1
  87. package/dist/type-generator/serving/cache.js +38 -0
  88. package/dist/type-generator/serving/cache.js.map +1 -0
  89. package/dist/type-generator/serving/converter.js +108 -0
  90. package/dist/type-generator/serving/converter.js.map +1 -0
  91. package/dist/type-generator/serving/fetcher.js +54 -0
  92. package/dist/type-generator/serving/fetcher.js.map +1 -0
  93. package/dist/type-generator/serving/generator.js +206 -0
  94. package/dist/type-generator/serving/generator.js.map +1 -0
  95. package/dist/type-generator/serving/server-file-extractor.d.ts +22 -0
  96. package/dist/type-generator/serving/server-file-extractor.d.ts.map +1 -0
  97. package/dist/type-generator/serving/server-file-extractor.js +131 -0
  98. package/dist/type-generator/serving/server-file-extractor.js.map +1 -0
  99. package/dist/type-generator/serving/vite-plugin.d.ts +24 -0
  100. package/dist/type-generator/serving/vite-plugin.d.ts.map +1 -0
  101. package/dist/type-generator/serving/vite-plugin.js +60 -0
  102. package/dist/type-generator/serving/vite-plugin.js.map +1 -0
  103. package/dist/type-generator/vite-plugin.d.ts.map +1 -1
  104. package/dist/type-generator/vite-plugin.js +3 -4
  105. package/dist/type-generator/vite-plugin.js.map +1 -1
  106. package/docs/api/appkit/Class.Plugin.md +8 -3
  107. package/docs/api/appkit/Function.appKitServingTypesPlugin.md +24 -0
  108. package/docs/api/appkit/Function.extractServingEndpoints.md +22 -0
  109. package/docs/api/appkit/Function.findServerFile.md +20 -0
  110. package/docs/api/appkit/Interface.EndpointConfig.md +23 -0
  111. package/docs/api/appkit/Interface.ServingEndpointEntry.md +30 -0
  112. package/docs/api/appkit/Interface.ServingEndpointRegistry.md +3 -0
  113. package/docs/api/appkit/TypeAlias.ExecutionResult.md +36 -0
  114. package/docs/api/appkit/TypeAlias.ServingFactory.md +19 -0
  115. package/docs/api/appkit.md +39 -31
  116. package/docs/development/type-generation.md +6 -5
  117. package/docs/faq.md +66 -0
  118. package/docs/plugins/analytics.md +1 -1
  119. package/docs/plugins/custom-plugins.md +4 -0
  120. package/docs/plugins/plugin-management.md +22 -6
  121. package/docs/plugins/serving.md +223 -0
  122. package/docs/plugins/vector-search.md +247 -0
  123. package/llms.txt +11 -0
  124. package/package.json +2 -2
  125. package/sbom.cdx.json +1 -1
@@ -0,0 +1,155 @@
1
+ import { createLogger } from "../logging/logger.js";
2
+ import fs from "node:fs/promises";
3
+ import path from "node:path";
4
+ import fs$1 from "node:fs";
5
+ import pc from "picocolors";
6
+
7
+ //#region src/type-generator/migration.ts
8
+ const logger = createLogger("type-generator:migration");
9
+ /**
10
+ * Derive project root from an outFile path.
11
+ * outFile is always `<projectRoot>/shared/appkit-types/<file>` — both the Vite plugins
12
+ * and the CLI construct it this way, so going up two levels is safe.
13
+ *
14
+ * Validates that the resolved root contains a package.json — if not, logs a warning
15
+ * so custom outFile paths don't silently operate on the wrong directory.
16
+ */
17
+ function resolveProjectRoot(outFile) {
18
+ const root = path.resolve(path.dirname(outFile), "..", "..");
19
+ if (!fs$1.existsSync(path.join(root, "package.json"))) logger.warn("Resolved project root %s has no package.json — migration may target the wrong directory. Check your outFile path: %s", root, outFile);
20
+ return root;
21
+ }
22
+ /**
23
+ * Remove old generated types from client/src/ (pre-shared/ location).
24
+ * Best-effort: silently ignores missing files.
25
+ */
26
+ async function removeOldGeneratedTypes(projectRoot, filename) {
27
+ const oldFile = path.join(projectRoot, "client", "src", filename);
28
+ try {
29
+ await fs.unlink(oldFile);
30
+ logger.debug("Removed old types at %s", oldFile);
31
+ } catch {}
32
+ }
33
+ const migratedProjects = /* @__PURE__ */ new Set();
34
+ /**
35
+ * One-time config migration: update tsconfig and package.json for shared/ types output.
36
+ * Idempotent — each sub-migration checks current file state and skips if already migrated.
37
+ * Deduplicates per project root so monorepo builds migrate each app independently.
38
+ * Opt-out: set `"appkit": { "autoMigrate": false }` in package.json.
39
+ */
40
+ async function migrateProjectConfig(projectRoot) {
41
+ const resolved = path.resolve(projectRoot);
42
+ if (migratedProjects.has(resolved)) return;
43
+ migratedProjects.add(resolved);
44
+ if (await isAutoMigrateDisabled(projectRoot)) {
45
+ logger.debug("Auto-migration disabled via package.json appkit.autoMigrate");
46
+ return;
47
+ }
48
+ const results = [];
49
+ results.push(...await migrateTsconfigClient(projectRoot));
50
+ results.push(...await migrateTsconfigServer(projectRoot));
51
+ results.push(...await migratePackageJsonScripts(projectRoot));
52
+ if (results.length > 0) printMigrationSummary(results);
53
+ }
54
+ async function isAutoMigrateDisabled(projectRoot) {
55
+ try {
56
+ const raw = await fs.readFile(path.join(projectRoot, "package.json"), "utf-8");
57
+ return JSON.parse(raw).appkit?.autoMigrate === false;
58
+ } catch {
59
+ return false;
60
+ }
61
+ }
62
+ /** Strip JSONC comments (block and line) so JSON.parse can handle tsconfig files. */
63
+ function stripJsonComments(text) {
64
+ return text.replace(/"(?:[^"\\]|\\.)*"|\/\*[\s\S]*?\*\/|\/\/.*/g, (match) => match.startsWith("\"") ? match : "");
65
+ }
66
+ async function migrateTsconfigClient(projectRoot) {
67
+ const results = [];
68
+ const filePath = path.join(projectRoot, "tsconfig.client.json");
69
+ try {
70
+ const raw = await fs.readFile(filePath, "utf-8");
71
+ const parsed = JSON.parse(stripJsonComments(raw));
72
+ if (!Array.isArray(parsed.include)) return results;
73
+ if (parsed.include.includes("shared/appkit-types")) return results;
74
+ parsed.include.push("shared/appkit-types");
75
+ await fs.writeFile(filePath, `${JSON.stringify(parsed, null, 2)}\n`, "utf-8");
76
+ results.push({
77
+ file: "tsconfig.client.json",
78
+ action: "added \"shared/appkit-types\" to include"
79
+ });
80
+ } catch (err) {
81
+ logger.warn("Failed to migrate tsconfig.client.json: %s", err.message);
82
+ }
83
+ return results;
84
+ }
85
+ async function migrateTsconfigServer(projectRoot) {
86
+ const results = [];
87
+ const filePath = path.join(projectRoot, "tsconfig.server.json");
88
+ try {
89
+ const raw = await fs.readFile(filePath, "utf-8");
90
+ const parsed = JSON.parse(stripJsonComments(raw));
91
+ const opts = parsed.compilerOptions;
92
+ if (!opts || !opts.outDir) return results;
93
+ delete opts.outDir;
94
+ delete opts.declaration;
95
+ delete opts.declarationMap;
96
+ delete opts.sourceMap;
97
+ opts.noEmit = true;
98
+ await fs.writeFile(filePath, `${JSON.stringify(parsed, null, 2)}\n`, "utf-8");
99
+ results.push({
100
+ file: "tsconfig.server.json",
101
+ action: "switched to noEmit mode"
102
+ });
103
+ } catch (err) {
104
+ logger.warn("Failed to migrate tsconfig.server.json: %s", err.message);
105
+ }
106
+ return results;
107
+ }
108
+ const SCRIPT_MIGRATIONS = {
109
+ "build:server": {
110
+ old: "tsdown -c tsdown.server.config.ts",
111
+ new: "tsc -b tsconfig.server.json && tsdown -c tsdown.server.config.ts"
112
+ },
113
+ typecheck: {
114
+ old: "tsc -p ./tsconfig.server.json --noEmit && tsc -p ./tsconfig.client.json --noEmit",
115
+ new: "tsc -b tsconfig.server.json && tsc -b tsconfig.client.json"
116
+ }
117
+ };
118
+ async function migratePackageJsonScripts(projectRoot) {
119
+ const results = [];
120
+ const filePath = path.join(projectRoot, "package.json");
121
+ try {
122
+ const raw = await fs.readFile(filePath, "utf-8");
123
+ const parsed = JSON.parse(raw);
124
+ const scripts = parsed.scripts;
125
+ if (!scripts) return results;
126
+ const updated = [];
127
+ for (const [name, { old, new: replacement }] of Object.entries(SCRIPT_MIGRATIONS)) if (scripts[name] === old) {
128
+ scripts[name] = replacement;
129
+ updated.push(name);
130
+ }
131
+ if (updated.length === 0) return results;
132
+ const indent = raw.match(/^\s+/m)?.[0]?.length === 4 ? 4 : 2;
133
+ await fs.writeFile(filePath, `${JSON.stringify(parsed, null, indent)}\n`, "utf-8");
134
+ results.push({
135
+ file: "package.json",
136
+ action: `updated ${updated.join(" and ")} scripts`
137
+ });
138
+ } catch (err) {
139
+ logger.warn("Failed to migrate package.json scripts: %s", err.message);
140
+ }
141
+ return results;
142
+ }
143
+ function printMigrationSummary(results) {
144
+ const separator = pc.dim("─".repeat(50));
145
+ console.log("");
146
+ console.log(` ${pc.bold("Typegen Migration")}`);
147
+ console.log(` ${separator}`);
148
+ for (const { file, action } of results) console.log(` ${pc.green("✓")} ${file.padEnd(24)} ${pc.dim(action)}`);
149
+ console.log(` ${separator}`);
150
+ console.log("");
151
+ }
152
+
153
+ //#endregion
154
+ export { migrateProjectConfig, removeOldGeneratedTypes, resolveProjectRoot };
155
+ //# sourceMappingURL=migration.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"migration.js","names":["fsSync"],"sources":["../../src/type-generator/migration.ts"],"sourcesContent":["import fsSync from \"node:fs\";\nimport fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport pc from \"picocolors\";\nimport { createLogger } from \"../logging/logger\";\n\nconst logger = createLogger(\"type-generator:migration\");\n\n/**\n * Derive project root from an outFile path.\n * outFile is always `<projectRoot>/shared/appkit-types/<file>` — both the Vite plugins\n * and the CLI construct it this way, so going up two levels is safe.\n *\n * Validates that the resolved root contains a package.json — if not, logs a warning\n * so custom outFile paths don't silently operate on the wrong directory.\n */\nexport function resolveProjectRoot(outFile: string): string {\n const root = path.resolve(path.dirname(outFile), \"..\", \"..\");\n if (!fsSync.existsSync(path.join(root, \"package.json\"))) {\n logger.warn(\n \"Resolved project root %s has no package.json — migration may target the wrong directory. \" +\n \"Check your outFile path: %s\",\n root,\n outFile,\n );\n }\n return root;\n}\n\n/**\n * Remove old generated types from client/src/ (pre-shared/ location).\n * Best-effort: silently ignores missing files.\n */\nexport async function removeOldGeneratedTypes(\n projectRoot: string,\n filename: string,\n): Promise<void> {\n const oldFile = path.join(projectRoot, \"client\", \"src\", filename);\n try {\n await fs.unlink(oldFile);\n logger.debug(\"Removed old types at %s\", oldFile);\n } catch {\n // File doesn't exist — nothing to clean up\n }\n}\n\n// ── Project config migration ────────────────────────────────────────────\n\nconst migratedProjects = new Set<string>();\n\n/**\n * One-time config migration: update tsconfig and package.json for shared/ types output.\n * Idempotent — each sub-migration checks current file state and skips if already migrated.\n * Deduplicates per project root so monorepo builds migrate each app independently.\n * Opt-out: set `\"appkit\": { \"autoMigrate\": false }` in package.json.\n */\nexport async function migrateProjectConfig(projectRoot: string): Promise<void> {\n const resolved = path.resolve(projectRoot);\n if (migratedProjects.has(resolved)) return;\n migratedProjects.add(resolved);\n\n if (await isAutoMigrateDisabled(projectRoot)) {\n logger.debug(\"Auto-migration disabled via package.json appkit.autoMigrate\");\n return;\n }\n\n const results: Array<{ file: string; action: string }> = [];\n\n results.push(...(await migrateTsconfigClient(projectRoot)));\n results.push(...(await migrateTsconfigServer(projectRoot)));\n results.push(...(await migratePackageJsonScripts(projectRoot)));\n\n if (results.length > 0) {\n printMigrationSummary(results);\n }\n}\n\n/** Exported for testing only. */\nexport function _resetMigrationState(): void {\n migratedProjects.clear();\n}\n\n// ── Helpers ─────────────────────────────────────────────────────────────\n\nasync function isAutoMigrateDisabled(projectRoot: string): Promise<boolean> {\n try {\n const raw = await fs.readFile(\n path.join(projectRoot, \"package.json\"),\n \"utf-8\",\n );\n const parsed = JSON.parse(raw);\n return parsed.appkit?.autoMigrate === false;\n } catch {\n return false;\n }\n}\n\n/** Strip JSONC comments (block and line) so JSON.parse can handle tsconfig files. */\nfunction stripJsonComments(text: string): string {\n // Match strings (to skip them) or comments (to remove them).\n // Strings must be matched first to avoid stripping comment-like patterns inside string values\n // (e.g. \"server/**/*\" contains /* which looks like a block comment start).\n return text.replace(/\"(?:[^\"\\\\]|\\\\.)*\"|\\/\\*[\\s\\S]*?\\*\\/|\\/\\/.*/g, (match) =>\n match.startsWith('\"') ? match : \"\",\n );\n}\n\ntype MigrationResult = Array<{ file: string; action: string }>;\n\n// ── tsconfig.client.json ────────────────────────────────────────────────\n\nasync function migrateTsconfigClient(\n projectRoot: string,\n): Promise<MigrationResult> {\n const results: MigrationResult = [];\n const filePath = path.join(projectRoot, \"tsconfig.client.json\");\n\n try {\n const raw = await fs.readFile(filePath, \"utf-8\");\n const parsed = JSON.parse(stripJsonComments(raw));\n\n if (!Array.isArray(parsed.include)) return results;\n if (parsed.include.includes(\"shared/appkit-types\")) return results;\n\n parsed.include.push(\"shared/appkit-types\");\n await fs.writeFile(\n filePath,\n `${JSON.stringify(parsed, null, 2)}\\n`,\n \"utf-8\",\n );\n results.push({\n file: \"tsconfig.client.json\",\n action: 'added \"shared/appkit-types\" to include',\n });\n } catch (err) {\n logger.warn(\n \"Failed to migrate tsconfig.client.json: %s\",\n (err as Error).message,\n );\n }\n\n return results;\n}\n\n// ── tsconfig.server.json ────────────────────────────────────────────────\n\nasync function migrateTsconfigServer(\n projectRoot: string,\n): Promise<MigrationResult> {\n const results: MigrationResult = [];\n const filePath = path.join(projectRoot, \"tsconfig.server.json\");\n\n try {\n const raw = await fs.readFile(filePath, \"utf-8\");\n const parsed = JSON.parse(stripJsonComments(raw));\n const opts = parsed.compilerOptions;\n\n if (!opts || !opts.outDir) return results; // already migrated or non-standard\n\n delete opts.outDir;\n delete opts.declaration;\n delete opts.declarationMap;\n delete opts.sourceMap;\n opts.noEmit = true;\n\n await fs.writeFile(\n filePath,\n `${JSON.stringify(parsed, null, 2)}\\n`,\n \"utf-8\",\n );\n results.push({\n file: \"tsconfig.server.json\",\n action: \"switched to noEmit mode\",\n });\n } catch (err) {\n logger.warn(\n \"Failed to migrate tsconfig.server.json: %s\",\n (err as Error).message,\n );\n }\n\n return results;\n}\n\n// ── package.json ────────────────────────────────────────────────────────\n\nconst SCRIPT_MIGRATIONS: Record<string, { old: string; new: string }> = {\n \"build:server\": {\n old: \"tsdown -c tsdown.server.config.ts\",\n new: \"tsc -b tsconfig.server.json && tsdown -c tsdown.server.config.ts\",\n },\n typecheck: {\n old: \"tsc -p ./tsconfig.server.json --noEmit && tsc -p ./tsconfig.client.json --noEmit\",\n new: \"tsc -b tsconfig.server.json && tsc -b tsconfig.client.json\",\n },\n};\n\nasync function migratePackageJsonScripts(\n projectRoot: string,\n): Promise<MigrationResult> {\n const results: MigrationResult = [];\n const filePath = path.join(projectRoot, \"package.json\");\n\n try {\n const raw = await fs.readFile(filePath, \"utf-8\");\n const parsed = JSON.parse(raw);\n const scripts = parsed.scripts;\n if (!scripts) return results;\n\n const updated: string[] = [];\n\n for (const [name, { old, new: replacement }] of Object.entries(\n SCRIPT_MIGRATIONS,\n )) {\n if (scripts[name] === old) {\n scripts[name] = replacement;\n updated.push(name);\n }\n }\n\n if (updated.length === 0) return results;\n\n const indent = raw.match(/^\\s+/m)?.[0]?.length === 4 ? 4 : 2;\n await fs.writeFile(\n filePath,\n `${JSON.stringify(parsed, null, indent)}\\n`,\n \"utf-8\",\n );\n results.push({\n file: \"package.json\",\n action: `updated ${updated.join(\" and \")} scripts`,\n });\n } catch (err) {\n logger.warn(\n \"Failed to migrate package.json scripts: %s\",\n (err as Error).message,\n );\n }\n\n return results;\n}\n\n// ── Summary ─────────────────────────────────────────────────────────────\n\nfunction printMigrationSummary(\n results: Array<{ file: string; action: string }>,\n) {\n const separator = pc.dim(\"─\".repeat(50));\n console.log(\"\");\n console.log(` ${pc.bold(\"Typegen Migration\")}`);\n console.log(` ${separator}`);\n for (const { file, action } of results) {\n console.log(` ${pc.green(\"✓\")} ${file.padEnd(24)} ${pc.dim(action)}`);\n }\n console.log(` ${separator}`);\n console.log(\"\");\n}\n"],"mappings":";;;;;;;AAMA,MAAM,SAAS,aAAa,2BAA2B;;;;;;;;;AAUvD,SAAgB,mBAAmB,SAAyB;CAC1D,MAAM,OAAO,KAAK,QAAQ,KAAK,QAAQ,QAAQ,EAAE,MAAM,KAAK;AAC5D,KAAI,CAACA,KAAO,WAAW,KAAK,KAAK,MAAM,eAAe,CAAC,CACrD,QAAO,KACL,wHAEA,MACA,QACD;AAEH,QAAO;;;;;;AAOT,eAAsB,wBACpB,aACA,UACe;CACf,MAAM,UAAU,KAAK,KAAK,aAAa,UAAU,OAAO,SAAS;AACjE,KAAI;AACF,QAAM,GAAG,OAAO,QAAQ;AACxB,SAAO,MAAM,2BAA2B,QAAQ;SAC1C;;AAOV,MAAM,mCAAmB,IAAI,KAAa;;;;;;;AAQ1C,eAAsB,qBAAqB,aAAoC;CAC7E,MAAM,WAAW,KAAK,QAAQ,YAAY;AAC1C,KAAI,iBAAiB,IAAI,SAAS,CAAE;AACpC,kBAAiB,IAAI,SAAS;AAE9B,KAAI,MAAM,sBAAsB,YAAY,EAAE;AAC5C,SAAO,MAAM,8DAA8D;AAC3E;;CAGF,MAAM,UAAmD,EAAE;AAE3D,SAAQ,KAAK,GAAI,MAAM,sBAAsB,YAAY,CAAE;AAC3D,SAAQ,KAAK,GAAI,MAAM,sBAAsB,YAAY,CAAE;AAC3D,SAAQ,KAAK,GAAI,MAAM,0BAA0B,YAAY,CAAE;AAE/D,KAAI,QAAQ,SAAS,EACnB,uBAAsB,QAAQ;;AAWlC,eAAe,sBAAsB,aAAuC;AAC1E,KAAI;EACF,MAAM,MAAM,MAAM,GAAG,SACnB,KAAK,KAAK,aAAa,eAAe,EACtC,QACD;AAED,SADe,KAAK,MAAM,IAAI,CAChB,QAAQ,gBAAgB;SAChC;AACN,SAAO;;;;AAKX,SAAS,kBAAkB,MAAsB;AAI/C,QAAO,KAAK,QAAQ,+CAA+C,UACjE,MAAM,WAAW,KAAI,GAAG,QAAQ,GACjC;;AAOH,eAAe,sBACb,aAC0B;CAC1B,MAAM,UAA2B,EAAE;CACnC,MAAM,WAAW,KAAK,KAAK,aAAa,uBAAuB;AAE/D,KAAI;EACF,MAAM,MAAM,MAAM,GAAG,SAAS,UAAU,QAAQ;EAChD,MAAM,SAAS,KAAK,MAAM,kBAAkB,IAAI,CAAC;AAEjD,MAAI,CAAC,MAAM,QAAQ,OAAO,QAAQ,CAAE,QAAO;AAC3C,MAAI,OAAO,QAAQ,SAAS,sBAAsB,CAAE,QAAO;AAE3D,SAAO,QAAQ,KAAK,sBAAsB;AAC1C,QAAM,GAAG,UACP,UACA,GAAG,KAAK,UAAU,QAAQ,MAAM,EAAE,CAAC,KACnC,QACD;AACD,UAAQ,KAAK;GACX,MAAM;GACN,QAAQ;GACT,CAAC;UACK,KAAK;AACZ,SAAO,KACL,8CACC,IAAc,QAChB;;AAGH,QAAO;;AAKT,eAAe,sBACb,aAC0B;CAC1B,MAAM,UAA2B,EAAE;CACnC,MAAM,WAAW,KAAK,KAAK,aAAa,uBAAuB;AAE/D,KAAI;EACF,MAAM,MAAM,MAAM,GAAG,SAAS,UAAU,QAAQ;EAChD,MAAM,SAAS,KAAK,MAAM,kBAAkB,IAAI,CAAC;EACjD,MAAM,OAAO,OAAO;AAEpB,MAAI,CAAC,QAAQ,CAAC,KAAK,OAAQ,QAAO;AAElC,SAAO,KAAK;AACZ,SAAO,KAAK;AACZ,SAAO,KAAK;AACZ,SAAO,KAAK;AACZ,OAAK,SAAS;AAEd,QAAM,GAAG,UACP,UACA,GAAG,KAAK,UAAU,QAAQ,MAAM,EAAE,CAAC,KACnC,QACD;AACD,UAAQ,KAAK;GACX,MAAM;GACN,QAAQ;GACT,CAAC;UACK,KAAK;AACZ,SAAO,KACL,8CACC,IAAc,QAChB;;AAGH,QAAO;;AAKT,MAAM,oBAAkE;CACtE,gBAAgB;EACd,KAAK;EACL,KAAK;EACN;CACD,WAAW;EACT,KAAK;EACL,KAAK;EACN;CACF;AAED,eAAe,0BACb,aAC0B;CAC1B,MAAM,UAA2B,EAAE;CACnC,MAAM,WAAW,KAAK,KAAK,aAAa,eAAe;AAEvD,KAAI;EACF,MAAM,MAAM,MAAM,GAAG,SAAS,UAAU,QAAQ;EAChD,MAAM,SAAS,KAAK,MAAM,IAAI;EAC9B,MAAM,UAAU,OAAO;AACvB,MAAI,CAAC,QAAS,QAAO;EAErB,MAAM,UAAoB,EAAE;AAE5B,OAAK,MAAM,CAAC,MAAM,EAAE,KAAK,KAAK,kBAAkB,OAAO,QACrD,kBACD,CACC,KAAI,QAAQ,UAAU,KAAK;AACzB,WAAQ,QAAQ;AAChB,WAAQ,KAAK,KAAK;;AAItB,MAAI,QAAQ,WAAW,EAAG,QAAO;EAEjC,MAAM,SAAS,IAAI,MAAM,QAAQ,GAAG,IAAI,WAAW,IAAI,IAAI;AAC3D,QAAM,GAAG,UACP,UACA,GAAG,KAAK,UAAU,QAAQ,MAAM,OAAO,CAAC,KACxC,QACD;AACD,UAAQ,KAAK;GACX,MAAM;GACN,QAAQ,WAAW,QAAQ,KAAK,QAAQ,CAAC;GAC1C,CAAC;UACK,KAAK;AACZ,SAAO,KACL,8CACC,IAAc,QAChB;;AAGH,QAAO;;AAKT,SAAS,sBACP,SACA;CACA,MAAM,YAAY,GAAG,IAAI,IAAI,OAAO,GAAG,CAAC;AACxC,SAAQ,IAAI,GAAG;AACf,SAAQ,IAAI,KAAK,GAAG,KAAK,oBAAoB,GAAG;AAChD,SAAQ,IAAI,KAAK,YAAY;AAC7B,MAAK,MAAM,EAAE,MAAM,YAAY,QAC7B,SAAQ,IAAI,KAAK,GAAG,MAAM,IAAI,CAAC,GAAG,KAAK,OAAO,GAAG,CAAC,GAAG,GAAG,IAAI,OAAO,GAAG;AAExE,SAAQ,IAAI,KAAK,YAAY;AAC7B,SAAQ,IAAI,GAAG"}
@@ -10,6 +10,39 @@ import pc from "picocolors";
10
10
  //#region src/type-generator/query-registry.ts
11
11
  const logger = createLogger("type-generator:query-registry");
12
12
  /**
13
+ * Regex breakdown:
14
+ * '(?:[^']|'')*' — matches a SQL string literal, including escaped '' pairs
15
+ * | — alternation: whichever branch matches first at a position wins
16
+ * --[^\n]* — matches a single-line SQL comment
17
+ *
18
+ * Because the regex engine scans left-to-right, a `'` is consumed as a string
19
+ * literal before any `--` inside it could match as a comment — giving us
20
+ * correct single-pass ordering without a manual state machine.
21
+ *
22
+ * V1: no block-comment support (deferred to next PR).
23
+ */
24
+ const PROTECTED_RANGE_RE = /'(?:[^']|'')*'|--[^\n]*/g;
25
+ /**
26
+ * Numeric-context patterns for positional type inference.
27
+ * Hoisted to module scope — safe because matchAll() clones the regex internally.
28
+ */
29
+ const NUMERIC_PATTERNS = [
30
+ /\bLIMIT\s+:([a-zA-Z_]\w*)/gi,
31
+ /\bOFFSET\s+:([a-zA-Z_]\w*)/gi,
32
+ /\bTOP\s+:([a-zA-Z_]\w*)/gi,
33
+ /\bFETCH\s+FIRST\s+:([a-zA-Z_]\w*)\s+ROWS/gi,
34
+ /[+\-*/]\s*:([a-zA-Z_]\w*)/g,
35
+ /:([a-zA-Z_]\w*)\s*[+\-*/]/g
36
+ ];
37
+ function getProtectedRanges(sql) {
38
+ const ranges = [];
39
+ for (const m of sql.matchAll(PROTECTED_RANGE_RE)) ranges.push([m.index, m.index + m[0].length]);
40
+ return ranges;
41
+ }
42
+ function isInsideProtectedRange(offset, ranges) {
43
+ return ranges.some(([start, end]) => offset >= start && offset < end);
44
+ }
45
+ /**
13
46
  * Parse a raw API/SDK error into a structured code + message.
14
47
  * Handles Databricks-style JSON bodies embedded in the message string,
15
48
  * e.g. `Response from server (Bad Request) {"error_code":"...","message":"..."}`.
@@ -30,10 +63,11 @@ function parseError(raw) {
30
63
  * @param sql - the SQL query to extract parameters from
31
64
  * @returns an array of parameter names
32
65
  */
33
- function extractParameters(sql) {
34
- const matches = sql.matchAll(/:([a-zA-Z_]\w*)/g);
66
+ function extractParameters(sql, ranges) {
67
+ const protectedRanges = ranges ?? getProtectedRanges(sql);
68
+ const matches = sql.matchAll(/(?<!:):([a-zA-Z_]\w*)/g);
35
69
  const params = /* @__PURE__ */ new Set();
36
- for (const match of matches) params.add(match[1]);
70
+ for (const match of matches) if (!isInsideProtectedRange(match.index, protectedRanges)) params.add(match[1]);
37
71
  return Array.from(params);
38
72
  }
39
73
  const SERVER_INJECTED_PARAMS = ["workspaceId"];
@@ -95,6 +129,29 @@ function extractParameterTypes(sql) {
95
129
  }
96
130
  return paramTypes;
97
131
  }
132
+ function defaultForType(sqlType) {
133
+ switch (sqlType?.toUpperCase()) {
134
+ case "NUMERIC": return "0";
135
+ case "STRING": return "''";
136
+ case "BOOLEAN": return "true";
137
+ case "DATE": return "'2000-01-01'";
138
+ case "TIMESTAMP": return "'2000-01-01T00:00:00Z'";
139
+ case "BINARY": return "X'00'";
140
+ default: return "''";
141
+ }
142
+ }
143
+ /**
144
+ * Infer parameter types from positional context in SQL.
145
+ * V1 only infers NUMERIC from patterns like LIMIT, OFFSET, TOP,
146
+ * FETCH FIRST ... ROWS, and arithmetic operators.
147
+ * Parameters inside string literals or SQL comments are ignored.
148
+ */
149
+ function inferParameterTypes(sql, ranges) {
150
+ const inferred = {};
151
+ const protectedRanges = ranges ?? getProtectedRanges(sql);
152
+ for (const pattern of NUMERIC_PATTERNS) for (const match of sql.matchAll(pattern)) if (!isInsideProtectedRange(match.index, protectedRanges)) inferred[match[1]] = "NUMERIC";
153
+ return inferred;
154
+ }
98
155
  /**
99
156
  * Generate query schemas from a folder of SQL files
100
157
  * It uses DESCRIBE QUERY to get the schema without executing the query
@@ -139,7 +196,23 @@ async function generateQueriesFromDescribe(queryFolder, warehouseId, options = {
139
196
  status: "HIT"
140
197
  });
141
198
  } else {
142
- const cleanedSql = sql.replace(/:([a-zA-Z_]\w*)/g, "''").trim().replace(/;\s*$/, "");
199
+ const protectedRanges = getProtectedRanges(sql);
200
+ const annotatedTypes = extractParameterTypes(sql);
201
+ const parameterTypes = {
202
+ ...inferParameterTypes(sql, protectedRanges),
203
+ ...annotatedTypes
204
+ };
205
+ const sqlWithDefaults = sql.replace(/(?<!:):([a-zA-Z_]\w*)/g, (original, paramName, offset) => {
206
+ if (isInsideProtectedRange(offset, protectedRanges)) return original;
207
+ return defaultForType(parameterTypes[paramName]);
208
+ });
209
+ const allParams = extractParameters(sql, protectedRanges);
210
+ for (const param of allParams) {
211
+ if (SERVER_INJECTED_PARAMS.includes(param)) continue;
212
+ if (parameterTypes[param]) continue;
213
+ logger.warn("%s: parameter \":%s\" has no type annotation or inference. Add %s to the query file.", queryFiles[i], param, `-- @param ${param} <TYPE>`);
214
+ }
215
+ const cleanedSql = sqlWithDefaults.trim().replace(/;\s*$/, "");
143
216
  uncachedQueries.push({
144
217
  index: i,
145
218
  queryName,
@@ -1 +1 @@
1
- {"version":3,"file":"query-registry.js","names":[],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport pc from \"picocolors\";\nimport { createLogger } from \"../logging/logger\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\nconst logger = createLogger(\"type-generator:query-registry\");\n\n/**\n * Parse a raw API/SDK error into a structured code + message.\n * Handles Databricks-style JSON bodies embedded in the message string,\n * e.g. `Response from server (Bad Request) {\"error_code\":\"...\",\"message\":\"...\"}`.\n */\nfunction parseError(raw: string): { code?: string; message: string } {\n const jsonMatch = raw.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n try {\n const parsed = JSON.parse(jsonMatch[0]);\n if (parsed.error_code || parsed.message) {\n return {\n code: parsed.error_code,\n message: parsed.message || raw,\n };\n }\n } catch {\n // not valid JSON, fall through\n }\n }\n return { message: raw };\n}\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(sql: string): string[] {\n const matches = sql.matchAll(/:([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n params.add(match[1]);\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\n/**\n * Generates the TypeScript type literal for query parameters from SQL.\n * Shared by both the success and failure paths.\n */\nfunction formatParametersType(sql: string): string {\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n const paramTypes = extractParameterTypes(sql);\n\n return params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n}\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): { type: string; hasResults: boolean } {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const paramsType = formatParametersType(sql);\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n const hasResults = resultFields.length > 0;\n\n const type = `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: ${\n hasResults\n ? `Array<{\n ${resultFields.join(\";\\n \")};\n }>`\n : \"unknown\"\n };\n }`;\n\n return { type, hasResults };\n}\n\n/**\n * Used when DESCRIBE QUERY fails so the query still appears in QueryRegistry.\n * Generates a type with unknown result from SQL alone (no warehouse call).\n */\nfunction generateUnknownResultQuery(sql: string, queryName: string): string {\n const paramsType = formatParametersType(sql);\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: unknown;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean; concurrency?: number } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false, concurrency: rawConcurrency = 10 } = options;\n const concurrency =\n typeof rawConcurrency === \"number\" && Number.isFinite(rawConcurrency)\n ? Math.max(1, Math.floor(rawConcurrency))\n : 10;\n\n // read all query files and cache in parallel\n const [allFiles, cache] = await Promise.all([\n fs.readdir(queryFolder),\n noCache\n ? ({ version: CACHE_VERSION, queries: {} } as Awaited<\n ReturnType<typeof loadCache>\n >)\n : loadCache(),\n ]);\n\n const queryFiles = allFiles.filter((file) => file.endsWith(\".sql\"));\n logger.debug(\"Found %d SQL queries\", queryFiles.length);\n\n const client = new WorkspaceClient({});\n const spinner = new Spinner();\n\n // Read all SQL files in parallel\n const sqlContents = await Promise.all(\n queryFiles.map((file) => fs.readFile(path.join(queryFolder, file), \"utf8\")),\n );\n\n const startTime = performance.now();\n\n // Phase 1: Check cache, separate cached vs uncached\n const cachedResults: Array<{ index: number; schema: QuerySchema }> = [];\n const uncachedQueries: Array<{\n index: number;\n queryName: string;\n sql: string;\n sqlHash: string;\n cleanedSql: string;\n }> = [];\n const logEntries: Array<{\n queryName: string;\n status: \"HIT\" | \"MISS\";\n failed?: boolean;\n error?: { code?: string; message: string };\n }> = [];\n\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const rawName = path.basename(file, \".sql\");\n const queryName = normalizeQueryName(rawName);\n\n const sql = sqlContents[i];\n const sqlHash = hashSQL(sql);\n\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash && !cached.retry) {\n cachedResults.push({\n index: i,\n schema: { name: queryName, type: cached.type },\n });\n logEntries.push({ queryName, status: \"HIT\" });\n } else {\n const sqlWithDefaults = sql.replace(/:([a-zA-Z_]\\w*)/g, \"''\");\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n uncachedQueries.push({ index: i, queryName, sql, sqlHash, cleanedSql });\n }\n }\n\n // Phase 2: Execute all uncached DESCRIBE calls in parallel\n type DescribeResult =\n | {\n status: \"ok\";\n index: number;\n schema: QuerySchema;\n cacheEntry: { hash: string; type: string; retry: boolean };\n }\n | {\n status: \"fail\";\n index: number;\n schema: QuerySchema;\n cacheEntry: { hash: string; type: string; retry: boolean };\n error: { code?: string; message: string };\n };\n\n const freshResults: Array<{ index: number; schema: QuerySchema }> = [];\n\n if (uncachedQueries.length > 0) {\n let completed = 0;\n const total = uncachedQueries.length;\n spinner.start(\n `Describing ${total} ${total === 1 ? \"query\" : \"queries\"} (0/${total})`,\n );\n\n const describeOne = async ({\n index,\n queryName,\n sql,\n sqlHash,\n cleanedSql,\n }: (typeof uncachedQueries)[number]): Promise<DescribeResult> => {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n completed++;\n spinner.update(\n `Describing ${total} ${total === 1 ? \"query\" : \"queries\"} (${completed}/${total})`,\n );\n\n logger.debug(\n \"DESCRIBE result for %s: state=%s, rows=%d\",\n queryName,\n result.status.state,\n result.result?.data_array?.length ?? 0,\n );\n\n if (result.status.state === \"FAILED\") {\n const sqlError =\n result.status.error?.message || \"Query execution failed\";\n logger.warn(\"DESCRIBE failed for %s: %s\", queryName, sqlError);\n const type = generateUnknownResultQuery(sql, queryName);\n return {\n status: \"fail\",\n index,\n schema: { name: queryName, type },\n cacheEntry: { hash: sqlHash, type, retry: true },\n error: parseError(sqlError),\n };\n }\n\n const { type, hasResults } = convertToQueryType(result, sql, queryName);\n return {\n status: \"ok\",\n index,\n schema: { name: queryName, type },\n cacheEntry: { hash: sqlHash, type, retry: !hasResults },\n };\n };\n\n // Process in chunks, saving cache after each chunk\n const processBatchResults = (\n settled: PromiseSettledResult<DescribeResult>[],\n batchOffset: number,\n ) => {\n for (let i = 0; i < settled.length; i++) {\n const entry = settled[i];\n const { queryName } = uncachedQueries[batchOffset + i];\n\n if (entry.status === \"fulfilled\") {\n const res = entry.value;\n freshResults.push({ index: res.index, schema: res.schema });\n cache.queries[queryName] = res.cacheEntry;\n logEntries.push({\n queryName,\n status: \"MISS\",\n failed: res.status === \"fail\",\n error: res.status === \"fail\" ? res.error : undefined,\n });\n } else {\n const { sql, sqlHash, index } = uncachedQueries[batchOffset + i];\n const reason =\n entry.reason instanceof Error\n ? entry.reason.message\n : String(entry.reason);\n logger.warn(\"DESCRIBE rejected for %s: %s\", queryName, reason);\n const type = generateUnknownResultQuery(sql, queryName);\n freshResults.push({ index, schema: { name: queryName, type } });\n cache.queries[queryName] = { hash: sqlHash, type, retry: true };\n logEntries.push({\n queryName,\n status: \"MISS\",\n failed: true,\n error: parseError(reason),\n });\n }\n }\n };\n\n if (uncachedQueries.length > concurrency) {\n for (let b = 0; b < uncachedQueries.length; b += concurrency) {\n const batch = uncachedQueries.slice(b, b + concurrency);\n const batchResults = await Promise.allSettled(batch.map(describeOne));\n processBatchResults(batchResults, b);\n await saveCache(cache);\n }\n } else {\n const settled = await Promise.allSettled(\n uncachedQueries.map(describeOne),\n );\n processBatchResults(settled, 0);\n await saveCache(cache);\n }\n\n spinner.stop(\"\");\n }\n\n const elapsed = ((performance.now() - startTime) / 1000).toFixed(2);\n\n // Print formatted table\n if (logEntries.length > 0) {\n const maxNameLen = Math.max(...logEntries.map((e) => e.queryName.length));\n const separator = pc.dim(\"─\".repeat(50));\n console.log(\"\");\n console.log(\n ` ${pc.bold(\"Typegen Queries\")} ${pc.dim(`(${logEntries.length})`)}`,\n );\n console.log(` ${separator}`);\n for (const entry of logEntries) {\n const tag = entry.failed\n ? pc.bold(pc.red(\"ERROR\"))\n : entry.status === \"HIT\"\n ? `cache ${pc.bold(pc.green(\"HIT \"))}`\n : `cache ${pc.bold(pc.yellow(\"MISS \"))}`;\n const rawName = entry.queryName.padEnd(maxNameLen);\n const name = entry.failed ? pc.dim(pc.strikethrough(rawName)) : rawName;\n const errorCode = entry.error?.message.match(/\\[([^\\]]+)\\]/)?.[1];\n const reason = errorCode ? ` ${pc.dim(errorCode)}` : \"\";\n console.log(` ${tag} ${name}${reason}`);\n }\n const newCount = logEntries.filter(\n (e) => e.status === \"MISS\" && !e.failed,\n ).length;\n const cacheCount = logEntries.filter(\n (e) => e.status === \"HIT\" && !e.failed,\n ).length;\n const errorCount = logEntries.filter((e) => e.failed).length;\n console.log(` ${separator}`);\n const parts = [`${newCount} new`, `${cacheCount} from cache`];\n if (errorCount > 0)\n parts.push(`${errorCount} ${errorCount === 1 ? \"error\" : \"errors\"}`);\n console.log(` ${parts.join(\", \")}. ${pc.dim(`${elapsed}s`)}`);\n console.log(\"\");\n }\n\n // Merge and sort by original file index for deterministic output\n return [...cachedResults, ...freshResults]\n .sort((a, b) => a.index - b.index)\n .map((r) => r.schema);\n}\n\n/**\n * Normalize query name by removing the .obo extension\n * @param queryName - the query name to normalize\n * @returns the normalized query name\n */\nfunction normalizeQueryName(fileName: string): string {\n return fileName.replace(/\\.obo$/, \"\");\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;;AAcA,MAAM,SAAS,aAAa,gCAAgC;;;;;;AAO5D,SAAS,WAAW,KAAiD;CACnE,MAAM,YAAY,IAAI,MAAM,cAAc;AAC1C,KAAI,UACF,KAAI;EACF,MAAM,SAAS,KAAK,MAAM,UAAU,GAAG;AACvC,MAAI,OAAO,cAAc,OAAO,QAC9B,QAAO;GACL,MAAM,OAAO;GACb,SAAS,OAAO,WAAW;GAC5B;SAEG;AAIV,QAAO,EAAE,SAAS,KAAK;;;;;;;AAQzB,SAAgB,kBAAkB,KAAuB;CACvD,MAAM,UAAU,IAAI,SAAS,mBAAmB;CAChD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,QAAO,IAAI,MAAM,GAAG;AAEtB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;;;;;AAMrD,SAAS,qBAAqB,KAAqB;CACjD,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CACD,MAAM,aAAa,sBAAsB,IAAI;AAE7C,QAAO,OAAO,SAAS,IACnB,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAC3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB;;AAGN,SAAgB,mBACd,QACA,KACA,WACuC;CAEvC,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,aAAa,qBAAqB,IAAI;CAG5C,MAAM,eAAe,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B;CAEF,MAAM,aAAa,aAAa,SAAS;AAczC,QAAO;EAAE,MAZI;aACF,UAAU;kBACL,WAAW;cAEvB,aACI;QACF,aAAa,KAAK,YAAY,CAAC;UAE7B,UACL;;EAGY;EAAY;;;;;;AAO7B,SAAS,2BAA2B,KAAa,WAA2B;AAG1E,QAAO;aACI,UAAU;kBAHF,qBAAqB,IAAI,CAIjB;;;;AAK7B,SAAgB,sBAAsB,KAAqC;CACzE,MAAM,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAuD,EAAE,EACjC;CACxB,MAAM,EAAE,UAAU,OAAO,aAAa,iBAAiB,OAAO;CAC9D,MAAM,cACJ,OAAO,mBAAmB,YAAY,OAAO,SAAS,eAAe,GACjE,KAAK,IAAI,GAAG,KAAK,MAAM,eAAe,CAAC,GACvC;CAGN,MAAM,CAAC,UAAU,SAAS,MAAM,QAAQ,IAAI,CAC1C,GAAG,QAAQ,YAAY,EACvB,UACK;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GAGxC,WAAW,CAChB,CAAC;CAEF,MAAM,aAAa,SAAS,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AACnE,QAAO,MAAM,wBAAwB,WAAW,OAAO;CAEvD,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAM,UAAU,IAAI,SAAS;CAG7B,MAAM,cAAc,MAAM,QAAQ,IAChC,WAAW,KAAK,SAAS,GAAG,SAAS,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO,CAAC,CAC5E;CAED,MAAM,YAAY,YAAY,KAAK;CAGnC,MAAM,gBAA+D,EAAE;CACvE,MAAM,kBAMD,EAAE;CACP,MAAM,aAKD,EAAE;AAEP,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EAExB,MAAM,YAAY,mBADF,KAAK,SAAS,MAAM,OAAO,CACE;EAE7C,MAAM,MAAM,YAAY;EACxB,MAAM,UAAU,QAAQ,IAAI;EAE5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,WAAW,CAAC,OAAO,OAAO;AACtD,iBAAc,KAAK;IACjB,OAAO;IACP,QAAQ;KAAE,MAAM;KAAW,MAAM,OAAO;KAAM;IAC/C,CAAC;AACF,cAAW,KAAK;IAAE;IAAW,QAAQ;IAAO,CAAC;SACxC;GAEL,MAAM,aADkB,IAAI,QAAQ,oBAAoB,KAAK,CAC1B,MAAM,CAAC,QAAQ,SAAS,GAAG;AAC9D,mBAAgB,KAAK;IAAE,OAAO;IAAG;IAAW;IAAK;IAAS;IAAY,CAAC;;;CAoB3E,MAAM,eAA8D,EAAE;AAEtE,KAAI,gBAAgB,SAAS,GAAG;EAC9B,IAAI,YAAY;EAChB,MAAM,QAAQ,gBAAgB;AAC9B,UAAQ,MACN,cAAc,MAAM,GAAG,UAAU,IAAI,UAAU,UAAU,MAAM,MAAM,GACtE;EAED,MAAM,cAAc,OAAO,EACzB,OACA,WACA,KACA,SACA,iBAC+D;GAC/D,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF;AACA,WAAQ,OACN,cAAc,MAAM,GAAG,UAAU,IAAI,UAAU,UAAU,IAAI,UAAU,GAAG,MAAM,GACjF;AAED,UAAO,MACL,6CACA,WACA,OAAO,OAAO,OACd,OAAO,QAAQ,YAAY,UAAU,EACtC;AAED,OAAI,OAAO,OAAO,UAAU,UAAU;IACpC,MAAM,WACJ,OAAO,OAAO,OAAO,WAAW;AAClC,WAAO,KAAK,8BAA8B,WAAW,SAAS;IAC9D,MAAM,OAAO,2BAA2B,KAAK,UAAU;AACvD,WAAO;KACL,QAAQ;KACR;KACA,QAAQ;MAAE,MAAM;MAAW;MAAM;KACjC,YAAY;MAAE,MAAM;MAAS;MAAM,OAAO;MAAM;KAChD,OAAO,WAAW,SAAS;KAC5B;;GAGH,MAAM,EAAE,MAAM,eAAe,mBAAmB,QAAQ,KAAK,UAAU;AACvE,UAAO;IACL,QAAQ;IACR;IACA,QAAQ;KAAE,MAAM;KAAW;KAAM;IACjC,YAAY;KAAE,MAAM;KAAS;KAAM,OAAO,CAAC;KAAY;IACxD;;EAIH,MAAM,uBACJ,SACA,gBACG;AACH,QAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;IACvC,MAAM,QAAQ,QAAQ;IACtB,MAAM,EAAE,cAAc,gBAAgB,cAAc;AAEpD,QAAI,MAAM,WAAW,aAAa;KAChC,MAAM,MAAM,MAAM;AAClB,kBAAa,KAAK;MAAE,OAAO,IAAI;MAAO,QAAQ,IAAI;MAAQ,CAAC;AAC3D,WAAM,QAAQ,aAAa,IAAI;AAC/B,gBAAW,KAAK;MACd;MACA,QAAQ;MACR,QAAQ,IAAI,WAAW;MACvB,OAAO,IAAI,WAAW,SAAS,IAAI,QAAQ;MAC5C,CAAC;WACG;KACL,MAAM,EAAE,KAAK,SAAS,UAAU,gBAAgB,cAAc;KAC9D,MAAM,SACJ,MAAM,kBAAkB,QACpB,MAAM,OAAO,UACb,OAAO,MAAM,OAAO;AAC1B,YAAO,KAAK,gCAAgC,WAAW,OAAO;KAC9D,MAAM,OAAO,2BAA2B,KAAK,UAAU;AACvD,kBAAa,KAAK;MAAE;MAAO,QAAQ;OAAE,MAAM;OAAW;OAAM;MAAE,CAAC;AAC/D,WAAM,QAAQ,aAAa;MAAE,MAAM;MAAS;MAAM,OAAO;MAAM;AAC/D,gBAAW,KAAK;MACd;MACA,QAAQ;MACR,QAAQ;MACR,OAAO,WAAW,OAAO;MAC1B,CAAC;;;;AAKR,MAAI,gBAAgB,SAAS,YAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK,aAAa;GAC5D,MAAM,QAAQ,gBAAgB,MAAM,GAAG,IAAI,YAAY;AAEvD,uBADqB,MAAM,QAAQ,WAAW,MAAM,IAAI,YAAY,CAAC,EACnC,EAAE;AACpC,SAAM,UAAU,MAAM;;OAEnB;AAIL,uBAHgB,MAAM,QAAQ,WAC5B,gBAAgB,IAAI,YAAY,CACjC,EAC4B,EAAE;AAC/B,SAAM,UAAU,MAAM;;AAGxB,UAAQ,KAAK,GAAG;;CAGlB,MAAM,YAAY,YAAY,KAAK,GAAG,aAAa,KAAM,QAAQ,EAAE;AAGnE,KAAI,WAAW,SAAS,GAAG;EACzB,MAAM,aAAa,KAAK,IAAI,GAAG,WAAW,KAAK,MAAM,EAAE,UAAU,OAAO,CAAC;EACzE,MAAM,YAAY,GAAG,IAAI,IAAI,OAAO,GAAG,CAAC;AACxC,UAAQ,IAAI,GAAG;AACf,UAAQ,IACN,KAAK,GAAG,KAAK,kBAAkB,CAAC,GAAG,GAAG,IAAI,IAAI,WAAW,OAAO,GAAG,GACpE;AACD,UAAQ,IAAI,KAAK,YAAY;AAC7B,OAAK,MAAM,SAAS,YAAY;GAC9B,MAAM,MAAM,MAAM,SACd,GAAG,KAAK,GAAG,IAAI,QAAQ,CAAC,GACxB,MAAM,WAAW,QACf,SAAS,GAAG,KAAK,GAAG,MAAM,QAAQ,CAAC,KACnC,SAAS,GAAG,KAAK,GAAG,OAAO,QAAQ,CAAC;GAC1C,MAAM,UAAU,MAAM,UAAU,OAAO,WAAW;GAClD,MAAM,OAAO,MAAM,SAAS,GAAG,IAAI,GAAG,cAAc,QAAQ,CAAC,GAAG;GAChE,MAAM,YAAY,MAAM,OAAO,QAAQ,MAAM,eAAe,GAAG;GAC/D,MAAM,SAAS,YAAY,KAAK,GAAG,IAAI,UAAU,KAAK;AACtD,WAAQ,IAAI,KAAK,IAAI,IAAI,OAAO,SAAS;;EAE3C,MAAM,WAAW,WAAW,QACzB,MAAM,EAAE,WAAW,UAAU,CAAC,EAAE,OAClC,CAAC;EACF,MAAM,aAAa,WAAW,QAC3B,MAAM,EAAE,WAAW,SAAS,CAAC,EAAE,OACjC,CAAC;EACF,MAAM,aAAa,WAAW,QAAQ,MAAM,EAAE,OAAO,CAAC;AACtD,UAAQ,IAAI,KAAK,YAAY;EAC7B,MAAM,QAAQ,CAAC,GAAG,SAAS,OAAO,GAAG,WAAW,aAAa;AAC7D,MAAI,aAAa,EACf,OAAM,KAAK,GAAG,WAAW,GAAG,eAAe,IAAI,UAAU,WAAW;AACtE,UAAQ,IAAI,KAAK,MAAM,KAAK,KAAK,CAAC,IAAI,GAAG,IAAI,GAAG,QAAQ,GAAG,GAAG;AAC9D,UAAQ,IAAI,GAAG;;AAIjB,QAAO,CAAC,GAAG,eAAe,GAAG,aAAa,CACvC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,KAAK,MAAM,EAAE,OAAO;;;;;;;AAQzB,SAAS,mBAAmB,UAA0B;AACpD,QAAO,SAAS,QAAQ,UAAU,GAAG;;;;;;;;;;;;AAavC,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAM,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
1
+ {"version":3,"file":"query-registry.js","names":[],"sources":["../../src/type-generator/query-registry.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport pc from \"picocolors\";\nimport { createLogger } from \"../logging/logger\";\nimport { CACHE_VERSION, hashSQL, loadCache, saveCache } from \"./cache\";\nimport { Spinner } from \"./spinner\";\nimport {\n type DatabricksStatementExecutionResponse,\n type QuerySchema,\n sqlTypeToHelper,\n sqlTypeToMarker,\n} from \"./types\";\n\nconst logger = createLogger(\"type-generator:query-registry\");\n\n/**\n * Regex breakdown:\n * '(?:[^']|'')*' — matches a SQL string literal, including escaped '' pairs\n * | — alternation: whichever branch matches first at a position wins\n * --[^\\n]* — matches a single-line SQL comment\n *\n * Because the regex engine scans left-to-right, a `'` is consumed as a string\n * literal before any `--` inside it could match as a comment — giving us\n * correct single-pass ordering without a manual state machine.\n *\n * V1: no block-comment support (deferred to next PR).\n */\nconst PROTECTED_RANGE_RE = /'(?:[^']|'')*'|--[^\\n]*/g;\n\n/**\n * Numeric-context patterns for positional type inference.\n * Hoisted to module scope — safe because matchAll() clones the regex internally.\n */\nconst NUMERIC_PATTERNS: RegExp[] = [\n /\\bLIMIT\\s+:([a-zA-Z_]\\w*)/gi,\n /\\bOFFSET\\s+:([a-zA-Z_]\\w*)/gi,\n /\\bTOP\\s+:([a-zA-Z_]\\w*)/gi,\n /\\bFETCH\\s+FIRST\\s+:([a-zA-Z_]\\w*)\\s+ROWS/gi,\n // V1 limitation: arithmetic operators may false-positive for date\n // expressions like `:start_date - INTERVAL '1 day'`. A smarter\n // heuristic (e.g. look-ahead for INTERVAL) is deferred to a future PR.\n /[+\\-*/]\\s*:([a-zA-Z_]\\w*)/g,\n /:([a-zA-Z_]\\w*)\\s*[+\\-*/]/g,\n];\n\nexport function getProtectedRanges(sql: string): Array<[number, number]> {\n const ranges: Array<[number, number]> = [];\n for (const m of sql.matchAll(PROTECTED_RANGE_RE)) {\n ranges.push([m.index, m.index + m[0].length]);\n }\n return ranges;\n}\n\nfunction isInsideProtectedRange(\n offset: number,\n ranges: Array<[number, number]>,\n): boolean {\n return ranges.some(([start, end]) => offset >= start && offset < end);\n}\n\n/**\n * Parse a raw API/SDK error into a structured code + message.\n * Handles Databricks-style JSON bodies embedded in the message string,\n * e.g. `Response from server (Bad Request) {\"error_code\":\"...\",\"message\":\"...\"}`.\n */\nfunction parseError(raw: string): { code?: string; message: string } {\n const jsonMatch = raw.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n try {\n const parsed = JSON.parse(jsonMatch[0]);\n if (parsed.error_code || parsed.message) {\n return {\n code: parsed.error_code,\n message: parsed.message || raw,\n };\n }\n } catch {\n // not valid JSON, fall through\n }\n }\n return { message: raw };\n}\n\n/**\n * Extract parameters from a SQL query\n * @param sql - the SQL query to extract parameters from\n * @returns an array of parameter names\n */\nexport function extractParameters(\n sql: string,\n ranges?: Array<[number, number]>,\n): string[] {\n const protectedRanges = ranges ?? getProtectedRanges(sql);\n const matches = sql.matchAll(/(?<!:):([a-zA-Z_]\\w*)/g);\n const params = new Set<string>();\n for (const match of matches) {\n if (!isInsideProtectedRange(match.index, protectedRanges)) {\n params.add(match[1]);\n }\n }\n return Array.from(params);\n}\n\n// parameters that are injected by the server\nexport const SERVER_INJECTED_PARAMS = [\"workspaceId\"];\n\n/**\n * Generates the TypeScript type literal for query parameters from SQL.\n * Shared by both the success and failure paths.\n */\nfunction formatParametersType(sql: string): string {\n const params = extractParameters(sql).filter(\n (p) => !SERVER_INJECTED_PARAMS.includes(p),\n );\n const paramTypes = extractParameterTypes(sql);\n\n return params.length > 0\n ? `{\\n ${params\n .map((p) => {\n const sqlType = paramTypes[p];\n const markerType = sqlType\n ? sqlTypeToMarker[sqlType]\n : \"SQLTypeMarker\";\n const helper = sqlType ? sqlTypeToHelper[sqlType] : \"sql.*()\";\n return `/** ${sqlType || \"any\"} - use ${helper} */\\n ${p}: ${markerType}`;\n })\n .join(\";\\n \")};\\n }`\n : \"Record<string, never>\";\n}\n\nexport function convertToQueryType(\n result: DatabricksStatementExecutionResponse,\n sql: string,\n queryName: string,\n): { type: string; hasResults: boolean } {\n const dataRows = result.result?.data_array || [];\n const columns = dataRows.map((row) => ({\n name: row[0] || \"\",\n type_name: row[1]?.toUpperCase() || \"STRING\",\n comment: row[2] || undefined,\n }));\n\n const paramsType = formatParametersType(sql);\n\n // generate result fields with JSDoc\n const resultFields = columns.map((column) => {\n const normalizedType = normalizeTypeName(column.type_name);\n const mappedType = typeMap[normalizedType] || \"unknown\";\n // validate column name is a valid identifier\n const name = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(column.name)\n ? column.name\n : `\"${column.name}\"`;\n\n // generate comment for column\n const comment = column.comment\n ? `/** ${column.comment} */\\n `\n : `/** @sqlType ${column.type_name} */\\n `;\n\n return `${comment}${name}: ${mappedType}`;\n });\n\n const hasResults = resultFields.length > 0;\n\n const type = `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: ${\n hasResults\n ? `Array<{\n ${resultFields.join(\";\\n \")};\n }>`\n : \"unknown\"\n };\n }`;\n\n return { type, hasResults };\n}\n\n/**\n * Used when DESCRIBE QUERY fails so the query still appears in QueryRegistry.\n * Generates a type with unknown result from SQL alone (no warehouse call).\n */\nfunction generateUnknownResultQuery(sql: string, queryName: string): string {\n const paramsType = formatParametersType(sql);\n\n return `{\n name: \"${queryName}\";\n parameters: ${paramsType};\n result: unknown;\n }`;\n}\n\nexport function extractParameterTypes(sql: string): Record<string, string> {\n const paramTypes: Record<string, string> = {};\n const regex =\n /--\\s*@param\\s+(\\w+)\\s+(STRING|NUMERIC|BOOLEAN|DATE|TIMESTAMP|BINARY)/gi;\n const matches = sql.matchAll(regex);\n for (const match of matches) {\n const [, paramName, paramType] = match;\n paramTypes[paramName] = paramType.toUpperCase();\n }\n\n return paramTypes;\n}\n\nexport function defaultForType(sqlType: string | undefined): string {\n switch (sqlType?.toUpperCase()) {\n case \"NUMERIC\":\n return \"0\";\n case \"STRING\":\n return \"''\";\n case \"BOOLEAN\":\n return \"true\";\n case \"DATE\":\n return \"'2000-01-01'\";\n case \"TIMESTAMP\":\n return \"'2000-01-01T00:00:00Z'\";\n case \"BINARY\":\n return \"X'00'\";\n default:\n return \"''\";\n }\n}\n\n/**\n * Infer parameter types from positional context in SQL.\n * V1 only infers NUMERIC from patterns like LIMIT, OFFSET, TOP,\n * FETCH FIRST ... ROWS, and arithmetic operators.\n * Parameters inside string literals or SQL comments are ignored.\n */\nexport function inferParameterTypes(\n sql: string,\n ranges?: Array<[number, number]>,\n): Record<string, string> {\n const inferred: Record<string, string> = {};\n const protectedRanges = ranges ?? getProtectedRanges(sql);\n\n for (const pattern of NUMERIC_PATTERNS) {\n for (const match of sql.matchAll(pattern)) {\n if (!isInsideProtectedRange(match.index, protectedRanges)) {\n inferred[match[1]] = \"NUMERIC\";\n }\n }\n }\n\n return inferred;\n}\n\n/**\n * Generate query schemas from a folder of SQL files\n * It uses DESCRIBE QUERY to get the schema without executing the query\n * @param queryFolder - the folder containing the SQL files\n * @param warehouseId - the warehouse id to use for schema analysis\n * @param options - options for the query generation\n * @param options.noCache - if true, skip the cache and regenerate all types\n * @returns an array of query schemas\n */\nexport async function generateQueriesFromDescribe(\n queryFolder: string,\n warehouseId: string,\n options: { noCache?: boolean; concurrency?: number } = {},\n): Promise<QuerySchema[]> {\n const { noCache = false, concurrency: rawConcurrency = 10 } = options;\n const concurrency =\n typeof rawConcurrency === \"number\" && Number.isFinite(rawConcurrency)\n ? Math.max(1, Math.floor(rawConcurrency))\n : 10;\n\n // read all query files and cache in parallel\n const [allFiles, cache] = await Promise.all([\n fs.readdir(queryFolder),\n noCache\n ? ({ version: CACHE_VERSION, queries: {} } as Awaited<\n ReturnType<typeof loadCache>\n >)\n : loadCache(),\n ]);\n\n const queryFiles = allFiles.filter((file) => file.endsWith(\".sql\"));\n logger.debug(\"Found %d SQL queries\", queryFiles.length);\n\n const client = new WorkspaceClient({});\n const spinner = new Spinner();\n\n // Read all SQL files in parallel\n const sqlContents = await Promise.all(\n queryFiles.map((file) => fs.readFile(path.join(queryFolder, file), \"utf8\")),\n );\n\n const startTime = performance.now();\n\n // Phase 1: Check cache, separate cached vs uncached\n const cachedResults: Array<{ index: number; schema: QuerySchema }> = [];\n const uncachedQueries: Array<{\n index: number;\n queryName: string;\n sql: string;\n sqlHash: string;\n cleanedSql: string;\n }> = [];\n const logEntries: Array<{\n queryName: string;\n status: \"HIT\" | \"MISS\";\n failed?: boolean;\n error?: { code?: string; message: string };\n }> = [];\n\n for (let i = 0; i < queryFiles.length; i++) {\n const file = queryFiles[i];\n const rawName = path.basename(file, \".sql\");\n const queryName = normalizeQueryName(rawName);\n\n const sql = sqlContents[i];\n const sqlHash = hashSQL(sql);\n\n const cached = cache.queries[queryName];\n if (cached && cached.hash === sqlHash && !cached.retry) {\n cachedResults.push({\n index: i,\n schema: { name: queryName, type: cached.type },\n });\n logEntries.push({ queryName, status: \"HIT\" });\n } else {\n const protectedRanges = getProtectedRanges(sql);\n const annotatedTypes = extractParameterTypes(sql);\n const inferredTypes = inferParameterTypes(sql, protectedRanges);\n const parameterTypes = { ...inferredTypes, ...annotatedTypes };\n const sqlWithDefaults = sql.replace(\n /(?<!:):([a-zA-Z_]\\w*)/g,\n (original, paramName, offset) => {\n if (isInsideProtectedRange(offset, protectedRanges)) {\n return original;\n }\n return defaultForType(parameterTypes[paramName]);\n },\n );\n\n // Warn about unresolved parameters\n const allParams = extractParameters(sql, protectedRanges);\n for (const param of allParams) {\n if (SERVER_INJECTED_PARAMS.includes(param)) continue;\n if (parameterTypes[param]) continue;\n logger.warn(\n '%s: parameter \":%s\" has no type annotation or inference. Add %s to the query file.',\n queryFiles[i],\n param,\n `-- @param ${param} <TYPE>`,\n );\n }\n\n const cleanedSql = sqlWithDefaults.trim().replace(/;\\s*$/, \"\");\n uncachedQueries.push({ index: i, queryName, sql, sqlHash, cleanedSql });\n }\n }\n\n // Phase 2: Execute all uncached DESCRIBE calls in parallel\n type DescribeResult =\n | {\n status: \"ok\";\n index: number;\n schema: QuerySchema;\n cacheEntry: { hash: string; type: string; retry: boolean };\n }\n | {\n status: \"fail\";\n index: number;\n schema: QuerySchema;\n cacheEntry: { hash: string; type: string; retry: boolean };\n error: { code?: string; message: string };\n };\n\n const freshResults: Array<{ index: number; schema: QuerySchema }> = [];\n\n if (uncachedQueries.length > 0) {\n let completed = 0;\n const total = uncachedQueries.length;\n spinner.start(\n `Describing ${total} ${total === 1 ? \"query\" : \"queries\"} (0/${total})`,\n );\n\n const describeOne = async ({\n index,\n queryName,\n sql,\n sqlHash,\n cleanedSql,\n }: (typeof uncachedQueries)[number]): Promise<DescribeResult> => {\n const result = (await client.statementExecution.executeStatement({\n statement: `DESCRIBE QUERY ${cleanedSql}`,\n warehouse_id: warehouseId,\n })) as DatabricksStatementExecutionResponse;\n\n completed++;\n spinner.update(\n `Describing ${total} ${total === 1 ? \"query\" : \"queries\"} (${completed}/${total})`,\n );\n\n logger.debug(\n \"DESCRIBE result for %s: state=%s, rows=%d\",\n queryName,\n result.status.state,\n result.result?.data_array?.length ?? 0,\n );\n\n if (result.status.state === \"FAILED\") {\n const sqlError =\n result.status.error?.message || \"Query execution failed\";\n logger.warn(\"DESCRIBE failed for %s: %s\", queryName, sqlError);\n const type = generateUnknownResultQuery(sql, queryName);\n return {\n status: \"fail\",\n index,\n schema: { name: queryName, type },\n cacheEntry: { hash: sqlHash, type, retry: true },\n error: parseError(sqlError),\n };\n }\n\n const { type, hasResults } = convertToQueryType(result, sql, queryName);\n return {\n status: \"ok\",\n index,\n schema: { name: queryName, type },\n cacheEntry: { hash: sqlHash, type, retry: !hasResults },\n };\n };\n\n // Process in chunks, saving cache after each chunk\n const processBatchResults = (\n settled: PromiseSettledResult<DescribeResult>[],\n batchOffset: number,\n ) => {\n for (let i = 0; i < settled.length; i++) {\n const entry = settled[i];\n const { queryName } = uncachedQueries[batchOffset + i];\n\n if (entry.status === \"fulfilled\") {\n const res = entry.value;\n freshResults.push({ index: res.index, schema: res.schema });\n cache.queries[queryName] = res.cacheEntry;\n logEntries.push({\n queryName,\n status: \"MISS\",\n failed: res.status === \"fail\",\n error: res.status === \"fail\" ? res.error : undefined,\n });\n } else {\n const { sql, sqlHash, index } = uncachedQueries[batchOffset + i];\n const reason =\n entry.reason instanceof Error\n ? entry.reason.message\n : String(entry.reason);\n logger.warn(\"DESCRIBE rejected for %s: %s\", queryName, reason);\n const type = generateUnknownResultQuery(sql, queryName);\n freshResults.push({ index, schema: { name: queryName, type } });\n cache.queries[queryName] = { hash: sqlHash, type, retry: true };\n logEntries.push({\n queryName,\n status: \"MISS\",\n failed: true,\n error: parseError(reason),\n });\n }\n }\n };\n\n if (uncachedQueries.length > concurrency) {\n for (let b = 0; b < uncachedQueries.length; b += concurrency) {\n const batch = uncachedQueries.slice(b, b + concurrency);\n const batchResults = await Promise.allSettled(batch.map(describeOne));\n processBatchResults(batchResults, b);\n await saveCache(cache);\n }\n } else {\n const settled = await Promise.allSettled(\n uncachedQueries.map(describeOne),\n );\n processBatchResults(settled, 0);\n await saveCache(cache);\n }\n\n spinner.stop(\"\");\n }\n\n const elapsed = ((performance.now() - startTime) / 1000).toFixed(2);\n\n // Print formatted table\n if (logEntries.length > 0) {\n const maxNameLen = Math.max(...logEntries.map((e) => e.queryName.length));\n const separator = pc.dim(\"─\".repeat(50));\n console.log(\"\");\n console.log(\n ` ${pc.bold(\"Typegen Queries\")} ${pc.dim(`(${logEntries.length})`)}`,\n );\n console.log(` ${separator}`);\n for (const entry of logEntries) {\n const tag = entry.failed\n ? pc.bold(pc.red(\"ERROR\"))\n : entry.status === \"HIT\"\n ? `cache ${pc.bold(pc.green(\"HIT \"))}`\n : `cache ${pc.bold(pc.yellow(\"MISS \"))}`;\n const rawName = entry.queryName.padEnd(maxNameLen);\n const name = entry.failed ? pc.dim(pc.strikethrough(rawName)) : rawName;\n const errorCode = entry.error?.message.match(/\\[([^\\]]+)\\]/)?.[1];\n const reason = errorCode ? ` ${pc.dim(errorCode)}` : \"\";\n console.log(` ${tag} ${name}${reason}`);\n }\n const newCount = logEntries.filter(\n (e) => e.status === \"MISS\" && !e.failed,\n ).length;\n const cacheCount = logEntries.filter(\n (e) => e.status === \"HIT\" && !e.failed,\n ).length;\n const errorCount = logEntries.filter((e) => e.failed).length;\n console.log(` ${separator}`);\n const parts = [`${newCount} new`, `${cacheCount} from cache`];\n if (errorCount > 0)\n parts.push(`${errorCount} ${errorCount === 1 ? \"error\" : \"errors\"}`);\n console.log(` ${parts.join(\", \")}. ${pc.dim(`${elapsed}s`)}`);\n console.log(\"\");\n }\n\n // Merge and sort by original file index for deterministic output\n return [...cachedResults, ...freshResults]\n .sort((a, b) => a.index - b.index)\n .map((r) => r.schema);\n}\n\n/**\n * Normalize query name by removing the .obo extension\n * @param queryName - the query name to normalize\n * @returns the normalized query name\n */\nfunction normalizeQueryName(fileName: string): string {\n return fileName.replace(/\\.obo$/, \"\");\n}\n\n/**\n * Normalize SQL type name by removing parameters/generics\n * Examples:\n * DECIMAL(38,6) -> DECIMAL\n * ARRAY<STRING> -> ARRAY\n * MAP<STRING,INT> -> MAP\n * STRUCT<name:STRING> -> STRUCT\n * INTERVAL DAY TO SECOND -> INTERVAL\n * GEOGRAPHY(4326) -> GEOGRAPHY\n */\nexport function normalizeTypeName(typeName: string): string {\n return typeName\n .replace(/\\(.*\\)$/, \"\") // remove (p, s) eg: DECIMAL(38,6) -> DECIMAL\n .replace(/<.*>$/, \"\") // remove <T> eg: ARRAY<STRING> -> ARRAY\n .split(\" \")[0]; // take first word eg: INTERVAL DAY TO SECOND -> INTERVAL\n}\n\n/** Type Map for Databricks data types to JavaScript types */\nconst typeMap: Record<string, string> = {\n // string types\n STRING: \"string\",\n BINARY: \"string\",\n // boolean\n BOOLEAN: \"boolean\",\n // numeric types\n TINYINT: \"number\",\n SMALLINT: \"number\",\n INT: \"number\",\n BIGINT: \"number\",\n FLOAT: \"number\",\n DOUBLE: \"number\",\n DECIMAL: \"number\",\n // date/time types\n DATE: \"string\",\n TIMESTAMP: \"string\",\n TIMESTAMP_NTZ: \"string\",\n INTERVAL: \"string\",\n // complex types\n ARRAY: \"unknown[]\",\n MAP: \"Record<string, unknown>\",\n STRUCT: \"Record<string, unknown>\",\n OBJECT: \"Record<string, unknown>\",\n VARIANT: \"unknown\",\n // spatial types\n GEOGRAPHY: \"unknown\",\n GEOMETRY: \"unknown\",\n // null type\n VOID: \"null\",\n};\n"],"mappings":";;;;;;;;;;AAcA,MAAM,SAAS,aAAa,gCAAgC;;;;;;;;;;;;;AAc5D,MAAM,qBAAqB;;;;;AAM3B,MAAM,mBAA6B;CACjC;CACA;CACA;CACA;CAIA;CACA;CACD;AAED,SAAgB,mBAAmB,KAAsC;CACvE,MAAM,SAAkC,EAAE;AAC1C,MAAK,MAAM,KAAK,IAAI,SAAS,mBAAmB,CAC9C,QAAO,KAAK,CAAC,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,OAAO,CAAC;AAE/C,QAAO;;AAGT,SAAS,uBACP,QACA,QACS;AACT,QAAO,OAAO,MAAM,CAAC,OAAO,SAAS,UAAU,SAAS,SAAS,IAAI;;;;;;;AAQvE,SAAS,WAAW,KAAiD;CACnE,MAAM,YAAY,IAAI,MAAM,cAAc;AAC1C,KAAI,UACF,KAAI;EACF,MAAM,SAAS,KAAK,MAAM,UAAU,GAAG;AACvC,MAAI,OAAO,cAAc,OAAO,QAC9B,QAAO;GACL,MAAM,OAAO;GACb,SAAS,OAAO,WAAW;GAC5B;SAEG;AAIV,QAAO,EAAE,SAAS,KAAK;;;;;;;AAQzB,SAAgB,kBACd,KACA,QACU;CACV,MAAM,kBAAkB,UAAU,mBAAmB,IAAI;CACzD,MAAM,UAAU,IAAI,SAAS,yBAAyB;CACtD,MAAM,yBAAS,IAAI,KAAa;AAChC,MAAK,MAAM,SAAS,QAClB,KAAI,CAAC,uBAAuB,MAAM,OAAO,gBAAgB,CACvD,QAAO,IAAI,MAAM,GAAG;AAGxB,QAAO,MAAM,KAAK,OAAO;;AAI3B,MAAa,yBAAyB,CAAC,cAAc;;;;;AAMrD,SAAS,qBAAqB,KAAqB;CACjD,MAAM,SAAS,kBAAkB,IAAI,CAAC,QACnC,MAAM,CAAC,uBAAuB,SAAS,EAAE,CAC3C;CACD,MAAM,aAAa,sBAAsB,IAAI;AAE7C,QAAO,OAAO,SAAS,IACnB,YAAY,OACT,KAAK,MAAM;EACV,MAAM,UAAU,WAAW;EAC3B,MAAM,aAAa,UACf,gBAAgB,WAChB;EACJ,MAAM,SAAS,UAAU,gBAAgB,WAAW;AACpD,SAAO,OAAO,WAAW,MAAM,SAAS,OAAO,aAAa,EAAE,IAAI;GAClE,CACD,KAAK,YAAY,CAAC,YACrB;;AAGN,SAAgB,mBACd,QACA,KACA,WACuC;CAEvC,MAAM,WADW,OAAO,QAAQ,cAAc,EAAE,EACvB,KAAK,SAAS;EACrC,MAAM,IAAI,MAAM;EAChB,WAAW,IAAI,IAAI,aAAa,IAAI;EACpC,SAAS,IAAI,MAAM;EACpB,EAAE;CAEH,MAAM,aAAa,qBAAqB,IAAI;CAG5C,MAAM,eAAe,QAAQ,KAAK,WAAW;EAE3C,MAAM,aAAa,QADI,kBAAkB,OAAO,UAAU,KACZ;EAE9C,MAAM,OAAO,6BAA6B,KAAK,OAAO,KAAK,GACvD,OAAO,OACP,IAAI,OAAO,KAAK;AAOpB,SAAO,GAJS,OAAO,UACnB,OAAO,OAAO,QAAQ,eACtB,gBAAgB,OAAO,UAAU,eAEjB,KAAK,IAAI;GAC7B;CAEF,MAAM,aAAa,aAAa,SAAS;AAczC,QAAO;EAAE,MAZI;aACF,UAAU;kBACL,WAAW;cAEvB,aACI;QACF,aAAa,KAAK,YAAY,CAAC;UAE7B,UACL;;EAGY;EAAY;;;;;;AAO7B,SAAS,2BAA2B,KAAa,WAA2B;AAG1E,QAAO;aACI,UAAU;kBAHF,qBAAqB,IAAI,CAIjB;;;;AAK7B,SAAgB,sBAAsB,KAAqC;CACzE,MAAM,aAAqC,EAAE;CAG7C,MAAM,UAAU,IAAI,SADlB,yEACiC;AACnC,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,GAAG,WAAW,aAAa;AACjC,aAAW,aAAa,UAAU,aAAa;;AAGjD,QAAO;;AAGT,SAAgB,eAAe,SAAqC;AAClE,SAAQ,SAAS,aAAa,EAA9B;EACE,KAAK,UACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK,UACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,YACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,QACE,QAAO;;;;;;;;;AAUb,SAAgB,oBACd,KACA,QACwB;CACxB,MAAM,WAAmC,EAAE;CAC3C,MAAM,kBAAkB,UAAU,mBAAmB,IAAI;AAEzD,MAAK,MAAM,WAAW,iBACpB,MAAK,MAAM,SAAS,IAAI,SAAS,QAAQ,CACvC,KAAI,CAAC,uBAAuB,MAAM,OAAO,gBAAgB,CACvD,UAAS,MAAM,MAAM;AAK3B,QAAO;;;;;;;;;;;AAYT,eAAsB,4BACpB,aACA,aACA,UAAuD,EAAE,EACjC;CACxB,MAAM,EAAE,UAAU,OAAO,aAAa,iBAAiB,OAAO;CAC9D,MAAM,cACJ,OAAO,mBAAmB,YAAY,OAAO,SAAS,eAAe,GACjE,KAAK,IAAI,GAAG,KAAK,MAAM,eAAe,CAAC,GACvC;CAGN,MAAM,CAAC,UAAU,SAAS,MAAM,QAAQ,IAAI,CAC1C,GAAG,QAAQ,YAAY,EACvB,UACK;EAAE,SAAS;EAAe,SAAS,EAAE;EAAE,GAGxC,WAAW,CAChB,CAAC;CAEF,MAAM,aAAa,SAAS,QAAQ,SAAS,KAAK,SAAS,OAAO,CAAC;AACnE,QAAO,MAAM,wBAAwB,WAAW,OAAO;CAEvD,MAAM,SAAS,IAAI,gBAAgB,EAAE,CAAC;CACtC,MAAM,UAAU,IAAI,SAAS;CAG7B,MAAM,cAAc,MAAM,QAAQ,IAChC,WAAW,KAAK,SAAS,GAAG,SAAS,KAAK,KAAK,aAAa,KAAK,EAAE,OAAO,CAAC,CAC5E;CAED,MAAM,YAAY,YAAY,KAAK;CAGnC,MAAM,gBAA+D,EAAE;CACvE,MAAM,kBAMD,EAAE;CACP,MAAM,aAKD,EAAE;AAEP,MAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,OAAO,WAAW;EAExB,MAAM,YAAY,mBADF,KAAK,SAAS,MAAM,OAAO,CACE;EAE7C,MAAM,MAAM,YAAY;EACxB,MAAM,UAAU,QAAQ,IAAI;EAE5B,MAAM,SAAS,MAAM,QAAQ;AAC7B,MAAI,UAAU,OAAO,SAAS,WAAW,CAAC,OAAO,OAAO;AACtD,iBAAc,KAAK;IACjB,OAAO;IACP,QAAQ;KAAE,MAAM;KAAW,MAAM,OAAO;KAAM;IAC/C,CAAC;AACF,cAAW,KAAK;IAAE;IAAW,QAAQ;IAAO,CAAC;SACxC;GACL,MAAM,kBAAkB,mBAAmB,IAAI;GAC/C,MAAM,iBAAiB,sBAAsB,IAAI;GAEjD,MAAM,iBAAiB;IAAE,GADH,oBAAoB,KAAK,gBAAgB;IACpB,GAAG;IAAgB;GAC9D,MAAM,kBAAkB,IAAI,QAC1B,2BACC,UAAU,WAAW,WAAW;AAC/B,QAAI,uBAAuB,QAAQ,gBAAgB,CACjD,QAAO;AAET,WAAO,eAAe,eAAe,WAAW;KAEnD;GAGD,MAAM,YAAY,kBAAkB,KAAK,gBAAgB;AACzD,QAAK,MAAM,SAAS,WAAW;AAC7B,QAAI,uBAAuB,SAAS,MAAM,CAAE;AAC5C,QAAI,eAAe,OAAQ;AAC3B,WAAO,KACL,wFACA,WAAW,IACX,OACA,aAAa,MAAM,SACpB;;GAGH,MAAM,aAAa,gBAAgB,MAAM,CAAC,QAAQ,SAAS,GAAG;AAC9D,mBAAgB,KAAK;IAAE,OAAO;IAAG;IAAW;IAAK;IAAS;IAAY,CAAC;;;CAoB3E,MAAM,eAA8D,EAAE;AAEtE,KAAI,gBAAgB,SAAS,GAAG;EAC9B,IAAI,YAAY;EAChB,MAAM,QAAQ,gBAAgB;AAC9B,UAAQ,MACN,cAAc,MAAM,GAAG,UAAU,IAAI,UAAU,UAAU,MAAM,MAAM,GACtE;EAED,MAAM,cAAc,OAAO,EACzB,OACA,WACA,KACA,SACA,iBAC+D;GAC/D,MAAM,SAAU,MAAM,OAAO,mBAAmB,iBAAiB;IAC/D,WAAW,kBAAkB;IAC7B,cAAc;IACf,CAAC;AAEF;AACA,WAAQ,OACN,cAAc,MAAM,GAAG,UAAU,IAAI,UAAU,UAAU,IAAI,UAAU,GAAG,MAAM,GACjF;AAED,UAAO,MACL,6CACA,WACA,OAAO,OAAO,OACd,OAAO,QAAQ,YAAY,UAAU,EACtC;AAED,OAAI,OAAO,OAAO,UAAU,UAAU;IACpC,MAAM,WACJ,OAAO,OAAO,OAAO,WAAW;AAClC,WAAO,KAAK,8BAA8B,WAAW,SAAS;IAC9D,MAAM,OAAO,2BAA2B,KAAK,UAAU;AACvD,WAAO;KACL,QAAQ;KACR;KACA,QAAQ;MAAE,MAAM;MAAW;MAAM;KACjC,YAAY;MAAE,MAAM;MAAS;MAAM,OAAO;MAAM;KAChD,OAAO,WAAW,SAAS;KAC5B;;GAGH,MAAM,EAAE,MAAM,eAAe,mBAAmB,QAAQ,KAAK,UAAU;AACvE,UAAO;IACL,QAAQ;IACR;IACA,QAAQ;KAAE,MAAM;KAAW;KAAM;IACjC,YAAY;KAAE,MAAM;KAAS;KAAM,OAAO,CAAC;KAAY;IACxD;;EAIH,MAAM,uBACJ,SACA,gBACG;AACH,QAAK,IAAI,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;IACvC,MAAM,QAAQ,QAAQ;IACtB,MAAM,EAAE,cAAc,gBAAgB,cAAc;AAEpD,QAAI,MAAM,WAAW,aAAa;KAChC,MAAM,MAAM,MAAM;AAClB,kBAAa,KAAK;MAAE,OAAO,IAAI;MAAO,QAAQ,IAAI;MAAQ,CAAC;AAC3D,WAAM,QAAQ,aAAa,IAAI;AAC/B,gBAAW,KAAK;MACd;MACA,QAAQ;MACR,QAAQ,IAAI,WAAW;MACvB,OAAO,IAAI,WAAW,SAAS,IAAI,QAAQ;MAC5C,CAAC;WACG;KACL,MAAM,EAAE,KAAK,SAAS,UAAU,gBAAgB,cAAc;KAC9D,MAAM,SACJ,MAAM,kBAAkB,QACpB,MAAM,OAAO,UACb,OAAO,MAAM,OAAO;AAC1B,YAAO,KAAK,gCAAgC,WAAW,OAAO;KAC9D,MAAM,OAAO,2BAA2B,KAAK,UAAU;AACvD,kBAAa,KAAK;MAAE;MAAO,QAAQ;OAAE,MAAM;OAAW;OAAM;MAAE,CAAC;AAC/D,WAAM,QAAQ,aAAa;MAAE,MAAM;MAAS;MAAM,OAAO;MAAM;AAC/D,gBAAW,KAAK;MACd;MACA,QAAQ;MACR,QAAQ;MACR,OAAO,WAAW,OAAO;MAC1B,CAAC;;;;AAKR,MAAI,gBAAgB,SAAS,YAC3B,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,QAAQ,KAAK,aAAa;GAC5D,MAAM,QAAQ,gBAAgB,MAAM,GAAG,IAAI,YAAY;AAEvD,uBADqB,MAAM,QAAQ,WAAW,MAAM,IAAI,YAAY,CAAC,EACnC,EAAE;AACpC,SAAM,UAAU,MAAM;;OAEnB;AAIL,uBAHgB,MAAM,QAAQ,WAC5B,gBAAgB,IAAI,YAAY,CACjC,EAC4B,EAAE;AAC/B,SAAM,UAAU,MAAM;;AAGxB,UAAQ,KAAK,GAAG;;CAGlB,MAAM,YAAY,YAAY,KAAK,GAAG,aAAa,KAAM,QAAQ,EAAE;AAGnE,KAAI,WAAW,SAAS,GAAG;EACzB,MAAM,aAAa,KAAK,IAAI,GAAG,WAAW,KAAK,MAAM,EAAE,UAAU,OAAO,CAAC;EACzE,MAAM,YAAY,GAAG,IAAI,IAAI,OAAO,GAAG,CAAC;AACxC,UAAQ,IAAI,GAAG;AACf,UAAQ,IACN,KAAK,GAAG,KAAK,kBAAkB,CAAC,GAAG,GAAG,IAAI,IAAI,WAAW,OAAO,GAAG,GACpE;AACD,UAAQ,IAAI,KAAK,YAAY;AAC7B,OAAK,MAAM,SAAS,YAAY;GAC9B,MAAM,MAAM,MAAM,SACd,GAAG,KAAK,GAAG,IAAI,QAAQ,CAAC,GACxB,MAAM,WAAW,QACf,SAAS,GAAG,KAAK,GAAG,MAAM,QAAQ,CAAC,KACnC,SAAS,GAAG,KAAK,GAAG,OAAO,QAAQ,CAAC;GAC1C,MAAM,UAAU,MAAM,UAAU,OAAO,WAAW;GAClD,MAAM,OAAO,MAAM,SAAS,GAAG,IAAI,GAAG,cAAc,QAAQ,CAAC,GAAG;GAChE,MAAM,YAAY,MAAM,OAAO,QAAQ,MAAM,eAAe,GAAG;GAC/D,MAAM,SAAS,YAAY,KAAK,GAAG,IAAI,UAAU,KAAK;AACtD,WAAQ,IAAI,KAAK,IAAI,IAAI,OAAO,SAAS;;EAE3C,MAAM,WAAW,WAAW,QACzB,MAAM,EAAE,WAAW,UAAU,CAAC,EAAE,OAClC,CAAC;EACF,MAAM,aAAa,WAAW,QAC3B,MAAM,EAAE,WAAW,SAAS,CAAC,EAAE,OACjC,CAAC;EACF,MAAM,aAAa,WAAW,QAAQ,MAAM,EAAE,OAAO,CAAC;AACtD,UAAQ,IAAI,KAAK,YAAY;EAC7B,MAAM,QAAQ,CAAC,GAAG,SAAS,OAAO,GAAG,WAAW,aAAa;AAC7D,MAAI,aAAa,EACf,OAAM,KAAK,GAAG,WAAW,GAAG,eAAe,IAAI,UAAU,WAAW;AACtE,UAAQ,IAAI,KAAK,MAAM,KAAK,KAAK,CAAC,IAAI,GAAG,IAAI,GAAG,QAAQ,GAAG,GAAG;AAC9D,UAAQ,IAAI,GAAG;;AAIjB,QAAO,CAAC,GAAG,eAAe,GAAG,aAAa,CACvC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,KAAK,MAAM,EAAE,OAAO;;;;;;;AAQzB,SAAS,mBAAmB,UAA0B;AACpD,QAAO,SAAS,QAAQ,UAAU,GAAG;;;;;;;;;;;;AAavC,SAAgB,kBAAkB,UAA0B;AAC1D,QAAO,SACJ,QAAQ,WAAW,GAAG,CACtB,QAAQ,SAAS,GAAG,CACpB,MAAM,IAAI,CAAC;;;AAIhB,MAAM,UAAkC;CAEtC,QAAQ;CACR,QAAQ;CAER,SAAS;CAET,SAAS;CACT,UAAU;CACV,KAAK;CACL,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,SAAS;CAET,MAAM;CACN,WAAW;CACX,eAAe;CACf,UAAU;CAEV,OAAO;CACP,KAAK;CACL,QAAQ;CACR,QAAQ;CACR,SAAS;CAET,WAAW;CACX,UAAU;CAEV,MAAM;CACP"}
@@ -0,0 +1,38 @@
1
+ import { createLogger } from "../../logging/logger.js";
2
+ import crypto from "node:crypto";
3
+ import fs from "node:fs/promises";
4
+ import path from "node:path";
5
+
6
+ //#region src/type-generator/serving/cache.ts
7
+ const logger = createLogger("type-generator:serving:cache");
8
+ const CACHE_VERSION = "1";
9
+ const CACHE_FILE = ".appkit-serving-types-cache.json";
10
+ const CACHE_DIR = path.join(process.cwd(), "node_modules", ".databricks", "appkit");
11
+ function hashSchema(schemaJson) {
12
+ return crypto.createHash("sha256").update(schemaJson).digest("hex");
13
+ }
14
+ async function loadServingCache() {
15
+ const cachePath = path.join(CACHE_DIR, CACHE_FILE);
16
+ try {
17
+ await fs.mkdir(CACHE_DIR, { recursive: true });
18
+ const raw = await fs.readFile(cachePath, "utf8");
19
+ const cache = JSON.parse(raw);
20
+ if (cache.version === CACHE_VERSION) return cache;
21
+ logger.debug("Cache version mismatch, starting fresh");
22
+ } catch (err) {
23
+ if (err.code !== "ENOENT") logger.warn("Cache file is corrupted, flushing cache completely.");
24
+ }
25
+ return {
26
+ version: CACHE_VERSION,
27
+ endpoints: {}
28
+ };
29
+ }
30
+ async function saveServingCache(cache) {
31
+ const cachePath = path.join(CACHE_DIR, CACHE_FILE);
32
+ await fs.mkdir(CACHE_DIR, { recursive: true });
33
+ await fs.writeFile(cachePath, JSON.stringify(cache, null, 2), "utf8");
34
+ }
35
+
36
+ //#endregion
37
+ export { CACHE_VERSION, hashSchema, loadServingCache, saveServingCache };
38
+ //# sourceMappingURL=cache.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cache.js","names":[],"sources":["../../../src/type-generator/serving/cache.ts"],"sourcesContent":["import crypto from \"node:crypto\";\nimport fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { createLogger } from \"../../logging/logger\";\n\nconst logger = createLogger(\"type-generator:serving:cache\");\n\nexport const CACHE_VERSION = \"1\";\nconst CACHE_FILE = \".appkit-serving-types-cache.json\";\nconst CACHE_DIR = path.join(\n process.cwd(),\n \"node_modules\",\n \".databricks\",\n \"appkit\",\n);\n\nexport interface ServingCacheEntry {\n hash: string;\n requestType: string;\n responseType: string;\n chunkType: string | null;\n requestKeys: string[];\n}\n\nexport interface ServingCache {\n version: string;\n endpoints: Record<string, ServingCacheEntry>;\n}\n\nexport function hashSchema(schemaJson: string): string {\n return crypto.createHash(\"sha256\").update(schemaJson).digest(\"hex\");\n}\n\nexport async function loadServingCache(): Promise<ServingCache> {\n const cachePath = path.join(CACHE_DIR, CACHE_FILE);\n try {\n await fs.mkdir(CACHE_DIR, { recursive: true });\n const raw = await fs.readFile(cachePath, \"utf8\");\n const cache = JSON.parse(raw) as ServingCache;\n if (cache.version === CACHE_VERSION) {\n return cache;\n }\n logger.debug(\"Cache version mismatch, starting fresh\");\n } catch (err) {\n if ((err as NodeJS.ErrnoException).code !== \"ENOENT\") {\n logger.warn(\"Cache file is corrupted, flushing cache completely.\");\n }\n }\n return { version: CACHE_VERSION, endpoints: {} };\n}\n\nexport async function saveServingCache(cache: ServingCache): Promise<void> {\n const cachePath = path.join(CACHE_DIR, CACHE_FILE);\n await fs.mkdir(CACHE_DIR, { recursive: true });\n await fs.writeFile(cachePath, JSON.stringify(cache, null, 2), \"utf8\");\n}\n"],"mappings":";;;;;;AAKA,MAAM,SAAS,aAAa,+BAA+B;AAE3D,MAAa,gBAAgB;AAC7B,MAAM,aAAa;AACnB,MAAM,YAAY,KAAK,KACrB,QAAQ,KAAK,EACb,gBACA,eACA,SACD;AAeD,SAAgB,WAAW,YAA4B;AACrD,QAAO,OAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;AAGrE,eAAsB,mBAA0C;CAC9D,MAAM,YAAY,KAAK,KAAK,WAAW,WAAW;AAClD,KAAI;AACF,QAAM,GAAG,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;EAC9C,MAAM,MAAM,MAAM,GAAG,SAAS,WAAW,OAAO;EAChD,MAAM,QAAQ,KAAK,MAAM,IAAI;AAC7B,MAAI,MAAM,YAAY,cACpB,QAAO;AAET,SAAO,MAAM,yCAAyC;UAC/C,KAAK;AACZ,MAAK,IAA8B,SAAS,SAC1C,QAAO,KAAK,sDAAsD;;AAGtE,QAAO;EAAE,SAAS;EAAe,WAAW,EAAE;EAAE;;AAGlD,eAAsB,iBAAiB,OAAoC;CACzE,MAAM,YAAY,KAAK,KAAK,WAAW,WAAW;AAClD,OAAM,GAAG,MAAM,WAAW,EAAE,WAAW,MAAM,CAAC;AAC9C,OAAM,GAAG,UAAU,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,EAAE,OAAO"}
@@ -0,0 +1,108 @@
1
+ //#region src/type-generator/serving/converter.ts
2
+ /**
3
+ * Converts an OpenAPI schema to a TypeScript type string.
4
+ */
5
+ function schemaToTypeString(schema, indent = 0) {
6
+ const pad = " ".repeat(indent);
7
+ if (schema.oneOf) return schema.oneOf.map((s) => schemaToTypeString(s, indent)).join(" | ");
8
+ if (schema.enum) return schema.enum.map((v) => JSON.stringify(v)).join(" | ");
9
+ switch (schema.type) {
10
+ case "string": return "string";
11
+ case "integer":
12
+ case "number": return "number";
13
+ case "boolean": return "boolean";
14
+ case "array": {
15
+ if (!schema.items) return "unknown[]";
16
+ const itemType = schemaToTypeString(schema.items, indent);
17
+ if (itemType.includes(" | ") && !itemType.startsWith("{")) return `(${itemType})[]`;
18
+ return `${itemType}[]`;
19
+ }
20
+ case "object": {
21
+ if (!schema.properties) return "Record<string, unknown>";
22
+ const required = new Set(schema.required ?? []);
23
+ return `{\n${Object.entries(schema.properties).map(([key, prop]) => {
24
+ const optional = !required.has(key) ? "?" : "";
25
+ const nullable = prop.nullable ? " | null" : "";
26
+ const typeStr = schemaToTypeString(prop, indent + 1);
27
+ return `${pad} ${prop.format && (prop.type === "number" || prop.type === "integer") ? `/** @openapi ${prop.format}${prop.nullable ? ", nullable" : ""} */\n${pad} ` : prop.nullable && prop.type === "integer" ? `/** @openapi integer, nullable */\n${pad} ` : ""}${key}${optional}: ${typeStr}${nullable};`;
28
+ }).join("\n")}\n${pad}}`;
29
+ }
30
+ default: return "unknown";
31
+ }
32
+ }
33
+ /**
34
+ * Extracts the top-level property keys from the request schema.
35
+ * Strips the `stream` property (plugin-controlled).
36
+ */
37
+ function extractRequestKeys(operation) {
38
+ const schema = operation.requestBody?.content?.["application/json"]?.schema;
39
+ if (!schema?.properties) return [];
40
+ return Object.keys(schema.properties).filter((k) => k !== "stream");
41
+ }
42
+ /**
43
+ * Extracts and converts the request schema from an OpenAPI path operation.
44
+ * Strips the `stream` property from the request type.
45
+ */
46
+ function convertRequestSchema(operation) {
47
+ const schema = operation.requestBody?.content?.["application/json"]?.schema;
48
+ if (!schema || !schema.properties) return "Record<string, unknown>";
49
+ const { stream: _stream, ...filteredProps } = schema.properties;
50
+ const filteredRequired = (schema.required ?? []).filter((r) => r !== "stream");
51
+ return schemaToTypeString({
52
+ ...schema,
53
+ properties: filteredProps,
54
+ required: filteredRequired.length > 0 ? filteredRequired : void 0
55
+ });
56
+ }
57
+ /**
58
+ * Extracts and converts the response schema from an OpenAPI path operation.
59
+ */
60
+ function convertResponseSchema(operation) {
61
+ const schema = (operation.responses?.["200"])?.content?.["application/json"]?.schema;
62
+ if (!schema) return "unknown";
63
+ return schemaToTypeString(schema);
64
+ }
65
+ /**
66
+ * Derives a streaming chunk type from the response schema.
67
+ * Returns null if the response doesn't follow OpenAI-compatible format.
68
+ *
69
+ * OpenAI-compatible heuristic: response has `choices` array where items
70
+ * have a `message` object property.
71
+ */
72
+ function deriveChunkType(operation) {
73
+ const schema = (operation.responses?.["200"])?.content?.["application/json"]?.schema;
74
+ if (!schema?.properties) return null;
75
+ const choicesProp = schema.properties.choices;
76
+ if (!choicesProp || choicesProp.type !== "array" || !choicesProp.items) return null;
77
+ const choiceItemProps = choicesProp.items.properties;
78
+ if (!choiceItemProps?.message) return null;
79
+ const messageSchema = choiceItemProps.message;
80
+ const chunkProperties = {};
81
+ for (const [key, prop] of Object.entries(schema.properties)) {
82
+ if (key === "usage") continue;
83
+ if (key === "choices") {
84
+ const chunkChoiceProps = {};
85
+ for (const [ck, cp] of Object.entries(choiceItemProps)) if (ck === "message") chunkChoiceProps.delta = { ...messageSchema };
86
+ else if (ck === "finish_reason") chunkChoiceProps[ck] = {
87
+ ...cp,
88
+ nullable: true
89
+ };
90
+ else chunkChoiceProps[ck] = cp;
91
+ chunkProperties[key] = {
92
+ type: "array",
93
+ items: {
94
+ type: "object",
95
+ properties: chunkChoiceProps
96
+ }
97
+ };
98
+ } else chunkProperties[key] = prop;
99
+ }
100
+ return schemaToTypeString({
101
+ type: "object",
102
+ properties: chunkProperties
103
+ });
104
+ }
105
+
106
+ //#endregion
107
+ export { convertRequestSchema, convertResponseSchema, deriveChunkType, extractRequestKeys };
108
+ //# sourceMappingURL=converter.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"converter.js","names":[],"sources":["../../../src/type-generator/serving/converter.ts"],"sourcesContent":["import type { OpenApiOperation, OpenApiSchema } from \"./fetcher\";\n\n/**\n * Converts an OpenAPI schema to a TypeScript type string.\n */\nfunction schemaToTypeString(schema: OpenApiSchema, indent = 0): string {\n const pad = \" \".repeat(indent);\n\n if (schema.oneOf) {\n return schema.oneOf.map((s) => schemaToTypeString(s, indent)).join(\" | \");\n }\n\n if (schema.enum) {\n return schema.enum.map((v) => JSON.stringify(v)).join(\" | \");\n }\n\n switch (schema.type) {\n case \"string\":\n return \"string\";\n case \"integer\":\n case \"number\":\n return \"number\";\n case \"boolean\":\n return \"boolean\";\n case \"array\": {\n if (!schema.items) return \"unknown[]\";\n const itemType = schemaToTypeString(schema.items, indent);\n // Wrap union types in parens for array\n if (itemType.includes(\" | \") && !itemType.startsWith(\"{\")) {\n return `(${itemType})[]`;\n }\n return `${itemType}[]`;\n }\n case \"object\": {\n if (!schema.properties) return \"Record<string, unknown>\";\n const required = new Set(schema.required ?? []);\n const entries = Object.entries(schema.properties).map(([key, prop]) => {\n const optional = !required.has(key) ? \"?\" : \"\";\n const nullable = prop.nullable ? \" | null\" : \"\";\n const typeStr = schemaToTypeString(prop, indent + 1);\n const formatComment =\n prop.format && (prop.type === \"number\" || prop.type === \"integer\")\n ? `/** @openapi ${prop.format}${prop.nullable ? \", nullable\" : \"\"} */\\n${pad} `\n : prop.nullable && prop.type === \"integer\"\n ? `/** @openapi integer, nullable */\\n${pad} `\n : \"\";\n return `${pad} ${formatComment}${key}${optional}: ${typeStr}${nullable};`;\n });\n return `{\\n${entries.join(\"\\n\")}\\n${pad}}`;\n }\n default:\n return \"unknown\";\n }\n}\n\n/**\n * Extracts the top-level property keys from the request schema.\n * Strips the `stream` property (plugin-controlled).\n */\nexport function extractRequestKeys(operation: OpenApiOperation): string[] {\n const schema = operation.requestBody?.content?.[\"application/json\"]?.schema;\n if (!schema?.properties) return [];\n return Object.keys(schema.properties).filter((k) => k !== \"stream\");\n}\n\n/**\n * Extracts and converts the request schema from an OpenAPI path operation.\n * Strips the `stream` property from the request type.\n */\nexport function convertRequestSchema(operation: OpenApiOperation): string {\n const schema = operation.requestBody?.content?.[\"application/json\"]?.schema;\n if (!schema || !schema.properties) return \"Record<string, unknown>\";\n\n // Strip `stream` property — the plugin controls this\n const { stream: _stream, ...filteredProps } = schema.properties;\n const filteredRequired = (schema.required ?? []).filter(\n (r) => r !== \"stream\",\n );\n\n const filteredSchema: OpenApiSchema = {\n ...schema,\n properties: filteredProps,\n required: filteredRequired.length > 0 ? filteredRequired : undefined,\n };\n\n return schemaToTypeString(filteredSchema);\n}\n\n/**\n * Extracts and converts the response schema from an OpenAPI path operation.\n */\nexport function convertResponseSchema(operation: OpenApiOperation): string {\n const response = operation.responses?.[\"200\"];\n const schema = response?.content?.[\"application/json\"]?.schema;\n if (!schema) return \"unknown\";\n return schemaToTypeString(schema);\n}\n\n/**\n * Derives a streaming chunk type from the response schema.\n * Returns null if the response doesn't follow OpenAI-compatible format.\n *\n * OpenAI-compatible heuristic: response has `choices` array where items\n * have a `message` object property.\n */\nexport function deriveChunkType(operation: OpenApiOperation): string | null {\n const response = operation.responses?.[\"200\"];\n const schema = response?.content?.[\"application/json\"]?.schema;\n if (!schema?.properties) return null;\n\n const choicesProp = schema.properties.choices;\n if (!choicesProp || choicesProp.type !== \"array\" || !choicesProp.items)\n return null;\n\n const choiceItemProps = choicesProp.items.properties;\n if (!choiceItemProps?.message) return null;\n\n // It's OpenAI-compatible. Build the chunk type by transforming.\n const messageSchema = choiceItemProps.message;\n\n // Build chunk schema: replace message with delta (Partial), make finish_reason nullable, drop usage\n const chunkProperties: Record<string, OpenApiSchema> = {};\n\n for (const [key, prop] of Object.entries(schema.properties)) {\n if (key === \"usage\") continue; // Drop usage from chunks\n if (key === \"choices\") {\n // Transform choices items\n const chunkChoiceProps: Record<string, OpenApiSchema> = {};\n for (const [ck, cp] of Object.entries(choiceItemProps)) {\n if (ck === \"message\") {\n // Replace message with delta: Partial<message>\n chunkChoiceProps.delta = { ...messageSchema };\n } else if (ck === \"finish_reason\") {\n chunkChoiceProps[ck] = { ...cp, nullable: true };\n } else {\n chunkChoiceProps[ck] = cp;\n }\n }\n chunkProperties[key] = {\n type: \"array\",\n items: {\n type: \"object\",\n properties: chunkChoiceProps,\n },\n };\n } else {\n chunkProperties[key] = prop;\n }\n }\n\n const chunkSchema: OpenApiSchema = {\n type: \"object\",\n properties: chunkProperties,\n };\n\n // Delta properties are already optional (no `required` array in the schema),\n // so schemaToTypeString renders them with `?:` — no Partial<> wrapper needed.\n return schemaToTypeString(chunkSchema);\n}\n"],"mappings":";;;;AAKA,SAAS,mBAAmB,QAAuB,SAAS,GAAW;CACrE,MAAM,MAAM,KAAK,OAAO,OAAO;AAE/B,KAAI,OAAO,MACT,QAAO,OAAO,MAAM,KAAK,MAAM,mBAAmB,GAAG,OAAO,CAAC,CAAC,KAAK,MAAM;AAG3E,KAAI,OAAO,KACT,QAAO,OAAO,KAAK,KAAK,MAAM,KAAK,UAAU,EAAE,CAAC,CAAC,KAAK,MAAM;AAG9D,SAAQ,OAAO,MAAf;EACE,KAAK,SACH,QAAO;EACT,KAAK;EACL,KAAK,SACH,QAAO;EACT,KAAK,UACH,QAAO;EACT,KAAK,SAAS;AACZ,OAAI,CAAC,OAAO,MAAO,QAAO;GAC1B,MAAM,WAAW,mBAAmB,OAAO,OAAO,OAAO;AAEzD,OAAI,SAAS,SAAS,MAAM,IAAI,CAAC,SAAS,WAAW,IAAI,CACvD,QAAO,IAAI,SAAS;AAEtB,UAAO,GAAG,SAAS;;EAErB,KAAK,UAAU;AACb,OAAI,CAAC,OAAO,WAAY,QAAO;GAC/B,MAAM,WAAW,IAAI,IAAI,OAAO,YAAY,EAAE,CAAC;AAa/C,UAAO,MAZS,OAAO,QAAQ,OAAO,WAAW,CAAC,KAAK,CAAC,KAAK,UAAU;IACrE,MAAM,WAAW,CAAC,SAAS,IAAI,IAAI,GAAG,MAAM;IAC5C,MAAM,WAAW,KAAK,WAAW,YAAY;IAC7C,MAAM,UAAU,mBAAmB,MAAM,SAAS,EAAE;AAOpD,WAAO,GAAG,IAAI,IALZ,KAAK,WAAW,KAAK,SAAS,YAAY,KAAK,SAAS,aACpD,gBAAgB,KAAK,SAAS,KAAK,WAAW,eAAe,GAAG,OAAO,IAAI,MAC3E,KAAK,YAAY,KAAK,SAAS,YAC7B,sCAAsC,IAAI,MAC1C,KAC0B,MAAM,SAAS,IAAI,UAAU,SAAS;KACxE,CACmB,KAAK,KAAK,CAAC,IAAI,IAAI;;EAE1C,QACE,QAAO;;;;;;;AAQb,SAAgB,mBAAmB,WAAuC;CACxE,MAAM,SAAS,UAAU,aAAa,UAAU,qBAAqB;AACrE,KAAI,CAAC,QAAQ,WAAY,QAAO,EAAE;AAClC,QAAO,OAAO,KAAK,OAAO,WAAW,CAAC,QAAQ,MAAM,MAAM,SAAS;;;;;;AAOrE,SAAgB,qBAAqB,WAAqC;CACxE,MAAM,SAAS,UAAU,aAAa,UAAU,qBAAqB;AACrE,KAAI,CAAC,UAAU,CAAC,OAAO,WAAY,QAAO;CAG1C,MAAM,EAAE,QAAQ,SAAS,GAAG,kBAAkB,OAAO;CACrD,MAAM,oBAAoB,OAAO,YAAY,EAAE,EAAE,QAC9C,MAAM,MAAM,SACd;AAQD,QAAO,mBAN+B;EACpC,GAAG;EACH,YAAY;EACZ,UAAU,iBAAiB,SAAS,IAAI,mBAAmB;EAC5D,CAEwC;;;;;AAM3C,SAAgB,sBAAsB,WAAqC;CAEzE,MAAM,UADW,UAAU,YAAY,SACd,UAAU,qBAAqB;AACxD,KAAI,CAAC,OAAQ,QAAO;AACpB,QAAO,mBAAmB,OAAO;;;;;;;;;AAUnC,SAAgB,gBAAgB,WAA4C;CAE1E,MAAM,UADW,UAAU,YAAY,SACd,UAAU,qBAAqB;AACxD,KAAI,CAAC,QAAQ,WAAY,QAAO;CAEhC,MAAM,cAAc,OAAO,WAAW;AACtC,KAAI,CAAC,eAAe,YAAY,SAAS,WAAW,CAAC,YAAY,MAC/D,QAAO;CAET,MAAM,kBAAkB,YAAY,MAAM;AAC1C,KAAI,CAAC,iBAAiB,QAAS,QAAO;CAGtC,MAAM,gBAAgB,gBAAgB;CAGtC,MAAM,kBAAiD,EAAE;AAEzD,MAAK,MAAM,CAAC,KAAK,SAAS,OAAO,QAAQ,OAAO,WAAW,EAAE;AAC3D,MAAI,QAAQ,QAAS;AACrB,MAAI,QAAQ,WAAW;GAErB,MAAM,mBAAkD,EAAE;AAC1D,QAAK,MAAM,CAAC,IAAI,OAAO,OAAO,QAAQ,gBAAgB,CACpD,KAAI,OAAO,UAET,kBAAiB,QAAQ,EAAE,GAAG,eAAe;YACpC,OAAO,gBAChB,kBAAiB,MAAM;IAAE,GAAG;IAAI,UAAU;IAAM;OAEhD,kBAAiB,MAAM;AAG3B,mBAAgB,OAAO;IACrB,MAAM;IACN,OAAO;KACL,MAAM;KACN,YAAY;KACb;IACF;QAED,iBAAgB,OAAO;;AAW3B,QAAO,mBAP4B;EACjC,MAAM;EACN,YAAY;EACb,CAIqC"}