@databricks/appkit 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/AGENTS.md +1179 -0
  2. package/CLAUDE.md +1178 -2
  3. package/NOTICE.md +2 -0
  4. package/bin/setup-claude.js +1 -1
  5. package/dist/_virtual/rolldown_runtime.js +2 -2
  6. package/dist/analytics/analytics.d.ts +33 -8
  7. package/dist/analytics/analytics.d.ts.map +1 -1
  8. package/dist/analytics/analytics.js +51 -24
  9. package/dist/analytics/analytics.js.map +1 -1
  10. package/dist/analytics/defaults.js.map +1 -1
  11. package/dist/analytics/query.js +4 -4
  12. package/dist/analytics/query.js.map +1 -1
  13. package/dist/appkit/package.js +1 -1
  14. package/dist/cache/defaults.js.map +1 -1
  15. package/dist/cache/index.d.ts +1 -0
  16. package/dist/cache/index.d.ts.map +1 -1
  17. package/dist/cache/index.js +1 -2
  18. package/dist/cache/index.js.map +1 -1
  19. package/dist/cache/storage/memory.js.map +1 -1
  20. package/dist/connectors/lakebase/client.js +7 -8
  21. package/dist/connectors/lakebase/client.js.map +1 -1
  22. package/dist/connectors/lakebase/defaults.js.map +1 -1
  23. package/dist/connectors/sql-warehouse/client.js.map +1 -1
  24. package/dist/connectors/sql-warehouse/defaults.js.map +1 -1
  25. package/dist/context/execution-context.js +75 -0
  26. package/dist/context/execution-context.js.map +1 -0
  27. package/dist/context/index.js +27 -0
  28. package/dist/context/index.js.map +1 -0
  29. package/dist/context/service-context.js +149 -0
  30. package/dist/context/service-context.js.map +1 -0
  31. package/dist/context/user-context.js +15 -0
  32. package/dist/context/user-context.js.map +1 -0
  33. package/dist/core/appkit.d.ts +3 -0
  34. package/dist/core/appkit.d.ts.map +1 -1
  35. package/dist/core/appkit.js +7 -0
  36. package/dist/core/appkit.js.map +1 -1
  37. package/dist/index.d.ts +5 -6
  38. package/dist/index.js +3 -10
  39. package/dist/plugin/interceptors/cache.js.map +1 -1
  40. package/dist/plugin/interceptors/retry.js.map +1 -1
  41. package/dist/plugin/interceptors/telemetry.js.map +1 -1
  42. package/dist/plugin/interceptors/timeout.js.map +1 -1
  43. package/dist/plugin/plugin.d.ts +39 -5
  44. package/dist/plugin/plugin.d.ts.map +1 -1
  45. package/dist/plugin/plugin.js +82 -6
  46. package/dist/plugin/plugin.js.map +1 -1
  47. package/dist/plugin/to-plugin.d.ts +4 -0
  48. package/dist/plugin/to-plugin.d.ts.map +1 -1
  49. package/dist/plugin/to-plugin.js +3 -0
  50. package/dist/plugin/to-plugin.js.map +1 -1
  51. package/dist/server/index.d.ts +3 -0
  52. package/dist/server/index.d.ts.map +1 -1
  53. package/dist/server/index.js +3 -4
  54. package/dist/server/index.js.map +1 -1
  55. package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
  56. package/dist/server/utils.js.map +1 -1
  57. package/dist/server/vite-dev-server.js +0 -2
  58. package/dist/server/vite-dev-server.js.map +1 -1
  59. package/dist/shared/src/sql/helpers.js.map +1 -1
  60. package/dist/stream/arrow-stream-processor.js.map +1 -1
  61. package/dist/stream/buffers.js.map +1 -1
  62. package/dist/stream/sse-writer.js.map +1 -1
  63. package/dist/stream/stream-manager.js.map +1 -1
  64. package/dist/stream/stream-registry.js.map +1 -1
  65. package/dist/telemetry/instrumentations.js.map +1 -1
  66. package/dist/type-generator/index.js.map +1 -1
  67. package/dist/type-generator/query-registry.js.map +1 -1
  68. package/dist/type-generator/types.js.map +1 -1
  69. package/dist/type-generator/vite-plugin.js.map +1 -1
  70. package/dist/utils/env-validator.js +1 -5
  71. package/dist/utils/env-validator.js.map +1 -1
  72. package/dist/utils/merge.js +1 -5
  73. package/dist/utils/merge.js.map +1 -1
  74. package/dist/utils/vite-config-merge.js +1 -5
  75. package/dist/utils/vite-config-merge.js.map +1 -1
  76. package/llms.txt +160 -58
  77. package/package.json +5 -2
  78. package/dist/index.js.map +0 -1
  79. package/dist/utils/databricks-client-middleware.d.ts +0 -17
  80. package/dist/utils/databricks-client-middleware.d.ts.map +0 -1
  81. package/dist/utils/databricks-client-middleware.js +0 -117
  82. package/dist/utils/databricks-client-middleware.js.map +0 -1
  83. package/dist/utils/index.js +0 -26
  84. package/dist/utils/index.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"vite-plugin.js","names":["root: string","outFile: string","watchFolders: string[]"],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import path from \"node:path\";\nimport type { Plugin } from \"vite\";\nimport fs from \"node:fs\";\nimport { generateFromEntryPoint } from \"./index\";\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls generateFromEntryPoint under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let outFile: string;\n let watchFolders: string[];\n\n async function generate() {\n try {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return;\n }\n\n await generateFromEntryPoint({\n outFile,\n queryFolder: watchFolders[0],\n warehouseId,\n noCache: false,\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n console.error(\"[AppKit] Error generating types:\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n\n apply() {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return false;\n }\n\n if (!fs.existsSync(path.join(process.cwd(), \"config\", \"queries\"))) {\n return false;\n }\n\n return true;\n },\n\n configResolved(config) {\n root = config.root;\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n watchFolders = options?.watchFolders ?? [\n path.join(process.cwd(), \"config\", \"queries\"),\n ];\n },\n\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;;;;;;AAqBA,SAAgB,kBAAkB,SAA4C;CAC5E,IAAIA;CACJ,IAAIC;CACJ,IAAIC;CAEJ,eAAe,WAAW;AACxB,MAAI;GACF,MAAM,cAAc,QAAQ,IAAI,2BAA2B;AAE3D,OAAI,CAAC,aAAa;AAChB,YAAQ,KACN,6DACD;AACD;;AAGF,SAAM,uBAAuB;IAC3B;IACA,aAAa,aAAa;IAC1B;IACA,SAAS;IACV,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,WAAQ,MAAM,oCAAoC,MAAM;;;AAI5D,QAAO;EACL,MAAM;EAEN,QAAQ;AAGN,OAAI,EAFgB,QAAQ,IAAI,2BAA2B,KAEzC;AAChB,YAAQ,KACN,6DACD;AACD,WAAO;;AAGT,OAAI,CAAC,GAAG,WAAW,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAAC,CAC/D,QAAO;AAGT,UAAO;;EAGT,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AACxE,kBAAe,SAAS,gBAAgB,CACtC,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAC9C;;EAGH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
1
+ {"version":3,"file":"vite-plugin.js","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import path from \"node:path\";\nimport type { Plugin } from \"vite\";\nimport fs from \"node:fs\";\nimport { generateFromEntryPoint } from \"./index\";\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls generateFromEntryPoint under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let outFile: string;\n let watchFolders: string[];\n\n async function generate() {\n try {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return;\n }\n\n await generateFromEntryPoint({\n outFile,\n queryFolder: watchFolders[0],\n warehouseId,\n noCache: false,\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n console.error(\"[AppKit] Error generating types:\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n\n apply() {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return false;\n }\n\n if (!fs.existsSync(path.join(process.cwd(), \"config\", \"queries\"))) {\n return false;\n }\n\n return true;\n },\n\n configResolved(config) {\n root = config.root;\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n watchFolders = options?.watchFolders ?? [\n path.join(process.cwd(), \"config\", \"queries\"),\n ];\n },\n\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;;;;;;AAqBA,SAAgB,kBAAkB,SAA4C;CAC5E,IAAI;CACJ,IAAI;CACJ,IAAI;CAEJ,eAAe,WAAW;AACxB,MAAI;GACF,MAAM,cAAc,QAAQ,IAAI,2BAA2B;AAE3D,OAAI,CAAC,aAAa;AAChB,YAAQ,KACN,6DACD;AACD;;AAGF,SAAM,uBAAuB;IAC3B;IACA,aAAa,aAAa;IAC1B;IACA,SAAS;IACV,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,WAAQ,MAAM,oCAAoC,MAAM;;;AAI5D,QAAO;EACL,MAAM;EAEN,QAAQ;AAGN,OAAI,EAFgB,QAAQ,IAAI,2BAA2B,KAEzC;AAChB,YAAQ,KACN,6DACD;AACD,WAAO;;AAGT,OAAI,CAAC,GAAG,WAAW,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAAC,CAC/D,QAAO;AAGT,UAAO;;EAGT,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AACxE,kBAAe,SAAS,gBAAgB,CACtC,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAC9C;;EAGH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
@@ -1,14 +1,10 @@
1
- import { __esmMin } from "../_virtual/rolldown_runtime.js";
2
-
3
1
  //#region src/utils/env-validator.ts
4
2
  function validateEnv(envVars) {
5
3
  const missingVars = [];
6
4
  for (const envVar of envVars) if (!process.env[envVar]) missingVars.push(envVar);
7
5
  if (missingVars.length > 0) throw new Error(`Missing required environment variables: ${missingVars.join(", ")}`);
8
6
  }
9
- var init_env_validator = __esmMin((() => {}));
10
7
 
11
8
  //#endregion
12
- init_env_validator();
13
- export { init_env_validator, validateEnv };
9
+ export { validateEnv };
14
10
  //# sourceMappingURL=env-validator.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"env-validator.js","names":[],"sources":["../../src/utils/env-validator.ts"],"sourcesContent":["export function validateEnv(envVars: string[]) {\n const missingVars = [];\n\n for (const envVar of envVars) {\n if (!process.env[envVar]) {\n missingVars.push(envVar);\n }\n }\n\n if (missingVars.length > 0) {\n throw new Error(\n `Missing required environment variables: ${missingVars.join(\", \")}`,\n );\n }\n}\n"],"mappings":";;;AAAA,SAAgB,YAAY,SAAmB;CAC7C,MAAM,cAAc,EAAE;AAEtB,MAAK,MAAM,UAAU,QACnB,KAAI,CAAC,QAAQ,IAAI,QACf,aAAY,KAAK,OAAO;AAI5B,KAAI,YAAY,SAAS,EACvB,OAAM,IAAI,MACR,2CAA2C,YAAY,KAAK,KAAK,GAClE"}
1
+ {"version":3,"file":"env-validator.js","names":[],"sources":["../../src/utils/env-validator.ts"],"sourcesContent":["export function validateEnv(envVars: string[]) {\n const missingVars = [];\n\n for (const envVar of envVars) {\n if (!process.env[envVar]) {\n missingVars.push(envVar);\n }\n }\n\n if (missingVars.length > 0) {\n throw new Error(\n `Missing required environment variables: ${missingVars.join(\", \")}`,\n );\n }\n}\n"],"mappings":";AAAA,SAAgB,YAAY,SAAmB;CAC7C,MAAM,cAAc,EAAE;AAEtB,MAAK,MAAM,UAAU,QACnB,KAAI,CAAC,QAAQ,IAAI,QACf,aAAY,KAAK,OAAO;AAI5B,KAAI,YAAY,SAAS,EACvB,OAAM,IAAI,MACR,2CAA2C,YAAY,KAAK,KAAK,GAClE"}
@@ -1,5 +1,3 @@
1
- import { __esmMin } from "../_virtual/rolldown_runtime.js";
2
-
3
1
  //#region src/utils/merge.ts
4
2
  function deepMerge(target, ...sources) {
5
3
  if (!sources.length) return target;
@@ -17,9 +15,7 @@ function deepMerge(target, ...sources) {
17
15
  function isObject(item) {
18
16
  return typeof item === "object" && item !== null && !Array.isArray(item);
19
17
  }
20
- var init_merge = __esmMin((() => {}));
21
18
 
22
19
  //#endregion
23
- init_merge();
24
- export { deepMerge, init_merge };
20
+ export { deepMerge };
25
21
  //# sourceMappingURL=merge.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"merge.js","names":[],"sources":["../../src/utils/merge.ts"],"sourcesContent":["export function deepMerge<T extends Record<string, unknown>>(\n target: T,\n ...sources: Array<Partial<T> | undefined>\n): T {\n if (!sources.length) return target;\n\n const source = sources.shift();\n if (!source) return deepMerge(target, ...sources);\n\n const result = { ...target };\n\n for (const key in source) {\n const sourceValue = source[key];\n const targetValue = result[key];\n\n if (sourceValue !== undefined) {\n if (isObject(sourceValue) && isObject(targetValue)) {\n result[key] = deepMerge(\n targetValue as Record<string, unknown>,\n sourceValue as Record<string, unknown>,\n ) as T[Extract<keyof T, string>];\n } else {\n result[key] = sourceValue as T[Extract<keyof T, string>];\n }\n }\n }\n\n return sources.length ? deepMerge(result, ...sources) : result;\n}\n\nfunction isObject(item: unknown): item is Record<string, unknown> {\n return typeof item === \"object\" && item !== null && !Array.isArray(item);\n}\n"],"mappings":";;;AAAA,SAAgB,UACd,QACA,GAAG,SACA;AACH,KAAI,CAAC,QAAQ,OAAQ,QAAO;CAE5B,MAAM,SAAS,QAAQ,OAAO;AAC9B,KAAI,CAAC,OAAQ,QAAO,UAAU,QAAQ,GAAG,QAAQ;CAEjD,MAAM,SAAS,EAAE,GAAG,QAAQ;AAE5B,MAAK,MAAM,OAAO,QAAQ;EACxB,MAAM,cAAc,OAAO;EAC3B,MAAM,cAAc,OAAO;AAE3B,MAAI,gBAAgB,OAClB,KAAI,SAAS,YAAY,IAAI,SAAS,YAAY,CAChD,QAAO,OAAO,UACZ,aACA,YACD;MAED,QAAO,OAAO;;AAKpB,QAAO,QAAQ,SAAS,UAAU,QAAQ,GAAG,QAAQ,GAAG;;AAG1D,SAAS,SAAS,MAAgD;AAChE,QAAO,OAAO,SAAS,YAAY,SAAS,QAAQ,CAAC,MAAM,QAAQ,KAAK"}
1
+ {"version":3,"file":"merge.js","names":[],"sources":["../../src/utils/merge.ts"],"sourcesContent":["export function deepMerge<T extends Record<string, unknown>>(\n target: T,\n ...sources: Array<Partial<T> | undefined>\n): T {\n if (!sources.length) return target;\n\n const source = sources.shift();\n if (!source) return deepMerge(target, ...sources);\n\n const result = { ...target };\n\n for (const key in source) {\n const sourceValue = source[key];\n const targetValue = result[key];\n\n if (sourceValue !== undefined) {\n if (isObject(sourceValue) && isObject(targetValue)) {\n result[key] = deepMerge(\n targetValue as Record<string, unknown>,\n sourceValue as Record<string, unknown>,\n ) as T[Extract<keyof T, string>];\n } else {\n result[key] = sourceValue as T[Extract<keyof T, string>];\n }\n }\n }\n\n return sources.length ? deepMerge(result, ...sources) : result;\n}\n\nfunction isObject(item: unknown): item is Record<string, unknown> {\n return typeof item === \"object\" && item !== null && !Array.isArray(item);\n}\n"],"mappings":";AAAA,SAAgB,UACd,QACA,GAAG,SACA;AACH,KAAI,CAAC,QAAQ,OAAQ,QAAO;CAE5B,MAAM,SAAS,QAAQ,OAAO;AAC9B,KAAI,CAAC,OAAQ,QAAO,UAAU,QAAQ,GAAG,QAAQ;CAEjD,MAAM,SAAS,EAAE,GAAG,QAAQ;AAE5B,MAAK,MAAM,OAAO,QAAQ;EACxB,MAAM,cAAc,OAAO;EAC3B,MAAM,cAAc,OAAO;AAE3B,MAAI,gBAAgB,OAClB,KAAI,SAAS,YAAY,IAAI,SAAS,YAAY,CAChD,QAAO,OAAO,UACZ,aACA,YACD;MAED,QAAO,OAAO;;AAKpB,QAAO,QAAQ,SAAS,UAAU,QAAQ,GAAG,QAAQ,GAAG;;AAG1D,SAAS,SAAS,MAAgD;AAChE,QAAO,OAAO,SAAS,YAAY,SAAS,QAAQ,CAAC,MAAM,QAAQ,KAAK"}
@@ -1,5 +1,3 @@
1
- import { __esmMin } from "../_virtual/rolldown_runtime.js";
2
-
3
1
  //#region src/utils/vite-config-merge.ts
4
2
  function mergeConfigDedup(base, override, mergeFn) {
5
3
  const merged = mergeFn(base, override);
@@ -14,9 +12,7 @@ function mergeConfigDedup(base, override, mergeFn) {
14
12
  }
15
13
  return merged;
16
14
  }
17
- var init_vite_config_merge = __esmMin((() => {}));
18
15
 
19
16
  //#endregion
20
- init_vite_config_merge();
21
- export { init_vite_config_merge, mergeConfigDedup };
17
+ export { mergeConfigDedup };
22
18
  //# sourceMappingURL=vite-config-merge.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"vite-config-merge.js","names":[],"sources":["../../src/utils/vite-config-merge.ts"],"sourcesContent":["import type { Plugin } from \"vite\";\n\nexport function mergeConfigDedup(\n base: any,\n override: any,\n mergeFn: (a: any, b: any) => any,\n) {\n const merged = mergeFn(base, override);\n if (base.plugins && override.plugins) {\n const seen = new Set<string>();\n merged.plugins = [...base.plugins, ...override.plugins].filter(\n (p: Plugin) => {\n const name = p.name;\n if (seen.has(name)) return false;\n seen.add(name);\n return true;\n },\n );\n }\n return merged;\n}\n"],"mappings":";;;AAEA,SAAgB,iBACd,MACA,UACA,SACA;CACA,MAAM,SAAS,QAAQ,MAAM,SAAS;AACtC,KAAI,KAAK,WAAW,SAAS,SAAS;EACpC,MAAM,uBAAO,IAAI,KAAa;AAC9B,SAAO,UAAU,CAAC,GAAG,KAAK,SAAS,GAAG,SAAS,QAAQ,CAAC,QACrD,MAAc;GACb,MAAM,OAAO,EAAE;AACf,OAAI,KAAK,IAAI,KAAK,CAAE,QAAO;AAC3B,QAAK,IAAI,KAAK;AACd,UAAO;IAEV;;AAEH,QAAO"}
1
+ {"version":3,"file":"vite-config-merge.js","names":[],"sources":["../../src/utils/vite-config-merge.ts"],"sourcesContent":["import type { Plugin } from \"vite\";\n\nexport function mergeConfigDedup(\n base: any,\n override: any,\n mergeFn: (a: any, b: any) => any,\n) {\n const merged = mergeFn(base, override);\n if (base.plugins && override.plugins) {\n const seen = new Set<string>();\n merged.plugins = [...base.plugins, ...override.plugins].filter(\n (p: Plugin) => {\n const name = p.name;\n if (seen.has(name)) return false;\n seen.add(name);\n return true;\n },\n );\n }\n return merged;\n}\n"],"mappings":";AAEA,SAAgB,iBACd,MACA,UACA,SACA;CACA,MAAM,SAAS,QAAQ,MAAM,SAAS;AACtC,KAAI,KAAK,WAAW,SAAS,SAAS;EACpC,MAAM,uBAAO,IAAI,KAAa;AAC9B,SAAO,UAAU,CAAC,GAAG,KAAK,SAAS,GAAG,SAAS,QAAQ,CAAC,QACrD,MAAc;GACb,MAAM,OAAO,EAAE;AACf,OAAI,KAAK,IAAI,KAAK,CAAE,QAAO;AAC3B,QAAK,IAAI,KAAK;AACd,UAAO;IAEV;;AAEH,QAAO"}
package/llms.txt CHANGED
@@ -75,46 +75,24 @@ Why this layout:
75
75
  "dev": "NODE_ENV=development tsx watch server/index.ts",
76
76
  "build": "npm run build:server && npm run build:client",
77
77
  "build:server": "tsdown --out-dir build server/index.ts",
78
- "build:client": "cd client && npm run build",
78
+ "build:client": "tsc -b && vite build --config client/vite.config.ts",
79
79
  "start": "node build/index.mjs"
80
80
  },
81
81
  "dependencies": {
82
- "@databricks/appkit": "^0.0.2"
82
+ "@databricks/appkit": "^0.1.2"
83
+ "@databricks/appkit-ui": "^0.1.2",
84
+ "react": "^19.2.3",
85
+ "react-dom": "^19.2.3"
83
86
  },
84
87
  "devDependencies": {
85
88
  "@types/node": "^20.0.0",
89
+ "@types/react": "^19.0.0",
90
+ "@types/react-dom": "^19.0.0",
91
+ "@vitejs/plugin-react": "^5.1.1",
86
92
  "tsdown": "^0.15.7",
87
93
  "tsx": "^4.19.0",
88
- "typescript": "~5.6.0"
89
- }
90
- }
91
- ```
92
-
93
- ### `client/package.json`
94
-
95
- ```json
96
- {
97
- "name": "client",
98
- "private": true,
99
- "version": "0.0.0",
100
- "type": "module",
101
- "scripts": {
102
- "dev": "vite",
103
- "build": "vite build",
104
- "preview": "vite preview"
105
- },
106
- "dependencies": {
107
- "@databricks/appkit-ui": "^0.0.2",
108
- "react": "^18.0.0",
109
- "react-dom": "^18.0.0",
110
- "recharts": "^3.0.0"
111
- },
112
- "devDependencies": {
113
- "@types/react": "^18.0.0",
114
- "@types/react-dom": "^18.0.0",
115
- "@vitejs/plugin-react": "^5.0.0",
116
94
  "typescript": "~5.6.0",
117
- "vite": "^6.0.0"
95
+ "vite": "^7.2.4"
118
96
  }
119
97
  }
120
98
  ```
@@ -208,7 +186,6 @@ await createApp({
208
186
  ```bash
209
187
  # Install dependencies
210
188
  npm install
211
- cd client && npm install && cd ..
212
189
 
213
190
  # Development (starts backend + Vite dev server)
214
191
  npm run dev
@@ -225,19 +202,14 @@ If you already have a React/Vite app and want to add AppKit:
225
202
  ### 1. Install dependencies
226
203
 
227
204
  ```bash
228
- npm install @databricks/appkit
229
- npm install -D tsx tsdown
205
+ npm install @databricks/appkit @databricks/appkit-ui react react-dom
206
+ npm install -D tsx tsdown vite @vitejs/plugin-react typescript
230
207
 
231
208
  # If you don't already have a client/ folder, create one and move your Vite app into it:
232
209
  # - move index.html -> client/index.html
233
210
  # - move vite.config.ts -> client/vite.config.ts
234
211
  # - move src/ -> client/src/
235
212
  #
236
- # Then install client deps:
237
- cd client
238
- npm install @databricks/appkit-ui react react-dom recharts
239
- npm install -D vite @vitejs/plugin-react typescript
240
- cd ..
241
213
  ```
242
214
 
243
215
  ### 2. Create `server/index.ts` (new file)
@@ -258,7 +230,7 @@ await createApp({
258
230
  "dev": "NODE_ENV=development tsx watch server/index.ts",
259
231
  "build": "npm run build:server && npm run build:client",
260
232
  "build:server": "tsdown --out-dir build server/index.ts",
261
- "build:client": "cd client && npm run build",
233
+ "build:client": "tsc -b && vite build --config client/vite.config.ts",
262
234
  "start": "node build/index.mjs"
263
235
  }
264
236
  }
@@ -276,7 +248,7 @@ await createApp({
276
248
  import { createApp, server, analytics } from "@databricks/appkit";
277
249
 
278
250
  await createApp({
279
- plugins: [server(), analytics({})],
251
+ plugins: [server(), analytics()],
280
252
  });
281
253
  ```
282
254
 
@@ -312,13 +284,17 @@ These are typically **provided by Databricks Apps runtime** (exact set can vary
312
284
 
313
285
  For local development, you need to authenticate with Databricks. Options:
314
286
 
315
- **Option 1: Databricks CLI profile (recommended)**
287
+ **Option 1: Databricks CLI Auth (recommended)**
316
288
 
317
289
  ```bash
318
290
  # Configure once
319
- databricks configure --profile my-profile
291
+ databricks auth login --host [host] --profile [profile-name]
320
292
 
321
- # Then run with profile
293
+ # If you used `DEFAULT` as the profile name then you can just run
294
+
295
+ `npm run dev`
296
+
297
+ # To run with a specific profile
322
298
  DATABRICKS_CONFIG_PROFILE=my-profile npm run dev
323
299
  # If your Databricks SDK expects a different variable name, try:
324
300
  # DATABRICKS_PROFILE=my-profile npm run dev
@@ -462,25 +438,51 @@ HTTP endpoints exposed (mounted under `/api/analytics`):
462
438
  Formats:
463
439
 
464
440
  - `format: "JSON"` (default) returns JSON rows
465
- - `format: "ARROW"` returns an Arrow “external links” payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId`
441
+ - `format: "ARROW"` returns an Arrow “statement_id” payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId`
442
+
443
+ ### Execution context and `asUser(req)`
466
444
 
467
- ### Request context (`getRequestContext()`)
445
+ AppKit manages Databricks authentication via two contexts:
468
446
 
469
- If a plugin sets `requiresDatabricksClient = true`, AppKit adds middleware that provides request context.
447
+ - **ServiceContext** (singleton): Initialized at app startup with service principal credentials
448
+ - **ExecutionContext**: Determined at runtime - either service principal or user context
470
449
 
471
- Headers used:
450
+ **Headers used for user context:**
472
451
 
473
452
  - `x-forwarded-user`: required in production; identifies the user
474
- - `x-forwarded-access-token`: optional; enables **user token passthrough** if `DATABRICKS_HOST` is set
453
+ - `x-forwarded-access-token`: required for user token passthrough
475
454
 
476
- Context fields (real behavior):
455
+ **Using `asUser(req)` for user-scoped operations:**
477
456
 
478
- - `userId`: derived from `x-forwarded-user` (in development it falls back to `serviceUserId`)
479
- - `serviceUserId`: service principal/user ID
480
- - `warehouseId`: `Promise<string>` (from `DATABRICKS_WAREHOUSE_ID`, or auto-selected in development)
481
- - `workspaceId`: `Promise<string>` (from `DATABRICKS_WORKSPACE_ID` or fetched)
482
- - `userDatabricksClient`: present only when passthrough is available (or in dev it equals service client)
483
- - `serviceDatabricksClient`: always present
457
+ The `asUser(req)` pattern allows plugins to execute operations using the requesting user's credentials:
458
+
459
+ ```ts
460
+ // In a custom plugin route handler
461
+ router.post("/users/me/data", async (req, res) => {
462
+ // Execute as the user (uses their Databricks permissions)
463
+ const result = await this.asUser(req).query("SELECT ...");
464
+ res.json(result);
465
+ });
466
+
467
+ // Service principal execution (default)
468
+ router.post("/system/data", async (req, res) => {
469
+ const result = await this.query("SELECT ...");
470
+ res.json(result);
471
+ });
472
+ ```
473
+
474
+ **Context helper functions (exported from `@databricks/appkit`):**
475
+
476
+ - `getExecutionContext()`: Returns current context (user or service)
477
+ - `getCurrentUserId()`: Returns user ID in user context, service user ID otherwise
478
+ - `getWorkspaceClient()`: Returns the appropriate WorkspaceClient for current context
479
+ - `getWarehouseId()`: `Promise<string>` (from `DATABRICKS_WAREHOUSE_ID` or auto-selected in dev)
480
+ - `getWorkspaceId()`: `Promise<string>` (from `DATABRICKS_WORKSPACE_ID` or fetched)
481
+ - `isInUserContext()`: Returns `true` if currently executing in user context
482
+
483
+ **Development mode behavior:**
484
+
485
+ In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and falls back to the service principal.
484
486
 
485
487
  ### Custom plugins (backend)
486
488
 
@@ -493,7 +495,6 @@ import type express from "express";
493
495
  class MyPlugin extends Plugin {
494
496
  name = "my-plugin";
495
497
  envVars = []; // list required env vars here
496
- requiresDatabricksClient = false; // set true if you need getRequestContext()
497
498
 
498
499
  injectRoutes(router: express.Router) {
499
500
  this.route(router, {
@@ -980,6 +981,108 @@ function LoadingCard() {
980
981
  }
981
982
  ```
982
983
 
984
+ ## Stylesheet
985
+
986
+ In the main css file import the following
987
+
988
+ ```css
989
+ @import "@databricks/appkit-ui/styles.css";
990
+ ```
991
+
992
+ That will provide a default theme for the app using css variables.
993
+
994
+ ### Customizing theme (light/dark mode)
995
+
996
+ - Full list of variables to customize the theme.
997
+
998
+ ```css
999
+ @import "@databricks/appkit-ui/styles.css";
1000
+
1001
+ :root {
1002
+ --radius: 0.625rem;
1003
+ --background: oklch(1 0 0);
1004
+ --foreground: oklch(0.141 0.005 285.823);
1005
+ --card: oklch(1 0 0);
1006
+ --card-foreground: oklch(0.141 0.005 285.823);
1007
+ --popover: oklch(1 0 0);
1008
+ --popover-foreground: oklch(0.141 0.005 285.823);
1009
+ --primary: oklch(0.21 0.006 285.885);
1010
+ --primary-foreground: oklch(0.985 0 0);
1011
+ --secondary: oklch(0.967 0.001 286.375);
1012
+ --secondary-foreground: oklch(0.21 0.006 285.885);
1013
+ --muted: oklch(0.967 0.001 286.375);
1014
+ --muted-foreground: oklch(0.552 0.016 285.938);
1015
+ --accent: oklch(0.967 0.001 286.375);
1016
+ --accent-foreground: oklch(0.21 0.006 285.885);
1017
+ --destructive: oklch(0.577 0.245 27.325);
1018
+ --destructive-foreground: oklch(0.985 0 0);
1019
+ --success: oklch(0.603 0.135 166.892);
1020
+ --success-foreground: oklch(1 0 0);
1021
+ --warning: oklch(0.795 0.157 78.748);
1022
+ --warning-foreground: oklch(0.199 0.027 238.732);
1023
+ --border: oklch(0.92 0.004 286.32);
1024
+ --input: oklch(0.92 0.004 286.32);
1025
+ --ring: oklch(0.705 0.015 286.067);
1026
+ --chart-1: oklch(0.646 0.222 41.116);
1027
+ --chart-2: oklch(0.6 0.118 184.704);
1028
+ --chart-3: oklch(0.398 0.07 227.392);
1029
+ --chart-4: oklch(0.828 0.189 84.429);
1030
+ --chart-5: oklch(0.769 0.188 70.08);
1031
+ --sidebar: oklch(0.985 0 0);
1032
+ --sidebar-foreground: oklch(0.141 0.005 285.823);
1033
+ --sidebar-primary: oklch(0.21 0.006 285.885);
1034
+ --sidebar-primary-foreground: oklch(0.985 0 0);
1035
+ --sidebar-accent: oklch(0.967 0.001 286.375);
1036
+ --sidebar-accent-foreground: oklch(0.21 0.006 285.885);
1037
+ --sidebar-border: oklch(0.92 0.004 286.32);
1038
+ --sidebar-ring: oklch(0.705 0.015 286.067);
1039
+ }
1040
+
1041
+ @media (prefers-color-scheme: dark) {
1042
+ :root {
1043
+ --background: oklch(0.141 0.005 285.823);
1044
+ --foreground: oklch(0.985 0 0);
1045
+ --card: oklch(0.21 0.006 285.885);
1046
+ --card-foreground: oklch(0.985 0 0);
1047
+ --popover: oklch(0.21 0.006 285.885);
1048
+ --popover-foreground: oklch(0.985 0 0);
1049
+ --primary: oklch(0.92 0.004 286.32);
1050
+ --primary-foreground: oklch(0.21 0.006 285.885);
1051
+ --secondary: oklch(0.274 0.006 286.033);
1052
+ --secondary-foreground: oklch(0.985 0 0);
1053
+ --muted: oklch(0.274 0.006 286.033);
1054
+ --muted-foreground: oklch(0.705 0.015 286.067);
1055
+ --accent: oklch(0.274 0.006 286.033);
1056
+ --accent-foreground: oklch(0.985 0 0);
1057
+ --destructive: oklch(0.704 0.191 22.216);
1058
+ --destructive-foreground: oklch(0.985 0 0);
1059
+ --success: oklch(0.67 0.12 167);
1060
+ --success-foreground: oklch(1 0 0);
1061
+ --warning: oklch(0.83 0.165 85);
1062
+ --warning-foreground: oklch(0.199 0.027 238.732);
1063
+ --border: oklch(1 0 0 / 10%);
1064
+ --input: oklch(1 0 0 / 15%);
1065
+ --ring: oklch(0.552 0.016 285.938);
1066
+ --chart-1: oklch(0.488 0.243 264.376);
1067
+ --chart-2: oklch(0.696 0.17 162.48);
1068
+ --chart-3: oklch(0.769 0.188 70.08);
1069
+ --chart-4: oklch(0.627 0.265 303.9);
1070
+ --chart-5: oklch(0.645 0.246 16.439);
1071
+ --sidebar: oklch(0.21 0.006 285.885);
1072
+ --sidebar-foreground: oklch(0.985 0 0);
1073
+ --sidebar-primary: oklch(0.488 0.243 264.376);
1074
+ --sidebar-primary-foreground: oklch(0.985 0 0);
1075
+ --sidebar-accent: oklch(0.274 0.006 286.033);
1076
+ --sidebar-accent-foreground: oklch(0.985 0 0);
1077
+ --sidebar-border: oklch(1 0 0 / 10%);
1078
+ --sidebar-ring: oklch(0.552 0.016 285.938);
1079
+ }
1080
+ }
1081
+
1082
+ ```
1083
+
1084
+ - If any variable is changed, it must be changed for both light and dark mode.
1085
+
983
1086
  ## Type generation (QueryRegistry + IntelliSense)
984
1087
 
985
1088
  Goal: generate `client/src/appKitTypes.d.ts` so query keys, params, and result rows are type-safe.
@@ -1054,7 +1157,6 @@ env:
1054
1157
  - `tsx` is in devDependencies for dev server
1055
1158
  - `dev` script uses `NODE_ENV=development tsx watch server/index.ts`
1056
1159
  - `client/index.html` exists with `<div id="root"></div>` and script pointing to `client/src/main.tsx`
1057
- - `client/package.json` exists and includes `@databricks/appkit-ui`
1058
1160
 
1059
1161
  - **Backend**
1060
1162
  - `await createApp({ plugins: [...] })` is used (or `void createApp` with intent)
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@databricks/appkit",
3
3
  "type": "module",
4
- "version": "0.1.3",
4
+ "version": "0.1.5",
5
5
  "main": "./dist/index.js",
6
6
  "types": "./dist/index.d.ts",
7
7
  "packageManager": "pnpm@10.21.0",
@@ -15,6 +15,7 @@
15
15
  "scripts",
16
16
  "CLAUDE.md",
17
17
  "llms.txt",
18
+ "AGENTS.md",
18
19
  "README.md",
19
20
  "DCO",
20
21
  "NOTICE.md"
@@ -33,7 +34,7 @@
33
34
  "clean:full": "rm -rf dist node_modules tmp",
34
35
  "clean": "rm -rf dist tmp",
35
36
  "dist": "tsx ../../tools/dist.ts",
36
- "tarball": "tsx ../../tools/dist.ts && npm pack ./tmp --pack-destination ./tmp",
37
+ "tarball": "rm -rf tmp && tsx ../../tools/dist.ts && npm pack ./tmp --pack-destination ./tmp",
37
38
  "typecheck": "tsc --noEmit",
38
39
  "postinstall": "node scripts/postinstall.js"
39
40
  },
@@ -53,9 +54,11 @@
53
54
  "@opentelemetry/sdk-metrics": "^2.2.0",
54
55
  "@opentelemetry/sdk-node": "^0.208.0",
55
56
  "@opentelemetry/semantic-conventions": "^1.38.0",
57
+ "@types/semver": "^7.7.1",
56
58
  "dotenv": "^16.6.1",
57
59
  "express": "^4.22.0",
58
60
  "pg": "^8.16.3",
61
+ "semver": "^7.7.3",
59
62
  "vite": "npm:rolldown-vite@7.1.14",
60
63
  "ws": "^8.18.3",
61
64
  "zod-to-ts": "^2.0.0"
package/dist/index.js.map DELETED
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.js","names":[],"sources":["../src/index.ts"],"sourcesContent":["export type {\n BasePluginConfig,\n IAppRouter,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nexport {\n isSQLTypeMarker,\n sql,\n} from \"shared\";\nexport { analytics } from \"./analytics\";\nexport { CacheManager } from \"./cache\";\nexport { createApp } from \"./core\";\nexport { Plugin, toPlugin } from \"./plugin\";\nexport { server } from \"./server\";\nexport type { ITelemetry } from \"./telemetry\";\nexport {\n type Counter,\n type Histogram,\n SeverityNumber,\n type Span,\n SpanStatusCode,\n} from \"./telemetry\";\nexport { appKitTypesPlugin } from \"./type-generator/vite-plugin\";\nexport { getRequestContext } from \"./utils\";\n"],"mappings":";;;;;;;;;;;;;;;;YAwB4C"}
@@ -1,17 +0,0 @@
1
- import { WorkspaceClient } from "@databricks/sdk-experimental";
2
- import express from "express";
3
-
4
- //#region src/utils/databricks-client-middleware.d.ts
5
- type RequestContext = {
6
- userDatabricksClient?: WorkspaceClient;
7
- serviceDatabricksClient: WorkspaceClient;
8
- userId: string;
9
- userName?: string;
10
- serviceUserId: string;
11
- warehouseId: Promise<string>;
12
- workspaceId: Promise<string>;
13
- };
14
- declare function getRequestContext(): RequestContext;
15
- //#endregion
16
- export { getRequestContext };
17
- //# sourceMappingURL=databricks-client-middleware.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"databricks-client-middleware.d.ts","names":[],"sources":["../../src/utils/databricks-client-middleware.ts"],"sourcesContent":[],"mappings":";;;;KAYY,cAAA;yBACa;EADb,uBAAc,EAEC,eAFD;EAAA,MAAA,EAAA,MAAA;UACD,CAAA,EAAA,MAAA;eACE,EAAA,MAAA;aAIZ,EAAA,OAAA,CAAA,MAAA,CAAA;aACA,EAAA,OAAA,CAAA,MAAA,CAAA;CAAO;iBA4EN,iBAAA,CAAA,GAAqB"}
@@ -1,117 +0,0 @@
1
- import { __esmMin } from "../_virtual/rolldown_runtime.js";
2
- import { name, version } from "../appkit/package.js";
3
- import { WorkspaceClient } from "@databricks/sdk-experimental";
4
- import { AsyncLocalStorage } from "node:async_hooks";
5
-
6
- //#region src/utils/databricks-client-middleware.ts
7
- function getClientOptions() {
8
- const isDev = process.env.NODE_ENV === "development";
9
- return {
10
- product: name,
11
- productVersion: version.split(".").slice(0, 3).join("."),
12
- ...isDev && { userAgentExtra: { mode: "dev" } }
13
- };
14
- }
15
- async function databricksClientMiddleware() {
16
- const serviceDatabricksClient = new WorkspaceClient({}, getClientOptions());
17
- const warehouseId = getWarehouseId(serviceDatabricksClient);
18
- const workspaceId = getWorkspaceId(serviceDatabricksClient);
19
- const serviceUserId = (await serviceDatabricksClient.currentUser.me()).id;
20
- if (!serviceUserId) throw new Error("Service user ID not found");
21
- return async (req, res, next) => {
22
- const userToken = req.headers["x-forwarded-access-token"];
23
- let userDatabricksClient;
24
- const host = process.env.DATABRICKS_HOST;
25
- if (userToken && host) userDatabricksClient = new WorkspaceClient({
26
- token: userToken,
27
- host,
28
- authType: "pat"
29
- }, getClientOptions());
30
- else if (process.env.NODE_ENV === "development") userDatabricksClient = serviceDatabricksClient;
31
- let userName = req.headers["x-forwarded-user"];
32
- if (!userName && process.env.NODE_ENV !== "development") {
33
- res.status(401).json({ error: "Unauthorized" });
34
- return;
35
- } else userName = serviceUserId;
36
- return asyncLocalStorage.run({
37
- userDatabricksClient,
38
- serviceDatabricksClient,
39
- warehouseId,
40
- workspaceId,
41
- userId: userName,
42
- serviceUserId
43
- }, async () => {
44
- return next();
45
- });
46
- };
47
- }
48
- function getRequestContext() {
49
- const store = asyncLocalStorage.getStore();
50
- if (!store) throw new Error("Request context not found");
51
- return store;
52
- }
53
- /**
54
- * Get the appropriate WorkspaceClient based on whether the request
55
- * should be executed as the user or as the service principal.
56
- *
57
- * @param asUser - If true, returns user's WorkspaceClient (requires token passthrough)
58
- * @throws Error if asUser is true but user token passthrough is not enabled
59
- */
60
- function getWorkspaceClient(asUser) {
61
- const context = getRequestContext();
62
- if (asUser) {
63
- if (!context.userDatabricksClient) throw new Error(`User token passthrough is not enabled for this workspace.`);
64
- return context.userDatabricksClient;
65
- }
66
- return context.serviceDatabricksClient;
67
- }
68
- async function getWorkspaceId(workspaceClient) {
69
- if (process.env.DATABRICKS_WORKSPACE_ID) return process.env.DATABRICKS_WORKSPACE_ID;
70
- const response = await workspaceClient.apiClient.request({
71
- path: "/api/2.0/preview/scim/v2/Me",
72
- method: "GET",
73
- headers: new Headers(),
74
- raw: false,
75
- query: {},
76
- responseHeaders: ["x-databricks-org-id"]
77
- });
78
- if (!response["x-databricks-org-id"]) throw new Error("Workspace ID not found");
79
- return response["x-databricks-org-id"];
80
- }
81
- async function getWarehouseId(workspaceClient) {
82
- if (process.env.DATABRICKS_WAREHOUSE_ID) return process.env.DATABRICKS_WAREHOUSE_ID;
83
- if (process.env.NODE_ENV === "development") {
84
- const response = await workspaceClient.apiClient.request({
85
- path: "/api/2.0/sql/warehouses",
86
- method: "GET",
87
- headers: new Headers(),
88
- raw: false,
89
- query: { skip_cannot_use: "true" }
90
- });
91
- const priorities = {
92
- RUNNING: 0,
93
- STOPPED: 1,
94
- STARTING: 2,
95
- STOPPING: 3,
96
- DELETED: 99,
97
- DELETING: 99
98
- };
99
- const warehouses = (response.warehouses || []).sort((a, b) => {
100
- return priorities[a.state] - priorities[b.state];
101
- });
102
- if (response.warehouses.length === 0) throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
103
- const firstWarehouse = warehouses[0];
104
- if (firstWarehouse.state === "DELETED" || firstWarehouse.state === "DELETING" || !firstWarehouse.id) throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
105
- return firstWarehouse.id;
106
- }
107
- throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
108
- }
109
- var asyncLocalStorage;
110
- var init_databricks_client_middleware = __esmMin((() => {
111
- asyncLocalStorage = new AsyncLocalStorage();
112
- }));
113
-
114
- //#endregion
115
- init_databricks_client_middleware();
116
- export { databricksClientMiddleware, getRequestContext, getWorkspaceClient, init_databricks_client_middleware };
117
- //# sourceMappingURL=databricks-client-middleware.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"databricks-client-middleware.js","names":["productName","productVersion","userDatabricksClient: WorkspaceClient | undefined","priorities: Record<sql.State, number>"],"sources":["../../src/utils/databricks-client-middleware.ts"],"sourcesContent":["import { AsyncLocalStorage } from \"node:async_hooks\";\nimport {\n type ClientOptions,\n type sql,\n WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport type express from \"express\";\nimport {\n name as productName,\n version as productVersion,\n} from \"../../package.json\";\n\nexport type RequestContext = {\n userDatabricksClient?: WorkspaceClient;\n serviceDatabricksClient: WorkspaceClient;\n userId: string;\n userName?: string;\n serviceUserId: string;\n warehouseId: Promise<string>;\n workspaceId: Promise<string>;\n};\n\nconst asyncLocalStorage = new AsyncLocalStorage<RequestContext>();\n\nfunction getClientOptions(): ClientOptions {\n const isDev = process.env.NODE_ENV === \"development\";\n const normalizedVersion = productVersion\n .split(\".\")\n .slice(0, 3)\n .join(\".\") as ClientOptions[\"productVersion\"];\n\n return {\n product: productName,\n productVersion: normalizedVersion,\n ...(isDev && { userAgentExtra: { mode: \"dev\" } }),\n };\n}\n\nexport async function databricksClientMiddleware(): Promise<express.RequestHandler> {\n const serviceDatabricksClient = new WorkspaceClient({}, getClientOptions());\n const warehouseId = getWarehouseId(serviceDatabricksClient);\n const workspaceId = getWorkspaceId(serviceDatabricksClient);\n const serviceUserId = (await serviceDatabricksClient.currentUser.me()).id;\n\n if (!serviceUserId) {\n throw new Error(\"Service user ID not found\");\n }\n\n return async (\n req: express.Request,\n res: express.Response,\n next: express.NextFunction,\n ) => {\n const userToken = req.headers[\"x-forwarded-access-token\"] as string;\n let userDatabricksClient: WorkspaceClient | undefined;\n const host = process.env.DATABRICKS_HOST;\n if (userToken && host) {\n userDatabricksClient = new WorkspaceClient(\n {\n token: userToken,\n host,\n authType: \"pat\",\n },\n getClientOptions(),\n );\n } else if (process.env.NODE_ENV === \"development\") {\n // in local development service and no user token are the same\n // TODO: use `databricks apps run-local` to fix this\n userDatabricksClient = serviceDatabricksClient;\n }\n\n let userName = req.headers[\"x-forwarded-user\"] as string;\n if (!userName && process.env.NODE_ENV !== \"development\") {\n res.status(401).json({ error: \"Unauthorized\" });\n return;\n } else {\n userName = serviceUserId;\n }\n\n return asyncLocalStorage.run(\n {\n userDatabricksClient,\n serviceDatabricksClient,\n warehouseId,\n workspaceId,\n userId: userName,\n serviceUserId,\n },\n async () => {\n return next();\n },\n );\n };\n}\n\nexport function getRequestContext(): RequestContext {\n const store = asyncLocalStorage.getStore();\n if (!store) {\n throw new Error(\"Request context not found\");\n }\n return store;\n}\n\n/**\n * Get the appropriate WorkspaceClient based on whether the request\n * should be executed as the user or as the service principal.\n *\n * @param asUser - If true, returns user's WorkspaceClient (requires token passthrough)\n * @throws Error if asUser is true but user token passthrough is not enabled\n */\nexport function getWorkspaceClient(asUser: boolean): WorkspaceClient {\n const context = getRequestContext();\n\n if (asUser) {\n if (!context.userDatabricksClient) {\n throw new Error(\n `User token passthrough is not enabled for this workspace.`,\n );\n }\n return context.userDatabricksClient;\n }\n\n return context.serviceDatabricksClient;\n}\n\nasync function getWorkspaceId(\n workspaceClient: WorkspaceClient,\n): Promise<string> {\n if (process.env.DATABRICKS_WORKSPACE_ID) {\n return process.env.DATABRICKS_WORKSPACE_ID;\n }\n\n const response = (await workspaceClient.apiClient.request({\n path: \"/api/2.0/preview/scim/v2/Me\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {},\n responseHeaders: [\"x-databricks-org-id\"],\n })) as { \"x-databricks-org-id\": string };\n\n if (!response[\"x-databricks-org-id\"]) {\n throw new Error(\"Workspace ID not found\");\n }\n\n return response[\"x-databricks-org-id\"];\n}\n\nasync function getWarehouseId(\n workspaceClient: WorkspaceClient,\n): Promise<string> {\n if (process.env.DATABRICKS_WAREHOUSE_ID) {\n return process.env.DATABRICKS_WAREHOUSE_ID;\n }\n\n if (process.env.NODE_ENV === \"development\") {\n const response = (await workspaceClient.apiClient.request({\n path: \"/api/2.0/sql/warehouses\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {\n skip_cannot_use: \"true\",\n },\n })) as { warehouses: sql.EndpointInfo[] };\n\n const priorities: Record<sql.State, number> = {\n RUNNING: 0,\n STOPPED: 1,\n STARTING: 2,\n STOPPING: 3,\n DELETED: 99,\n DELETING: 99,\n };\n\n const warehouses = (response.warehouses || []).sort((a, b) => {\n return (\n priorities[a.state as sql.State] - priorities[b.state as sql.State]\n );\n });\n\n if (response.warehouses.length === 0) {\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n }\n\n const firstWarehouse = warehouses[0];\n if (\n firstWarehouse.state === \"DELETED\" ||\n firstWarehouse.state === \"DELETING\" ||\n !firstWarehouse.id\n ) {\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n }\n\n return firstWarehouse.id;\n }\n\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n}\n\nexport type Request = express.Request;\nexport type Response = express.Response;\n"],"mappings":";;;;;;AAwBA,SAAS,mBAAkC;CACzC,MAAM,QAAQ,QAAQ,IAAI,aAAa;AAMvC,QAAO;EACL,SAASA;EACT,gBAPwBC,QACvB,MAAM,IAAI,CACV,MAAM,GAAG,EAAE,CACX,KAAK,IAAI;EAKV,GAAI,SAAS,EAAE,gBAAgB,EAAE,MAAM,OAAO,EAAE;EACjD;;AAGH,eAAsB,6BAA8D;CAClF,MAAM,0BAA0B,IAAI,gBAAgB,EAAE,EAAE,kBAAkB,CAAC;CAC3E,MAAM,cAAc,eAAe,wBAAwB;CAC3D,MAAM,cAAc,eAAe,wBAAwB;CAC3D,MAAM,iBAAiB,MAAM,wBAAwB,YAAY,IAAI,EAAE;AAEvE,KAAI,CAAC,cACH,OAAM,IAAI,MAAM,4BAA4B;AAG9C,QAAO,OACL,KACA,KACA,SACG;EACH,MAAM,YAAY,IAAI,QAAQ;EAC9B,IAAIC;EACJ,MAAM,OAAO,QAAQ,IAAI;AACzB,MAAI,aAAa,KACf,wBAAuB,IAAI,gBACzB;GACE,OAAO;GACP;GACA,UAAU;GACX,EACD,kBAAkB,CACnB;WACQ,QAAQ,IAAI,aAAa,cAGlC,wBAAuB;EAGzB,IAAI,WAAW,IAAI,QAAQ;AAC3B,MAAI,CAAC,YAAY,QAAQ,IAAI,aAAa,eAAe;AACvD,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,gBAAgB,CAAC;AAC/C;QAEA,YAAW;AAGb,SAAO,kBAAkB,IACvB;GACE;GACA;GACA;GACA;GACA,QAAQ;GACR;GACD,EACD,YAAY;AACV,UAAO,MAAM;IAEhB;;;AAIL,SAAgB,oBAAoC;CAClD,MAAM,QAAQ,kBAAkB,UAAU;AAC1C,KAAI,CAAC,MACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,QAAO;;;;;;;;;AAUT,SAAgB,mBAAmB,QAAkC;CACnE,MAAM,UAAU,mBAAmB;AAEnC,KAAI,QAAQ;AACV,MAAI,CAAC,QAAQ,qBACX,OAAM,IAAI,MACR,4DACD;AAEH,SAAO,QAAQ;;AAGjB,QAAO,QAAQ;;AAGjB,eAAe,eACb,iBACiB;AACjB,KAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;CAGrB,MAAM,WAAY,MAAM,gBAAgB,UAAU,QAAQ;EACxD,MAAM;EACN,QAAQ;EACR,SAAS,IAAI,SAAS;EACtB,KAAK;EACL,OAAO,EAAE;EACT,iBAAiB,CAAC,sBAAsB;EACzC,CAAC;AAEF,KAAI,CAAC,SAAS,uBACZ,OAAM,IAAI,MAAM,yBAAyB;AAG3C,QAAO,SAAS;;AAGlB,eAAe,eACb,iBACiB;AACjB,KAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;AAGrB,KAAI,QAAQ,IAAI,aAAa,eAAe;EAC1C,MAAM,WAAY,MAAM,gBAAgB,UAAU,QAAQ;GACxD,MAAM;GACN,QAAQ;GACR,SAAS,IAAI,SAAS;GACtB,KAAK;GACL,OAAO,EACL,iBAAiB,QAClB;GACF,CAAC;EAEF,MAAMC,aAAwC;GAC5C,SAAS;GACT,SAAS;GACT,UAAU;GACV,UAAU;GACV,SAAS;GACT,UAAU;GACX;EAED,MAAM,cAAc,SAAS,cAAc,EAAE,EAAE,MAAM,GAAG,MAAM;AAC5D,UACE,WAAW,EAAE,SAAsB,WAAW,EAAE;IAElD;AAEF,MAAI,SAAS,WAAW,WAAW,EACjC,OAAM,IAAI,MACR,6FACD;EAGH,MAAM,iBAAiB,WAAW;AAClC,MACE,eAAe,UAAU,aACzB,eAAe,UAAU,cACzB,CAAC,eAAe,GAEhB,OAAM,IAAI,MACR,6FACD;AAGH,SAAO,eAAe;;AAGxB,OAAM,IAAI,MACR,6FACD;;;;CArLG,oBAAoB,IAAI,mBAAmC"}
@@ -1,26 +0,0 @@
1
- import { __esmMin, __export } from "../_virtual/rolldown_runtime.js";
2
- import { databricksClientMiddleware, getRequestContext, getWorkspaceClient, init_databricks_client_middleware } from "./databricks-client-middleware.js";
3
- import { init_env_validator, validateEnv } from "./env-validator.js";
4
- import { deepMerge, init_merge } from "./merge.js";
5
- import { init_vite_config_merge, mergeConfigDedup } from "./vite-config-merge.js";
6
-
7
- //#region src/utils/index.ts
8
- var utils_exports = /* @__PURE__ */ __export({
9
- databricksClientMiddleware: () => databricksClientMiddleware,
10
- deepMerge: () => deepMerge,
11
- getRequestContext: () => getRequestContext,
12
- getWorkspaceClient: () => getWorkspaceClient,
13
- mergeConfigDedup: () => mergeConfigDedup,
14
- validateEnv: () => validateEnv
15
- });
16
- var init_utils = __esmMin((() => {
17
- init_databricks_client_middleware();
18
- init_env_validator();
19
- init_merge();
20
- init_vite_config_merge();
21
- }));
22
-
23
- //#endregion
24
- init_utils();
25
- export { init_utils, utils_exports };
26
- //# sourceMappingURL=index.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.js","names":[],"sources":["../../src/utils/index.ts"],"sourcesContent":["export * from \"./databricks-client-middleware\";\nexport * from \"./env-validator\";\nexport * from \"./merge\";\nexport * from \"./vite-config-merge\";\n"],"mappings":""}