@databricks/appkit 0.1.4 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/AGENTS.md +37 -12
  2. package/CLAUDE.md +37 -12
  3. package/NOTICE.md +2 -0
  4. package/dist/analytics/analytics.d.ts +33 -8
  5. package/dist/analytics/analytics.d.ts.map +1 -1
  6. package/dist/analytics/analytics.js +51 -24
  7. package/dist/analytics/analytics.js.map +1 -1
  8. package/dist/analytics/defaults.js.map +1 -1
  9. package/dist/analytics/query.js +4 -4
  10. package/dist/analytics/query.js.map +1 -1
  11. package/dist/appkit/package.js +1 -1
  12. package/dist/cache/defaults.js.map +1 -1
  13. package/dist/cache/index.d.ts +1 -0
  14. package/dist/cache/index.d.ts.map +1 -1
  15. package/dist/cache/index.js +1 -2
  16. package/dist/cache/index.js.map +1 -1
  17. package/dist/cache/storage/memory.js.map +1 -1
  18. package/dist/connectors/lakebase/client.js +7 -8
  19. package/dist/connectors/lakebase/client.js.map +1 -1
  20. package/dist/connectors/lakebase/defaults.js.map +1 -1
  21. package/dist/connectors/sql-warehouse/client.js.map +1 -1
  22. package/dist/connectors/sql-warehouse/defaults.js.map +1 -1
  23. package/dist/context/execution-context.js +75 -0
  24. package/dist/context/execution-context.js.map +1 -0
  25. package/dist/context/index.js +27 -0
  26. package/dist/context/index.js.map +1 -0
  27. package/dist/context/service-context.js +149 -0
  28. package/dist/context/service-context.js.map +1 -0
  29. package/dist/context/user-context.js +15 -0
  30. package/dist/context/user-context.js.map +1 -0
  31. package/dist/core/appkit.d.ts +3 -0
  32. package/dist/core/appkit.d.ts.map +1 -1
  33. package/dist/core/appkit.js +7 -0
  34. package/dist/core/appkit.js.map +1 -1
  35. package/dist/index.d.ts +5 -6
  36. package/dist/index.js +3 -10
  37. package/dist/plugin/interceptors/cache.js.map +1 -1
  38. package/dist/plugin/interceptors/retry.js.map +1 -1
  39. package/dist/plugin/interceptors/telemetry.js.map +1 -1
  40. package/dist/plugin/interceptors/timeout.js.map +1 -1
  41. package/dist/plugin/plugin.d.ts +39 -5
  42. package/dist/plugin/plugin.d.ts.map +1 -1
  43. package/dist/plugin/plugin.js +82 -6
  44. package/dist/plugin/plugin.js.map +1 -1
  45. package/dist/plugin/to-plugin.d.ts +4 -0
  46. package/dist/plugin/to-plugin.d.ts.map +1 -1
  47. package/dist/plugin/to-plugin.js +3 -0
  48. package/dist/plugin/to-plugin.js.map +1 -1
  49. package/dist/server/index.d.ts +3 -0
  50. package/dist/server/index.d.ts.map +1 -1
  51. package/dist/server/index.js +3 -4
  52. package/dist/server/index.js.map +1 -1
  53. package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
  54. package/dist/server/utils.js.map +1 -1
  55. package/dist/server/vite-dev-server.js +0 -2
  56. package/dist/server/vite-dev-server.js.map +1 -1
  57. package/dist/shared/src/sql/helpers.js.map +1 -1
  58. package/dist/stream/arrow-stream-processor.js.map +1 -1
  59. package/dist/stream/buffers.js.map +1 -1
  60. package/dist/stream/sse-writer.js.map +1 -1
  61. package/dist/stream/stream-manager.js.map +1 -1
  62. package/dist/stream/stream-registry.js.map +1 -1
  63. package/dist/telemetry/instrumentations.js.map +1 -1
  64. package/dist/type-generator/index.js.map +1 -1
  65. package/dist/type-generator/query-registry.js.map +1 -1
  66. package/dist/type-generator/types.js.map +1 -1
  67. package/dist/type-generator/vite-plugin.js.map +1 -1
  68. package/dist/utils/env-validator.js +1 -5
  69. package/dist/utils/env-validator.js.map +1 -1
  70. package/dist/utils/merge.js +1 -5
  71. package/dist/utils/merge.js.map +1 -1
  72. package/dist/utils/vite-config-merge.js +1 -5
  73. package/dist/utils/vite-config-merge.js.map +1 -1
  74. package/llms.txt +37 -12
  75. package/package.json +3 -1
  76. package/dist/index.js.map +0 -1
  77. package/dist/utils/databricks-client-middleware.d.ts +0 -17
  78. package/dist/utils/databricks-client-middleware.d.ts.map +0 -1
  79. package/dist/utils/databricks-client-middleware.js +0 -117
  80. package/dist/utils/databricks-client-middleware.js.map +0 -1
  81. package/dist/utils/index.js +0 -26
  82. package/dist/utils/index.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"vite-plugin.js","names":["root: string","outFile: string","watchFolders: string[]"],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import path from \"node:path\";\nimport type { Plugin } from \"vite\";\nimport fs from \"node:fs\";\nimport { generateFromEntryPoint } from \"./index\";\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls generateFromEntryPoint under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let outFile: string;\n let watchFolders: string[];\n\n async function generate() {\n try {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return;\n }\n\n await generateFromEntryPoint({\n outFile,\n queryFolder: watchFolders[0],\n warehouseId,\n noCache: false,\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n console.error(\"[AppKit] Error generating types:\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n\n apply() {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return false;\n }\n\n if (!fs.existsSync(path.join(process.cwd(), \"config\", \"queries\"))) {\n return false;\n }\n\n return true;\n },\n\n configResolved(config) {\n root = config.root;\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n watchFolders = options?.watchFolders ?? [\n path.join(process.cwd(), \"config\", \"queries\"),\n ];\n },\n\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;;;;;;AAqBA,SAAgB,kBAAkB,SAA4C;CAC5E,IAAIA;CACJ,IAAIC;CACJ,IAAIC;CAEJ,eAAe,WAAW;AACxB,MAAI;GACF,MAAM,cAAc,QAAQ,IAAI,2BAA2B;AAE3D,OAAI,CAAC,aAAa;AAChB,YAAQ,KACN,6DACD;AACD;;AAGF,SAAM,uBAAuB;IAC3B;IACA,aAAa,aAAa;IAC1B;IACA,SAAS;IACV,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,WAAQ,MAAM,oCAAoC,MAAM;;;AAI5D,QAAO;EACL,MAAM;EAEN,QAAQ;AAGN,OAAI,EAFgB,QAAQ,IAAI,2BAA2B,KAEzC;AAChB,YAAQ,KACN,6DACD;AACD,WAAO;;AAGT,OAAI,CAAC,GAAG,WAAW,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAAC,CAC/D,QAAO;AAGT,UAAO;;EAGT,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AACxE,kBAAe,SAAS,gBAAgB,CACtC,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAC9C;;EAGH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
1
+ {"version":3,"file":"vite-plugin.js","names":[],"sources":["../../src/type-generator/vite-plugin.ts"],"sourcesContent":["import path from \"node:path\";\nimport type { Plugin } from \"vite\";\nimport fs from \"node:fs\";\nimport { generateFromEntryPoint } from \"./index\";\n\n/**\n * Options for the AppKit types plugin.\n */\ninterface AppKitTypesPluginOptions {\n /* Path to the output d.ts file (relative to client folder). */\n outFile?: string;\n /** Folders to watch for changes. */\n watchFolders?: string[];\n}\n\n/**\n * Vite plugin to generate types for AppKit queries.\n * Calls generateFromEntryPoint under the hood.\n * @param options - Options to override default values.\n * @returns Vite plugin to generate types for AppKit queries.\n */\nexport function appKitTypesPlugin(options?: AppKitTypesPluginOptions): Plugin {\n let root: string;\n let outFile: string;\n let watchFolders: string[];\n\n async function generate() {\n try {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return;\n }\n\n await generateFromEntryPoint({\n outFile,\n queryFolder: watchFolders[0],\n warehouseId,\n noCache: false,\n });\n } catch (error) {\n // throw in production to fail the build\n if (process.env.NODE_ENV === \"production\") {\n throw error;\n }\n console.error(\"[AppKit] Error generating types:\", error);\n }\n }\n\n return {\n name: \"appkit-types\",\n\n apply() {\n const warehouseId = process.env.DATABRICKS_WAREHOUSE_ID || \"\";\n\n if (!warehouseId) {\n console.warn(\n \"[AppKit] Warehouse ID not found. Skipping type generation.\",\n );\n return false;\n }\n\n if (!fs.existsSync(path.join(process.cwd(), \"config\", \"queries\"))) {\n return false;\n }\n\n return true;\n },\n\n configResolved(config) {\n root = config.root;\n outFile = path.resolve(root, options?.outFile ?? \"src/appKitTypes.d.ts\");\n watchFolders = options?.watchFolders ?? [\n path.join(process.cwd(), \"config\", \"queries\"),\n ];\n },\n\n buildStart() {\n generate();\n },\n\n configureServer(server) {\n server.watcher.add(watchFolders);\n\n server.watcher.on(\"change\", (changedFile) => {\n const isWatchedFile = watchFolders.some((folder) =>\n changedFile.startsWith(folder),\n );\n\n if (isWatchedFile && changedFile.endsWith(\".sql\")) {\n generate();\n }\n });\n },\n };\n}\n"],"mappings":";;;;;;;;;;;AAqBA,SAAgB,kBAAkB,SAA4C;CAC5E,IAAI;CACJ,IAAI;CACJ,IAAI;CAEJ,eAAe,WAAW;AACxB,MAAI;GACF,MAAM,cAAc,QAAQ,IAAI,2BAA2B;AAE3D,OAAI,CAAC,aAAa;AAChB,YAAQ,KACN,6DACD;AACD;;AAGF,SAAM,uBAAuB;IAC3B;IACA,aAAa,aAAa;IAC1B;IACA,SAAS;IACV,CAAC;WACK,OAAO;AAEd,OAAI,QAAQ,IAAI,aAAa,aAC3B,OAAM;AAER,WAAQ,MAAM,oCAAoC,MAAM;;;AAI5D,QAAO;EACL,MAAM;EAEN,QAAQ;AAGN,OAAI,EAFgB,QAAQ,IAAI,2BAA2B,KAEzC;AAChB,YAAQ,KACN,6DACD;AACD,WAAO;;AAGT,OAAI,CAAC,GAAG,WAAW,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAAC,CAC/D,QAAO;AAGT,UAAO;;EAGT,eAAe,QAAQ;AACrB,UAAO,OAAO;AACd,aAAU,KAAK,QAAQ,MAAM,SAAS,WAAW,uBAAuB;AACxE,kBAAe,SAAS,gBAAgB,CACtC,KAAK,KAAK,QAAQ,KAAK,EAAE,UAAU,UAAU,CAC9C;;EAGH,aAAa;AACX,aAAU;;EAGZ,gBAAgB,QAAQ;AACtB,UAAO,QAAQ,IAAI,aAAa;AAEhC,UAAO,QAAQ,GAAG,WAAW,gBAAgB;AAK3C,QAJsB,aAAa,MAAM,WACvC,YAAY,WAAW,OAAO,CAC/B,IAEoB,YAAY,SAAS,OAAO,CAC/C,WAAU;KAEZ;;EAEL"}
@@ -1,14 +1,10 @@
1
- import { __esmMin } from "../_virtual/rolldown_runtime.js";
2
-
3
1
  //#region src/utils/env-validator.ts
4
2
  function validateEnv(envVars) {
5
3
  const missingVars = [];
6
4
  for (const envVar of envVars) if (!process.env[envVar]) missingVars.push(envVar);
7
5
  if (missingVars.length > 0) throw new Error(`Missing required environment variables: ${missingVars.join(", ")}`);
8
6
  }
9
- var init_env_validator = __esmMin((() => {}));
10
7
 
11
8
  //#endregion
12
- init_env_validator();
13
- export { init_env_validator, validateEnv };
9
+ export { validateEnv };
14
10
  //# sourceMappingURL=env-validator.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"env-validator.js","names":[],"sources":["../../src/utils/env-validator.ts"],"sourcesContent":["export function validateEnv(envVars: string[]) {\n const missingVars = [];\n\n for (const envVar of envVars) {\n if (!process.env[envVar]) {\n missingVars.push(envVar);\n }\n }\n\n if (missingVars.length > 0) {\n throw new Error(\n `Missing required environment variables: ${missingVars.join(\", \")}`,\n );\n }\n}\n"],"mappings":";;;AAAA,SAAgB,YAAY,SAAmB;CAC7C,MAAM,cAAc,EAAE;AAEtB,MAAK,MAAM,UAAU,QACnB,KAAI,CAAC,QAAQ,IAAI,QACf,aAAY,KAAK,OAAO;AAI5B,KAAI,YAAY,SAAS,EACvB,OAAM,IAAI,MACR,2CAA2C,YAAY,KAAK,KAAK,GAClE"}
1
+ {"version":3,"file":"env-validator.js","names":[],"sources":["../../src/utils/env-validator.ts"],"sourcesContent":["export function validateEnv(envVars: string[]) {\n const missingVars = [];\n\n for (const envVar of envVars) {\n if (!process.env[envVar]) {\n missingVars.push(envVar);\n }\n }\n\n if (missingVars.length > 0) {\n throw new Error(\n `Missing required environment variables: ${missingVars.join(\", \")}`,\n );\n }\n}\n"],"mappings":";AAAA,SAAgB,YAAY,SAAmB;CAC7C,MAAM,cAAc,EAAE;AAEtB,MAAK,MAAM,UAAU,QACnB,KAAI,CAAC,QAAQ,IAAI,QACf,aAAY,KAAK,OAAO;AAI5B,KAAI,YAAY,SAAS,EACvB,OAAM,IAAI,MACR,2CAA2C,YAAY,KAAK,KAAK,GAClE"}
@@ -1,5 +1,3 @@
1
- import { __esmMin } from "../_virtual/rolldown_runtime.js";
2
-
3
1
  //#region src/utils/merge.ts
4
2
  function deepMerge(target, ...sources) {
5
3
  if (!sources.length) return target;
@@ -17,9 +15,7 @@ function deepMerge(target, ...sources) {
17
15
  function isObject(item) {
18
16
  return typeof item === "object" && item !== null && !Array.isArray(item);
19
17
  }
20
- var init_merge = __esmMin((() => {}));
21
18
 
22
19
  //#endregion
23
- init_merge();
24
- export { deepMerge, init_merge };
20
+ export { deepMerge };
25
21
  //# sourceMappingURL=merge.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"merge.js","names":[],"sources":["../../src/utils/merge.ts"],"sourcesContent":["export function deepMerge<T extends Record<string, unknown>>(\n target: T,\n ...sources: Array<Partial<T> | undefined>\n): T {\n if (!sources.length) return target;\n\n const source = sources.shift();\n if (!source) return deepMerge(target, ...sources);\n\n const result = { ...target };\n\n for (const key in source) {\n const sourceValue = source[key];\n const targetValue = result[key];\n\n if (sourceValue !== undefined) {\n if (isObject(sourceValue) && isObject(targetValue)) {\n result[key] = deepMerge(\n targetValue as Record<string, unknown>,\n sourceValue as Record<string, unknown>,\n ) as T[Extract<keyof T, string>];\n } else {\n result[key] = sourceValue as T[Extract<keyof T, string>];\n }\n }\n }\n\n return sources.length ? deepMerge(result, ...sources) : result;\n}\n\nfunction isObject(item: unknown): item is Record<string, unknown> {\n return typeof item === \"object\" && item !== null && !Array.isArray(item);\n}\n"],"mappings":";;;AAAA,SAAgB,UACd,QACA,GAAG,SACA;AACH,KAAI,CAAC,QAAQ,OAAQ,QAAO;CAE5B,MAAM,SAAS,QAAQ,OAAO;AAC9B,KAAI,CAAC,OAAQ,QAAO,UAAU,QAAQ,GAAG,QAAQ;CAEjD,MAAM,SAAS,EAAE,GAAG,QAAQ;AAE5B,MAAK,MAAM,OAAO,QAAQ;EACxB,MAAM,cAAc,OAAO;EAC3B,MAAM,cAAc,OAAO;AAE3B,MAAI,gBAAgB,OAClB,KAAI,SAAS,YAAY,IAAI,SAAS,YAAY,CAChD,QAAO,OAAO,UACZ,aACA,YACD;MAED,QAAO,OAAO;;AAKpB,QAAO,QAAQ,SAAS,UAAU,QAAQ,GAAG,QAAQ,GAAG;;AAG1D,SAAS,SAAS,MAAgD;AAChE,QAAO,OAAO,SAAS,YAAY,SAAS,QAAQ,CAAC,MAAM,QAAQ,KAAK"}
1
+ {"version":3,"file":"merge.js","names":[],"sources":["../../src/utils/merge.ts"],"sourcesContent":["export function deepMerge<T extends Record<string, unknown>>(\n target: T,\n ...sources: Array<Partial<T> | undefined>\n): T {\n if (!sources.length) return target;\n\n const source = sources.shift();\n if (!source) return deepMerge(target, ...sources);\n\n const result = { ...target };\n\n for (const key in source) {\n const sourceValue = source[key];\n const targetValue = result[key];\n\n if (sourceValue !== undefined) {\n if (isObject(sourceValue) && isObject(targetValue)) {\n result[key] = deepMerge(\n targetValue as Record<string, unknown>,\n sourceValue as Record<string, unknown>,\n ) as T[Extract<keyof T, string>];\n } else {\n result[key] = sourceValue as T[Extract<keyof T, string>];\n }\n }\n }\n\n return sources.length ? deepMerge(result, ...sources) : result;\n}\n\nfunction isObject(item: unknown): item is Record<string, unknown> {\n return typeof item === \"object\" && item !== null && !Array.isArray(item);\n}\n"],"mappings":";AAAA,SAAgB,UACd,QACA,GAAG,SACA;AACH,KAAI,CAAC,QAAQ,OAAQ,QAAO;CAE5B,MAAM,SAAS,QAAQ,OAAO;AAC9B,KAAI,CAAC,OAAQ,QAAO,UAAU,QAAQ,GAAG,QAAQ;CAEjD,MAAM,SAAS,EAAE,GAAG,QAAQ;AAE5B,MAAK,MAAM,OAAO,QAAQ;EACxB,MAAM,cAAc,OAAO;EAC3B,MAAM,cAAc,OAAO;AAE3B,MAAI,gBAAgB,OAClB,KAAI,SAAS,YAAY,IAAI,SAAS,YAAY,CAChD,QAAO,OAAO,UACZ,aACA,YACD;MAED,QAAO,OAAO;;AAKpB,QAAO,QAAQ,SAAS,UAAU,QAAQ,GAAG,QAAQ,GAAG;;AAG1D,SAAS,SAAS,MAAgD;AAChE,QAAO,OAAO,SAAS,YAAY,SAAS,QAAQ,CAAC,MAAM,QAAQ,KAAK"}
@@ -1,5 +1,3 @@
1
- import { __esmMin } from "../_virtual/rolldown_runtime.js";
2
-
3
1
  //#region src/utils/vite-config-merge.ts
4
2
  function mergeConfigDedup(base, override, mergeFn) {
5
3
  const merged = mergeFn(base, override);
@@ -14,9 +12,7 @@ function mergeConfigDedup(base, override, mergeFn) {
14
12
  }
15
13
  return merged;
16
14
  }
17
- var init_vite_config_merge = __esmMin((() => {}));
18
15
 
19
16
  //#endregion
20
- init_vite_config_merge();
21
- export { init_vite_config_merge, mergeConfigDedup };
17
+ export { mergeConfigDedup };
22
18
  //# sourceMappingURL=vite-config-merge.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"vite-config-merge.js","names":[],"sources":["../../src/utils/vite-config-merge.ts"],"sourcesContent":["import type { Plugin } from \"vite\";\n\nexport function mergeConfigDedup(\n base: any,\n override: any,\n mergeFn: (a: any, b: any) => any,\n) {\n const merged = mergeFn(base, override);\n if (base.plugins && override.plugins) {\n const seen = new Set<string>();\n merged.plugins = [...base.plugins, ...override.plugins].filter(\n (p: Plugin) => {\n const name = p.name;\n if (seen.has(name)) return false;\n seen.add(name);\n return true;\n },\n );\n }\n return merged;\n}\n"],"mappings":";;;AAEA,SAAgB,iBACd,MACA,UACA,SACA;CACA,MAAM,SAAS,QAAQ,MAAM,SAAS;AACtC,KAAI,KAAK,WAAW,SAAS,SAAS;EACpC,MAAM,uBAAO,IAAI,KAAa;AAC9B,SAAO,UAAU,CAAC,GAAG,KAAK,SAAS,GAAG,SAAS,QAAQ,CAAC,QACrD,MAAc;GACb,MAAM,OAAO,EAAE;AACf,OAAI,KAAK,IAAI,KAAK,CAAE,QAAO;AAC3B,QAAK,IAAI,KAAK;AACd,UAAO;IAEV;;AAEH,QAAO"}
1
+ {"version":3,"file":"vite-config-merge.js","names":[],"sources":["../../src/utils/vite-config-merge.ts"],"sourcesContent":["import type { Plugin } from \"vite\";\n\nexport function mergeConfigDedup(\n base: any,\n override: any,\n mergeFn: (a: any, b: any) => any,\n) {\n const merged = mergeFn(base, override);\n if (base.plugins && override.plugins) {\n const seen = new Set<string>();\n merged.plugins = [...base.plugins, ...override.plugins].filter(\n (p: Plugin) => {\n const name = p.name;\n if (seen.has(name)) return false;\n seen.add(name);\n return true;\n },\n );\n }\n return merged;\n}\n"],"mappings":";AAEA,SAAgB,iBACd,MACA,UACA,SACA;CACA,MAAM,SAAS,QAAQ,MAAM,SAAS;AACtC,KAAI,KAAK,WAAW,SAAS,SAAS;EACpC,MAAM,uBAAO,IAAI,KAAa;AAC9B,SAAO,UAAU,CAAC,GAAG,KAAK,SAAS,GAAG,SAAS,QAAQ,CAAC,QACrD,MAAc;GACb,MAAM,OAAO,EAAE;AACf,OAAI,KAAK,IAAI,KAAK,CAAE,QAAO;AAC3B,QAAK,IAAI,KAAK;AACd,UAAO;IAEV;;AAEH,QAAO"}
package/llms.txt CHANGED
@@ -440,23 +440,49 @@ Formats:
440
440
  - `format: "JSON"` (default) returns JSON rows
441
441
  - `format: "ARROW"` returns an Arrow “statement_id” payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId`
442
442
 
443
- ### Request context (`getRequestContext()`)
443
+ ### Execution context and `asUser(req)`
444
444
 
445
- If a plugin sets `requiresDatabricksClient = true`, AppKit adds middleware that provides request context.
445
+ AppKit manages Databricks authentication via two contexts:
446
446
 
447
- Headers used:
447
+ - **ServiceContext** (singleton): Initialized at app startup with service principal credentials
448
+ - **ExecutionContext**: Determined at runtime - either service principal or user context
449
+
450
+ **Headers used for user context:**
448
451
 
449
452
  - `x-forwarded-user`: required in production; identifies the user
450
- - `x-forwarded-access-token`: optional; enables **user token passthrough** if `DATABRICKS_HOST` is set
453
+ - `x-forwarded-access-token`: required for user token passthrough
454
+
455
+ **Using `asUser(req)` for user-scoped operations:**
456
+
457
+ The `asUser(req)` pattern allows plugins to execute operations using the requesting user's credentials:
458
+
459
+ ```ts
460
+ // In a custom plugin route handler
461
+ router.post("/users/me/data", async (req, res) => {
462
+ // Execute as the user (uses their Databricks permissions)
463
+ const result = await this.asUser(req).query("SELECT ...");
464
+ res.json(result);
465
+ });
466
+
467
+ // Service principal execution (default)
468
+ router.post("/system/data", async (req, res) => {
469
+ const result = await this.query("SELECT ...");
470
+ res.json(result);
471
+ });
472
+ ```
473
+
474
+ **Context helper functions (exported from `@databricks/appkit`):**
475
+
476
+ - `getExecutionContext()`: Returns current context (user or service)
477
+ - `getCurrentUserId()`: Returns user ID in user context, service user ID otherwise
478
+ - `getWorkspaceClient()`: Returns the appropriate WorkspaceClient for current context
479
+ - `getWarehouseId()`: `Promise<string>` (from `DATABRICKS_WAREHOUSE_ID` or auto-selected in dev)
480
+ - `getWorkspaceId()`: `Promise<string>` (from `DATABRICKS_WORKSPACE_ID` or fetched)
481
+ - `isInUserContext()`: Returns `true` if currently executing in user context
451
482
 
452
- Context fields (real behavior):
483
+ **Development mode behavior:**
453
484
 
454
- - `userId`: derived from `x-forwarded-user` (in development it falls back to `serviceUserId`)
455
- - `serviceUserId`: service principal/user ID
456
- - `warehouseId`: `Promise<string>` (from `DATABRICKS_WAREHOUSE_ID`, or auto-selected in development)
457
- - `workspaceId`: `Promise<string>` (from `DATABRICKS_WORKSPACE_ID` or fetched)
458
- - `userDatabricksClient`: present only when passthrough is available (or in dev it equals service client)
459
- - `serviceDatabricksClient`: always present
485
+ In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and falls back to the service principal.
460
486
 
461
487
  ### Custom plugins (backend)
462
488
 
@@ -469,7 +495,6 @@ import type express from "express";
469
495
  class MyPlugin extends Plugin {
470
496
  name = "my-plugin";
471
497
  envVars = []; // list required env vars here
472
- requiresDatabricksClient = false; // set true if you need getRequestContext()
473
498
 
474
499
  injectRoutes(router: express.Router) {
475
500
  this.route(router, {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@databricks/appkit",
3
3
  "type": "module",
4
- "version": "0.1.4",
4
+ "version": "0.1.5",
5
5
  "main": "./dist/index.js",
6
6
  "types": "./dist/index.d.ts",
7
7
  "packageManager": "pnpm@10.21.0",
@@ -54,9 +54,11 @@
54
54
  "@opentelemetry/sdk-metrics": "^2.2.0",
55
55
  "@opentelemetry/sdk-node": "^0.208.0",
56
56
  "@opentelemetry/semantic-conventions": "^1.38.0",
57
+ "@types/semver": "^7.7.1",
57
58
  "dotenv": "^16.6.1",
58
59
  "express": "^4.22.0",
59
60
  "pg": "^8.16.3",
61
+ "semver": "^7.7.3",
60
62
  "vite": "npm:rolldown-vite@7.1.14",
61
63
  "ws": "^8.18.3",
62
64
  "zod-to-ts": "^2.0.0"
package/dist/index.js.map DELETED
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.js","names":[],"sources":["../src/index.ts"],"sourcesContent":["export type {\n BasePluginConfig,\n IAppRouter,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nexport {\n isSQLTypeMarker,\n sql,\n} from \"shared\";\nexport { analytics } from \"./analytics\";\nexport { CacheManager } from \"./cache\";\nexport { createApp } from \"./core\";\nexport { Plugin, toPlugin } from \"./plugin\";\nexport { server } from \"./server\";\nexport type { ITelemetry } from \"./telemetry\";\nexport {\n type Counter,\n type Histogram,\n SeverityNumber,\n type Span,\n SpanStatusCode,\n} from \"./telemetry\";\nexport { appKitTypesPlugin } from \"./type-generator/vite-plugin\";\nexport { getRequestContext } from \"./utils\";\n"],"mappings":";;;;;;;;;;;;;;;;YAwB4C"}
@@ -1,17 +0,0 @@
1
- import { WorkspaceClient } from "@databricks/sdk-experimental";
2
- import express from "express";
3
-
4
- //#region src/utils/databricks-client-middleware.d.ts
5
- type RequestContext = {
6
- userDatabricksClient?: WorkspaceClient;
7
- serviceDatabricksClient: WorkspaceClient;
8
- userId: string;
9
- userName?: string;
10
- serviceUserId: string;
11
- warehouseId: Promise<string>;
12
- workspaceId: Promise<string>;
13
- };
14
- declare function getRequestContext(): RequestContext;
15
- //#endregion
16
- export { getRequestContext };
17
- //# sourceMappingURL=databricks-client-middleware.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"databricks-client-middleware.d.ts","names":[],"sources":["../../src/utils/databricks-client-middleware.ts"],"sourcesContent":[],"mappings":";;;;KAYY,cAAA;yBACa;EADb,uBAAc,EAEC,eAFD;EAAA,MAAA,EAAA,MAAA;UACD,CAAA,EAAA,MAAA;eACE,EAAA,MAAA;aAIZ,EAAA,OAAA,CAAA,MAAA,CAAA;aACA,EAAA,OAAA,CAAA,MAAA,CAAA;CAAO;iBA4EN,iBAAA,CAAA,GAAqB"}
@@ -1,117 +0,0 @@
1
- import { __esmMin } from "../_virtual/rolldown_runtime.js";
2
- import { name, version } from "../appkit/package.js";
3
- import { WorkspaceClient } from "@databricks/sdk-experimental";
4
- import { AsyncLocalStorage } from "node:async_hooks";
5
-
6
- //#region src/utils/databricks-client-middleware.ts
7
- function getClientOptions() {
8
- const isDev = process.env.NODE_ENV === "development";
9
- return {
10
- product: name,
11
- productVersion: version.split(".").slice(0, 3).join("."),
12
- ...isDev && { userAgentExtra: { mode: "dev" } }
13
- };
14
- }
15
- async function databricksClientMiddleware() {
16
- const serviceDatabricksClient = new WorkspaceClient({}, getClientOptions());
17
- const warehouseId = getWarehouseId(serviceDatabricksClient);
18
- const workspaceId = getWorkspaceId(serviceDatabricksClient);
19
- const serviceUserId = (await serviceDatabricksClient.currentUser.me()).id;
20
- if (!serviceUserId) throw new Error("Service user ID not found");
21
- return async (req, res, next) => {
22
- const userToken = req.headers["x-forwarded-access-token"];
23
- let userDatabricksClient;
24
- const host = process.env.DATABRICKS_HOST;
25
- if (userToken && host) userDatabricksClient = new WorkspaceClient({
26
- token: userToken,
27
- host,
28
- authType: "pat"
29
- }, getClientOptions());
30
- else if (process.env.NODE_ENV === "development") userDatabricksClient = serviceDatabricksClient;
31
- let userName = req.headers["x-forwarded-user"];
32
- if (!userName && process.env.NODE_ENV !== "development") {
33
- res.status(401).json({ error: "Unauthorized" });
34
- return;
35
- } else userName = serviceUserId;
36
- return asyncLocalStorage.run({
37
- userDatabricksClient,
38
- serviceDatabricksClient,
39
- warehouseId,
40
- workspaceId,
41
- userId: userName,
42
- serviceUserId
43
- }, async () => {
44
- return next();
45
- });
46
- };
47
- }
48
- function getRequestContext() {
49
- const store = asyncLocalStorage.getStore();
50
- if (!store) throw new Error("Request context not found");
51
- return store;
52
- }
53
- /**
54
- * Get the appropriate WorkspaceClient based on whether the request
55
- * should be executed as the user or as the service principal.
56
- *
57
- * @param asUser - If true, returns user's WorkspaceClient (requires token passthrough)
58
- * @throws Error if asUser is true but user token passthrough is not enabled
59
- */
60
- function getWorkspaceClient(asUser) {
61
- const context = getRequestContext();
62
- if (asUser) {
63
- if (!context.userDatabricksClient) throw new Error(`User token passthrough is not enabled for this workspace.`);
64
- return context.userDatabricksClient;
65
- }
66
- return context.serviceDatabricksClient;
67
- }
68
- async function getWorkspaceId(workspaceClient) {
69
- if (process.env.DATABRICKS_WORKSPACE_ID) return process.env.DATABRICKS_WORKSPACE_ID;
70
- const response = await workspaceClient.apiClient.request({
71
- path: "/api/2.0/preview/scim/v2/Me",
72
- method: "GET",
73
- headers: new Headers(),
74
- raw: false,
75
- query: {},
76
- responseHeaders: ["x-databricks-org-id"]
77
- });
78
- if (!response["x-databricks-org-id"]) throw new Error("Workspace ID not found");
79
- return response["x-databricks-org-id"];
80
- }
81
- async function getWarehouseId(workspaceClient) {
82
- if (process.env.DATABRICKS_WAREHOUSE_ID) return process.env.DATABRICKS_WAREHOUSE_ID;
83
- if (process.env.NODE_ENV === "development") {
84
- const response = await workspaceClient.apiClient.request({
85
- path: "/api/2.0/sql/warehouses",
86
- method: "GET",
87
- headers: new Headers(),
88
- raw: false,
89
- query: { skip_cannot_use: "true" }
90
- });
91
- const priorities = {
92
- RUNNING: 0,
93
- STOPPED: 1,
94
- STARTING: 2,
95
- STOPPING: 3,
96
- DELETED: 99,
97
- DELETING: 99
98
- };
99
- const warehouses = (response.warehouses || []).sort((a, b) => {
100
- return priorities[a.state] - priorities[b.state];
101
- });
102
- if (response.warehouses.length === 0) throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
103
- const firstWarehouse = warehouses[0];
104
- if (firstWarehouse.state === "DELETED" || firstWarehouse.state === "DELETING" || !firstWarehouse.id) throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
105
- return firstWarehouse.id;
106
- }
107
- throw new Error("Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.");
108
- }
109
- var asyncLocalStorage;
110
- var init_databricks_client_middleware = __esmMin((() => {
111
- asyncLocalStorage = new AsyncLocalStorage();
112
- }));
113
-
114
- //#endregion
115
- init_databricks_client_middleware();
116
- export { databricksClientMiddleware, getRequestContext, getWorkspaceClient, init_databricks_client_middleware };
117
- //# sourceMappingURL=databricks-client-middleware.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"databricks-client-middleware.js","names":["productName","productVersion","userDatabricksClient: WorkspaceClient | undefined","priorities: Record<sql.State, number>"],"sources":["../../src/utils/databricks-client-middleware.ts"],"sourcesContent":["import { AsyncLocalStorage } from \"node:async_hooks\";\nimport {\n type ClientOptions,\n type sql,\n WorkspaceClient,\n} from \"@databricks/sdk-experimental\";\nimport type express from \"express\";\nimport {\n name as productName,\n version as productVersion,\n} from \"../../package.json\";\n\nexport type RequestContext = {\n userDatabricksClient?: WorkspaceClient;\n serviceDatabricksClient: WorkspaceClient;\n userId: string;\n userName?: string;\n serviceUserId: string;\n warehouseId: Promise<string>;\n workspaceId: Promise<string>;\n};\n\nconst asyncLocalStorage = new AsyncLocalStorage<RequestContext>();\n\nfunction getClientOptions(): ClientOptions {\n const isDev = process.env.NODE_ENV === \"development\";\n const normalizedVersion = productVersion\n .split(\".\")\n .slice(0, 3)\n .join(\".\") as ClientOptions[\"productVersion\"];\n\n return {\n product: productName,\n productVersion: normalizedVersion,\n ...(isDev && { userAgentExtra: { mode: \"dev\" } }),\n };\n}\n\nexport async function databricksClientMiddleware(): Promise<express.RequestHandler> {\n const serviceDatabricksClient = new WorkspaceClient({}, getClientOptions());\n const warehouseId = getWarehouseId(serviceDatabricksClient);\n const workspaceId = getWorkspaceId(serviceDatabricksClient);\n const serviceUserId = (await serviceDatabricksClient.currentUser.me()).id;\n\n if (!serviceUserId) {\n throw new Error(\"Service user ID not found\");\n }\n\n return async (\n req: express.Request,\n res: express.Response,\n next: express.NextFunction,\n ) => {\n const userToken = req.headers[\"x-forwarded-access-token\"] as string;\n let userDatabricksClient: WorkspaceClient | undefined;\n const host = process.env.DATABRICKS_HOST;\n if (userToken && host) {\n userDatabricksClient = new WorkspaceClient(\n {\n token: userToken,\n host,\n authType: \"pat\",\n },\n getClientOptions(),\n );\n } else if (process.env.NODE_ENV === \"development\") {\n // in local development service and no user token are the same\n // TODO: use `databricks apps run-local` to fix this\n userDatabricksClient = serviceDatabricksClient;\n }\n\n let userName = req.headers[\"x-forwarded-user\"] as string;\n if (!userName && process.env.NODE_ENV !== \"development\") {\n res.status(401).json({ error: \"Unauthorized\" });\n return;\n } else {\n userName = serviceUserId;\n }\n\n return asyncLocalStorage.run(\n {\n userDatabricksClient,\n serviceDatabricksClient,\n warehouseId,\n workspaceId,\n userId: userName,\n serviceUserId,\n },\n async () => {\n return next();\n },\n );\n };\n}\n\nexport function getRequestContext(): RequestContext {\n const store = asyncLocalStorage.getStore();\n if (!store) {\n throw new Error(\"Request context not found\");\n }\n return store;\n}\n\n/**\n * Get the appropriate WorkspaceClient based on whether the request\n * should be executed as the user or as the service principal.\n *\n * @param asUser - If true, returns user's WorkspaceClient (requires token passthrough)\n * @throws Error if asUser is true but user token passthrough is not enabled\n */\nexport function getWorkspaceClient(asUser: boolean): WorkspaceClient {\n const context = getRequestContext();\n\n if (asUser) {\n if (!context.userDatabricksClient) {\n throw new Error(\n `User token passthrough is not enabled for this workspace.`,\n );\n }\n return context.userDatabricksClient;\n }\n\n return context.serviceDatabricksClient;\n}\n\nasync function getWorkspaceId(\n workspaceClient: WorkspaceClient,\n): Promise<string> {\n if (process.env.DATABRICKS_WORKSPACE_ID) {\n return process.env.DATABRICKS_WORKSPACE_ID;\n }\n\n const response = (await workspaceClient.apiClient.request({\n path: \"/api/2.0/preview/scim/v2/Me\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {},\n responseHeaders: [\"x-databricks-org-id\"],\n })) as { \"x-databricks-org-id\": string };\n\n if (!response[\"x-databricks-org-id\"]) {\n throw new Error(\"Workspace ID not found\");\n }\n\n return response[\"x-databricks-org-id\"];\n}\n\nasync function getWarehouseId(\n workspaceClient: WorkspaceClient,\n): Promise<string> {\n if (process.env.DATABRICKS_WAREHOUSE_ID) {\n return process.env.DATABRICKS_WAREHOUSE_ID;\n }\n\n if (process.env.NODE_ENV === \"development\") {\n const response = (await workspaceClient.apiClient.request({\n path: \"/api/2.0/sql/warehouses\",\n method: \"GET\",\n headers: new Headers(),\n raw: false,\n query: {\n skip_cannot_use: \"true\",\n },\n })) as { warehouses: sql.EndpointInfo[] };\n\n const priorities: Record<sql.State, number> = {\n RUNNING: 0,\n STOPPED: 1,\n STARTING: 2,\n STOPPING: 3,\n DELETED: 99,\n DELETING: 99,\n };\n\n const warehouses = (response.warehouses || []).sort((a, b) => {\n return (\n priorities[a.state as sql.State] - priorities[b.state as sql.State]\n );\n });\n\n if (response.warehouses.length === 0) {\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n }\n\n const firstWarehouse = warehouses[0];\n if (\n firstWarehouse.state === \"DELETED\" ||\n firstWarehouse.state === \"DELETING\" ||\n !firstWarehouse.id\n ) {\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n }\n\n return firstWarehouse.id;\n }\n\n throw new Error(\n \"Warehouse ID not found. Please configure the DATABRICKS_WAREHOUSE_ID environment variable.\",\n );\n}\n\nexport type Request = express.Request;\nexport type Response = express.Response;\n"],"mappings":";;;;;;AAwBA,SAAS,mBAAkC;CACzC,MAAM,QAAQ,QAAQ,IAAI,aAAa;AAMvC,QAAO;EACL,SAASA;EACT,gBAPwBC,QACvB,MAAM,IAAI,CACV,MAAM,GAAG,EAAE,CACX,KAAK,IAAI;EAKV,GAAI,SAAS,EAAE,gBAAgB,EAAE,MAAM,OAAO,EAAE;EACjD;;AAGH,eAAsB,6BAA8D;CAClF,MAAM,0BAA0B,IAAI,gBAAgB,EAAE,EAAE,kBAAkB,CAAC;CAC3E,MAAM,cAAc,eAAe,wBAAwB;CAC3D,MAAM,cAAc,eAAe,wBAAwB;CAC3D,MAAM,iBAAiB,MAAM,wBAAwB,YAAY,IAAI,EAAE;AAEvE,KAAI,CAAC,cACH,OAAM,IAAI,MAAM,4BAA4B;AAG9C,QAAO,OACL,KACA,KACA,SACG;EACH,MAAM,YAAY,IAAI,QAAQ;EAC9B,IAAIC;EACJ,MAAM,OAAO,QAAQ,IAAI;AACzB,MAAI,aAAa,KACf,wBAAuB,IAAI,gBACzB;GACE,OAAO;GACP;GACA,UAAU;GACX,EACD,kBAAkB,CACnB;WACQ,QAAQ,IAAI,aAAa,cAGlC,wBAAuB;EAGzB,IAAI,WAAW,IAAI,QAAQ;AAC3B,MAAI,CAAC,YAAY,QAAQ,IAAI,aAAa,eAAe;AACvD,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,gBAAgB,CAAC;AAC/C;QAEA,YAAW;AAGb,SAAO,kBAAkB,IACvB;GACE;GACA;GACA;GACA;GACA,QAAQ;GACR;GACD,EACD,YAAY;AACV,UAAO,MAAM;IAEhB;;;AAIL,SAAgB,oBAAoC;CAClD,MAAM,QAAQ,kBAAkB,UAAU;AAC1C,KAAI,CAAC,MACH,OAAM,IAAI,MAAM,4BAA4B;AAE9C,QAAO;;;;;;;;;AAUT,SAAgB,mBAAmB,QAAkC;CACnE,MAAM,UAAU,mBAAmB;AAEnC,KAAI,QAAQ;AACV,MAAI,CAAC,QAAQ,qBACX,OAAM,IAAI,MACR,4DACD;AAEH,SAAO,QAAQ;;AAGjB,QAAO,QAAQ;;AAGjB,eAAe,eACb,iBACiB;AACjB,KAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;CAGrB,MAAM,WAAY,MAAM,gBAAgB,UAAU,QAAQ;EACxD,MAAM;EACN,QAAQ;EACR,SAAS,IAAI,SAAS;EACtB,KAAK;EACL,OAAO,EAAE;EACT,iBAAiB,CAAC,sBAAsB;EACzC,CAAC;AAEF,KAAI,CAAC,SAAS,uBACZ,OAAM,IAAI,MAAM,yBAAyB;AAG3C,QAAO,SAAS;;AAGlB,eAAe,eACb,iBACiB;AACjB,KAAI,QAAQ,IAAI,wBACd,QAAO,QAAQ,IAAI;AAGrB,KAAI,QAAQ,IAAI,aAAa,eAAe;EAC1C,MAAM,WAAY,MAAM,gBAAgB,UAAU,QAAQ;GACxD,MAAM;GACN,QAAQ;GACR,SAAS,IAAI,SAAS;GACtB,KAAK;GACL,OAAO,EACL,iBAAiB,QAClB;GACF,CAAC;EAEF,MAAMC,aAAwC;GAC5C,SAAS;GACT,SAAS;GACT,UAAU;GACV,UAAU;GACV,SAAS;GACT,UAAU;GACX;EAED,MAAM,cAAc,SAAS,cAAc,EAAE,EAAE,MAAM,GAAG,MAAM;AAC5D,UACE,WAAW,EAAE,SAAsB,WAAW,EAAE;IAElD;AAEF,MAAI,SAAS,WAAW,WAAW,EACjC,OAAM,IAAI,MACR,6FACD;EAGH,MAAM,iBAAiB,WAAW;AAClC,MACE,eAAe,UAAU,aACzB,eAAe,UAAU,cACzB,CAAC,eAAe,GAEhB,OAAM,IAAI,MACR,6FACD;AAGH,SAAO,eAAe;;AAGxB,OAAM,IAAI,MACR,6FACD;;;;CArLG,oBAAoB,IAAI,mBAAmC"}
@@ -1,26 +0,0 @@
1
- import { __esmMin, __exportAll } from "../_virtual/rolldown_runtime.js";
2
- import { databricksClientMiddleware, getRequestContext, getWorkspaceClient, init_databricks_client_middleware } from "./databricks-client-middleware.js";
3
- import { init_env_validator, validateEnv } from "./env-validator.js";
4
- import { deepMerge, init_merge } from "./merge.js";
5
- import { init_vite_config_merge, mergeConfigDedup } from "./vite-config-merge.js";
6
-
7
- //#region src/utils/index.ts
8
- var utils_exports = /* @__PURE__ */ __exportAll({
9
- databricksClientMiddleware: () => databricksClientMiddleware,
10
- deepMerge: () => deepMerge,
11
- getRequestContext: () => getRequestContext,
12
- getWorkspaceClient: () => getWorkspaceClient,
13
- mergeConfigDedup: () => mergeConfigDedup,
14
- validateEnv: () => validateEnv
15
- });
16
- var init_utils = __esmMin((() => {
17
- init_databricks_client_middleware();
18
- init_env_validator();
19
- init_merge();
20
- init_vite_config_merge();
21
- }));
22
-
23
- //#endregion
24
- init_utils();
25
- export { init_utils, utils_exports };
26
- //# sourceMappingURL=index.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.js","names":[],"sources":["../../src/utils/index.ts"],"sourcesContent":["export * from \"./databricks-client-middleware\";\nexport * from \"./env-validator\";\nexport * from \"./merge\";\nexport * from \"./vite-config-merge\";\n"],"mappings":""}